@Override public CoCombineResult extractOutput(Object[] accumulator) { Map<TupleTag<?>, Object> valuesMap = Maps.newHashMap(); for (int i = 0; i < combineFnCount; ++i) { valuesMap.put(outputTags.get(i), combineFns.get(i).extractOutput(accumulator[i])); } return new CoCombineResult(valuesMap); }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsWithEmptyAccumulators( CombineFn<InputT, AccumT, OutputT> fn, Iterable<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { List<AccumT> accumulators = combineInputs(fn, shards); accumulators.add(0, fn.createAccumulator()); accumulators.add(fn.createAccumulator()); AccumT merged = fn.mergeAccumulators(accumulators); assertThat(fn.extractOutput(merged), matcher); }
@Override public OutputT read() { return combineFn.extractOutput( combineFn.mergeAccumulators(Arrays.asList(combineFn.createAccumulator(), accum))); }
@Override public void processElement(WindowedValue<KV<K, Iterable<AccumT>>> element) throws Exception { checkState( element.getWindows().size() == 1, "Expected inputs to %s to be in exactly one window. Got %s", MergeAccumulatorsAndExtractOutputEvaluator.class.getSimpleName(), element.getWindows().size()); Iterable<AccumT> inputAccumulators = element.getValue().getValue(); try { AccumT first = combineFn.createAccumulator(); AccumT merged = combineFn.mergeAccumulators( Iterables.concat( Collections.singleton(first), inputAccumulators, Collections.singleton(combineFn.createAccumulator()))); OutputT extracted = combineFn.extractOutput(merged); output.add(element.withValue(KV.of(element.getValue().getKey(), extracted))); } catch (Exception e) { throw UserCodeException.wrap(e); } }
@Override public OutputT read() { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT accum = state.value(); if (accum != null) { return combineFn.extractOutput(accum); } else { return combineFn.extractOutput(combineFn.createAccumulator()); } } catch (Exception e) { throw new RuntimeException("Error reading state.", e); } }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsIncrementalMerging( CombineFn<InputT, AccumT, OutputT> fn, List<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { AccumT accumulator = shards.isEmpty() ? fn.createAccumulator() : null; for (AccumT inputAccum : combineInputs(fn, shards)) { if (accumulator == null) { accumulator = inputAccum; } else { accumulator = fn.mergeAccumulators(Arrays.asList(accumulator, inputAccum)); } fn.extractOutput(accumulator); // Extract output to simulate multiple firings } assertThat(fn.extractOutput(accumulator), matcher); }
@Override public OutputT read() { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT accum = state.value(); if (accum != null) { return combineFn.extractOutput(accum); } else { return combineFn.extractOutput(combineFn.createAccumulator()); } } catch (Exception e) { throw new RuntimeException("Error reading state.", e); } }
@Override public OutputT read() { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT accum = state.value(); if (accum != null) { return combineFn.extractOutput(accum); } else { return combineFn.extractOutput(combineFn.createAccumulator()); } } catch (Exception e) { throw new RuntimeException("Error reading state.", e); } }
/** * Applies this {@code CombineFn} to a collection of input values to produce a combined output * value. * * <p>Useful when using a {@code CombineFn} separately from a {@code Combine} transform. Does * not invoke the {@link #mergeAccumulators} operation. */ public OutputT apply(Iterable<? extends InputT> inputs) { AccumT accum = createAccumulator(); for (InputT input : inputs) { accum = addInput(accum, input); } return extractOutput(accum); }
@Override public OutputT extractOutput(AccumT accumulator) { return fn.extractOutput(accumulator); }
@Override public OutputT extractOutput(AccumT accumulator, Context c) { return combineFn.extractOutput(accumulator); }
@Override public Object extractOutput(Object accumulator) { return combineFn.extractOutput(accumulator); }
/** * {@inheritDoc} * * <p>By default returns the extract output of an empty accumulator. */ @Override public OutputT defaultValue() { return extractOutput(createAccumulator()); }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsSingleMerge( CombineFn<InputT, AccumT, OutputT> fn, Iterable<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { List<AccumT> accumulators = combineInputs(fn, shards); AccumT merged = fn.mergeAccumulators(accumulators); assertThat(fn.extractOutput(merged), matcher); }
@Override public OutputT extractOutput( AccumT accumulator, PipelineOptions options, SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows) { return combineFn.extractOutput(accumulator); }