@Override public void addAccum(AccumT accum) { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT current = state.value(); if (current == null) { state.update(accum); } else { current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum)); state.update(current); } } catch (Exception e) { throw new RuntimeException("Error adding to state.", e); } }
@Override public void addAccum(AccumT accum) { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT current = state.value(); if (current == null) { state.update(accum); } else { current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum), context); state.update(current); } } catch (Exception e) { throw new RuntimeException("Error adding to state.", e); } }
List<Coder<?>> outputCoders = Lists.newArrayList(); for (TupleTag<?> tag : indexMap.values()) { PValue taggedValue = outputs.get(tag);
: null; ArrayList<TupleTag<?>> additionalOutputTags = Lists.newArrayList(); for (TupleTag<?> tupleTag : tagsToCoders.keySet()) { if (!mainOutputTag.getId().equals(tupleTag.getId())) {
List<TupleTag<?>> additionalOutputTags = Lists.newArrayList(outputMap.keySet());
timerInternals.advanceSynchronizedProcessingTime(Instant.now()); List<TupleTag<?>> additionalOutputTags = Lists.newArrayList(outputMap.keySet());
List<Coder<?>> unionCoders = Lists.newArrayList();
List<WindowedValue<KV<K, InputT>>> sortedInput = Lists.newArrayList(); for (WindowedValue<KV<K, InputT>> inputValue : elements) { for (WindowedValue<KV<K, InputT>> exploded : inputValue.explodeWindows()) {