@Override public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() { return ImmutableList.of(FlinkRunner.class, TestFlinkRunner.class); } }
@Override public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() { return ImmutableList.of(FlinkPipelineOptions.class); } }
@Override public List<? extends Coder<?>> getCoderArguments() { return ImmutableList.of(keyCoder, elemCoder, windowCoder); }
@Override public AccumT addInput( K key, AccumT accumulator, AccumT value, PipelineOptions options, SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows) { return combineFnRunner.mergeAccumulators( ImmutableList.of(accumulator, value), options, sideInputReader, windows); }
@Override public List<BoundedSource<KV<K, V>>> split(long desiredBundleSizeBytes, PipelineOptions options) throws Exception { // desiredBundleSizeBytes is not being considered as splitting based on this // value is not supported by inputFormat getSplits() method. if (inputSplit != null) { LOGGER.info("Not splitting source {} because source is already split.", this); return ImmutableList.of((BoundedSource<KV<K, V>>) this); } computeSplitsIfNecessary(); LOGGER.info( "Generated {} splits. Size of first split is {} ", inputSplits.size(), inputSplits.get(0).getSplit().getLength()); return inputSplits .stream() .map( serializableInputSplit -> { return new HadoopInputFormatBoundedSource<>( conf, keyCoder, valueCoder, keyTranslationFunction, valueTranslationFunction, serializableInputSplit); }) .collect(Collectors.toList()); }
WindowingStrategy.globalDefault(), ImmutableList.of(view1, view2), /* side inputs */ PipelineOptionsFactory.as(FlinkPipelineOptions.class), null,
WindowingStrategy.of(FixedWindows.of(Duration.millis(100))), ImmutableList.of(view1, view2), /* side inputs */ PipelineOptionsFactory.as(FlinkPipelineOptions.class), null,
WindowingStrategy.of(FixedWindows.of(Duration.millis(100))), ImmutableList.of(view1, view2), /* side inputs */ PipelineOptionsFactory.as(FlinkPipelineOptions.class), keyCoder,
WindowingStrategy.of(FixedWindows.of(Duration.millis(100))), ImmutableList.of(view1, view2), /* side inputs */ PipelineOptionsFactory.as(FlinkPipelineOptions.class), keyCoder,
WindowingStrategy.of(FixedWindows.of(Duration.millis(100))), ImmutableList.of(view1, view2), /* side inputs */ PipelineOptionsFactory.as(FlinkPipelineOptions.class), keyCoder, new RawUnionValue( 1, valuesInWindow(ImmutableList.of("hello", "ciao"), new Instant(0), firstWindow)))); testHarness.processElement2( new StreamRecord<>( new RawUnionValue( 2, valuesInWindow(ImmutableList.of("foo", "bar"), new Instant(0), secondWindow)))); 1, valuesInWindow( ImmutableList.of("hello", "ciao"), new Instant(1000), firstWindow)))); testHarness.processElement2( new StreamRecord<>( new RawUnionValue( 2, valuesInWindow(ImmutableList.of("foo", "bar"), new Instant(1000), secondWindow))));
Collections.emptyMap(), mainOutput, ImmutableList.of(additionalOutput1, additionalOutput2), new DoFnOperator.MultiOutputOutputManagerFactory( mainOutput, tagsToOutputTags, tagsToCoders, tagsToIds),
PAssert.that(result).containsInAnyOrder(KV.of("foo", ImmutableList.of(4L, 3L, 3L)));
getOperator( mainOutput, ImmutableList.of(additionalOutput1, additionalOutput2), outputManagerFactory);