@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
private BoundedSource.BoundedReader<T> createReader(SourcePartition<T> partition) { try { return ((BoundedSource<T>) partition.source).createReader(options.get()); } catch (IOException e) { throw new RuntimeException("Failed to create reader from a BoundedSource.", e); } }
@Override public void setup(OperatorContext context) { this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled( pipelineOptions.get().as(ApexPipelineOptions.class), this); try { reader = source.createReader(this.pipelineOptions.get(), null); available = reader.start(); } catch (IOException e) { throw new RuntimeException(e); } }
@Override public void setup(OperatorContext context) { this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled( serializedOptions.get().as(ApexPipelineOptions.class), this); }
public String storageLevel() { return serializableOptions.get().as(SparkPipelineOptions.class).getStorageLevel(); } }
private static Long getBatchDuration(final SerializablePipelineOptions options) { return options.get().as(SparkPipelineOptions.class).getCheckpointDurationMillis(); }
@Override public void open(TaskContext context, Instant startTime) { try { PipelineOptions options = serializedOptions.get(); this.reader = createReader(options); this.available = reader.start(); } catch (Exception e) { close(); throw new RuntimeException(e); } }
@Override public Partition[] getPartitions() { try { final List<? extends Source<T>> partitionedSources = microbatchSource.split(options.get()); final Partition[] partitions = new CheckpointableSourcePartition[partitionedSources.size()]; for (int i = 0; i < partitionedSources.size(); i++) { partitions[i] = new CheckpointableSourcePartition<>( id(), i, partitionedSources.get(i), EmptyCheckpointMark.get()); } return partitions; } catch (Exception e) { throw new RuntimeException("Failed to create partitions.", e); } }
public PushbackSideInputDoFnRunner<InputT, OutputT> createRunner( ReadyCheckingSideInputReader sideInputReader) { PipelineOptions options = serializedOptions.get(); DoFnRunner<InputT, OutputT> underlying = DoFnRunners.simpleRunner( options, fn, sideInputReader, outputManager, mainOutputTag, sideOutputTags, stepContext, null, outputCoders, windowingStrategy); return SimplePushbackSideInputDoFnRunner.create(underlying, sideInputs, sideInputReader); } }
@Override public void setup( StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<WindowedValue<OutputT>>> output) { // make sure that FileSystems is initialized correctly FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); FileSystems.setDefaultPipelineOptions(options); super.setup(containingTask, config, output); }
@Override public void setup( StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<WindowedValue<OutputT>>> output) { // make sure that FileSystems is initialized correctly FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); FileSystems.setDefaultPipelineOptions(options); super.setup(containingTask, config, output); }
@Override public void setup( StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<WindowedValue<OutputT>>> output) { // make sure that FileSystems is initialized correctly FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); FileSystems.setDefaultPipelineOptions(options); super.setup(containingTask, config, output); }
protected SparkCombineContext ctxtForInput(WindowedValue<?> input) { if (combineContext == null) { combineContext = new SparkCombineContext(options.get(), new SparkSideInputReader(sideInputs)); } return combineContext.forInput(input); }
@Override public void open(SourceInputSplit<T> sourceInputSplit) throws IOException { FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); readerInvoker = new ReaderInvocationUtil<>(stepName, serializedOptions.get(), metricContainer); reader = ((BoundedSource<T>) sourceInputSplit.getSource()).createReader(options); inputAvailable = readerInvoker.invokeStart(reader); }
@Override public void open(SourceInputSplit<T> sourceInputSplit) throws IOException { FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); readerInvoker = new ReaderInvocationUtil<>( stepName, serializedOptions.get(), metricContainer); reader = ((BoundedSource<T>) sourceInputSplit.getSource()).createReader(options); inputAvailable = readerInvoker.invokeStart(reader); }
@Override public void open(SourceInputSplit<T> sourceInputSplit) throws IOException { FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); readerInvoker = new ReaderInvocationUtil<>(stepName, serializedOptions.get(), metricContainer); reader = ((BoundedSource<T>) sourceInputSplit.getSource()).createReader(options); inputAvailable = readerInvoker.invokeStart(reader); }
@Override public void reduce( Iterable<WindowedValue<KV<K, InputT>>> elements, Collector<WindowedValue<KV<K, OutputT>>> out) throws Exception { PipelineOptions options = serializedOptions.get(); FlinkSideInputReader sideInputReader = new FlinkSideInputReader(sideInputs, getRuntimeContext()); AbstractFlinkCombineRunner<K, InputT, AccumT, OutputT, W> reduceRunner; if (windowingStrategy.getWindowFn().windowCoder().equals(IntervalWindow.getCoder())) { reduceRunner = new SortingFlinkCombineRunner<>(); } else { reduceRunner = new HashingFlinkCombineRunner<>(); } reduceRunner.combine( new AbstractFlinkCombineRunner.CompleteFlinkCombiner<K, InputT, AccumT, OutputT>(combineFn), windowingStrategy, sideInputReader, options, elements, out); }
@Test public void testSerializationAndDeserialization() throws Exception { PipelineOptions options = PipelineOptionsFactory.fromArgs("--foo=testValue", "--ignoredField=overridden") .as(MyOptions.class); SerializablePipelineOptions serializableOptions = new SerializablePipelineOptions(options); assertEquals("testValue", serializableOptions.get().as(MyOptions.class).getFoo()); assertEquals("overridden", serializableOptions.get().as(MyOptions.class).getIgnoredField()); SerializablePipelineOptions copy = SerializableUtils.clone(serializableOptions); assertEquals("testValue", copy.get().as(MyOptions.class).getFoo()); assertEquals("not overridden", copy.get().as(MyOptions.class).getIgnoredField()); }