public FlinkReduceFunction( CombineFnBase.GlobalCombineFn<?, AccumT, OutputT> combineFn, WindowingStrategy<Object, W> windowingStrategy, Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs, PipelineOptions pipelineOptions) { this.combineFn = combineFn; this.windowingStrategy = windowingStrategy; this.sideInputs = sideInputs; this.serializedOptions = new SerializablePipelineOptions(pipelineOptions); }
@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
private void readObject(ObjectInputStream is) throws IOException, ClassNotFoundException { is.defaultReadObject(); this.options = deserializeFromJson(serializedPipelineOptions); // TODO https://issues.apache.org/jira/browse/BEAM-2712: remove this call. FileSystems.setDefaultPipelineOptions(options); }
@Test public void testSerializationAndDeserialization() throws Exception { PipelineOptions options = PipelineOptionsFactory.fromArgs("--foo=testValue", "--ignoredField=overridden") .as(MyOptions.class); SerializablePipelineOptions serializableOptions = new SerializablePipelineOptions(options); assertEquals("testValue", serializableOptions.get().as(MyOptions.class).getFoo()); assertEquals("overridden", serializableOptions.get().as(MyOptions.class).getIgnoredField()); SerializablePipelineOptions copy = SerializableUtils.clone(serializableOptions); assertEquals("testValue", copy.get().as(MyOptions.class).getFoo()); assertEquals("not overridden", copy.get().as(MyOptions.class).getIgnoredField()); }
public SerializablePipelineOptions(PipelineOptions options) { this.serializedPipelineOptions = serializeToJson(options); this.options = options; FileSystems.setDefaultPipelineOptions(options); }
@Test public void testIndependence() throws Exception { SerializablePipelineOptions first = new SerializablePipelineOptions( PipelineOptionsFactory.fromArgs("--foo=first").as(MyOptions.class)); SerializablePipelineOptions firstCopy = SerializableUtils.clone(first); SerializablePipelineOptions second = new SerializablePipelineOptions( PipelineOptionsFactory.fromArgs("--foo=second").as(MyOptions.class)); SerializablePipelineOptions secondCopy = SerializableUtils.clone(second); assertEquals("first", first.get().as(MyOptions.class).getFoo()); assertEquals("first", firstCopy.get().as(MyOptions.class).getFoo()); assertEquals("second", second.get().as(MyOptions.class).getFoo()); assertEquals("second", secondCopy.get().as(MyOptions.class).getFoo()); first.get().as(MyOptions.class).setFoo("new first"); firstCopy.get().as(MyOptions.class).setFoo("new firstCopy"); second.get().as(MyOptions.class).setFoo("new second"); secondCopy.get().as(MyOptions.class).setFoo("new secondCopy"); assertEquals("new first", first.get().as(MyOptions.class).getFoo()); assertEquals("new firstCopy", firstCopy.get().as(MyOptions.class).getFoo()); assertEquals("new second", second.get().as(MyOptions.class).getFoo()); assertEquals("new secondCopy", secondCopy.get().as(MyOptions.class).getFoo()); } }
@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
public FlinkReduceFunction( CombineFnBase.GlobalCombineFn<?, AccumT, OutputT> combineFn, WindowingStrategy<Object, W> windowingStrategy, Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs, PipelineOptions pipelineOptions) { this.combineFn = combineFn; this.windowingStrategy = windowingStrategy; this.sideInputs = sideInputs; this.serializedOptions = new SerializablePipelineOptions(pipelineOptions); }
@Override public void configure(Configuration configuration) { options = serializedOptions.get(); }
public ApexReadUnboundedInputOperator( UnboundedSource<OutputT, CheckpointMarkT> source, ApexPipelineOptions options) { this.pipelineOptions = new SerializablePipelineOptions(options); this.source = source; this.isBoundedSource = false; }
private BoundedSource.BoundedReader<T> createReader(SourcePartition<T> partition) { try { return ((BoundedSource<T>) partition.source).createReader(options.get()); } catch (IOException e) { throw new RuntimeException("Failed to create reader from a BoundedSource.", e); } }
public EvaluationContext(JavaSparkContext jsc, Pipeline pipeline, PipelineOptions options) { this.jsc = jsc; this.pipeline = pipeline; this.options = options; this.serializableOptions = new SerializablePipelineOptions(options); }
@Override public void setup(OperatorContext context) { this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled( pipelineOptions.get().as(ApexPipelineOptions.class), this); try { reader = source.createReader(this.pipelineOptions.get(), null); available = reader.start(); } catch (IOException e) { throw new RuntimeException(e); } }
public SourceInputFormat( String stepName, BoundedSource<T> initialSource, PipelineOptions options) { this.stepName = stepName; this.initialSource = initialSource; this.serializedOptions = new SerializablePipelineOptions(options); }
@Override public void setup(OperatorContext context) { this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled( serializedOptions.get().as(ApexPipelineOptions.class), this); }
public FlinkPartialReduceFunction( CombineFnBase.GlobalCombineFn<InputT, AccumT, ?> combineFn, WindowingStrategy<Object, W> windowingStrategy, Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs, PipelineOptions pipelineOptions) { this.combineFn = combineFn; this.windowingStrategy = windowingStrategy; this.sideInputs = sideInputs; this.serializedOptions = new SerializablePipelineOptions(pipelineOptions); }
private void finalizeSource() { FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); if (!options.isShutdownSourcesOnFinalWatermark()) { // do nothing, but still look busy ... // we can't return here since Flink requires that all operators stay up, // otherwise checkpointing would not work correctly anymore // // See https://issues.apache.org/jira/browse/FLINK-2491 for progress on this issue // wait until this is canceled while (isRunning) { try { // Flink will interrupt us at some point Thread.sleep(1000); } catch (InterruptedException e) { if (!isRunning) { // restore the interrupted state, and fall through the loop Thread.currentThread().interrupt(); } } } } }
public ApexReadUnboundedInputOperator( UnboundedSource<OutputT, CheckpointMarkT> source, boolean isBoundedSource, ApexPipelineOptions options) { this.pipelineOptions = new SerializablePipelineOptions(options); this.source = source; this.isBoundedSource = isBoundedSource; }
public String storageLevel() { return serializableOptions.get().as(SparkPipelineOptions.class).getStorageLevel(); } }
public FlinkMergingNonShuffleReduceFunction( CombineFnBase.GlobalCombineFn<InputT, AccumT, OutputT> combineFn, WindowingStrategy<Object, W> windowingStrategy, Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs, PipelineOptions pipelineOptions) { this.combineFn = combineFn; this.windowingStrategy = windowingStrategy; this.sideInputs = sideInputs; this.serializedOptions = new SerializablePipelineOptions(pipelineOptions); }