/** Converts the provided Json{@link String} into {@link PipelineOptions}. */ public static PipelineOptions fromJson(String optionsJson) { try { Map<String, Object> probingOptionsMap = MAPPER.readValue(optionsJson, new TypeReference<Map<String, Object>>() {}); if (probingOptionsMap.containsKey("options")) { //Legacy options. return MAPPER.readValue(optionsJson, PipelineOptions.class); } else { // Fn Options with namespace and version. Struct.Builder builder = Struct.newBuilder(); JsonFormat.parser().merge(optionsJson, builder); return fromProto(builder.build()); } } catch (IOException e) { throw new RuntimeException("Failed to read PipelineOptions from JSON", e); } }
@Override public FlinkExecutableStageContext get(JobInfo jobInfo) { JobFactoryState state = jobFactories.computeIfAbsent( jobInfo.jobId(), k -> { PortablePipelineOptions portableOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()) .as(PortablePipelineOptions.class); return new JobFactoryState( MoreObjects.firstNonNull(portableOptions.getSdkWorkerParallelism(), 1L) .intValue()); }); return state.getFactory().get(jobInfo); } }
@Override public FlinkExecutableStageContext get(JobInfo jobInfo) { JobFactoryState state = jobFactories.computeIfAbsent( jobInfo.jobId(), k -> { PortablePipelineOptions portableOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()) .as(PortablePipelineOptions.class); return new JobFactoryState( MoreObjects.firstNonNull(portableOptions.getSdkWorkerParallelism(), 1L) .intValue()); }); return state.getFactory().get(jobInfo); } }
PipelineOptionsTranslation.fromProto(options).as(FlinkPipelineOptions.class);
PipelineOptionsTranslation.fromProto(options).as(FlinkPipelineOptions.class);
@SuppressWarnings("FutureReturnValueIgnored") private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { // Do immediate cleanup if this class is not loaded on Flink parent classloader. if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { // Schedule task to clean the container later. // Ensure that this class is loaded in the parent Flink classloader. getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { // Do not release this asynchronously, as the releasing could fail due to the classloader not // being available anymore after the tasks have been removed from the execution engine. release(wrapper); } }
@SuppressWarnings("FutureReturnValueIgnored") private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { // Do immediate cleanup if this class is not loaded on Flink parent classloader. if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { // Schedule task to clean the container later. // Ensure that this class is loaded in the parent Flink classloader. getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { // Do not release this asynchronously, as the releasing could fail due to the classloader not // being available anymore after the tasks have been removed from the execution engine. release(wrapper); } }
@Test public void emptyStructDeserializes() throws Exception { Struct serialized = Struct.getDefaultInstance(); PipelineOptions deserialized = PipelineOptionsTranslation.fromProto(serialized); assertThat(deserialized, notNullValue()); }
private static FlinkDefaultExecutableStageContext create(JobInfo jobInfo) { JobBundleFactory jobBundleFactory = DefaultJobBundleFactory.create( jobInfo, ImmutableMap.of( BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER), new DockerEnvironmentFactory.Provider( PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions())), BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS), new ProcessEnvironmentFactory.Provider(), BeamUrns.getUrn(StandardEnvironments.Environments.EXTERNAL), new ExternalEnvironmentFactory.Provider(), Environments.ENVIRONMENT_EMBEDDED, // Non Public urn for testing. new EmbeddedEnvironmentFactory.Provider( PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions())))); return new FlinkDefaultExecutableStageContext(jobBundleFactory); }
@Test public void ignoredSettingsNotSerialized() throws Exception { TestUnserializableOptions opts = PipelineOptionsFactory.as(TestUnserializableOptions.class); opts.setUnserializable(new Object()); Struct serialized = PipelineOptionsTranslation.toProto(opts); PipelineOptions deserialized = PipelineOptionsTranslation.fromProto(serialized); assertThat( deserialized.as(TestUnserializableOptions.class).getUnserializable(), is(nullValue())); }
@Test public void defaultsRestored() throws Exception { Struct serialized = PipelineOptionsTranslation.toProto(PipelineOptionsFactory.as(TestDefaultOptions.class)); PipelineOptions deserialized = PipelineOptionsTranslation.fromProto(serialized); assertThat(deserialized.as(TestDefaultOptions.class).getDefault(), equalTo(19)); }
private static FlinkDefaultExecutableStageContext create(JobInfo jobInfo) { JobBundleFactory jobBundleFactory = DefaultJobBundleFactory.create( jobInfo, ImmutableMap.of( BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER), new DockerEnvironmentFactory.Provider( PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions())), BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS), new ProcessEnvironmentFactory.Provider(), Environments.ENVIRONMENT_EMBEDDED, // Non Public urn for testing. new EmbeddedEnvironmentFactory.Provider())); return new FlinkDefaultExecutableStageContext(jobBundleFactory); }
@Test public void testToFromProto() throws Exception { options.getOptionsId(); Struct originalStruct = PipelineOptionsTranslation.toProto(options); PipelineOptions deserializedStruct = PipelineOptionsTranslation.fromProto(originalStruct); Struct reserializedStruct = PipelineOptionsTranslation.toProto(deserializedStruct); assertThat(reserializedStruct.getFieldsMap(), equalTo(originalStruct.getFieldsMap())); }
@Test public void customSettingsRetained() throws Exception { TestOptions options = PipelineOptionsFactory.as(TestOptions.class); options.setExample(23); Struct serialized = PipelineOptionsTranslation.toProto(options); PipelineOptions deserialized = PipelineOptionsTranslation.fromProto(serialized); assertThat(deserialized.as(TestOptions.class).getExample(), equalTo(23)); }
@Test public void structWithNullOptionsDeserializes() throws Exception { Struct serialized = Struct.newBuilder() .putFields( "beam:option:option_key:v1", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) .build(); PipelineOptions deserialized = PipelineOptionsTranslation.fromProto(serialized); assertThat(deserialized, notNullValue()); } }