@JsonIgnore @Description("Set to false to disable reference tracking") @Default.Boolean(true) boolean getKryoReferenceTracking(); void setKryoReferenceTracking(boolean referenceTracking);
@JsonIgnore @Description("Set to true to require registration") @Default.Boolean(false) boolean getKryoRegistrationRequired(); void setKryoRegistrationRequired(boolean registrationRequired);
@Description("Enable/disable Beam metrics in Flink Runner") @Default.Boolean(true) Boolean getEnableMetrics(); void setEnableMetrics(Boolean enableMetrics);
@Description( "Flag if migration should by executed in 'dry run' mode (import to target agent will not be performed and" + "instead will be written to console). By default value is set to 'false', which means migrated data will be send" + "to target agent to perform import." ) @Default.Boolean( false ) boolean isDryRun(); void setDryRun( boolean dryRun );
@Description("Whether to wait until the pipeline completes. This is useful " + "for test purposes.") @Default.Boolean(false) boolean getWait(); void setWait(boolean wait);
@Description("By default, variants analyses will be run on BRCA1. Pass this flag to run on all " + "references present in the dataset. Note that certain jobs such as PCA and IBS " + "will automatically exclude X and Y chromosomes when this option is true.") @Default.Boolean(false) boolean isAllReferences(); void setAllReferences(boolean allReferences);
@Description( "Flag indicating whether non restored state is allowed if the savepoint " + "contains state for an operator that is no longer part of the pipeline.") @Default.Boolean(false) Boolean getAllowNonRestoredState();
@Default.Boolean(true) @Description( "Controls whether the DirectRunner should ensure that all of the elements of every " + "PCollection are not mutated. PTransforms are not permitted to mutate input elements " + "at any point, or output elements after they are output.") boolean isEnforceImmutability();
@Description( "If the spark runner will be initialized with a provided Spark Context. " + "The Spark Context should be provided with SparkContextOptions.") @Default.Boolean(false) boolean getUsesProvidedSparkContext();
@Description("If querying a dataset with non-variant segments (such as Complete Genomics data " + "or data in Genome VCF (gVCF) format), specify this flag so that the pipeline correctly " + "takes into account non-variant segment records that overlap variants within the dataset.") @Default.Boolean(false) boolean getHasNonVariantSegments(); void setHasNonVariantSegments(boolean hasNonVariantSegments);
@Description("Whether to wait until the pipeline completes. This is useful " + "for test purposes.") @Default.Boolean(false) boolean getWait(); void setWait(boolean wait);
@Default.Boolean(true) @Description( "If the pipeline should block awaiting completion of the pipeline. If set to true, " + "a call to Pipeline#run() will block until all PTransforms are complete. Otherwise, " + "the Pipeline will execute asynchronously. If set to false, use " + "PipelineResult#waitUntilFinish() to block until the Pipeline is complete.") boolean isBlockOnRun();
@Description( "Sets the expected behaviour for tasks in case that they encounter an error in their " + "checkpointing procedure. If this is set to true, the task will fail on checkpointing error. " + "If this is set to false, the task will only decline a the checkpoint and continue running. ") @Default.Boolean(true) Boolean getFailOnCheckpointingErrors();
/** Enables or disables externalized checkpoints. */ @Description( "Enables or disables externalized checkpoints. " + "Works in conjunction with CheckpointingInterval") @Default.Boolean(false) Boolean isExternalizedCheckpointsEnabled();
@Description("Whether to wait until the pipeline completes. This is useful " + "for test purposes.") @Default.Boolean(false) boolean getWait(); void setWait(boolean wait);
/** * Enables or disables externalized checkpoints. */ @Description("Enables or disables externalized checkpoints. " + "Works in conjunction with CheckpointingInterval") @Default.Boolean(false) Boolean isExternalizedCheckpointsEnabled(); void setExternalizedCheckpointsEnabled(Boolean externalCheckpoints);
/** * Whether to shutdown sources when their watermark reaches {@code +Inf}. For production use cases * you want this to be disabled because Flink will currently (versions {@literal <=} 1.5) stop * doing checkpoints when any operator (which includes sources) is finished. * * <p>Please see <a href="https://issues.apache.org/jira/browse/FLINK-2491">FLINK-2491</a> for * progress on this issue. */ @Description("If set, shutdown sources when their watermark reaches +Inf.") @Default.Boolean(false) Boolean isShutdownSourcesOnFinalWatermark();
/** * Whether to shutdown sources when their watermark reaches {@code +Inf}. For production use cases * you want this to be disabled because Flink will currently (versions {@literal <=} 1.5) stop * doing checkpoints when any operator (which includes sources) is finished. * * <p>Please see <a href="https://issues.apache.org/jira/browse/FLINK-2491">FLINK-2491</a> for * progress on this issue. */ @Description("If set, shutdown sources when their watermark reaches +Inf.") @Default.Boolean(false) Boolean isShutdownSourcesOnFinalWatermark();
@Default.Boolean(true) @Description( "Controls whether the DirectRunner should ensure that all of the elements of every " + "PCollection can be encoded and decoded by that PCollection's Coder.") boolean isEnforceEncodability();
/** Enables or disables externalized checkpoints. */ @Description( "Enables or disables externalized checkpoints. " + "Works in conjunction with CheckpointingInterval") @Default.Boolean(false) Boolean isExternalizedCheckpointsEnabled();