public SparkPipelineRuntime(JavaSparkExecutionContext sec, long batchTime) { super(sec.getNamespace(), sec.getApplicationSpecification().getName(), batchTime, new BasicArguments(sec), sec.getMetrics(), sec.getPluginContext(), sec.getServiceDiscoverer(), sec.getSecureStore()); } }
public DefaultStreamingContext(StageSpec stageSpec, JavaSparkExecutionContext sec, JavaStreamingContext jsc) { super(new PipelineRuntime(sec.getNamespace(), sec.getApplicationSpecification().getName(), sec.getLogicalStartTime(), new BasicArguments(sec), sec.getMetrics(), sec.getPluginContext(), sec.getServiceDiscoverer(), sec, sec, sec), stageSpec); this.sec = sec; this.jsc = jsc; this.admin = sec.getAdmin(); }
public PluginFunctionContext(StageSpec stageSpec, JavaSparkExecutionContext sec, Map<String, String> arguments, long logicalStartTime, StageStatisticsCollector collector) { this.namespace = sec.getNamespace(); this.pipelineName = sec.getApplicationSpecification().getName(); this.stageSpec = stageSpec; this.logicalStartTime = logicalStartTime; this.arguments = new BasicArguments(sec); this.pluginContext = sec.getPluginContext(); this.serviceDiscoverer = sec.getServiceDiscoverer(); this.metrics = sec.getMetrics(); this.secureStore = sec.getSecureStore(); this.dataTracer = sec.getDataTracer(stageSpec.getName()); this.pipelinePluginContext = getPluginContext(); this.collector = collector; }
if (!checkpointsDisabled) { String pipelineName = sec.getApplicationSpecification().getName(); String relativeCheckpointDir = pipelineSpec.getCheckpointDirectory();