public SparkPipelineRuntime(SparkClientContext context) { super(context.getNamespace(), context.getApplicationSpecification().getName(), context.getLogicalStartTime(), new BasicArguments(context), context.getMetrics(), context, context, context, context, context); }
DataStreamsPipelineSpec.class); PipelinePluginContext pluginContext = new SparkPipelinePluginContext(context, context.getMetrics(), true, true); int numSources = 0;
final SparkBatchSinkFactory sinkFactory = new SparkBatchSinkFactory(); final Map<String, Integer> stagePartitions = new HashMap<>(); PluginContext pluginContext = new SparkPipelinePluginContext(context, context.getMetrics(), phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled()); PipelinePluginInstantiator pluginInstantiator = new PipelinePluginInstantiator(pluginContext, context.getMetrics(), phaseSpec, new SingleConnectorFactory()); final PipelineRuntime pipelineRuntime = new PipelineRuntime(context); final Admin admin = context.getAdmin();