@Override public Object call() throws Exception { compute.initialize(context); return null; } });
@Override public void initialize(SparkExecutionPluginContext context) throws Exception { delegate.initialize(context); }
@Override public void run(DatasetContext datasetContext) throws Exception { PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec); delegate.initialize(sparkPluginContext); } }, Exception.class);
@Override public void run(DatasetContext datasetContext) throws Exception { PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, JavaSparkContext.fromSparkContext(stream.context().sparkContext()), datasetContext, pipelineRuntime, stageSpec); wrappedCompute.initialize(sparkPluginContext); } }, Exception.class);
@Override public <U> SparkCollection<U> compute(StageSpec stageSpec, SparkCompute<T, U> compute) throws Exception { String stageName = stageSpec.getName(); PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec); compute.initialize(sparkPluginContext); JavaRDD<T> countedInput = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache(); return wrap(compute.transform(sparkPluginContext, countedInput) .map(new CountingFunction<U>(stageName, sec.getMetrics(), "records.out", sec.getDataTracer(stageName)))); }