@Override public Void call() { sink.configurePipeline(pipelineConfigurer); return null; } });
@Override public Void call() throws Exception { sink.onRunFinish(succeeded, context); return null; } });
@Override public Void call() throws Exception { sink.prepareRun(context); return null; } });
@Override public void run(DatasetContext context) throws Exception { sparkSink.run(sparkExecutionPluginContext, countedRDD); } });
@Override public Void call() throws Exception { sink.run(context, input); return null; } });
@Override public void run(DatasetContext datasetContext) throws Exception { SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin()); sparkSink.onRunFinish(true, context); } });
@Override public void run() { String stageName = stageSpec.getName(); PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec); JavaRDD<T> countedRDD = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache(); try { sink.run(sparkPluginContext, countedRDD); } catch (Exception e) { Throwables.propagate(e); } } };
@Override public void run(DatasetContext datasetContext) throws Exception { SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin()); sparkSink.prepareRun(context); } });
@Override public void run(DatasetContext datasetContext) throws Exception { SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin()); sparkSink.onRunFinish(false, context); } });