@Override public void run(DatasetContext context) throws Exception { sparkSink.run(sparkExecutionPluginContext, countedRDD); } });
@Override public Void call() throws Exception { sink.run(context, input); return null; } });
@Override public void run() { String stageName = stageSpec.getName(); PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec); SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec); JavaRDD<T> countedRDD = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache(); try { sink.run(sparkPluginContext, countedRDD); } catch (Exception e) { Throwables.propagate(e); } } };