@Override public Void call() throws Exception { sink.run(context, input); return null; } });
@Override public Void call() throws Exception { sink.prepareRun(context); return null; } });
@Override public void prepareRun(BatchAggregatorContext context) throws Exception { if (numPartitions != null) { context.setNumPartitions(numPartitions); } } }
@Override public void prepareRun(BatchSinkContext context) throws Exception { if (!context.datasetExists(config.tableName)) { context.createDataset(config.tableName, "table", DatasetProperties.EMPTY); } context.addOutput(Output.ofDataset(config.tableName)); }
@Override public void initialize(BatchJoinerRuntimeContext context) throws Exception { inputSchemas = context.getInputSchemas(); outputSchema = context.getOutputSchema(); }
@Override public void initialize(BatchRuntimeContext context) throws Exception { super.initialize(context); if (config.schema != null) { // should never happen, just done to test App correctness in unit tests Schema outputSchema = Schema.parseJson(config.schema); if (!outputSchema.equals(context.getOutputSchema())) { throw new IllegalStateException("Output schema does not match what was set at configure time."); } } }
@Override public void initialize(BatchRuntimeContext context) throws Exception { super.initialize(context); }
@Override public void run(DatasetContext datasetContext) throws Exception { SparkBatchSinkContext sinkContext = new SparkBatchSinkContext(sinkFactory, sec, datasetContext, pipelineRuntime, stageSpec); batchSink.onRunFinish(true, sinkContext); } });
@Override public void initialize(BatchRuntimeContext context) throws Exception { super.initialize(context); }
@Override public Object call() throws Exception { compute.initialize(context); return null; } });
@Override public Void call() throws Exception { sink.onRunFinish(succeeded, context); return null; } });
@Override public Void call() { sink.configurePipeline(pipelineConfigurer); return null; } });
@Override public Void call() { compute.configurePipeline(pipelineConfigurer); return null; } });
@Override public JavaRDD<OUT> call() throws Exception { return compute.transform(context, input); } });
@Override public void initialize(BatchJoinerRuntimeContext context) throws Exception { init(context.getInputSchemas()); inputSchemas = context.getInputSchemas(); outputSchema = context.getOutputSchema(); }
@Override public void initialize(BatchRuntimeContext context) throws Exception { super.initialize(context); }
@Override public void run(DatasetContext datasetContext) throws Exception { SparkBatchSinkContext sinkContext = new SparkBatchSinkContext(sinkFactory, sec, datasetContext, pipelineRuntime, stageSpec); batchSink.onRunFinish(false, sinkContext); } });
@Override public void run(DatasetContext context) throws Exception { sparkSink.run(sparkExecutionPluginContext, countedRDD); } });
@Override public void prepareRun(BatchAggregatorContext context) throws Exception { if (conf.numPartitions != null) { context.setNumPartitions(conf.numPartitions); } }
@Override public void initialize(SparkExecutionPluginContext context) throws Exception { delegate.initialize(context); }