@Override @TransactionPolicy(TransactionControl.EXPLICIT) public void destroy() { if (finisher != null) { finisher.onFinish(getContext().getState().getStatus() == ProgramStatus.COMPLETED); } for (File file : cleanupFiles) { if (!file.delete()) { LOG.warn("Failed to clean up resource {} ", file); } } } }
ContextProvider<MapReduceBatchContext> contextProvider = new MapReduceBatchContextProvider(context, pipelineRuntime, stageSpec, connectorDatasets); submitterPlugin = new SubmitterPlugin<>( stageName, context, batchSource, contextProvider, new SubmitterPlugin.PrepareAction<MapReduceBatchContext>() { ContextProvider<MapReduceBatchContext> contextProvider = new MapReduceBatchContextProvider(context, pipelineRuntime, stageSpec, connectorDatasets); submitterPlugin = new SubmitterPlugin<>( stageName, context, batchSink, contextProvider, new SubmitterPlugin.PrepareAction<MapReduceBatchContext>() { ContextProvider<MapReduceBatchContext> contextProvider = new MapReduceBatchContextProvider(context, pipelineRuntime, stageSpec, connectorDatasets); submitterPlugin = new SubmitterPlugin<>( stageName, context, transform, contextProvider, new SubmitterPlugin.PrepareAction<MapReduceBatchContext>() { new AggregatorContextProvider(pipelineRuntime, stageSpec, context.getAdmin()); submitterPlugin = new SubmitterPlugin<>( stageName, context, aggregator, contextProvider, new SubmitterPlugin.PrepareAction<DefaultAggregatorContext>() { new JoinerContextProvider(pipelineRuntime, stageSpec, context.getAdmin()); submitterPlugin = new SubmitterPlugin<>( stageName, context, batchJoiner, contextProvider, new SubmitterPlugin.PrepareAction<DefaultJoinerContext>() { submitterPlugin.prepareRun();
submitterPlugin = new SubmitterPlugin(stageName, context, batchSource, contextProvider, new SubmitterPlugin.PrepareAction<SparkBatchSourceContext>() { @Override submitterPlugin = new SubmitterPlugin(stageName, context, transform, contextProvider, new SubmitterPlugin.PrepareAction<SparkBatchSourceContext>() { @Override submitterPlugin = new SubmitterPlugin(stageName, context, batchSink, contextProvider, new SubmitterPlugin.PrepareAction<SparkBatchSinkContext>() { @Override submitterPlugin = new SubmitterPlugin(stageName, context, sparkSink, contextProvider); new AggregatorContextProvider(pipelineRuntime, stageSpec, admin); submitterPlugin = new SubmitterPlugin(stageName, context, aggregator, contextProvider, new SubmitterPlugin.PrepareAction<DefaultAggregatorContext>() { @Override new JoinerContextProvider(pipelineRuntime, stageSpec, admin); submitterPlugin = new SubmitterPlugin<>( stageName, context, joiner, contextProvider, new SubmitterPlugin.PrepareAction<DefaultJoinerContext>() { submitterPlugin.prepareRun(); finishers.add(submitterPlugin); finisher = new CompositeFinisher(finishers); context.localize("HydratorSpark.config", configFile.toURI());
@Override @TransactionPolicy(TransactionControl.EXPLICIT) public void destroy() { boolean isSuccessful = getContext().getState().getStatus() == ProgramStatus.COMPLETED; if (finisher != null) { // this can be null if the initialize() method failed. finisher.onFinish(isSuccessful); } LOG.info("Batch Run finished : status = {}", getContext().getState()); }