@Override protected void configure() { setName(NAME); setDescription(DESCRIPTION); switch (engine) { case MAPREDUCE: addMapReduce(ETLMapReduce.NAME); break; case SPARK: addSpark(ETLSpark.class.getSimpleName()); break; } Map<String, String> properties = new HashMap<>(); properties.put("pipeline.spec", GSON.toJson(spec)); setProperties(properties); }
@TransactionPolicy(TransactionControl.EXPLICIT) @Override public void destroy() { WorkflowContext workflowContext = getContext(); PipelineRuntime pipelineRuntime = new PipelineRuntime(workflowContext, workflowMetrics); if (workflowContext.getDataTracer(PostAction.PLUGIN_TYPE).isEnabled()) { return; } for (Map.Entry<String, PostAction> endingActionEntry : postActions.entrySet()) { String name = endingActionEntry.getKey(); PostAction action = endingActionEntry.getValue(); StageSpec stageSpec = postActionSpecs.get(name); BatchActionContext context = new WorkflowBackedActionContext(workflowContext, pipelineRuntime, stageSpec); try { action.run(context); } catch (Throwable t) { LOG.error("Error while running ending action {}.", name, t); } } } }
addWorkflow(new ETLWorkflow(spec, config.getEngine())); schedule(buildSchedule(SCHEDULE_NAME, ProgramType.WORKFLOW, ETLWorkflow.NAME) .setDescription("ETL Batch schedule")