public SparkPipelineRuntime(SparkClientContext context) { super(context.getNamespace(), context.getApplicationSpecification().getName(), context.getLogicalStartTime(), new BasicArguments(context), context.getMetrics(), context, context, context, context, context); }
@Override protected void initialize() throws Exception { SparkClientContext context = getContext(); String stageName = context.getSpecification().getProperty(STAGE_NAME); Class<?> externalProgramClass = context.loadPluginClass(stageName); // If the external program implements Spark, instantiate it and call initialize() to provide full lifecycle support if (Spark.class.isAssignableFrom(externalProgramClass)) { MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(context), context.getLogicalStartTime(), context, context.getNamespace()); delegateSpark = context.newPluginInstance(stageName, macroEvaluator); if (delegateSpark instanceof AbstractSpark) { //noinspection unchecked ((AbstractSpark) delegateSpark).initialize(context); } } }
context.getLogicalStartTime(), context, context.getNamespace()); final SparkBatchSourceFactory sourceFactory = new SparkBatchSourceFactory();