public SparkPipelineRuntime(JavaSparkExecutionContext sec, long batchTime) { super(sec.getNamespace(), sec.getApplicationSpecification().getName(), batchTime, new BasicArguments(sec), sec.getMetrics(), sec.getPluginContext(), sec.getServiceDiscoverer(), sec.getSecureStore()); } }
public PluginFunctionContext(StageSpec stageSpec, JavaSparkExecutionContext sec, Map<String, String> arguments, long logicalStartTime, StageStatisticsCollector collector) { this.namespace = sec.getNamespace(); this.pipelineName = sec.getApplicationSpecification().getName(); this.stageSpec = stageSpec; this.logicalStartTime = logicalStartTime; this.arguments = new BasicArguments(sec); this.pluginContext = sec.getPluginContext(); this.serviceDiscoverer = sec.getServiceDiscoverer(); this.metrics = sec.getMetrics(); this.secureStore = sec.getSecureStore(); this.dataTracer = sec.getDataTracer(stageSpec.getName()); this.pipelinePluginContext = getPluginContext(); this.collector = collector; }
sec.getLogicalStartTime(), sec.getSecureStore(), sec.getNamespace()); JavaSparkMain javaSparkMain = pluginContext.newPluginInstance(stageName, macroEvaluator);
@Override public Void call(JavaRDD<Alert> data, Time batchTime) throws Exception { MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), batchTime.milliseconds(), sec.getSecureStore(), sec.getNamespace()); PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), stageSpec.isStageLoggingEnabled(), stageSpec.isProcessTimingEnabled()); String stageName = stageSpec.getName(); AlertPublisher alertPublisher = pluginContext.newPluginInstance(stageName, evaluator); PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, batchTime.milliseconds()); AlertPublisherContext alertPublisherContext = new DefaultAlertPublisherContext(pipelineRuntime, stageSpec, sec.getMessagingContext(), sec.getAdmin()); alertPublisher.initialize(alertPublisherContext); StageMetrics stageMetrics = new DefaultStageMetrics(sec.getMetrics(), stageName); TrackedIterator<Alert> trackedAlerts = new TrackedIterator<>(data.collect().iterator(), stageMetrics, Constants.Metrics.RECORDS_IN); alertPublisher.publish(trackedAlerts); alertPublisher.destroy(); return null; } }
MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getNamespace()); PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(),
MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getNamespace());