@Override public void destroy() { if (delegateSpark != null) { if (delegateSpark instanceof AbstractSpark) { ((AbstractSpark) delegateSpark).destroy(); } } } }
@Override public final void configure(SparkConfigurer configurer) { this.configurer = configurer; configure(); }
/** * Sets the job main class name in specification. The main method of this class will be called to run the * Spark job * * @param mainClass the class containing the main method */ protected final void setMainClass(Class<?> mainClass) { setMainClassName(mainClass.getName()); }
/** * Returns the {@link ExtendedSparkConfigurer} for extended features, only available at configuration time. */ @Override protected ExtendedSparkConfigurer getConfigurer() { SparkConfigurer configurer = super.getConfigurer(); if (!(configurer instanceof ExtendedSparkConfigurer)) { // This shouldn't happen, unless there is bug in app-fabric throw new IllegalStateException( "Expected the configurer is an instance of " + ExtendedSparkConfigurer.class.getName() + ", but get " + configurer.getClass().getName() + " instead."); } return (ExtendedSparkConfigurer) configurer; }
@Override @TransactionPolicy(TransactionControl.IMPLICIT) public final void initialize(SparkClientContext context) throws Exception { this.context = context; initialize(); }
/** * Returns the {@link ExtendedSparkConfigurer} for extended features, only available at configuration time. */ @Override protected ExtendedSparkConfigurer getConfigurer() { SparkConfigurer configurer = super.getConfigurer(); if (!(configurer instanceof ExtendedSparkConfigurer)) { // This shouldn't happen, unless there is bug in app-fabric throw new IllegalStateException( "Expected the configurer is an instance of " + ExtendedSparkConfigurer.class.getName() + ", but get " + configurer.getClass().getName() + " instead."); } return (ExtendedSparkConfigurer) configurer; }
@Override protected void initialize() throws Exception { SparkClientContext context = getContext(); String stageName = context.getSpecification().getProperty(STAGE_NAME); Class<?> externalProgramClass = context.loadPluginClass(stageName); // If the external program implements Spark, instantiate it and call initialize() to provide full lifecycle support if (Spark.class.isAssignableFrom(externalProgramClass)) { MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(context), context.getLogicalStartTime(), context, context.getNamespace()); delegateSpark = context.newPluginInstance(stageName, macroEvaluator); if (delegateSpark instanceof AbstractSpark) { //noinspection unchecked ((AbstractSpark) delegateSpark).initialize(context); } } }
/** * Returns the {@link ExtendedSparkConfigurer} for extended features, only available at configuration time. */ @Override protected ExtendedSparkConfigurer getConfigurer() { SparkConfigurer configurer = super.getConfigurer(); if (!(configurer instanceof ExtendedSparkConfigurer)) { // This shouldn't happen, unless there is bug in app-fabric throw new IllegalStateException( "Expected the configurer is an instance of " + ExtendedSparkConfigurer.class.getName() + ", but get " + configurer.getClass().getName() + " instead."); } return (ExtendedSparkConfigurer) configurer; }
@TransactionPolicy(TransactionControl.EXPLICIT) @Override public void destroy() { super.destroy(); ProgramStatus status = getContext().getState().getStatus(); WRAPPERLOGGER.info("Pipeline '{}' {}", getContext().getApplicationSpecification().getName(), status == ProgramStatus.COMPLETED ? "succeeded" : status.name().toLowerCase()); }