public Configuration getConfiguration() { return this.job.getConfiguration(); }
public Configuration getConfiguration() { return this.job.getConfiguration(); }
public JavaExecutor(JavaPlatform javaPlatform, Job job) { super(job); this.platform = javaPlatform; this.compiler = new FunctionCompiler(job.getConfiguration()); }
public GraphChiExecutor(GraphChiPlatform platform, Job job) { super(job.getCrossPlatformExecutor()); this.job = job; this.platform = platform; this.configuration = job.getConfiguration(); }
/** * Create a new, plain instance. */ public OptimizationContext(Job job) { this( job, null, null, -1, new ChannelConversionGraph(job.getConfiguration()), initializePruningStrategies(job.getConfiguration()) ); }
/** * Creates a new instance. Useful for testing. * * @param operator the single {@link Operator} of this instance */ public OptimizationContext(Job job, Operator operator) { this( job, null, null, -1, new ChannelConversionGraph(job.getConfiguration()), initializePruningStrategies(job.getConfiguration()) ); this.addOneTimeOperator(operator); }
@Override public void warmUp(Configuration configuration) { super.warmUp(configuration); // Run a most simple Spark job. this.logger.info("Running warm-up Spark job..."); long startTime = System.currentTimeMillis(); final RheemContext rheemCtx = new RheemContext(configuration); SparkCollectionSource<Integer> source = new SparkCollectionSource<>( Collections.singleton(0), DataSetType.createDefault(Integer.class) ); SparkLocalCallbackSink<Integer> sink = new SparkLocalCallbackSink<>( dq -> { }, DataSetType.createDefault(Integer.class) ); source.connectTo(0, sink, 0); final Job job = rheemCtx.createJob("Warm up", new RheemPlan(sink)); // Make sure not to have the warm-up jobs bloat the execution logs. job.getConfiguration().setProperty("rheem.core.log.enabled", "false"); job.execute(); long stopTime = System.currentTimeMillis(); this.logger.info("Spark warm-up finished in {}.", Formats.formatDuration(stopTime - startTime, true)); }
final Configuration configuration = job.getConfiguration(); if (this.sparkContextReference != null && !this.sparkContextReference.isDisposed()) { final JavaSparkContext sparkContext = this.sparkContextReference.get();