/** * Execute a plan. * * @param jobName name of the {@link Job} or {@code null} * @param rheemPlan the plan to execute * @param udfJars JARs that declare the code for the UDFs * @see ReflectionUtils#getDeclaringJar(Class) */ public void execute(String jobName, Monitor monitor, RheemPlan rheemPlan, String... udfJars) { this.createJob(jobName, monitor, rheemPlan, udfJars).execute(); }
/** * Execute a plan. * * @param jobName name of the {@link Job} or {@code null} * @param rheemPlan the plan to execute * @param experiment {@link Experiment} for that profiling entries will be created * @param udfJars JARs that declare the code for the UDFs * @see ReflectionUtils#getDeclaringJar(Class) */ public void execute(String jobName, RheemPlan rheemPlan, Experiment experiment, String... udfJars) { this.createJob(jobName, rheemPlan, experiment, udfJars).execute(); }
@Override public void warmUp(Configuration configuration) { super.warmUp(configuration); // Run a most simple Spark job. this.logger.info("Running warm-up Spark job..."); long startTime = System.currentTimeMillis(); final RheemContext rheemCtx = new RheemContext(configuration); SparkCollectionSource<Integer> source = new SparkCollectionSource<>( Collections.singleton(0), DataSetType.createDefault(Integer.class) ); SparkLocalCallbackSink<Integer> sink = new SparkLocalCallbackSink<>( dq -> { }, DataSetType.createDefault(Integer.class) ); source.connectTo(0, sink, 0); final Job job = rheemCtx.createJob("Warm up", new RheemPlan(sink)); // Make sure not to have the warm-up jobs bloat the execution logs. job.getConfiguration().setProperty("rheem.core.log.enabled", "false"); job.execute(); long stopTime = System.currentTimeMillis(); this.logger.info("Spark warm-up finished in {}.", Formats.formatDuration(stopTime - startTime, true)); }
while (!this.execute(executionPlan, executionId)) { this.optimizationRound.start(); if (this.postProcess(executionPlan, executionId)) {