@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=childproc") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), TF); new OutputRedirector(app.getErrorStream(), TF); assertEquals(0, app.waitFor()); }
assertEquals(Arrays.asList("bar"), launcher.builder.pyFiles); launcher.setConf("spark.foo", "foo"); launcher.addSparkArg(opts.CONF, "spark.foo=bar"); assertEquals("bar", launcher.builder.conf.get("spark.foo")); launcher.setConf(SparkLauncher.PYSPARK_DRIVER_PYTHON, "python3.4"); launcher.setConf(SparkLauncher.PYSPARK_PYTHON, "python3.5"); assertEquals("python3.4", launcher.builder.conf.get( package$.MODULE$.PYSPARK_DRIVER_PYTHON().key()));
assertEquals(Arrays.asList("bar"), launcher.builder.pyFiles); launcher.setConf("spark.foo", "foo"); launcher.addSparkArg(opts.CONF, "spark.foo=bar"); assertEquals("bar", launcher.builder.conf.get("spark.foo")); launcher.setConf(SparkLauncher.PYSPARK_DRIVER_PYTHON, "python3.4"); launcher.setConf(SparkLauncher.PYSPARK_PYTHON, "python3.5"); assertEquals("python3.4", launcher.builder.conf.get( package$.MODULE$.PYSPARK_DRIVER_PYTHON().key()));
assertEquals(Arrays.asList("bar"), launcher.builder.pyFiles); launcher.setConf("spark.foo", "foo"); launcher.addSparkArg(opts.CONF, "spark.foo=bar"); assertEquals("bar", launcher.builder.conf.get("spark.foo")); launcher.setConf(SparkLauncher.PYSPARK_DRIVER_PYTHON, "python3.4"); launcher.setConf(SparkLauncher.PYSPARK_PYTHON, "python3.5"); assertEquals("python3.4", launcher.builder.conf.get( package$.MODULE$.PYSPARK_DRIVER_PYTHON().key()));
.setConf(SparkLauncher.DRIVER_MEMORY, "1g") .addAppArgs(appArgs);
.setConf(SparkLauncher.DRIVER_MEMORY, "1g") .addAppArgs(appArgs);
/** * Sets all output to be logged and redirected to a logger with the specified name. * * @param loggerName The name of the logger to log stdout and stderr. * @return This launcher. */ public SparkLauncher redirectToLog(String loggerName) { setConf(CHILD_PROCESS_LOGGER_NAME, loggerName); redirectToLog = true; return this; }
/** * Sets all output to be logged and redirected to a logger with the specified name. * * @param loggerName The name of the logger to log stdout and stderr. * @return This launcher. */ public SparkLauncher redirectToLog(String loggerName) { setConf(CHILD_PROCESS_LOGGER_NAME, loggerName); return this; }
/** * Sets all output to be logged and redirected to a logger with the specified name. * * @param loggerName The name of the logger to log stdout and stderr. * @return This launcher. */ public SparkLauncher redirectToLog(String loggerName) { setConf(CHILD_PROCESS_LOGGER_NAME, loggerName); redirectToLog = true; return this; }
/** * Sets all output to be logged and redirected to a logger with the specified name. * * @param loggerName The name of the logger to log stdout and stderr. * @return This launcher. */ public SparkLauncher redirectToLog(String loggerName) { setConf(CHILD_PROCESS_LOGGER_NAME, loggerName); return this; }
} else if (validator.CONF.equals(name)) { String[] vals = value.split("=", 2); setConf(vals[0], vals[1]); } else if (validator.CLASS.equals(name)) { setMainClass(value);
} else if (validator.CONF.equals(name)) { String[] vals = value.split("=", 2); setConf(vals[0], vals[1]); } else if (validator.CLASS.equals(name)) { setMainClass(value);
sparkLauncher.setConf("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
.setConf(SparkLauncher.DRIVER_MEMORY, "2g");//TODO appHandle = launcher.startApplication();