@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=childproc") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), TF); new OutputRedirector(app.getErrorStream(), TF); assertEquals(0, app.waitFor()); }
.setMaster("local") .setConf(SparkLauncher.DRIVER_MEMORY, "1g") .addAppArgs(appArgs);
.setMaster("local") .setConf(SparkLauncher.DRIVER_MEMORY, "1g") .addAppArgs(appArgs);
SparkSubmitOptionParser validator = new ArgumentValidator(true); if (validator.MASTER.equals(name)) { setMaster(value); } else if (validator.PROPERTIES_FILE.equals(name)) { setPropertiesFile(value);
SparkSubmitOptionParser validator = new ArgumentValidator(true); if (validator.MASTER.equals(name)) { setMaster(value); } else if (validator.PROPERTIES_FILE.equals(name)) { setPropertiesFile(value);
sparkLauncher.setMaster("yarn-cluster"); sparkLauncher.setAppName("DataCleaner");
.setMaster(this.deploymentConfig.getSparkHost()) .setConf(SparkLauncher.DRIVER_MEMORY, "2g");//TODO appHandle = launcher.startApplication();