private AbstractLauncher<InProcessLauncher> getSparkLauncher() { if (this.sparkLauncher == null) { this.sparkLauncher = new InProcessLauncher(); } return this.sparkLauncher; }
@Override public void stateChanged(SparkAppHandle sparkAppHandle) { LOG.info("Spark app transitioned to state = " + sparkAppHandle.getState()); if (sparkAppHandle.getState().isFinal() || sparkAppHandle.getState().equals( SparkAppHandle.State.RUNNING)) { this.shutdownLatch.countDown(); sparkAppHandle.disconnect(); LOG.info("Successfully disconnected from Spark app handle"); } if (FAILED_SPARK_STATES.contains(sparkAppHandle.getState())) { this.rpcServer.cancelClient(this.clientId, "Spark app launcher failed," + " transitioned to state " + sparkAppHandle.getState()); } }
@Override protected void addPropertiesFile(String absolutePath) { getSparkLauncher().setPropertiesFile(absolutePath); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=childproc") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), TF); new OutputRedirector(app.getErrorStream(), TF); assertEquals(0, app.waitFor()); }
@Test(expected=IllegalStateException.class) public void testRedirectToLogWithOthersFails() throws Exception { launcher.setAppResource("fake-resource.jar") .setMainClass("my.fake.class.Fake") .redirectToLog("fakeLog") .redirectError(ProcessBuilder.Redirect.PIPE) .launch(); }
@Test public void testInProcessLauncherDoesNotKillJvm() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); List<String[]> wrongArgs = Arrays.asList( new String[] { "--unknown" }, new String[] { opts.DEPLOY_MODE, "invalid" }); for (String[] args : wrongArgs) { InProcessLauncher launcher = new InProcessLauncher() .setAppResource(SparkLauncher.NO_RESOURCE); switch (args.length) { case 2: launcher.addSparkArg(args[0], args[1]); break; case 1: launcher.addSparkArg(args[0]); break; default: fail("FIXME: invalid test."); } SparkAppHandle handle = launcher.startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.FAILED, handle.getState()); } // Run --version, which is useless as a use case, but should succeed and not exit the JVM. // The expected state is "LOST" since "--version" doesn't report state back to the handle. SparkAppHandle handle = new InProcessLauncher().addSparkArg(opts.VERSION).startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.LOST, handle.getState()); }
@Before public void configureLauncher() { launcher = new SparkLauncher().setSparkHome(System.getProperty("spark.test.home")); }
@Test public void testRedirectLastWins() throws Exception { launcher.redirectError(ProcessBuilder.Redirect.PIPE) .redirectError(ProcessBuilder.Redirect.INHERIT); assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.INHERIT); launcher.redirectOutput(ProcessBuilder.Redirect.PIPE) .redirectOutput(ProcessBuilder.Redirect.INHERIT); assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.INHERIT); }
@Test public void testRedirectToLog() throws Exception { launcher.redirectToLog("fakeLogger"); assertTrue(launcher.redirectToLog); assertTrue(launcher.builder.getEffectiveConfig() .containsKey(SparkLauncher.CHILD_PROCESS_LOGGER_NAME)); }
@Override protected void addNumExecutors(String numOfExecutors) { getSparkLauncher().addSparkArg("--num-executors", numOfExecutors); }
@Test public void testRedirectErrorToOutput() throws Exception { launcher.redirectError(); assertTrue(launcher.redirectErrorStream); }
@Override protected void addJars(String jars) { getSparkLauncher().addJar(jars); }
@Override protected void addAppArg(String arg) { getSparkLauncher().addAppArgs(arg); }
@Override protected void addExecutableJar(String jar) { getSparkLauncher().setAppResource(jar); }
@Override protected void addClass(String name) { getSparkLauncher().setMainClass(name); }
@Test(expected=IllegalStateException.class) public void testRedirectTwiceFails() throws Exception { launcher.setAppResource("fake-resource.jar") .setMainClass("my.fake.class.Fake") .redirectError() .redirectError(ProcessBuilder.Redirect.PIPE) .launch(); }
@Test public void testInProcessLauncherDoesNotKillJvm() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); List<String[]> wrongArgs = Arrays.asList( new String[] { "--unknown" }, new String[] { opts.DEPLOY_MODE, "invalid" }); for (String[] args : wrongArgs) { InProcessLauncher launcher = new InProcessLauncher() .setAppResource(SparkLauncher.NO_RESOURCE); switch (args.length) { case 2: launcher.addSparkArg(args[0], args[1]); break; case 1: launcher.addSparkArg(args[0]); break; default: fail("FIXME: invalid test."); } SparkAppHandle handle = launcher.startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.FAILED, handle.getState()); } // Run --version, which is useless as a use case, but should succeed and not exit the JVM. // The expected state is "LOST" since "--version" doesn't report state back to the handle. SparkAppHandle handle = new InProcessLauncher().addSparkArg(opts.VERSION).startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.LOST, handle.getState()); }
@Test public void testRedirectsSimple() throws Exception { launcher.redirectError(ProcessBuilder.Redirect.PIPE); assertNotNull(launcher.errorStream); assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.PIPE); launcher.redirectOutput(ProcessBuilder.Redirect.PIPE); assertNotNull(launcher.outputStream); assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.PIPE); }
@Override protected void addExecutorMemory(String executorMemory) { getSparkLauncher().addSparkArg("--executor-memory", executorMemory); }
@Override protected void addExecutorCores(String executorCores) { getSparkLauncher().addSparkArg("--executor-cores", executorCores); }