@Test public void testInProcessLauncher() throws Exception { // Because this test runs SparkLauncher in process and in client mode, it pollutes the system // properties, and that can cause test failures down the test pipeline. So restore the original // system properties after this test runs. Map<Object, Object> properties = new HashMap<>(System.getProperties()); try { inProcessLauncherTestImpl(); } finally { Properties p = new Properties(); for (Map.Entry<Object, Object> e : properties.entrySet()) { p.put(e.getKey(), e.getValue()); } System.setProperties(p); // Here DAGScheduler is stopped, while SparkContext.clearActiveContext may not be called yet. // Wait for a reasonable amount of time to avoid creating two active SparkContext in JVM. // See SPARK-23019 and SparkContext.stop() for details. eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertTrue("SparkContext is still alive.", SparkContext$.MODULE$.getActive().isEmpty()); }); } }
eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertNotEquals(SparkAppHandle.State.UNKNOWN, _handle.getState()); }); waitFor(handle); assertEquals(SparkAppHandle.State.FINISHED, handle.getState());
@Test public void testInProcessLauncherDoesNotKillJvm() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); List<String[]> wrongArgs = Arrays.asList( new String[] { "--unknown" }, new String[] { opts.DEPLOY_MODE, "invalid" }); for (String[] args : wrongArgs) { InProcessLauncher launcher = new InProcessLauncher() .setAppResource(SparkLauncher.NO_RESOURCE); switch (args.length) { case 2: launcher.addSparkArg(args[0], args[1]); break; case 1: launcher.addSparkArg(args[0]); break; default: fail("FIXME: invalid test."); } SparkAppHandle handle = launcher.startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.FAILED, handle.getState()); } // Run --version, which is useless as a use case, but should succeed and not exit the JVM. // The expected state is "LOST" since "--version" doesn't report state back to the handle. SparkAppHandle handle = new InProcessLauncher().addSparkArg(opts.VERSION).startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.LOST, handle.getState()); }
eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertNotEquals(SparkAppHandle.State.UNKNOWN, _handle.getState()); }); waitFor(handle); assertEquals(SparkAppHandle.State.FINISHED, handle.getState());
@Test public void testInProcessLauncherDoesNotKillJvm() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); List<String[]> wrongArgs = Arrays.asList( new String[] { "--unknown" }, new String[] { opts.DEPLOY_MODE, "invalid" }); for (String[] args : wrongArgs) { InProcessLauncher launcher = new InProcessLauncher() .setAppResource(SparkLauncher.NO_RESOURCE); switch (args.length) { case 2: launcher.addSparkArg(args[0], args[1]); break; case 1: launcher.addSparkArg(args[0]); break; default: fail("FIXME: invalid test."); } SparkAppHandle handle = launcher.startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.FAILED, handle.getState()); } // Run --version, which is useless as a use case, but should succeed and not exit the JVM. // The expected state is "LOST" since "--version" doesn't report state back to the handle. SparkAppHandle handle = new InProcessLauncher().addSparkArg(opts.VERSION).startApplication(); waitFor(handle); assertEquals(SparkAppHandle.State.LOST, handle.getState()); }
@Test public void testInProcessLauncher() throws Exception { // Because this test runs SparkLauncher in process and in client mode, it pollutes the system // properties, and that can cause test failures down the test pipeline. So restore the original // system properties after this test runs. Map<Object, Object> properties = new HashMap<>(System.getProperties()); try { inProcessLauncherTestImpl(); } finally { Properties p = new Properties(); for (Map.Entry<Object, Object> e : properties.entrySet()) { p.put(e.getKey(), e.getValue()); } System.setProperties(p); // Here DAGScheduler is stopped, while SparkContext.clearActiveContext may not be called yet. // Wait for a reasonable amount of time to avoid creating two active SparkContext in JVM. // See SPARK-23019 and SparkContext.stop() for details. eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertTrue("SparkContext is still alive.", SparkContext$.MODULE$.getActive().isEmpty()); }); } }