@Test public void testInProcessLauncher() throws Exception { // Because this test runs SparkLauncher in process and in client mode, it pollutes the system // properties, and that can cause test failures down the test pipeline. So restore the original // system properties after this test runs. Map<Object, Object> properties = new HashMap<>(System.getProperties()); try { inProcessLauncherTestImpl(); } finally { Properties p = new Properties(); for (Map.Entry<Object, Object> e : properties.entrySet()) { p.put(e.getKey(), e.getValue()); } System.setProperties(p); // Here DAGScheduler is stopped, while SparkContext.clearActiveContext may not be called yet. // Wait for a reasonable amount of time to avoid creating two active SparkContext in JVM. // See SPARK-23019 and SparkContext.stop() for details. eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertTrue("SparkContext is still alive.", SparkContext$.MODULE$.getActive().isEmpty()); }); } }
@Test public void testInProcessLauncher() throws Exception { // Because this test runs SparkLauncher in process and in client mode, it pollutes the system // properties, and that can cause test failures down the test pipeline. So restore the original // system properties after this test runs. Map<Object, Object> properties = new HashMap<>(System.getProperties()); try { inProcessLauncherTestImpl(); } finally { Properties p = new Properties(); for (Map.Entry<Object, Object> e : properties.entrySet()) { p.put(e.getKey(), e.getValue()); } System.setProperties(p); // Here DAGScheduler is stopped, while SparkContext.clearActiveContext may not be called yet. // Wait for a reasonable amount of time to avoid creating two active SparkContext in JVM. // See SPARK-23019 and SparkContext.stop() for details. eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertTrue("SparkContext is still alive.", SparkContext$.MODULE$.getActive().isEmpty()); }); } }
eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertNotEquals(SparkAppHandle.State.UNKNOWN, _handle.getState()); });
eventually(Duration.ofSeconds(5), Duration.ofMillis(10), () -> { assertNotEquals(SparkAppHandle.State.UNKNOWN, _handle.getState()); });