@Override public void close() { synchronized (LocalHiveSparkClient.class) { client = null; } if (sc != null) { sc.stop(); } } }
void stop() { monitoredJobs.clear(); sc.stop(); }
@Override public void close() { synchronized (LocalHiveSparkClient.class) { if (--activeSessions == 0) { client = null; if (sc != null) { LOG.debug("Shutting down the SparkContext"); sc.stop(); } } } } }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() { if (sc != null) { sc.stop(); sc = null; } }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() { if (sc != null) { sc.stop(); sc = null; } }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() { sc.stop(); sc = null; }
@After public void tearDown() throws SQLException { try { DriverManager.getConnection("jdbc:derby:target/JavaJdbcRDDSuiteDb;shutdown=true"); } catch(SQLException e) { // Throw if not normal single database shutdown // https://db.apache.org/derby/docs/10.2/ref/rrefexcept71493.html if (e.getSQLState().compareTo("08006") != 0) { throw e; } } sc.stop(); sc = null; }
@Test public void javaSparkContext() { String[] jars = new String[] {}; java.util.Map<String, String> environment = new java.util.HashMap<>(); new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop(); new JavaSparkContext("local", "name", new SparkConf()).stop(); new JavaSparkContext("local", "name").stop(); new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop(); new JavaSparkContext("local", "name", "sparkHome", jars).stop(); new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop(); }
@Test public void javaSparkContext() { String[] jars = new String[] {}; java.util.Map<String, String> environment = new java.util.HashMap<>(); new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop(); new JavaSparkContext("local", "name", new SparkConf()).stop(); new JavaSparkContext("local", "name").stop(); new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop(); new JavaSparkContext("local", "name", "sparkHome", jars).stop(); new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop(); }
@Test public void testPluginShutdownWithException() { // Verify an exception in one plugin shutdown does not affect the others String pluginNames = testPluginName + "," + testBadPluginName + "," + testPluginName; SparkConf conf = initializeSparkConf(pluginNames); sc = new JavaSparkContext(conf); assertEquals(3, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(2, numSuccessfulTerminations); }
@Test public void testAddMultiplePlugins() throws InterruptedException { // Load two plugins and verify they both execute. SparkConf conf = initializeSparkConf(testPluginName + "," + testSecondPluginName); sc = new JavaSparkContext(conf); assertEquals(2, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(2, numSuccessfulTerminations); }
@Test public void testAddPlugin() throws InterruptedException { // Load the sample TestExecutorPlugin, which will change the value of numSuccessfulPlugins SparkConf conf = initializeSparkConf(testPluginName); sc = new JavaSparkContext(conf); assertEquals(1, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(1, numSuccessfulTerminations); }
@Test public void testPluginShutdownWithException() { // Verify an exception in one plugin shutdown does not affect the others String pluginNames = testPluginName + "," + testBadPluginName + "," + testPluginName; SparkConf conf = initializeSparkConf(pluginNames); sc = new JavaSparkContext(conf); assertEquals(3, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(2, numSuccessfulTerminations); }
@Test public void testAddPlugin() throws InterruptedException { // Load the sample TestExecutorPlugin, which will change the value of numSuccessfulPlugins SparkConf conf = initializeSparkConf(testPluginName); sc = new JavaSparkContext(conf); assertEquals(1, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(1, numSuccessfulTerminations); }
@Test public void testAddMultiplePlugins() throws InterruptedException { // Load two plugins and verify they both execute. SparkConf conf = initializeSparkConf(testPluginName + "," + testSecondPluginName); sc = new JavaSparkContext(conf); assertEquals(2, numSuccessfulPlugins); sc.stop(); sc = null; assertEquals(2, numSuccessfulTerminations); }