public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().setup(hiveConf);
SparkSession sparkSession = null; try { sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession( context.getConf(), sparkSessionManager);
SparkSession sparkSession = null; try { sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession( context.getConf(), sparkSessionManager);
sessionManagerHS2 = SparkSessionManagerImpl.getInstance();
@Test public void testGetHiveException() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); SparkSessionImpl ss = (SparkSessionImpl) ssm.getSession( null, conf, true);
try { printConfigInfo(); sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession(conf, sparkSessionManager);
try { printConfigInfo(); sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession(conf, sparkSessionManager);
/** * Force a Spark config to be generated and check that a config value has the expected value * @param conf the Hive config to use as a base * @param paramName the Spark config name to check * @param expectedValue the expected value in the Spark config */ private void checkSparkConf(HiveConf conf, String paramName, String expectedValue) throws HiveException { SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSessionImpl sparkSessionImpl = (SparkSessionImpl) sessionManager.getSession(null, conf, true); assertTrue(sparkSessionImpl.isOpen()); HiveSparkClient hiveSparkClient = sparkSessionImpl.getHiveSparkClient(); SparkConf sparkConf = hiveSparkClient.getSparkConf(); String cloneConfig = sparkConf.get(paramName); sessionManager.closeSession(sparkSessionImpl); assertEquals(expectedValue, cloneConfig); sessionManager.shutdown(); }
/** Tests CLI scenario where we get a single session and use it multiple times. */ @Test public void testSingleSessionMultipleUse() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSession sparkSession1 = sessionManager.getSession(null, conf, true); assertTrue(sparkSession1.isOpen()); SparkSession sparkSession2 = sessionManager.getSession(sparkSession1, conf, true); assertTrue(sparkSession1 == sparkSession2); // Same session object is expected. assertTrue(sparkSession2.isOpen()); sessionManager.shutdown(); sessionManager.closeSession(sparkSession1); }
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {
private void runSparkTestSession(HiveConf conf, int threadId) throws Exception { conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT, "10s"); conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT_PERIOD, "1s"); Driver driver = null; try { driver = new Driver(new QueryState.Builder() .withGenerateNewQueryId(true) .withHiveConf(conf).build(), null, null); SparkSession sparkSession = SparkUtilities.getSparkSession(conf, SparkSessionManagerImpl.getInstance()); Assert.assertEquals(0, driver.run("show tables").getResponseCode()); barrier.await(); SparkContext sparkContext = getSparkContext(sparkSession); Assert.assertFalse(sparkContext.isStopped()); if(threadId == 1) { barrier.await(); closeSparkSession(sparkSession); Assert.assertTrue(sparkContext.isStopped()); } else { closeSparkSession(sparkSession); Assert.assertFalse(sparkContext.isStopped()); barrier.await(); } } finally { if (driver != null) { driver.destroy(); } } }
@Test public void testGetSessionId() throws HiveException { SessionState ss = SessionState.start(SESSION_HIVE_CONF); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_1", ss.getSparkSession().getSessionId()); ss = SessionState.start(SESSION_HIVE_CONF); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); }
public void shutdown() throws Exception { if (System.getenv(QTEST_LEAVE_FILES) == null) { cleanUp(); } if (clusterType.getCoreClusterType() == CoreClusterType.TEZ) { SessionState.get().getTezSession().destroy(); } setup.tearDown(); if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } if (mr != null) { mr.shutdown(); mr = null; } FileSystem.closeAll(); if (dfs != null) { dfs.shutdown(); dfs = null; } Hive.closeCurrent(); }
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex);
try { printConfigInfo(); sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession(conf, sparkSessionManager);