SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
public static synchronized SparkSessionManagerImpl getInstance() throws HiveException { if (instance == null) { instance = new SparkSessionManagerImpl(); } return instance; }
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {
sessionManagerHS2 = SparkSessionManagerImpl.getInstance(); sessionManagerHS2.shutdown(); sessionManagerHS2.setup(hiveConf); sessionManagerHS2.shutdown();
SparkSessionManagerImpl.getInstance().setup(hiveConf);
SparkSession sparkSession = null; try { sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession( context.getConf(), sparkSessionManager);
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
@Override public void run() { try { SessionState.setCurrentSessionState(ss); Random random = new Random(Thread.currentThread().getId()); String threadName = Thread.currentThread().getName(); System.out.println(threadName + " started."); HiveConf conf = getHiveConf(); SparkSession prevSession = null; SparkSession currentSession = null; for(int i = 0; i < 5; i++) { currentSession = sessionManagerHS2.getSession(prevSession, conf, true); assertTrue(prevSession == null || prevSession == currentSession); assertTrue(currentSession.isOpen()); System.out.println(String.format("%s got session (%d): %s", threadName, i, currentSession.getSessionId())); Thread.sleep((random.nextInt(3)+1) * 1000); sessionManagerHS2.returnSession(currentSession); prevSession = currentSession; } sessionManagerHS2.closeSession(currentSession); System.out.println(threadName + " ended."); } catch (Throwable e) { anyFailedSessionThread = true; String msg = String.format("Error executing '%s'", Thread.currentThread().getName()); LOG.error(msg, e); fail(msg + " " + StringUtils.stringifyException(e)); } } }
SparkSessionManagerImpl.getInstance().setup(hiveConf);
SparkSession sparkSession = null; try { sparkSessionManager = SparkSessionManagerImpl.getInstance(); sparkSession = SparkUtilities.getSparkSession( context.getConf(), sparkSessionManager);
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); createdSessions.add(existingSession); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(SessionState.get().getNewSparkSessionId()); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().setup(hiveConf);
@Test public void testGetHiveException() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); SparkSessionImpl ss = (SparkSessionImpl) ssm.getSession( null, conf, true);
public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); createdSessions.add(sparkSession); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } return sparkSession; }