@Override public void close() { isOpen = false; if (hiveSparkClient != null) { try { hiveSparkClient.close(); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close spark session (" + sessionId + ").", e); } } hiveSparkClient = null; }
/** * Create scratch directory for spark session if it does not exist. * This method is not thread safe. * @return Path to Spark session scratch directory. * @throws IOException */ @Override public Path getHDFSSessionDir() throws IOException { if (scratchDir == null) { scratchDir = createScratchDir(); } return scratchDir; }
public SparkSessionImpl() { sessionId = makeSessionId(); }
/** * Check if a session has timed out, and if it has close the session. */ @Override public boolean triggerTimeout(long sessionTimeout) { if (hasTimedOut(activeJobs, lastSparkJobCompletionTime, sessionTimeout)) { closeLock.writeLock().lock(); try { if (hasTimedOut(activeJobs, lastSparkJobCompletionTime, sessionTimeout)) { LOG.warn("Closing Spark session " + getSessionId() + " because a Spark job has not " + "been run in the past " + sessionTimeout / 1000 + " seconds"); close(); return true; } } finally { closeLock.writeLock().unlock(); } } return false; }
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
@Override public void open(HiveConf conf) throws HiveException { closeLock.writeLock().lock(); try { if (!isOpen) { LOG.info("Trying to open Hive on Spark session {}", sessionId); this.conf = conf; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf, sessionId, SessionState.get().getSessionId()); isOpen = true; } catch (Throwable e) { throw getHiveException(e); } LOG.info("Hive on Spark session {} successfully opened", sessionId); } else { LOG.info("Hive on Spark session {} is already opened", sessionId); } } finally { closeLock.writeLock().unlock(); } }
/** * Force a Spark config to be generated and check that a config value has the expected value * @param conf the Hive config to use as a base * @param paramName the Spark config name to check * @param expectedValue the expected value in the Spark config */ private void checkSparkConf(HiveConf conf, String paramName, String expectedValue) throws HiveException { SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSessionImpl sparkSessionImpl = (SparkSessionImpl) sessionManager.getSession(null, conf, true); assertTrue(sparkSessionImpl.isOpen()); HiveSparkClient hiveSparkClient = sparkSessionImpl.getHiveSparkClient(); SparkConf sparkConf = hiveSparkClient.getSparkConf(); String cloneConfig = sparkConf.get(paramName); sessionManager.closeSession(sparkSessionImpl); assertEquals(expectedValue, cloneConfig); sessionManager.shutdown(); }
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); createdSessions.add(existingSession); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(SessionState.get().getNewSparkSessionId()); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg, String expectedMatchedStr) { HiveException he = ss.getHiveException(e); assertEquals(expectedErrMsg, he.getCanonicalErrorMsg()); if (expectedMatchedStr != null) { assertTrue(he.getMessage().contains(expectedMatchedStr)); } }
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); createdSessions.add(sparkSession); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } return sparkSession; }
@Override public Path getHDFSSessionDir() throws IOException { if (scratchDir == null) { synchronized (dirLock) { if (scratchDir == null) { scratchDir = createScratchDir(); } } } return scratchDir; }
@Override public void close() { if (isOpen) { closeLock.writeLock().lock(); try { if (isOpen) { LOG.info("Trying to close Hive on Spark session {}", sessionId); if (hiveSparkClient != null) { try { hiveSparkClient.close(); LOG.info("Hive on Spark session {} successfully closed", sessionId); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close Hive on Spark session (" + sessionId + ")", e); } } hiveSparkClient = null; lastSparkJobCompletionTime = 0; isOpen = false; } } finally { closeLock.writeLock().unlock(); } } }
public SparkSessionImpl() { sessionId = makeSessionId(); }