closeLock.readLock().lock(); try { SparkConf sparkConf = hiveSparkClient.getSparkConf(); int numExecutors = hiveSparkClient.getExecutorCount(); totalCores = sparkConf.contains("spark.default.parallelism") ? sparkConf.getInt("spark.default.parallelism", 1) : hiveSparkClient.getDefaultParallelism(); totalCores = Math.max(totalCores, numExecutors); } else {
@Override public void close() { isOpen = false; if (hiveSparkClient != null) { try { hiveSparkClient.close(); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close spark session (" + sessionId + ").", e); } } hiveSparkClient = null; }
@Override public SparkJobRef submit(DriverContext driverContext, SparkWork sparkWork) throws Exception { closeLock.readLock().lock(); try { Preconditions.checkState(isOpen, "Hive on Spark session is not open. Can't submit jobs."); return hiveSparkClient.execute(driverContext, sparkWork); } finally { closeLock.readLock().unlock(); } }
/** * Force a Spark config to be generated and check that a config value has the expected value * @param conf the Hive config to use as a base * @param paramName the Spark config name to check * @param expectedValue the expected value in the Spark config */ private void checkSparkConf(HiveConf conf, String paramName, String expectedValue) throws HiveException { SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSessionImpl sparkSessionImpl = (SparkSessionImpl) sessionManager.getSession(null, conf, true); assertTrue(sparkSessionImpl.isOpen()); HiveSparkClient hiveSparkClient = sparkSessionImpl.getHiveSparkClient(); SparkConf sparkConf = hiveSparkClient.getSparkConf(); String cloneConfig = sparkConf.get(paramName); sessionManager.closeSession(sparkSessionImpl); assertEquals(expectedValue, cloneConfig); sessionManager.shutdown(); }
@Override public ObjectPair<Long, Integer> getMemoryAndCores() throws Exception { SparkConf sparkConf = hiveSparkClient.getSparkConf(); int numExecutors = hiveSparkClient.getExecutorCount(); totalCores = sparkConf.contains("spark.default.parallelism") ? sparkConf.getInt("spark.default.parallelism", 1) : hiveSparkClient.getDefaultParallelism(); totalCores = Math.max(totalCores, numExecutors); } else {
@Override public SparkJobRef submit(DriverContext driverContext, SparkWork sparkWork) throws Exception { Preconditions.checkState(isOpen, "Session is not open. Can't submit jobs."); return hiveSparkClient.execute(driverContext, sparkWork); }
@Override public void close() { if (isOpen) { closeLock.writeLock().lock(); try { if (isOpen) { LOG.info("Trying to close Hive on Spark session {}", sessionId); if (hiveSparkClient != null) { try { hiveSparkClient.close(); LOG.info("Hive on Spark session {} successfully closed", sessionId); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close Hive on Spark session (" + sessionId + ")", e); } } hiveSparkClient = null; lastSparkJobCompletionTime = 0; isOpen = false; } } finally { closeLock.writeLock().unlock(); } } }
@Override public ObjectPair<Long, Integer> getMemoryAndCores() throws Exception { SparkConf sparkConf = hiveSparkClient.getSparkConf(); int numExecutors = hiveSparkClient.getExecutorCount(); totalCores = sparkConf.contains("spark.default.parallelism") ? sparkConf.getInt("spark.default.parallelism", 1) : hiveSparkClient.getDefaultParallelism(); totalCores = Math.max(totalCores, numExecutors); } else {
@Override public SparkJobRef submit(DriverContext driverContext, SparkWork sparkWork) throws Exception { Preconditions.checkState(isOpen, "Session is not open. Can't submit jobs."); return hiveSparkClient.execute(driverContext, sparkWork); }
@Override public void close() { isOpen = false; if (hiveSparkClient != null) { try { hiveSparkClient.close(); } catch (IOException e) { LOG.error("Failed to close spark session (" + sessionId + ").", e); } } hiveSparkClient = null; }