@Override public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException { OperationHandle opHandle = sessionManager.getSession(sessionHandle) .getCatalogs(); LOG.debug(sessionHandle + ": getCatalogs()"); return opHandle; }
@Override public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { sessionManager.closeSession(sessionHandle); LOG.debug(sessionHandle + ": closeSession()"); }
@Override public void updateQueryTag(String queryId, String queryTag) throws HiveSQLException { sessionManager.getOperationManager().updateQueryTag(queryId, queryTag); }
private void closeAndDisallowHiveSessions() { LOG.info("Closing all open hive sessions."); if (cliService == null) return; cliService.getSessionManager().allowSessions(false); // No sessions can be opened after the above call. Close the existing ones if any. try { for (HiveSession session : cliService.getSessionManager().getSessions()) { cliService.getSessionManager().closeSession(session.getSessionHandle()); } LOG.info("Closed all open hive sessions"); } catch (HiveSQLException e) { LOG.error("Unable to close all open sessions.", e); } }
@Override public synchronized void stop() { super.stop(); shutdownTimeoutChecker(); if (backgroundOperationPool != null) { backgroundOperationPool.shutdown(); long timeout = hiveConf.getTimeVar( ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT, TimeUnit.SECONDS); try { backgroundOperationPool.awaitTermination(timeout, TimeUnit.SECONDS); } catch (InterruptedException e) { LOG.warn("HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT = " + timeout + " seconds has been exceeded. RUNNING background operations will be shut down", e); } backgroundOperationPool = null; } cleanupLoggingRootDir(); }
@Override public synchronized void init(HiveConf hiveConf) { this.hiveConf = hiveConf; //Create operation log root directory, if operation logging is enabled if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { initOperationLogRootDir(); } createBackgroundOperationPool(); addService(operationManager); initSessionImplClassName(); Metrics metrics = MetricsFactory.getInstance(); if(metrics != null){ registerOpenSesssionMetrics(metrics); registerActiveSesssionMetrics(metrics); } userLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER); ipAddressLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS); userIpAddressLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS); LOG.info("Connections limit are user: {} ipaddress: {} user-ipaddress: {}", userLimit, ipAddressLimit, userIpAddressLimit); super.init(hiveConf); }
SessionManager.setUserName(clientUserName); SessionManager.setProxyUserName(doAsQueryParam); LOG.debug("Client IP Address: " + clientIpAddress); SessionManager.setIpAddress(clientIpAddress); SessionManager.clearUserName(); SessionManager.clearIpAddress(); SessionManager.clearProxyUserName();
SessionManager.setUserName(clientUserName); SessionManager.setProxyUserName(doAsQueryParam); LOG.debug("Client IP Address: " + clientIpAddress); SessionManager.setIpAddress(clientIpAddress); LOG.debug("{}:{}", X_FORWARDED_FOR, forwarded_for); List<String> forwardedAddresses = Arrays.asList(forwarded_for.split(",")); SessionManager.setForwardedAddresses(forwardedAddresses); } else { SessionManager.setForwardedAddresses(Collections.<String>emptyList()); SessionManager.clearUserName(); SessionManager.clearIpAddress(); SessionManager.clearProxyUserName(); SessionManager.clearForwardedAddresses();
@Override public synchronized void init(HiveConf hiveConf) { this.hiveConf = hiveConf; //Create operation log root directory, if operation logging is enabled if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { initOperationLogRootDir(); } createBackgroundOperationPool(); addService(operationManager); super.init(hiveConf); }
public SessionHandle openSession(TProtocolVersion protocol, String username, String password, String ipAddress, Map<String, String> sessionConf) throws HiveSQLException { return openSession(protocol, username, password, ipAddress, sessionConf, false, null); }
sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); final HiveSession session = sm.getSession(handle); OperationManager operationManager = mock(OperationManager.class); when(operationManager.
@Test public void testOpenSessionTimeMetrics() throws Exception { String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, "NaN"); long firstSessionOpen = System.currentTimeMillis(); SessionHandle handle = sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - firstSessionOpen), 100d); long secondSessionOpen = System.currentTimeMillis(); sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - firstSessionOpen + System.currentTimeMillis() - secondSessionOpen) / 2d, 100d); sm.closeSession(handle); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - secondSessionOpen), 100d); }
@Override public Future<?> submitBackgroundOperation(Runnable work) { return getSessionManager().submitBackgroundOperation( operationLock == null ? work : new FutureTask<Void>(work, null) { protected void done() { // We assume this always comes from a user operation that took the lock. operationLock.release(); }; }); }
@Override public synchronized void init(HiveConf hiveConf) { setHiveConf(hiveConf); sessionManager = new SessionManager(hiveServer2, allowSessionsInitial); defaultFetchRows = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE); addService(sessionManager);
@Override public synchronized void stop() { super.stop(); shutdown = true; if (backgroundOperationPool != null) { backgroundOperationPool.shutdown(); long timeout = hiveConf.getTimeVar( ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT, TimeUnit.SECONDS); try { backgroundOperationPool.awaitTermination(timeout, TimeUnit.SECONDS); } catch (InterruptedException e) { LOG.warn("HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT = " + timeout + " seconds has been exceeded. RUNNING background operations will be shut down", e); } backgroundOperationPool = null; } cleanupLoggingRootDir(); }
private void closeHiveSessions() { LOG.info("Closing all open hive sessions."); if (cliService != null && cliService.getSessionManager().getOpenSessionCount() > 0) { try { for (HiveSession session : cliService.getSessionManager().getSessions()) { cliService.getSessionManager().closeSession(session.getSessionHandle()); } LOG.info("Closed all open hive sessions"); } catch (HiveSQLException e) { LOG.error("Unable to close all open sessions.", e); } } }
@Test public void testAbandonedSessionMetrics() throws Exception { sm.start(); String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.HS2_ABANDONED_SESSIONS, ""); sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); // We're going to wait for the session to be abandoned. String currentValue; int count = 5; // how many times we'll sleep before giving up String expectedValue = "1"; do { // HIVE_SERVER2_SESSION_CHECK_INTERVAL is set to 3 seconds, so we have to wait for at least // that long to see an abandoned session Thread.sleep(3200); json = metrics.dumpJson(); currentValue = MetricsTestUtils .getJsonNode(json, MetricsTestUtils.COUNTER, MetricsConstant.HS2_ABANDONED_SESSIONS) .asText(); // loop until the value is correct or we run out of tries } while (!expectedValue.equals(currentValue) && --count > 0); Assert.assertEquals(expectedValue, currentValue); } }
@Before public void setup() throws Exception { HiveConf conf = new HiveConf(); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS, 2); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_WAIT_QUEUE_SIZE, 10); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME, "1000000s"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_IDLE_SESSION_TIMEOUT, "500ms"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL, "3s"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); conf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, false); MetricsFactory.init(conf); sm = new SessionManager(null, true); sm.init(conf); metrics = (CodahaleMetrics) MetricsFactory.getInstance(); Hive doNothingHive = mock(Hive.class); Hive.set(doNothingHive); }
Collection<HiveSession> hiveSessions = sessionManager.getSessions(); int sessionCount = 0; for (HiveSession hiveSession: hiveSessions) { Collection<QueryInfo> operations = sessionManager.getOperationManager().getLiveQueryInfos(); for (QueryInfo operation : operations) { if (!HttpServer.hasAccess(remoteUser, operation.getUserName(), ctx, request)) { operations = sessionManager.getOperationManager().getHistoricalQueryInfos(); for (QueryInfo operation : operations) { if (!HttpServer.hasAccess(remoteUser, operation.getUserName(), ctx, request)) {
SessionManager.setUserName(clientUserName); SessionManager.setProxyUserName(doAsQueryParam); LOG.debug("Client IP Address: " + clientIpAddress); SessionManager.setIpAddress(clientIpAddress); SessionManager.clearUserName(); SessionManager.clearIpAddress(); SessionManager.clearProxyUserName();