conf.setVar(HiveConf.ConfVars.METASTOREURIS,""); Driver d = new Driver(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), userName); SessionState localSession = null; try { if (SessionState.get() == null) { localSession = new SessionState(conf); SessionState.start(localSession); CommandProcessorResponse cpr = d.run(sb.toString()); if (cpr.getResponseCode() != 0) { LOG.warn(ci + ": " + sb.toString() + " failed due to: " + cpr);
" is not valid. Context: " + query); SessionState.setCurrentSessionState(sessionState); boolean isOk = false; try { QueryState qs = new QueryState.Builder().withHiveConf(conf).nonIsolated().build(); Driver driver = new Driver(qs, user, null, null); driver.setCompactionWriteIds(writeIds, compactorTxnId); try { CommandProcessorResponse cpr = driver.run(query); if (cpr.getResponseCode() != 0) { LOG.error("Failed to run " + query, cpr.getException()); throw new HiveException("Failed to run " + query, cpr.getException()); driver.close(); driver.destroy(); if (!isOk) { try { sessionState.close(); // This also resets SessionState.get. } catch (Throwable th) { LOG.warn("Failed to close a bad session", th); SessionState.detachSession();
private List<String> runStatementOnDriver(String stmt) throws Exception { CommandProcessorResponse cpr = d.run(stmt); if(cpr.getResponseCode() != 0) { throw new RuntimeException(stmt + " failed: " + cpr); } List<String> rs = new ArrayList<String>(); d.getResults(rs); return rs; } @Before
@Override public Schema getSchema() { if(explainReOptimization) { return coreDriver.getExplainSchema(); } return coreDriver.getSchema(); }
@Override public void close() throws IOException { try { LOG.info("DriverCleanup for LLAP splits: {}", applicationId); driver.releaseLocksAndCommitOrRollback(true); driver.close(); driver.destroy(); txnManager.closeTxnManager(); } catch (Exception err) { LOG.error("Error closing driver resources", err); throw new IOException(err); } }
@Override public void close() { lDrvState.stateLock.lock(); try { releaseDriverContext(); if (lDrvState.driverState == DriverState.COMPILING || lDrvState.driverState == DriverState.EXECUTING) { lDrvState.abort(); } releasePlan(); releaseCachedResult(); releaseFetchTask(); releaseResStream(); releaseContext(); lDrvState.driverState = DriverState.CLOSED; } finally { lDrvState.stateLock.unlock(); LockedDriverState.removeLockedDriverState(); } destroy(); }
private Driver createDriver() { HiveConf conf = new HiveConf(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); Driver driver = new Driver(conf); return driver; }
if (isInterrupted()) { return handleInterruption("at beginning of compilation."); //indicate if need clean resource closeInProcess(false); if (isInterrupted()) { return handleInterruption("before parsing and analysing the query"); ctx.setTryCount(getTryCount()); ctx.setCmd(command); ctx.setHDFSCleanup(true); queryHooks = loadQueryHooks(); if (queryHooks != null && !queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl(); BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree); List<HiveSemanticAnalyzerHook> saHooks = getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, HiveSemanticAnalyzerHook.class); if (isInterrupted()) { return handleInterruption("after analyzing query."); schema = getSchema(sem, conf); plan = new QueryPlan(queryStr, sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId, queryState.getHiveOperation(), schema); doAuthorization(queryState.getHiveOperation(), sem, command);
"outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' " + "tblproperties ('myprop1'='val1', 'myprop2' = 'val2')"; Driver driver = new Driver(conf); CommandProcessorResponse response = driver.run(cmd); assertEquals(0, response.getResponseCode()); List<Object> result = new ArrayList<Object>(); response = driver.run(cmd); assertEquals(0, response.getResponseCode()); response = driver.run(cmd); assertEquals(0, response.getResponseCode()); driver.setMaxRows(500); response = driver.run(cmd); assertEquals(0, response.getResponseCode()); driver.getResults(result); assertEquals(20, result.size()); driver.close();
hiveConf.set("mapred.local.dir", workDir + File.separator + this.getClass().getSimpleName() + File.separator + "mapred" + File.separator + "local"); hiveConf.set("mapred.system.dir", workDir + File.separator + this.getClass().getSimpleName() + File.separator + "mapred" + File.separator + "system"); hiveConf.set("mapreduce.jobtracker.staging.root.dir", workDir + File.separator + this.getClass().getSimpleName() + File.separator + "mapred" + File.separator + "staging"); hiveConf.set("mapred.temp.dir", workDir + File.separator + this.getClass().getSimpleName() throw new RuntimeException("Could not create " + getWarehouseDir()); SessionState ss = SessionState.start(hiveConf); ss.applyAuthorizationPolicy(); d = new Driver(new QueryState(hiveConf), null); d.setMaxRows(10000);
qp.setTryCount(tryCount); ret = qp.run(cmd).getResponseCode(); if (ret != 0) { qp.close(); return ret; while (qp.getResults(res)) { for (String r : res) { out.println(r); console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; int cret = qp.close(); if (ret == 0) { ret = cret; if (res.getResponseCode() != 0) { ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage());
close(); plan = null; saveSession(queryState); ctx.setTryCount(getTryCount()); ctx.setCmd(command); ctx.setHDFSCleanup(true); SessionState.get().initTxnMgr(conf); recordValidTxns(); getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, HiveSemanticAnalyzerHook.class); schema = getSchema(sem, conf); doAuthorization(sem, command); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() String explainOutput = getExplainOutput(sem, plan, tree.dump()); if (explainOutput != null) { LOG.info("EXPLAIN output for queryid " + queryId + " : " downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return error.getErrorCode();
checkInterrupted("at beginning of compilation.", null, null); closeInProcess(false); setTriggerContext(queryId); checkInterrupted("before parsing and analysing the query", null, null); setTriggerContext(queryId); if ((queryState.getHiveOperation() != null) && queryState.getHiveOperation().equals(HiveOperation.REPLDUMP)) { setLastReplIdForDump(queryState.getConf()); openTransaction(); generateValidTxnList(); checkInterrupted("after analyzing query.", null, null); schema = getSchema(sem, conf); plan = new QueryPlan(queryStr, sem, queryDisplay.getQueryStartTime(), queryId, queryState.getHiveOperation(), schema); doAuthorization(queryState.getHiveOperation(), sem, command); errorMessage = authExp.getMessage(); SQLState = "42000"; throw createProcessorResponse(403); } finally {
input.getTokenStopIndex()); LOG.info("Explain analyze (running phase) for query " + query); conf.unset(ValidTxnList.VALID_TXNS_KEY); conf.unset(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY); Context runCtx = null; try { Driver driver = new Driver(conf, runCtx, queryState.getLineageState()); CommandProcessorResponse ret = driver.run(query); if(ret.getResponseCode() == 0) { while (driver.getResults(new ArrayList<String>())) { throw new SemanticException(ret.getErrorMessage(), ret.getException()); && ( HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER) && HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")
@Test public void testMoreBucketsThanReducers() throws Exception { //see bucket_num_reducers.q bucket_num_reducers2.q // todo: try using set VerifyNumReducersHook.num.reducers=10; d.destroy(); HiveConf hc = new HiveConf(hiveConf); hc.setIntVar(HiveConf.ConfVars.MAXREDUCERS, 1); //this is used in multiple places, SemanticAnalyzer.getBucketingSortingDest() among others hc.setIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS, 1); hc.setBoolVar(HiveConf.ConfVars.HIVE_EXPLAIN_USER, false); d = new Driver(hc); d.setMaxRows(10000); runStatementOnDriver("insert into " + Table.ACIDTBL + " values(1,1)");//txn X write to bucket1 runStatementOnDriver("insert into " + Table.ACIDTBL + " values(0,0),(3,3)");// txn X + 1 write to bucket0 + bucket1 runStatementOnDriver("update " + Table.ACIDTBL + " set b = -1"); List<String> r = runStatementOnDriver("select * from " + Table.ACIDTBL + " order by a, b"); int[][] expected = {{0, -1}, {1, -1}, {3, -1}}; Assert.assertEquals(stringifyValues(expected), r); } @Ignore("Moved to Tez")
@BeforeClass public static void onetimeSetup() throws Exception { HiveConf conf = new HiveConf(TestHooks.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); Driver driver = createDriver(conf); int ret = driver.run("create table t1(i int)").getResponseCode(); assertEquals("Checking command success", 0, ret); }
public void runWithinSession(Consumer<Driver> driverConsumer) { final HiveConf hiveConf = new HiveConf(SessionState.class); driverConf.forEach(hiveConf::set); SessionState ss = new SessionState(hiveConf); try (Closeable ssClose = ss::close) { SessionState.start(ss); driverConsumer.accept(new Driver(hiveConf)); } catch (IOException e) { throw new RuntimeException("Exception was thrown while closing SessionState", e); } }
close(); plan = null; List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks(); schema = getSchema(sem, conf); if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { try { doAuthorization(sem); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() SQLState = ErrorMsg.findSQLState(e.getMessage()); console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (10); } catch (ParseException e) { SQLState = ErrorMsg.findSQLState(e.getMessage()); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12);
@Test public void testInsertOverwriteToAcidWithUnionRemove() throws Exception { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_UNION_REMOVE, true); hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHTASKCONVERSION, "none"); d.close(); d = new Driver(hiveConf); int[][] values = {{1, 2}, {3, 4}, {5, 6}, {7, 8}, {9, 10}}; runStatementOnDriver("drop table if exists T"); runStatementOnDriver("create table T (a int, b int) stored as ORC TBLPROPERTIES ('transactional'='true')"); CommandProcessorResponse cpr = runStatementOnDriverNegative( "insert overwrite table T select a, b from " + TxnCommandsBaseForTests.Table.ACIDTBL + " where a between 1 and 3 group by a, b union all select a, b from " + TxnCommandsBaseForTests.Table.ACIDTBL + " where a between 5 and 7 union all select a, b from " + TxnCommandsBaseForTests.Table.ACIDTBL + " where a >= 9"); Assert.assertTrue("", cpr.getErrorMessage().contains("not supported due to OVERWRITE and UNION ALL")); } /**
driver = new Driver(sqlOperationConf, getParentSession().getUserName()); driver.setOperationId(guid64); driver.setTryCount(Integer.MAX_VALUE); response = driver.compileAndRespond(subStatement); if (0 != response.getResponseCode()) { throw toSQLException("Error while compiling statement", response); mResultSchema = driver.getSchema(); if(driver.getPlan().getFetchTask() != null) { for (Task<? extends Serializable> task: driver.getPlan().getRootTasks()) { if (task.getClass() == ExplainTask.class) { resultSchema = new TableSchema(mResultSchema);