Driver d = new Driver(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), userName); SessionState localSession = null; try {
d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null); d.setMaxRows(10000); dropTables();
protected Operation(HiveSession parentSession, Map<String, String> confOverlay, OperationType opType) { this.parentSession = parentSession; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); opTerminateMonitorLatch = new CountDownLatch(1); beginTime = System.currentTimeMillis(); lastAccessTime = beginTime; operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); scheduledExecutorService = Executors.newScheduledThreadPool(1); currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); queryState = new QueryState.Builder() .withConfOverlay(confOverlay) .withGenerateNewQueryId(true) .withHiveConf(parentSession.getHiveConf()) .build(); }
protected Operation(HiveSession parentSession, Map<String, String> confOverlay, OperationType opType) { this.parentSession = parentSession; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); beginTime = System.currentTimeMillis(); lastAccessTime = beginTime; operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); scheduledExecutorService = Executors.newScheduledThreadPool(1); currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); queryState = new QueryState.Builder() .withConfOverlay(confOverlay) .withGenerateNewQueryId(true) .withHiveConf(parentSession.getHiveConf()) .build(); }
queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build(); conf = queryState.getConf(); this.hadoopVer = getHadoopMainVersion(hadoopVer);
public void resetParser() throws SemanticException { pd = new ParseDriver(); queryState = new QueryState.Builder().withHiveConf(conf).build(); sem = new SemanticAnalyzer(queryState); }
Driver driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null, null, txnManager); DriverCleanup driverCleanup = new DriverCleanup(driver, txnManager, splitsAppId.toString()); boolean needsCleanup = true;
runStatementOnDriver("insert into T values(1,4)");//makes delta_2_2 in T2 Driver driver2 = new Driver(new QueryState.Builder().withHiveConf(hiveConf).build(), null); driver2.setMaxRows(10000);
try { final QueryState qs = new QueryState.Builder().withHiveConf(conf).build(); final CalcitePlanner planner = new CalcitePlanner(qs); final Context ctx = new Context(conf);
final QueryState queryState = new QueryState.Builder().withHiveConf(conf).build(); if (rewritten) {
d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null); d.setMaxRows(10000); dropTables();
QueryState queryState = new QueryState.Builder().build(); HiveConf conf = queryState.getConf(); conf.addResource("hive-site.xml");
boolean isOk = false; try { QueryState qs = new QueryState.Builder().withHiveConf(conf).nonIsolated().build(); Driver driver = new Driver(qs, user, null, null); driver.setCompactionWriteIds(writeIds, compactorTxnId);
@Before public void setup() throws Exception { queryState = new QueryState.Builder().build(); HiveConf conf = queryState.getConf(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); partition = new Partition(table); SessionState.start(conf); context = new Context(conf); parseDriver = new ParseDriver(); analyzer = new DDLSemanticAnalyzer(queryState, db); Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table); Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap<String, String>(), false)) .thenReturn(partition); HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator(); auth.setConf(conf); currentUser = auth.getUserName(); DummyHiveAuthorizationTaskFactoryImpl.reset(); } /**
private void runSparkTestSession(HiveConf conf, int threadId) throws Exception { conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT, "10s"); conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT_PERIOD, "1s"); Driver driver = null; try { driver = new Driver(new QueryState.Builder() .withGenerateNewQueryId(true) .withHiveConf(conf).build(), null, null); SparkSession sparkSession = SparkUtilities.getSparkSession(conf, SparkSessionManagerImpl.getInstance()); Assert.assertEquals(0, driver.run("show tables").getResponseCode()); barrier.await(); SparkContext sparkContext = getSparkContext(sparkSession); Assert.assertFalse(sparkContext.isStopped()); if(threadId == 1) { barrier.await(); closeSparkSession(sparkSession); Assert.assertTrue(sparkContext.isStopped()); } else { closeSparkSession(sparkSession); Assert.assertFalse(sparkContext.isStopped()); barrier.await(); } } finally { if (driver != null) { driver.destroy(); } } }
@Test public void mrTaskSumbitViaChildWithImpersonation() throws IOException, LoginException { Utils.getUGI().setAuthenticationMethod(PROXY); Context ctx = Mockito.mock(Context.class); when(ctx.getLocalTmpPath()).thenReturn(new Path(System.getProperty("java.io.tmpdir"))); DriverContext dctx = new DriverContext(ctx); QueryState queryState = new QueryState.Builder().build(); HiveConf conf= queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); MapredWork mrWork = new MapredWork(); mrWork.setMapWork(Mockito.mock(MapWork.class)); MapRedTask mrTask = Mockito.spy(new MapRedTask()); mrTask.setWork(mrWork); mrTask.initialize(queryState, null, dctx, null); mrTask.jobExecHelper = Mockito.mock(HadoopJobExecHelper.class); when(mrTask.jobExecHelper.progressLocal(Mockito.any(Process.class), Mockito.anyString())).thenReturn(0); mrTask.execute(dctx); ArgumentCaptor<String[]> captor = ArgumentCaptor.forClass(String[].class); verify(mrTask).spawn(Mockito.anyString(), Mockito.anyString(), captor.capture()); String expected = "HADOOP_PROXY_USER=" + Utils.getUGI().getUserName(); Assert.assertTrue(Arrays.asList(captor.getValue()).contains(expected)); }
@Override public void run() { SessionState ss = SessionState.start(hiveConf); try { ss.applyAuthorizationPolicy(); } catch (HiveException e) { throw new RuntimeException(e); } QueryState qs = new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(); Driver d = new Driver(qs, null); try { LOG.info("Ready to run the query: " + query); syncThreadStart(cdlIn, cdlOut); try { CommandProcessorResponse cpr = d.run(query); if(cpr.getResponseCode() != 0) { throw new RuntimeException(query + " failed: " + cpr); } d.getResults(new ArrayList<String>()); } catch (Exception e) { throw new RuntimeException(e); } } finally { d.close(); } } }
@BeforeClass public static void init() throws Exception { queryState = new QueryState.Builder().build(); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); // Create a table so we can work against it Hive h = Hive.get(conf); List<String> cols = new ArrayList<String>(); cols.add("a"); List<String> partCols = new ArrayList<String>(); partCols.add("ds"); h.createTable("foo", cols, partCols, OrcInputFormat.class, OrcOutputFormat.class); Table t = h.getTable("foo"); Map<String, String> partSpec = new HashMap<String, String>(); partSpec.put("ds", "today"); h.createPartition(t, partSpec); }
@Before public void setup() throws Exception { conf = new HiveConf(); conf.set(HiveConf.ConfVars.LLAP_DAEMON_QUEUE_NAME.varname, "llap_queue"); conf.set(MRJobConfig.QUEUE_NAME, "mr_queue"); conf.set(TezConfiguration.TEZ_QUEUE_NAME, "tez_queue"); tmpFolder = folder.newFolder().getAbsolutePath(); conf.setVar(HiveConf.ConfVars.HIVE_PROTO_EVENTS_BASE_PATH, tmpFolder); QueryState state = new QueryState.Builder().withHiveConf(conf).build(); @SuppressWarnings("serial") QueryPlan queryPlan = new QueryPlan(HiveOperation.QUERY) {}; queryPlan.setQueryId("test_queryId"); queryPlan.setQueryStartTime(1234L); queryPlan.setRootTasks(new ArrayList<>()); queryPlan.setInputs(new HashSet<>()); queryPlan.setOutputs(new HashSet<>()); PerfLogger perf = PerfLogger.getPerfLogger(conf, true); context = new HookContext(queryPlan, state, null, "test_user", "192.168.10.10", "hive_addr", "test_op_id", "test_session_id", "test_thread_id", true, perf, null); }
@Before public void setup() throws Exception { queryState = new QueryState.Builder().build(); //set authorization mode to V2 HiveConf conf = queryState.getConf(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); SessionState.start(conf); Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table); Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap<String, String>(), false)) .thenReturn(partition); }