/** * set current session to existing session object if a thread is running * multiple sessions - it must call this method with the new session object * when switching from one session to another. */ public static SessionState start(SessionState startSs) { start(startSs, false, null); return startSs; }
public void run() { SessionState.start(ss); SessionState.registerJars(Arrays.asList(jar)); loader = Thread.currentThread().getContextClassLoader(); } }
private Driver createDriver() throws IllegalAccessException, ClassNotFoundException, InstantiationException { SessionState.start(conf); Driver driver = new Driver(conf); return driver; }
private static Driver createDriver(HiveConf conf) { HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; }
@BeforeClass public static void setUp() { hiveConf = new HiveConf(TestHiveCopyFiles.class); SessionState.start(hiveConf); }
@BeforeClass public static void initialize() { conf = new HiveConf(SemanticAnalyzer.class); SessionState.start(conf); }
@Before public void setUp() { dummyMRTask = new MapRedTask(); SessionState.start(hiveConf); }
@BeforeClass public static void initialize() { conf = new HiveConf(SemanticAnalyzer.class); SessionState.start(conf); }
@Test public void testRemoteSparkCancel() { RemoteSparkJobStatus jobSts = mock(RemoteSparkJobStatus.class); when(jobSts.getRemoteJobState()).thenReturn(State.CANCELLED); when(jobSts.isRemoteActive()).thenReturn(true); HiveConf hiveConf = new HiveConf(); SessionState.start(hiveConf); RemoteSparkJobMonitor remoteSparkJobMonitor = new RemoteSparkJobMonitor(hiveConf, jobSts); Assert.assertEquals(remoteSparkJobMonitor.startMonitor(), 3); }
private Driver createDriver() { HiveConf conf = new HiveConf(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); Driver driver = new Driver(conf); return driver; }
/** * Test with auth enabled and StorageBasedAuthorizationProvider */ @Test public void authEnabledV1Auth() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); hcatConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, StorageBasedAuthorizationProvider.class.getName()); SessionState.start(hcatConf); assertTrue("hcat auth should be enabled", HCatAuthUtil.isAuthorizationEnabled(hcatConf)); }
public TestDbTxnManager() throws Exception { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); TxnDbUtil.setConfValues(conf); SessionState.start(conf); ctx = new Context(conf); tearDown(); }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; } }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; }
@Before public void setUp() throws Exception { if (driver == null) { setUpHiveConf(); driver = DriverFactory.newDriver(hiveConf); client = new HiveMetaStoreClient(hiveConf); SessionState.start(new CliSessionState(hiveConf)); } }
private static Driver createDriver() { HiveConf conf = new HiveConf(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS, true); SessionState.start(conf); Driver driver = new Driver(conf); return driver; }
@Override protected void setUp() { String maxVarcharTypeName = "varchar(" + HiveVarchar.MAX_VARCHAR_LENGTH + ")"; maxVarchar = TypeInfoFactory.getPrimitiveTypeInfo(maxVarcharTypeName); varchar10 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(10)"); varchar5 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(5)"); char10 = TypeInfoFactory.getPrimitiveTypeInfo("char(10)"); char5 = TypeInfoFactory.getPrimitiveTypeInfo("char(5)"); SessionState.start(new HiveConf()); }
@Before public void setup() throws Exception { queryState = new QueryState.Builder().build(); conf = queryState.getConf(); SessionState.start(conf); context = new Context(conf); analyzer = new MacroSemanticAnalyzer(queryState); }
@BeforeClass public static void setupClass() throws Exception { hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class); hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(hiveConf); db = new HiveMetaStoreClient(hiveConf); msck = new Msck( false, false); msck.init(hiveConf); }
/** * Test that the groupNames returned is null, when the user name is null. The user name is null * in the case of embedded HS2 and we assert that we don't throw an NPE in that case. * @throws Exception */ @Test public void testSessionNullUser() throws Exception { SessionState ss = new SessionState(getAuthV2HiveConf(), null); setupDataNucleusFreeHive(ss.getConf()); SessionState.start(ss); assertNull("getGroupNames when userName == null", ss.getAuthenticator().getGroupNames()); }