protected void cloneConf() { if (!clonedConf) { clonedConf = true; conf = new HiveConf(conf); } }
private HiveConf createHiveConf(String metaStoreURI, boolean tokenAuthEnabled) { if (!tokenAuthEnabled) { return null; } HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); return hcatConf; }
public static HiveConf newInstance(Class<?> clazz, String metaStoreUri) { HiveConf conf = new HiveConf(clazz); if (metaStoreUri != null) { setHiveConf(conf, HiveConf.ConfVars.METASTOREURIS, metaStoreUri); } overrideSettings(conf); return conf; }
@Override protected void setUp() throws Exception { super.setUp(); System.setProperty(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname, ConfVars.HIVETESTMODEPREFIX.varname); conf = new HiveConf(); }
public LlapStatusServiceDriver() { SessionState ss = SessionState.get(); conf = (ss != null) ? ss.getConf() : new HiveConf(SessionState.class); setupConf(); }
private HiveConf newAuthEnabledConf() { HiveConf conf = new HiveConf(); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); return conf; }
@Override public void beforeClass(HiveTestEnvContext ctx) throws Exception { File confFolder = new File(ctx.tmpFolder, "conf"); FileUtils.copyDirectory(new File(DATA_DIR + "/conf/"), confFolder); FileUtils.copyDirectory(new File(DATA_DIR + "/conf/tez"), confFolder); HiveConf.setHiveSiteLocation(new File(confFolder, "hive-site.xml").toURI().toURL()); HiveConf.setHivemetastoreSiteUrl(new File(confFolder, "hivemetastore-site.xml").toURI().toURL()); // FIXME: hiveServer2SiteUrl is not settable? ctx.hiveConf = new HiveConf(IDriver.class); ctx.hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST_IDE, true); }
public HiveConf getHiveConfHelper(boolean call) { HiveConf conf = new HiveConf(); BufferedRows rows = getConfInternal(call); while (rows != null && rows.hasNext()) { addConf((Rows.Row) rows.next(), conf); } return conf; }
public HiveConf createHiveConf(String metaStoreURI, String hiveMetaStorePrincipal) throws IOException { HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, hiveMetaStorePrincipal); return hcatConf; }
@Before public void setup() throws AuthenticationException { conf = new HiveConf(); conf.set("hive.root.logger", "DEBUG,console"); conf.set("hive.server2.authentication.ldap.url", "localhost"); when(factory.getInstance(any(HiveConf.class), anyString(), anyString())).thenReturn(search); }
public static HiveConf getHiveConf() { HiveConf conf = new HiveConf(); // String metastoreDBLocation = "jdbc:derby:databaseName=/tmp/metastore_db;create=true"; // conf.set("javax.jdo.option.ConnectionDriverName","org.apache.derby.jdbc.EmbeddedDriver"); // conf.set("javax.jdo.option.ConnectionURL",metastoreDBLocation); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); return conf; }
public AbstractTestGenericUDFOPNumeric() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. SessionState ss = new SessionState(new HiveConf()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); SessionState.setCurrentSessionState(ss); }
public TestVectorArithmetic() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. SessionState ss = new SessionState(new HiveConf()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "default"); SessionState.setCurrentSessionState(ss); }
@Test public void testCreateCandidatePrincipalsLdapDomain() { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN, "mycompany.com"); List<String> expected = Arrays.asList("user1@mycompany.com"); List<String> actual = LdapUtils.createCandidatePrincipals(conf, "user1"); assertEquals(expected, actual); }
/** * Test with auth enabled and StorageBasedAuthorizationProvider */ @Test public void authEnabledV1Auth() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); hcatConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, StorageBasedAuthorizationProvider.class.getName()); SessionState.start(hcatConf); assertTrue("hcat auth should be enabled", HCatAuthUtil.isAuthorizationEnabled(hcatConf)); }
@Before public void before() throws Exception { MetricsFactory.close(); HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, LegacyMetrics.class.getCanonicalName()); MetricsFactory.init(conf); metrics = (LegacyMetrics) MetricsFactory.getInstance(); }
@Test public void testEncodingDecoding() throws UnsupportedEncodingException { HiveConf conf = new HiveConf(); String query = "select blah, '\u0001' from random_table"; conf.setQueryString(query); Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), conf.get(ConfVars.HIVEQUERYSTRING.varname)); Assert.assertEquals(query, conf.getQueryString()); } }
@Before public void before() throws Exception { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); // disable json file writing conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "60000m"); MetricsFactory.init(conf); metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); }
/** * Verify that no exception is thrown if authorization is enabled from hive cli, * when sql std auth is used */ @Test public void testAuthEnable() throws Exception { HiveConf processedConf = new HiveConf(); processedConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); HiveAuthorizerFactory authorizerFactory = new SQLStdHiveAuthorizerFactory(); HiveAuthorizer authorizer = authorizerFactory.createHiveAuthorizer(null, processedConf, new HadoopDefaultAuthenticator(), getCLISessionCtx()); }