/** * Create a Database. Raise an error if a database with the same name already exists. * @param db * @throws AlreadyExistsException * @throws HiveException */ public void createDatabase(Database db) throws AlreadyExistsException, HiveException { createDatabase(db, false); }
/** * Create a Database. Raise an error if a database with the same name already exists. * @param db * @throws AlreadyExistsException * @throws HiveException */ public void createDatabase(Database db) throws AlreadyExistsException, HiveException { createDatabase(db, false); }
Database db = new Database(); db.setName(dbName); hive.createDatabase(db, true);
@Test public void testDataDeletion() throws HiveException, IOException, TException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Path fakeTable = table.getPath().getParent().suffix( Path.SEPARATOR + "faketable"); fs = fakeTable.getFileSystem(hive.getConf()); fs.mkdirs(fakeTable); fs.deleteOnExit(fakeTable); Path fakePart = new Path(table.getDataLocation().toString(), "fakepartition=fakevalue"); fs.mkdirs(fakePart); fs.deleteOnExit(fakePart); hive.dropTable(dbName, tableName, true, true); assertFalse(fs.exists(fakePart)); hive.dropDatabase(dbName); assertFalse(fs.exists(fakeTable)); }
/** * Create a Database * @param db * @param crtDb * @return Always returns 0 * @throws HiveException */ private int createDatabase(Hive db, CreateDatabaseDesc crtDb) throws HiveException { Database database = new Database(); database.setName(crtDb.getName()); database.setDescription(crtDb.getComment()); database.setLocationUri(crtDb.getLocationUri()); database.setParameters(crtDb.getDatabaseProperties()); database.setOwnerName(SessionState.getUserFromAuthenticator()); database.setOwnerType(PrincipalType.USER); try { makeLocationQualified(database); db.createDatabase(database, crtDb.getIfNotExists()); } catch (AlreadyExistsException ex) { //it would be better if AlreadyExistsException had an errorCode field.... throw new HiveException(ex, ErrorMsg.DATABASE_ALREADY_EXISTS, crtDb.getName()); } return 0; }
/** * Create a Database * @param db * @param crtDb * @return Always returns 0 * @throws HiveException */ private int createDatabase(Hive db, CreateDatabaseDesc crtDb) throws HiveException { Database database = new Database(); database.setName(crtDb.getName()); database.setDescription(crtDb.getComment()); database.setLocationUri(crtDb.getLocationUri()); database.setParameters(crtDb.getDatabaseProperties()); database.setOwnerName(SessionState.getUserFromAuthenticator()); database.setOwnerType(PrincipalType.USER); try { if (!Utilities.isDefaultNameNode(conf)) { makeLocationQualified(database); } db.createDatabase(database, crtDb.getIfNotExists()); } catch (AlreadyExistsException ex) { //it would be better if AlreadyExistsException had an errorCode field.... throw new HiveException(ex, ErrorMsg.DATABSAE_ALREADY_EXISTS, crtDb.getName()); } return 0; }
private Table createTestTable() throws HiveException, AlreadyExistsException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db, true); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Assert.assertTrue(table.getTTable().isSetId()); table.getTTable().unsetId(); for (Map<String, String> partSpec : parts) { hive.createPartition(table, partSpec); } return table; }
/** * Create a Database. Raise an error if a database with the same name already exists. * @param db * @throws AlreadyExistsException * @throws HiveException */ public void createDatabase(Database db) throws AlreadyExistsException, HiveException { createDatabase(db, false); }
/** * Create a Database. Raise an error if a database with the same name already exists. * @param db * @throws AlreadyExistsException * @throws HiveException */ public void createDatabase(Database db) throws AlreadyExistsException, HiveException { createDatabase(db, false); }
/** * Create a database in Hive metastore * * @param database database name * @param ignore ignore if database already exists * @throws LensException */ @Override public void createDatabase(LensSessionHandle sessionid, String database, boolean ignore) throws LensException { try (SessionContext ignored = new SessionContext(sessionid)){ Database db = new Database(); db.setName(database); Hive.get(getSession(sessionid).getHiveConf()).createDatabase(db, ignore); } catch (AlreadyExistsException | HiveException e) { throw new LensException(e); } log.info("Database created " + database); }
/** * Setup. * * @throws AlreadyExistsException the already exists exception * @throws HiveException the hive exception * @throws IOException Signals that an I/O exception has occurred. */ @BeforeTest public void setup() throws AlreadyExistsException, HiveException, IOException { SessionState.start(conf); Hive client = Hive.get(conf); Database database = new Database(); database.setName(TestDBStorage.class.getSimpleName()); client.createDatabase(database); SessionState.get().setCurrentDatabase(TestDBStorage.class.getSimpleName()); }
/** * Configure hive tables. * * @return the configuration */ private HiveConf configureHiveTables() { assertNotNull(System.getProperty("hadoop.bin.path")); HiveConf conf = LensServerConf.getHiveConf(); try { Hive hive = getHiveClient(conf); Database database = new Database(); database.setName(LensConfConstants.DEFAULT_STATISTICS_DATABASE); hive.dropTable(LensConfConstants.DEFAULT_STATISTICS_DATABASE, EVENT_NAME, true, true); hive.dropTable(LensConfConstants.DEFAULT_STATISTICS_DATABASE, QueryExecutionStatistics.class.getSimpleName(), true, true); hive.dropDatabase(LensConfConstants.DEFAULT_STATISTICS_DATABASE, true, true); hive.createDatabase(database); Table t = getHiveTable(); hive.createTable(t); } catch (Exception e) { Assert.fail(); } return conf; }
/** * Before test. * * @throws Exception the exception */ @BeforeTest public void beforeTest() throws Exception { // Check if hadoop property set System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path")); assertNotNull(System.getProperty("hadoop.bin.path")); createDriver(); ss = new SessionState(hiveConf, "testuser"); SessionState.start(ss); Hive client = Hive.get(hiveConf); Database database = new Database(); database.setName(dataBase); client.createDatabase(database, true); SessionState.get().setCurrentDatabase(dataBase); sessionid = SessionState.get().getSessionId(); driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false); QueryContext context = createContext("USE " + dataBase, this.queryConf); driver.execute(context); driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true); driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true); }
/** * Create a Database * @param db * @param crtDb * @return Always returns 0 * @throws HiveException * @throws AlreadyExistsException */ private int createDatabase(Hive db, CreateDatabaseDesc crtDb) throws HiveException, AlreadyExistsException { Database database = new Database(); database.setName(crtDb.getName()); database.setDescription(crtDb.getComment()); database.setLocationUri(crtDb.getLocationUri()); database.setParameters(crtDb.getDatabaseProperties()); db.createDatabase(database, crtDb.getIfNotExists()); return 0; }
@BeforeClass public static void setup() throws HiveException, AlreadyExistsException, LensException { SessionState.start(conf); conf.set(LensConfConstants.AUTHORIZER_CLASS, "org.apache.lens.cube.parse.MockAuthorizer"); LensAuthorizer.get().init(conf); Database database = new Database(); database.setName(TestCubeMetastoreClient.class.getSimpleName()); Hive.get(conf).createDatabase(database); SessionState.get().setCurrentDatabase(TestCubeMetastoreClient.class.getSimpleName()); client = CubeMetastoreClient.getInstance(conf); client.getConf().setBoolean(LensConfConstants.ENABLE_METASTORE_SCHEMA_AUTHORIZATION_CHECK, true); client.getConf().setBoolean(LensConfConstants.USER_GROUPS_BASED_AUTHORIZATION, true); SessionState.getSessionConf().set(LensConfConstants.SESSION_USER_GROUPS, "lens-auth-test1"); defineCube(CUBE_NAME, CUBE_NAME_WITH_PROPS, DERIVED_CUBE_NAME, DERIVED_CUBE_NAME_WITH_PROPS); defineUberDims(); }
@BeforeTest public void setUp() throws Exception { super.setUp(); Hive hive = Hive.get(new HiveConf()); Database db = new Database(); db.setName(TEST_DB); hive.createDatabase(db, true); LensClientConfig lensClientConfig = new LensClientConfig(); lensClientConfig.setLensDatabase(TEST_DB); lensClientConfig.set(LensConfConstants.SERVER_BASE_URL, "http://localhost:" + getTestPort() + "/lensapi"); LensClient client = new LensClient(lensClientConfig); mlClient = new LensMLClient(client); }
@BeforeTest public void setUp() throws Exception { super.setUp(); Hive hive = Hive.get(new HiveConf()); Database db = new Database(); db.setName(TEST_DB); hive.createDatabase(db, true); LensClientConfig lensClientConfig = new LensClientConfig(); lensClientConfig.setLensDatabase(TEST_DB); lensClientConfig.set(LensConfConstants.SERVER_BASE_URL, "http://localhost:" + getTestPort() + "/lensapi"); LensClient client = new LensClient(lensClientConfig); mlClient = new LensMLClient(client); }
public void createSources(HiveConf conf, String dbName) throws Exception { try { Database database = new Database(); database.setName(dbName); Hive.get(conf).dropDatabase(dbName, true, true, true); Hive.get(conf).createDatabase(database); SessionState.get().setCurrentDatabase(dbName); CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf); createFromXML(client); assertTestFactTimelineClass(client); createCubeCheapFactPartitions(client); // commenting this as the week date format throws IllegalPatternException // createCubeFactWeekly(client); createTestFact2Partitions(client); createTestFact2RawPartitions(client); createBaseCubeFactPartitions(client); createSummaryPartitions(client); // dump(client); } catch (Exception exc) { log.error("Exception while creating sources.", exc); throw exc; } } private static final StrSubstitutor GREGORIAN_SUBSTITUTOR = new StrSubstitutor(new StrLookup<String>() {