@Override public Table getTable(String catName, String dbName, String tableName, String validWriteIdList) throws TException { return getTable(catName, dbName, tableName, validWriteIdList, false); }
protected void testFilterForTables(boolean filterAtServer) throws Exception { // Skip this call when testing filter hook at HMS server because HMS server calls authorization // API for getTable(), and does not call filter hook if (!filterAtServer) { try { client.getTable(DBNAME1, TAB1); fail("getTable() should fail with blocking mode"); } catch (NoSuchObjectException e) { // Excepted } } assertEquals(0, client.getTables(DBNAME1, "*").size()); assertEquals(0, client.getAllTables(DBNAME1).size()); assertEquals(0, client.getTables(DBNAME1, TAB2).size()); }
@Override public Table getTable(String catName, String dbName, String tableName) throws TException { return getTable(catName, dbName, tableName, false); }
/** * Disable filtering at HMS client * By default, the HMS server side filtering is diabled, so we can see HMS client filtering behavior * @throws Exception */ @Test public void testHMSClientWithoutFilter() throws Exception { MetastoreConf.setBoolVar(conf, ConfVars.METASTORE_CLIENT_FILTER_ENABLED, false); DBNAME1 = "db_testHMSClientWithoutFilter_1"; DBNAME2 = "db_testHMSClientWithoutFilter_2"; creatEnv(conf); assertNotNull(client.getTable(DBNAME1, TAB1)); assertEquals(2, client.getTables(DBNAME1, "*").size()); assertEquals(2, client.getAllTables(DBNAME1).size()); assertEquals(1, client.getTables(DBNAME1, TAB2).size()); assertEquals(0, client.getAllTables(DBNAME2).size()); assertNotNull(client.getDatabase(DBNAME1)); assertEquals(2, client.getDatabases("*testHMSClientWithoutFilter*").size()); assertEquals(1, client.getDatabases(DBNAME1).size()); assertNotNull(client.getPartition(DBNAME1, TAB2, "name=value1")); assertEquals(1, client.getPartitionsByNames(DBNAME1, TAB2, Lists.newArrayList("name=value1")).size()); }
@Override public org.apache.hadoop.hive.metastore.api.Table getTable(String catName, String dbName, String tableName, boolean getColStats) throws TException { if (!DEFAULT_CATALOG_NAME.equals(catName)) { return super.getTable(catName, dbName, tableName, getColStats); } else { return getTable(dbName, tableName, getColStats); } }
@Override public Table getTable(String dbname, String name) throws TException { return getTable(getDefaultCatalog(conf), dbname, name); }
@Override public Table getTable(String dbname, String name, boolean getColumnStats) throws TException { return getTable(getDefaultCatalog(conf), dbname, name, getColumnStats); }
@Override public org.apache.hadoop.hive.metastore.api.Table getTable(String dbname, String name) throws MetaException, TException, NoSuchObjectException { // First check temp tables org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbname, name); if (table != null) { return deepCopy(table); // Original method used deepCopy(), do the same here. } // Try underlying client return super.getTable(dbname, name); }
@Override public org.apache.hadoop.hive.metastore.api.Table getTable(String dbname, String name, boolean getColStats) throws MetaException, TException, NoSuchObjectException { // First check temp tables org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbname, name); if (table != null) { return deepCopy(table); // Original method used deepCopy(), do the same here. } // Try underlying client return super.getTable(MetaStoreUtils.getDefaultCatalog(conf), dbname, name, getColStats); }
@Override public long getHiveTableRows(String database, String tableName) throws Exception { Table table = getMetaStoreClient().getTable(database, tableName); return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.ROW_COUNT); }
private static void initalizeTables() throws Exception { for (String table : tableNames) { try { if (hmsc.getTable(DATABASE, table) != null) { hmsc.dropTable(DATABASE, table); } } catch (NoSuchObjectException ignored) { } } for (int i = 0; i < tableNames.length; i++) { createTable(tableNames[i], tablePerms[i]); } }
@Test public void testAlterTableSetFF() throws Exception { hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'"); hcatDriver.run("desc extended junit_sem_analysis"); tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); hcatDriver.run("drop table junit_sem_analysis"); }
@Test public void testAlterTableRename() throws Exception { hcatDriver.run("drop table oldname"); hcatDriver.run("drop table newname"); hcatDriver.run("create table oldname (a int)"); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, "oldname"); assertTrue("The old table location is: " + tbl.getSd().getLocation(), tbl.getSd().getLocation().contains("oldname")); hcatDriver.run("alter table oldname rename to newNAME"); tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, "newname"); // since the oldname table is not under its database (See HIVE-15059), the renamed oldname table will keep // its location after HIVE-14909. I changed to check the existence of the newname table and its name instead // of verifying its location // assertTrue(tbl.getSd().getLocation().contains("newname")); assertTrue(tbl != null); assertTrue(tbl.getTableName().equalsIgnoreCase("newname")); hcatDriver.run("drop table newname"); }
@Test public void testAlterTblFFpart() throws Exception { driver.run("drop table junit_sem_analysis"); driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE"); driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')"); hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE"); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); List<String> partVals = new ArrayList<String>(1); partVals.add("2010-10-10"); Partition part = client.getPartition(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME, partVals); assertEquals(RCFileInputFormat.class.getName(), part.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), part.getSd().getOutputFormat()); hcatDriver.run("drop table junit_sem_analysis"); }
@Test public void testAddDriverInfo() throws Exception { hcatDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as " + "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' "; assertEquals(0, hcatDriver.run(query).getResponseCode()); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); hcatDriver.run("drop table junit_sem_analysis"); }
@Test public void testCreateTblWithLowerCasePartNames() throws Exception { driver.run("drop table junit_sem_analysis"); CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE"); assertEquals(resp.getResponseCode(), 0); assertEquals(null, resp.getErrorMessage()); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals("Partition key name case problem", "b", tbl.getPartitionKeys().get(0).getName()); driver.run("drop table junit_sem_analysis"); }
@Test public void testCreateTableIfNotExists() throws Exception { hcatDriver.run("drop table " + TBL_NAME); hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE"); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); List<FieldSchema> cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE"); assertEquals(0, resp.getResponseCode()); assertNull(resp.getErrorMessage()); tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); hcatDriver.run("drop table junit_sem_analysis"); }
@Before public void setup() throws TException { // This is default case with setugi off for both client and server client = createClient(); createTestTables(); origPartitions = client.listPartitions(dbName, tblName, (short) -1); tbl = client.getTable(dbName, tblName); // set directSQL to true explicitly client.setMetaConf(ConfVars.TRY_DIRECT_SQL.getVarname(), "true"); client.setMetaConf(ConfVars.TRY_DIRECT_SQL_DDL.getVarname(), "true"); }
@Test public void testCreateAndGetTableWithDriver() throws Exception { String dbName = "createDb"; String tblName = "createTbl"; client.dropTable(dbName, tblName); silentDropDatabase(dbName); new DatabaseBuilder() .setName(dbName) .create(client, conf); createTable(dbName, tblName); Table tblRead = client.getTable(dbName, tblName); Assert.assertTrue(tblRead.isSetId()); long firstTableId = tblRead.getId(); createTable(dbName, tblName + "_2"); Table tblRead2 = client.getTable(dbName, tblName + "_2"); Assert.assertTrue(tblRead2.isSetId()); Assert.assertNotEquals(firstTableId, tblRead2.getId()); }
@Test public void testAddReplaceCols() throws Exception { hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)"); assertEquals(0, response.getResponseCode()); response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)"); assertEquals(0, response.getResponseCode()); assertNull(response.getErrorMessage()); response = hcatDriver.run("describe extended junit_sem_analysis"); assertEquals(0, response.getResponseCode()); Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME); List<FieldSchema> cols = tbl.getSd().getCols(); assertEquals(2, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null))); assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null))); hcatDriver.run("drop table junit_sem_analysis"); }