/** * @param tbl * @throws MetaException * @throws NoSuchObjectException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table) */ @Override public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { createTable(tbl, null); }
private static void createTable(HiveMetaStoreClient hmsc, boolean enablePartitionGrouping) throws Exception { List<FieldSchema> columns = new ArrayList<>(); columns.add(new FieldSchema("foo", "string", "")); columns.add(new FieldSchema("bar", "string", "")); List<FieldSchema> partColumns = new ArrayList<>(); partColumns.add(new FieldSchema("dt", "string", "")); partColumns.add(new FieldSchema("blurb", "string", "")); SerDeInfo serdeInfo = new SerDeInfo("LBCSerDe", "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", new HashMap<>()); StorageDescriptor storageDescriptor = new StorageDescriptor(columns, null, "org.apache.hadoop.hive.ql.io.RCFileInputFormat", "org.apache.hadoop.hive.ql.io.RCFileOutputFormat", false, 0, serdeInfo, null, null, null); Map<String, String> tableParameters = new HashMap<>(); tableParameters.put("hive.hcatalog.partition.spec.grouping.enabled", enablePartitionGrouping? "true":"false"); Table table = new Table(tableName, dbName, "", 0, 0, 0, storageDescriptor, partColumns, tableParameters, "", "", ""); hmsc.createTable(table); Assert.assertTrue("Table " + dbName + "." + tableName + " does not exist", hmsc.tableExists(dbName, tableName)); }
private static void createTable(String tableName, String tablePerm) throws Exception { Table tbl = new Table(); tbl.setDbName(DATABASE); tbl.setTableName(tableName); StorageDescriptor sd = new StorageDescriptor(); sd.setCols(ColumnHolder.colMapping.get(tableName)); tbl.setSd(sd); sd.setParameters(new HashMap<String, String>()); sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); sd.setInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat.class.getName()); sd.setOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat.class.getName()); sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe.class.getName()); tbl.setPartitionKeys(ColumnHolder.partitionCols); hmsc.createTable(tbl); Path path = new Path(warehousedir, tableName); FileSystem fs = path.getFileSystem(hiveConf); fs.setPermission(path, new FsPermission(tablePerm)); }
client.createTable(tbl); Path tblPath = new Path(client.getTable(dbName, tblName).getSd().getLocation()); assertTrue(tblPath.getFileSystem(hiveConf).mkdirs(new Path(tblPath, "colname=p1")));
assert dbEvent.getStatus(); msc.createTable(table, envContext); listSize++; assertEquals(notifyList.size(), listSize);
private void createTable(String dbName, String tableName) throws Exception { String databaseName = (dbName == null) ? Warehouse.DEFAULT_DATABASE_NAME : dbName; try { msc.dropTable(databaseName, tableName); } catch (Exception e) { } // can fail with NoSuchObjectException Table tbl = new Table(); tbl.setDbName(databaseName); tbl.setTableName(tableName); tbl.setTableType("MANAGED_TABLE"); StorageDescriptor sd = new StorageDescriptor(); sd.setCols(getTableColumns()); tbl.setPartitionKeys(getPartitionKeys()); tbl.setSd(sd); sd.setBucketCols(new ArrayList<String>(2)); sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib(ColumnarSerDe.class.getName()); sd.setInputFormat(RCFileInputFormat.class.getName()); sd.setOutputFormat(RCFileOutputFormat.class.getName()); Map<String, String> tableParams = new HashMap<String, String>(); tbl.setParameters(tableParams); msc.createTable(tbl); }
client.createTable(tbl); retp = client.add_partition(part); assertTrue(fs.exists(partPath));
msc.createTable(tbl); Database db = Hive.get(hcatConf).getDatabase(dbName); Path dfsPath = clientWH.getDefaultTablePath(db, tblName);
@Test public void testCreateTableSettingId() throws Exception { String dbName = "createDb"; String tblName = "createTbl"; client.dropTable(dbName, tblName); silentDropDatabase(dbName); new DatabaseBuilder() .setName(dbName) .create(client, conf); Table table = new TableBuilder() .setDbName(dbName) .setTableName(tblName) .addCol("foo", "string") .addCol("bar", "string") .build(conf); table.setId(1); try { client.createTable(table); Assert.fail("An error should happen when setting the id" + " to create a table"); } catch (InvalidObjectException e) { Assert.assertTrue(e.getMessage().contains("Id shouldn't be set")); Assert.assertTrue(e.getMessage().contains(tblName)); } }
tbl.setParameters(tableParams); client.createTable(tbl);
viewSd.getSerdeInfo().setParameters(new HashMap<>()); client.createTable(view);
client.createTable(tbl); } catch (InvalidObjectException ex) { failed = true; boolean failChecker = false; try { client.createTable(tbl); } catch (InvalidObjectException ex) { failChecker = true; client.createTable(tbl);
client.createTable(tbl2); if (isThriftClient) { tbl2 = client.getTable(tbl2.getDbName(), tbl2.getTableName());
targetTableSd.setLocation( targetTableSd.getLocation().replace( tableName, targetTableName)); hmsc.createTable(targetTable);
client.createTable(tbl); tbl = client.getTable(dbName, tblName);
@Override public Void call() throws TException { client.createTable(tbl); return null; } };
/** * @param tbl * @throws MetaException * @throws NoSuchObjectException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table) */ @Override public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { createTable(tbl, null); }
/** * @param tbl * @throws MetaException * @throws NoSuchObjectException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table) */ @Override public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { createTable(tbl, null); }
/** * @param tbl * @throws MetaException * @throws NoSuchObjectException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table) */ public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { createTable(tbl, null); }
@Override public Void call() throws TException { client.createTable(tbl); return null; } };