/** * Creates the table with the give objects * * @param tbl * a table object * @throws HiveException */ public void createTable(Table tbl) throws HiveException { createTable(tbl, false); }
public void createTable(Table tbl, boolean ifNotExists) throws HiveException { createTable(tbl, ifNotExists, null, null, null, null, null, null); }
/** * Creates the table with the give objects * * @param tbl * a table object * @throws HiveException */ public void createTable(Table tbl) throws HiveException { createTable(tbl, false); }
public void createTable(Table tbl, boolean ifNotExists) throws HiveException { createTable(tbl, ifNotExists, null, null); }
/** * Creates a table metadata and the directory for the table data * * @param tableName * name of the table * @param columns * list of fields of the table * @param partCols * partition keys of the table * @param fileInputFormat * Class of the input format of the table data file * @param fileOutputFormat * Class of the output format of the table data file * @throws HiveException * thrown if the args are invalid or if the metadata or the data * directory couldn't be created */ public void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat) throws HiveException { this.createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, -1, null); }
/** * Creates a table metadata and the directory for the table data * * @param tableName * name of the table * @param columns * list of fields of the table * @param partCols * partition keys of the table * @param fileInputFormat * Class of the input format of the table data file * @param fileOutputFormat * Class of the output format of the table data file * @param bucketCount * number of buckets that each partition (or the table itself) should * be divided into * @throws HiveException * thrown if the args are invalid or if the metadata or the data * directory couldn't be created */ public void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols) throws HiveException { createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount, bucketCols, null); }
/** * Creates a table metadata and the directory for the table data * * @param tableName * name of the table * @param columns * list of fields of the table * @param partCols * partition keys of the table * @param fileInputFormat * Class of the input format of the table data file * @param fileOutputFormat * Class of the output format of the table data file * @throws HiveException * thrown if the args are invalid or if the metadata or the data * directory couldn't be created */ public void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat) throws HiveException { this.createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, -1, null); }
/** * Creates a table metadata and the directory for the table data * * @param tableName * name of the table * @param columns * list of fields of the table * @param partCols * partition keys of the table * @param fileInputFormat * Class of the input format of the table data file * @param fileOutputFormat * Class of the output format of the table data file * @param bucketCount * number of buckets that each partition (or the table itself) should * be divided into * @throws HiveException * thrown if the args are invalid or if the metadata or the data * directory couldn't be created */ public void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols) throws HiveException { createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount, bucketCols, null); }
table.setInputFormatClass(format.getInputFormat()); table.setOutputFormatClass(format.getOutputFormat()); db.createTable(table, true);
private Table createPartitionedTable(String dbName, String tableName) throws Exception { try { hm.dropTable(dbName, tableName); hm.createTable(tableName, Arrays.asList("key", "value"), // Data columns. Arrays.asList("ds", "hr"), // Partition columns. TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class); return hm.getTable(dbName, tableName); } catch (Exception exception) { fail("Unable to drop and create table " + StatsUtils.getFullyQualifiedTableName(dbName, tableName) + " because " + StringUtils.stringifyException(exception)); throw exception; } }
tbl.setParameters(parameters); createTable(tbl);
tbl.setParameters(parameters); createTable(tbl);
@BeforeClass public static void init() throws Exception { queryState = new QueryState.Builder().build(); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); // Create a table so we can work against it Hive h = Hive.get(conf); List<String> cols = new ArrayList<String>(); cols.add("a"); List<String> partCols = new ArrayList<String>(); partCols.add("ds"); h.createTable("foo", cols, partCols, OrcInputFormat.class, OrcOutputFormat.class); Table t = h.getTable("foo"); Map<String, String> partSpec = new HashMap<String, String>(); partSpec.put("ds", "today"); h.createPartition(t, partSpec); }
part_cols.add("hr"); try { hm.createTable(tableName, cols, part_cols, TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class); } catch (HiveException e) {
db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params); db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params); Table u = db.getTable("U");
ts.add("table2"); Table tbl1 = createTestTable(dbName, ts.get(0)); hm.createTable(tbl1); hm.createTable(tbl2);
hm.createTable(tbl); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e));
@Test public void testDataDeletion() throws HiveException, IOException, TException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Path fakeTable = table.getPath().getParent().suffix( Path.SEPARATOR + "faketable"); fs = fakeTable.getFileSystem(hive.getConf()); fs.mkdirs(fakeTable); fs.deleteOnExit(fakeTable); Path fakePart = new Path(table.getDataLocation().toString(), "fakepartition=fakevalue"); fs.mkdirs(fakePart); fs.deleteOnExit(fakePart); hive.dropTable(dbName, tableName, true, true); assertFalse(fs.exists(fakePart)); hive.dropDatabase(dbName); assertFalse(fs.exists(fakeTable)); }
hive.createTable(table); result = new CheckResult(); checker.checkMetastore(catName, dbName, null, null, result);
private Table createTestTable() throws HiveException, AlreadyExistsException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db, true); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Assert.assertTrue(table.getTTable().isSetId()); table.getTTable().unsetId(); for (Map<String, String> partSpec : parts) { hive.createPartition(table, partSpec); } return table; }