private Table newTable(boolean isPartitioned) { Table t = new Table("default", "table" + Integer.toString(nextInput++)); if (isPartitioned) { FieldSchema fs = new FieldSchema(); fs.setName("version"); fs.setType("String"); List<FieldSchema> partCols = new ArrayList<FieldSchema>(1); partCols.add(fs); t.setPartCols(partCols); } return t; } }
private Table newTable(boolean isPartitioned) { Table t = new Table("default", "table" + Integer.toString(nextInput++)); if (isPartitioned) { FieldSchema fs = new FieldSchema(); fs.setName("version"); fs.setType("String"); List<FieldSchema> partCols = new ArrayList<FieldSchema>(1); partCols.add(fs); t.setPartCols(partCols); } Map<String, String> tblProps = t.getParameters(); if(tblProps == null) { tblProps = new HashMap<>(); } tblProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true"); t.setParameters(tblProps); return t; }
tempTableObj.setPartCols(new ArrayList<>());
partKeys.add(new FieldSchema(partName, serdeConstants.STRING_TYPE_NAME, "")); table.setPartCols(partKeys);
@Test public void testDataDeletion() throws HiveException, IOException, TException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Path fakeTable = table.getPath().getParent().suffix( Path.SEPARATOR + "faketable"); fs = fakeTable.getFileSystem(hive.getConf()); fs.mkdirs(fakeTable); fs.deleteOnExit(fakeTable); Path fakePart = new Path(table.getDataLocation().toString(), "fakepartition=fakevalue"); fs.mkdirs(fakePart); fs.deleteOnExit(fakePart); hive.dropTable(dbName, tableName, true, true); assertFalse(fs.exists(fakePart)); hive.dropDatabase(dbName); assertFalse(fs.exists(fakeTable)); }
tbl.setPartCols(oldtbl.getPartCols());
tbl.setPartCols(oldtbl.getPartCols());
tbl.setPartCols(getPartCols());
tbl.setPartCols(getPartCols());
tbl.setPartCols(getPartCols());
serdeConstants.STRING_TYPE_NAME, "partition column, date but in string format as date type is not yet supported in QL")); tbl.setPartCols(partCols);
private Table createTestTable() throws HiveException, AlreadyExistsException { Database db = new Database(); db.setName(dbName); hive.createDatabase(db, true); Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); table.setPartCols(partCols); hive.createTable(table); table = hive.getTable(dbName, tableName); Assert.assertTrue(table.getTTable().isSetId()); table.getTTable().unsetId(); for (Map<String, String> partSpec : parts) { hive.createPartition(table, partSpec); } return table; }
oldview.getTTable().getParameters().putAll(crtView.getTblProps()); oldview.setPartCols(crtView.getPartCols()); if (crtView.getInputFormat() != null) { oldview.setInputFormatClass(crtView.getInputFormat());
oldview.getTTable().getParameters().putAll(crtView.getTblProps()); oldview.setPartCols(crtView.getPartCols()); if (crtView.getInputFormat() != null) { oldview.setInputFormatClass(crtView.getInputFormat()); tbl.setPartCols(crtView.getPartCols());
private Table getHiveTable() { Table t = new Table(LensConfConstants.DEFAULT_STATISTICS_DATABASE, EVENT_NAME); LinkedList<FieldSchema> partCols = new LinkedList<FieldSchema>(); partCols.add(new FieldSchema("dt", "string", "partCol")); t.setPartCols(partCols); return t; }
@Override public Table getHiveTable(HiveConf conf) { Table table = new Table(conf.get(LensConfConstants.STATISTICS_DATABASE_KEY, LensConfConstants.DEFAULT_STATISTICS_DATABASE), this.getClass().getSimpleName()); LinkedList<FieldSchema> colList = new LinkedList<FieldSchema>(); colList.add(new FieldSchema("handle", "string", "Query Handle")); colList.add(new FieldSchema("userQuery", "string", "User Query before rewrite")); colList.add(new FieldSchema("submitter", "string", "submitter")); colList.add(new FieldSchema("clusterUser", "string", "Cluster User which will do all operations on hdfs")); colList.add(new FieldSchema("sessionId", "string", "Lens Session which ran the query")); colList.add(new FieldSchema("submissionTime", "bigint", "Time which query was submitted")); colList.add(new FieldSchema("startTime", "bigint", "Timestamp which query was Started")); colList.add(new FieldSchema("endTime", "bigint", "Timestamp which query was finished")); colList.add(new FieldSchema("result", "string", "path to result of query")); colList.add(new FieldSchema("cause", "string", "failure/eror cause if any")); colList.add(new FieldSchema("status", "map<string,string>", "status object of the query")); colList.add(new FieldSchema("driverStats", "map<string,string>", "driver statistics of the query")); table.setFields(colList); LinkedList<FieldSchema> partCols = new LinkedList<FieldSchema>(); partCols.add(new FieldSchema("dt", "string", "partCol")); table.setPartCols(partCols); table.setSerializationLib(JSonSerde.class.getName()); try { table.setInputFormatClass(TextInputFormat.class.getName()); } catch (HiveException e) { log.error("Encountered hive exception.", e); } return table; }
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(avroSerde); try { table.setInputFormatClass(avroInputFormat); table.setOutputFormatClass(avroOutputFormat); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString()); return table; } }
/** * Creates the hive table. * * @param tableName the table name * @throws HiveException the hive exception */ public static void createHiveTable(String tableName, Map<String, String> parameters) throws HiveException { List<FieldSchema> columns = new ArrayList<FieldSchema>(); columns.add(new FieldSchema("col1", "string", "")); List<FieldSchema> partCols = new ArrayList<FieldSchema>(); partCols.add(new FieldSchema("pcol1", "string", "")); Map<String, String> params = new HashMap<String, String>(); params.put("test.hive.table.prop", "tvalue"); if (null != parameters && !parameters.isEmpty()) { params.putAll(parameters); } Table tbl = Hive.get().newTable(tableName); tbl.setTableType(TableType.MANAGED_TABLE); tbl.getTTable().getSd().setCols(columns); tbl.setPartCols(partCols); tbl.getTTable().getParameters().putAll(params); Hive.get().createTable(tbl); }
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(getHiveParquetSerde()); try { table.setInputFormatClass(getHiveParquetInputFormat()); table.setOutputFormatClass(getHiveParquetOutputFormat()); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } // convert copycat schema schema to Hive columns List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); return table; }
tbl.setPartCols(crtView.getPartCols());