private static Table createTestTable(String dbName, String tableName) throws HiveException { Table tbl = new Table(dbName, tableName); tbl.setInputFormatClass(SequenceFileInputFormat.class.getName()); tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); tbl.setSerializationLib(ThriftDeserializer.class.getName()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class .getName()); return tbl; }
format.processStorageFormat("TextFile"); Table table = db.newTable(tableName); table.setSerializationLib(format.getSerde()); List<FieldSchema> fields = new ArrayList<FieldSchema>(); fields.add(new FieldSchema("val", "int", null));
tbl.setSerializationLib(LazySimpleSerDe.class.getName()); tbl.setNumBuckets(bucketCount); tbl.setBucketCols(bucketCols);
format.processStorageFormat("TextFile"); Table table = db.newTable(tableName); table.setSerializationLib(format.getSerde()); List<FieldSchema> fields = new ArrayList<FieldSchema>(); fields.add(new FieldSchema("val", "int", null));
tbl.setSerializationLib(LazySimpleSerDe.class.getName()); tbl.setNumBuckets(bucketCount); tbl.setBucketCols(bucketCols);
try { tempTableObj.setInputFormatClass(inputFormatClassName); tempTableObj.setSerializationLib(serDeClassName); } catch (HiveException e) { throw new SemanticException("Load Data: Failed to set inputFormat or SerDe");
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { tbl.setSerializationLib(crtTbl.getDefaultSerName()); if (crtTbl.getDefaultSerName() == null) { LOG.info("Default to LazySimpleSerDe for like table {}", targetTableName); tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { tbl.setSerializationLib(crtTbl.getDefaultSerName());
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName()); tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); tbl.setSerializationLib(ThriftDeserializer.class.getName()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { tbl.setSerializationLib(crtTbl.getDefaultSerName()); if (crtTbl.getDefaultSerName() == null) { LOG.info("Default to LazySimpleSerDe for like table " + crtTbl.getTableName()); tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { tbl.setSerializationLib(crtTbl.getDefaultSerName());
format.processStorageFormat("TextFile"); Table table = db.newTable(tableName); table.setSerializationLib(format.getSerde()); table.setFields(fields); table.setDataLocation(tablePath);
tbl.setTableType(TableType.VIRTUAL_VIEW); tbl.setSerializationLib(null); tbl.clearSerDeInfo(); tbl.setFields(getSchema()); DDLTask.validateSerDe(serDeClassName, conf); tbl.setSerializationLib(serDeClassName);
if (storageHandler == null) { LOG.info("Default to LazySimpleSerDe for table " + tableName); tbl.setSerializationLib(LazySimpleSerDe.class.getName()); } else { String serDeClassName = storageHandler.getSerDeClass().getName(); LOG.info("Use StorageHandler-supplied " + serDeClassName + " for table " + tableName); tbl.setSerializationLib(serDeClassName); tbl.setSerializationLib(getSerName());
DDLTask.validateSerDe(serDeClassName, conf); tbl.setSerializationLib(serDeClassName);
tbl.setSerializationLib(LazySimpleSerDe.class.getName()); tbl.setStoredAsSubDirectories(false);
tbl.setTableType(TableType.VIRTUAL_VIEW); tbl.setSerializationLib(null); tbl.clearSerDeInfo(); tbl.setFields(crtView.getSchema());
@Override public Table getHiveTable(HiveConf conf) { Table table = new Table(conf.get(LensConfConstants.STATISTICS_DATABASE_KEY, LensConfConstants.DEFAULT_STATISTICS_DATABASE), this.getClass().getSimpleName()); LinkedList<FieldSchema> colList = new LinkedList<FieldSchema>(); colList.add(new FieldSchema("handle", "string", "Query Handle")); colList.add(new FieldSchema("userQuery", "string", "User Query before rewrite")); colList.add(new FieldSchema("submitter", "string", "submitter")); colList.add(new FieldSchema("clusterUser", "string", "Cluster User which will do all operations on hdfs")); colList.add(new FieldSchema("sessionId", "string", "Lens Session which ran the query")); colList.add(new FieldSchema("submissionTime", "bigint", "Time which query was submitted")); colList.add(new FieldSchema("startTime", "bigint", "Timestamp which query was Started")); colList.add(new FieldSchema("endTime", "bigint", "Timestamp which query was finished")); colList.add(new FieldSchema("result", "string", "path to result of query")); colList.add(new FieldSchema("cause", "string", "failure/eror cause if any")); colList.add(new FieldSchema("status", "map<string,string>", "status object of the query")); colList.add(new FieldSchema("driverStats", "map<string,string>", "driver statistics of the query")); table.setFields(colList); LinkedList<FieldSchema> partCols = new LinkedList<FieldSchema>(); partCols.add(new FieldSchema("dt", "string", "partCol")); table.setPartCols(partCols); table.setSerializationLib(JSonSerde.class.getName()); try { table.setInputFormatClass(TextInputFormat.class.getName()); } catch (HiveException e) { log.error("Encountered hive exception.", e); } return table; }
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(avroSerde); try { table.setInputFormatClass(avroInputFormat); table.setOutputFormatClass(avroOutputFormat); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString()); return table; } }
Table tbl1 = new Table(db, table); if (setCustomSerde) { tbl1.setSerializationLib("DatabaseJarSerde");
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(getHiveParquetSerde()); try { table.setInputFormatClass(getHiveParquetInputFormat()); table.setOutputFormatClass(getHiveParquetOutputFormat()); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } // convert copycat schema schema to Hive columns List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); return table; }
Table tbl = db.newTable(crtView.getViewName()); tbl.setTableType(TableType.VIRTUAL_VIEW); tbl.setSerializationLib(null); tbl.clearSerDeInfo(); tbl.setViewOriginalText(crtView.getViewOriginalText());