/** * @param table * @return true if the table has the parquet serde defined */ public static boolean isParquetTable(Table table) { return table == null ? false : ParquetHiveSerDe.class.getName().equals(table.getSerializationLib()); }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return HiveMetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
private void alterPartitionSpecInMemory(Table tbl, Map<String, String> partSpec, org.apache.hadoop.hive.metastore.api.Partition tpart, boolean inheritTableSpecs, String partPath) throws HiveException, InvalidOperationException { LOG.debug("altering partition for table " + tbl.getTableName() + " with partition spec : " + partSpec); if (inheritTableSpecs) { tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat()); tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat()); tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib()); tpart.getSd().getSerdeInfo().setParameters( tbl.getTTable().getSd().getSerdeInfo().getParameters()); tpart.getSd().setBucketCols(tbl.getBucketCols()); tpart.getSd().setNumBuckets(tbl.getNumBuckets()); tpart.getSd().setSortCols(tbl.getSortCols()); } if (partPath == null || partPath.trim().equals("")) { throw new HiveException("new partition path should not be null or empty."); } tpart.getSd().setLocation(partPath); }
private void alterPartitionSpecInMemory(Table tbl, Map<String, String> partSpec, org.apache.hadoop.hive.metastore.api.Partition tpart, boolean inheritTableSpecs, String partPath) throws HiveException, InvalidOperationException { LOG.debug("altering partition for table " + tbl.getTableName() + " with partition spec : " + partSpec); if (inheritTableSpecs) { tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat()); tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat()); tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib()); tpart.getSd().getSerdeInfo().setParameters( tbl.getTTable().getSd().getSerdeInfo().getParameters()); tpart.getSd().setBucketCols(tbl.getBucketCols()); tpart.getSd().setNumBuckets(tbl.getNumBuckets()); tpart.getSd().setSortCols(tbl.getSortCols()); } if (partPath == null || partPath.trim().equals("")) { throw new HiveException("new partition path should not be null or empty."); } tpart.getSd().setLocation(partPath); }
.getMsg(" Table inputformat/outputformats do not match")); String existingSerde = table.getSerializationLib(); String importedSerde = tableDesc.getSerName(); if (!existingSerde.equals(importedSerde)) {
.getMsg(" Table inputformat/outputformats do not match")); String existingSerde = table.getSerializationLib(); String importedSerde = tableDesc.getSerName(); if (!existingSerde.equals(importedSerde)) {
+ "; " + tbl.getTTable() + ")", ft.getTTable().equals(tbl.getTTable())); assertEquals("SerializationLib is not set correctly", tbl .getSerializationLib(), ft.getSerializationLib()); assertEquals("Serde is not set correctly", tbl.getDeserializer() .getClass().getName(), ft.getDeserializer().getClass().getName());
public List<FieldSchema> getCols() { boolean getColsFromSerDe = SerDeUtils.shouldGetColsFromSerDe( getSerializationLib()); if (!getColsFromSerDe) { return tTable.getSd().getCols(); } else { try { return Hive.getFieldsFromDeserializer(getTableName(), getDeserializer()); } catch (HiveException e) { LOG.error("Unable to get field from serde: " + getSerializationLib(), e); } return new ArrayList<FieldSchema>(); } }
public List<FieldSchema> getCols() { String serializationLib = getSerializationLib(); try { if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else { return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
tblDesc.setMapKeyDelimiter(tbl.getSerdeParam(serdeConstants.MAPKEY_DELIM)); tblDesc.setEscapeChar(tbl.getSerdeParam(serdeConstants.ESCAPE_CHAR)); tblDesc.setSerdeClassName(tbl.getSerializationLib()); tblDesc.setStorageHandlerName(tbl.getStorageHandler() != null ? tbl.getStorageHandler().getClass().getCanonicalName() : "");
tblDesc.setMapKeyDelimiter(tbl.getSerdeParam(serdeConstants.MAPKEY_DELIM)); tblDesc.setEscapeChar(tbl.getSerdeParam(serdeConstants.ESCAPE_CHAR)); tblDesc.setSerdeClassName(tbl.getSerializationLib()); tblDesc.setStorageHandlerName(tbl.getStorageHandler() != null ? tbl.getStorageHandler().getClass().getCanonicalName() : "");
tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib()); if (partPath == null || partPath.trim().equals("")) { throw new HiveException("new partition path should not be null or empty.");
List<FieldSchema> newCols = alterTbl.getNewCols(); List<FieldSchema> oldCols = tbl.getCols(); if (tbl.getSerializationLib().equals( "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { console } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.REPLACECOLS) { if (tbl.getSerializationLib().equals( "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { console .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); tbl.setSerializationLib(LazySimpleSerDe.class.getName()); } else if (!tbl.getSerializationLib().equals( MetadataTypedColumnsetSerDe.class.getName()) && !tbl.getSerializationLib().equals(LazySimpleSerDe.class.getName()) && !tbl.getSerializationLib().equals(ColumnarSerDe.class.getName()) && !tbl.getSerializationLib().equals(DynamicSerDe.class.getName())) { console.printError("Replace columns is not supported for this table. " + "SerDe may be incompatible.");
.getMsg(" Table inputformat/outputformats do not match")); String existingSerde = table.getSerializationLib(); String importedSerde = tableDesc.getSerName(); if (!existingSerde.equals(importedSerde)) {
private void alterPartitionSpec(Table tbl, Map<String, String> partSpec, org.apache.hadoop.hive.metastore.api.Partition tpart, boolean inheritTableSpecs, String partPath) throws HiveException, InvalidOperationException { LOG.debug("altering partition for table " + tbl.getTableName() + " with partition spec : " + partSpec); if (inheritTableSpecs) { tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat()); tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat()); tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib()); tpart.getSd().getSerdeInfo().setParameters( tbl.getTTable().getSd().getSerdeInfo().getParameters()); tpart.getSd().setBucketCols(tbl.getBucketCols()); tpart.getSd().setNumBuckets(tbl.getNumBuckets()); tpart.getSd().setSortCols(tbl.getSortCols()); } if (partPath == null || partPath.trim().equals("")) { throw new HiveException("new partition path should not be null or empty."); } tpart.getSd().setLocation(partPath); tpart.getParameters().put(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK,"true"); String fullName = tbl.getTableName(); if (!com.facebook.presto.hive.$internal.org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) { fullName = tbl.getDbName() + "." + tbl.getTableName(); } alterPartition(fullName, new Partition(tbl, tpart)); }