/** * Updates the existing table metadata with the new metadata. * * @param tblName * name of the existing table * @param newTbl * new name of the table. could be the old name * @throws InvalidOperationException * if the changes in metadata is not acceptable * @throws TException */ public void alterTable(String tblName, Table newTbl, EnvironmentContext environmentContext) throws InvalidOperationException, HiveException { alterTable(tblName, newTbl, false, environmentContext); }
public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional) throws HiveException { String[] names = Utilities.getDbTableName(fullyQlfdTblName); alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional); }
/** * Updates the existing table metadata with the new metadata. * * @param fullyQlfdTblName * name of the existing table * @param newTbl * new name of the table. could be the old name * @param transactional * Need to generate and save a table snapshot into the metastore? * @throws InvalidOperationException * if the changes in metadata is not acceptable * @throws TException */ public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext, boolean transactional) throws HiveException { String[] names = Utilities.getDbTableName(fullyQlfdTblName); alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional); }
public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional) throws HiveException { alterTable(newTbl.getCatName(), newTbl.getDbName(), newTbl.getTableName(), newTbl, cascade, environmentContext, transactional); }
db.alterTable(tableFullName, (Table) values.get(0).result, environmentContext, true); LOG.debug("Updated stats for {}.", tableFullName); } else {
/** * Actually makes the table transactional */ private static void alterTable(Table t, Hive db, boolean isMM) throws HiveException, InvalidOperationException { org.apache.hadoop.hive.ql.metadata.Table metaTable = //clone to make sure new prop doesn't leak new org.apache.hadoop.hive.ql.metadata.Table(t.deepCopy()); metaTable.getParameters().put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true"); if(isMM) { metaTable.getParameters() .put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES, "insert_only"); } EnvironmentContext ec = new EnvironmentContext(); /*we are not modifying any data so stats should be exactly the same*/ ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); db.alterTable(Warehouse.getQualifiedName(t), metaTable, false, ec, false); }
FileStatus fstat = fs.getFileStatus(url); tbl.getParameters().put(HiveIndex.INDEX_TABLE_CREATETIME, Long.toString(fstat.getModificationTime())); db.alterTable(tbl.getDbName() + "." + tbl.getTableName(), tbl, null);
private List<Path> getLocations(Hive db, Table table, Map<String, String> partSpec) throws HiveException, InvalidOperationException { List<Path> locations = new ArrayList<Path>(); if (partSpec == null) { if (table.isPartitioned()) { for (Partition partition : db.getPartitions(table)) { locations.add(partition.getDataLocation()); EnvironmentContext environmentContext = new EnvironmentContext(); if (needToUpdateStats(partition.getParameters(), environmentContext)) { db.alterPartition(table.getDbName(), table.getTableName(), partition, environmentContext); } } } else { locations.add(table.getPath()); EnvironmentContext environmentContext = new EnvironmentContext(); if (needToUpdateStats(table.getParameters(), environmentContext)) { db.alterTable(table.getDbName()+"."+table.getTableName(), table, environmentContext); } } } else { for (Partition partition : db.getPartitionsByNames(table, partSpec)) { locations.add(partition.getDataLocation()); EnvironmentContext environmentContext = new EnvironmentContext(); if (needToUpdateStats(partition.getParameters(), environmentContext)) { db.alterPartition(table.getDbName(), table.getTableName(), partition, environmentContext); } } } return locations; }
db.alterTable(tbl, false, null, true);
Table t = db.getTable(re.getTable().getTableName()); t.setLastAccessTime(lastAccessTime); db.alterTable(t.getDbName() + "." + t.getTableName(), t, null); break; db.alterPartition(t.getTableName(), p, null); t.setLastAccessTime(lastAccessTime); db.alterTable(t.getDbName() + "." + t.getTableName(), t, null); break;
Table t = db.getTable(dbName, tblName); t.setLastAccessTime(lastAccessTime); db.alterTable(dbName + "." + tblName, t, false, null, false); break; db.alterPartition(null, dbName, tblName, p, null, false); t.setLastAccessTime(lastAccessTime); db.alterTable(dbName + "." + tblName, t, false, null, false); break;
db.alterTable(tbl, false, environmentContext, true); work.getInputs().add(new ReadEntity(tbl)); addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional, long replWriteId) throws HiveException { if (catName == null) { catName = getDefaultCatalog(conf); } try { // Remove the DDL_TIME so it gets refreshed if (newTbl.getParameters() != null) { newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME); } newTbl.checkValidity(conf); if (environmentContext == null) { environmentContext = new EnvironmentContext(); } if (cascade) { environmentContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE); } // Take a table snapshot and set it to newTbl. AcidUtils.TableSnapshot tableSnapshot = null; if (transactional) { if (replWriteId > 0) { ValidWriteIdList writeIds = AcidUtils.getTableValidWriteIdListWithTxnList(conf, dbName, tblName); tableSnapshot = new TableSnapshot(replWriteId, writeIds.writeToString()); } else { // Make sure we pass in the names, so we can get the correct snapshot for rename table. tableSnapshot = AcidUtils.getTableSnapshot(conf, newTbl, dbName, tblName, true); }
db.alterTable(touchDesc.getTableName(), tbl, environmentContext); } catch (InvalidOperationException e) { throw new HiveException("Uable to update table");
db.alterTable(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(), tbl, false, environmentContext, true, writeId); } else {
hm.alterTable(tableName, table, false, null, true);
db.alterTable(alterTbl.getOldName(), tbl, alterTbl.getIsCascade(), environmentContext, true); } else {
db.alterTable(tbl.getDbName()+"."+tbl.getTableName(),tbl,null); } catch (InvalidOperationException e) { throw new HiveException("Unable to alter table. " + e.getMessage(), e);
db.alterTable(mv, false, environmentContext, true);
db.alterTable(crtView.getViewName(), oldview, false, null, true); addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK)); } else {