public Object getFieldValue(_Fields field) { switch (field) { case MESSAGE: return getMessage(); } throw new IllegalStateException(); }
private void firePreEvent(PreEventContext event) throws MetaException { for (MetaStorePreEventListener listener : preListeners) { try { listener.onEvent(event); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } catch (InvalidOperationException e) { throw new MetaException(e.getMessage()); } } }
@Override protected void drop_table_with_environment_context(String catName, String dbname, String name, boolean deleteData, EnvironmentContext envContext) throws MetaException, TException, NoSuchObjectException, UnsupportedOperationException { // First try temp table // TODO CAT - I think the right thing here is to always put temp tables in the current // catalog. But we don't yet have a notion of current catalog, so we'll have to hold on // until we do. org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbname, name); if (table != null) { try { deleteTempTableColumnStatsForTable(dbname, name); } catch (NoSuchObjectException err){ // No stats to delete, forgivable error. LOG.info(err.getMessage()); } dropTempTable(table, deleteData, envContext); return; } // Try underlying client super.drop_table_with_environment_context(catName, dbname, name, deleteData, envContext); }
public Table getTable(String location, String hcatServerUri, String hcatServerPrincipal, Job job) throws IOException { Pair<String, String> loc_server = new Pair<String, String>(location, hcatServerUri); Table hcatTable = hcatTableCache.get(loc_server); if (hcatTable != null) { return hcatTable; } Pair<String, String> dbTablePair = PigHCatUtil.getDBTableNames(location); String dbName = dbTablePair.first; String tableName = dbTablePair.second; Table table = null; IMetaStoreClient client = null; try { client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal, PigHCatUtil.class, job); table = HCatUtil.getTable(client, dbName, tableName); } catch (NoSuchObjectException nsoe) { throw new PigException("Table not found : " + nsoe.getMessage(), PIG_EXCEPTION_CODE); // prettier error messages to frontend } catch (Exception e) { throw new IOException(e); } finally { HCatUtil.closeHiveClientQuietly(client); } hcatTableCache.put(loc_server, table); return table; }
void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot, Hive hiveDb) throws Exception { try { Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME); Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName); Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName); List<SQLPrimaryKey> pks = hiveDb.getPrimaryKeyList(dbName, tblName); List<SQLForeignKey> fks = hiveDb.getForeignKeyList(dbName, tblName); List<SQLUniqueConstraint> uks = hiveDb.getUniqueConstraintList(dbName, tblName); List<SQLNotNullConstraint> nns = hiveDb.getNotNullConstraintList(dbName, tblName); if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) { try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) { ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf); serializer.writeTo(jsonWriter, null); } } if (fks != null && !fks.isEmpty()) { try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) { ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf); serializer.writeTo(jsonWriter, null); } } } catch (NoSuchObjectException e) { // Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it. // Just log a debug message and skip it. LOG.debug(e.getMessage()); } }
@Override public PartitionValuesResponse get_partition_values(PartitionValuesRequest request) throws MetaException { String catName = request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf); String dbName = request.getDbName(); String tblName = request.getTblName(); try { authorizeTableForPartitionMetadata(catName, dbName, tblName); // This is serious black magic, as the following 2 lines do nothing AFAICT but without them // the subsequent call to listPartitionValues fails. List<FieldSchema> partCols = new ArrayList<FieldSchema>(); partCols.add(request.getPartitionKeys().get(0)); return getMS().listPartitionValues(catName, dbName, tblName, request.getPartitionKeys(), request.isApplyDistinct(), request.getFilter(), request.isAscending(), request.getPartitionOrder(), request.getMaxParts()); } catch (NoSuchObjectException e) { LOG.error(String.format("Unable to get partition for %s.%s.%s", catName, dbName, tblName), e); throw new MetaException(e.getMessage()); } }
} catch (NoSuchObjectException err){ LOG.info(err.getMessage());
} catch (NoSuchObjectException e) { if (throwException) { LOG.error("Table " + tableName + " not found: " + e.getMessage()); throw new InvalidTableException(tableName);
@Override public void create_table_with_environment_context(final Table tbl, final EnvironmentContext envContext) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), tbl, envContext); success = true; } catch (NoSuchObjectException e) { LOG.warn("create_table_with_environment_context got ", e); ex = e; throw new InvalidObjectException(e.getMessage()); } catch (MetaException | InvalidObjectException | AlreadyExistsException | InvalidInputException e) { ex = e; throw e; } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("create_table", success, ex, tbl.getTableName()); } }
private String getPartName(HiveObjectRef hiveObject) throws MetaException { String partName = null; List<String> partValue = hiveObject.getPartValues(); if (partValue != null && partValue.size() > 0) { try { String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); Table table = get_table_core(catName, hiveObject.getDbName(), hiveObject .getObjectName()); partName = Warehouse .makePartName(table.getPartitionKeys(), partValue); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } } return partName; }
tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage());
LOG.error("Table " + dbName + "." + tableName + " not found: " + e.getMessage()); throw new InvalidTableException(tableName);
throw new InvalidOperationException(e.getMessage()); } catch (MetaException | InvalidOperationException e) { ex = e;
@Override public void create_table_with_constraints(final Table tbl, final List<SQLPrimaryKey> primaryKeys, final List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), tbl, null, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); success = true; } catch (NoSuchObjectException e) { ex = e; throw new InvalidObjectException(e.getMessage()); } catch (MetaException | InvalidObjectException | AlreadyExistsException | InvalidInputException e) { ex = e; throw e; } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("create_table", success, ex, tbl.getTableName()); } }
LOG.debug("Object not found in metastore ", e); throw new InvalidOperationException( "Unable to change partition or table. Object " + e.getMessage() + " does not exist." + " Check metastore logs for detailed stack."); } finally {
tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage());
} catch (NoSuchObjectException e) { throw new InvalidOperationException("alter is not possible: " + e.getMessage()); } finally { if(!success) { throw new InvalidOperationException( "Unable to change partition or table. Database " + dbname + " does not exist" + " Check metastore logs for detailed stack." + e.getMessage());
} catch (NoSuchObjectException e) { ex = e; throw new InvalidObjectException(e.getMessage()); } catch (MetaException e) { ex = e;
public Object getFieldValue(_Fields field) { switch (field) { case MESSAGE: return getMessage(); } throw new IllegalStateException(); }
private void firePreEvent(PreEventContext event) throws MetaException { for (MetaStorePreEventListener listener : preListeners) { try { listener.onEvent(event); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } catch (InvalidOperationException e) { throw new MetaException(e.getMessage()); } } }