private ThriftTableMetadata getRequiredTableMetadata(SchemaTableName schemaTableName) { Optional<ThriftTableMetadata> table = tableCache.getUnchecked(schemaTableName); if (!table.isPresent()) { throw new TableNotFoundException(schemaTableName); } else { return table.get(); } }
@Override public void dropView(ConnectorSession session, SchemaTableName viewName) { ConnectorViewDefinition view = getViews(session, viewName.toSchemaTablePrefix()).get(viewName); if (view == null) { throw new ViewNotFoundException(viewName); } try { metastore.dropTable(session, viewName.getSchemaName(), viewName.getTableName()); } catch (TableNotFoundException e) { throw new ViewNotFoundException(e.getTableName()); } }
private Table getTableOrElseThrow(String databaseName, String tableName) { return getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); }
private Table getRequiredTable(String databaseName, String tableName) { return getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); }
private void alterTable(String databaseName, String tableName, Function<TableMetadata, TableMetadata> alterFunction) { requireNonNull(databaseName, "databaseName is null"); requireNonNull(tableName, "tableName is null"); Path tableMetadataDirectory = getTableMetadataDirectory(databaseName, tableName); TableMetadata oldTableSchema = readSchemaFile("table", tableMetadataDirectory, tableCodec) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); TableMetadata newTableSchema = alterFunction.apply(oldTableSchema); if (oldTableSchema == newTableSchema) { return; } writeSchemaFile("table", tableMetadataDirectory, tableCodec, newTableSchema, true); }
private static AbstractTableMetadata getTableMetadata(KeyspaceMetadata keyspace, String caseInsensitiveTableName) { List<AbstractTableMetadata> tables = Stream.concat( keyspace.getTables().stream(), keyspace.getMaterializedViews().stream()) .filter(table -> table.getName().equalsIgnoreCase(caseInsensitiveTableName)) .collect(toImmutableList()); if (tables.size() == 0) { throw new TableNotFoundException(new SchemaTableName(keyspace.getName(), caseInsensitiveTableName)); } else if (tables.size() == 1) { return tables.get(0); } String tableNames = tables.stream() .map(AbstractTableMetadata::getName) .sorted() .collect(joining(", ")); throw new PrestoException( NOT_SUPPORTED, format("More than one table has been found for the case insensitive table name: %s -> (%s)", caseInsensitiveTableName, tableNames)); }
@Override public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) { SchemaTableName tableName = schemaTableName(tableHandle); Optional<Table> table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()); if (!table.isPresent()) { throw new TableNotFoundException(tableName); } ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); for (HiveColumnHandle columnHandle : hiveColumnHandles(table.get())) { columnHandles.put(columnHandle.getName(), columnHandle); } return columnHandles.build(); }
private Table getTable(SemiTransactionalHiveMetastore metastore, SchemaTableName tableName) { Optional<Table> target = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()); if (!target.isPresent()) { throw new TableNotFoundException(tableName); } Table table = target.get(); verifyOnline(tableName, Optional.empty(), getProtectMode(table), table.getParameters()); return table; }
default Optional<List<FieldSchema>> getFields(String databaseName, String tableName) { Optional<Table> table = getTable(databaseName, tableName); if (!table.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } if (table.get().getSd() == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor"); } return Optional.of(table.get().getSd().getCols()); } }
public static void verifyCanDropColumn(ExtendedHiveMetastore metastore, String databaseName, String tableName, String columnName) { Table table = metastore.getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); if (table.getPartitionColumns().stream().anyMatch(column -> column.getName().equals(columnName))) { throw new PrestoException(NOT_SUPPORTED, "Cannot drop partition columns"); } if (table.getDataColumns().size() <= 1) { throw new PrestoException(NOT_SUPPORTED, "Cannot drop the only non-partition column in a table"); } } }
private String getPartitionName(String databaseName, String tableName, List<String> partitionValues) { Table table = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); return getPartitionName(table, partitionValues); }
@Override public List<JdbcColumnHandle> getColumns(ConnectorSession session, JdbcTableHandle tableHandle) { try (Connection connection = connectionFactory.openConnection()) { try (ResultSet resultSet = getColumns(tableHandle, connection.getMetaData())) { List<JdbcColumnHandle> columns = new ArrayList<>(); while (resultSet.next()) { JdbcTypeHandle typeHandle = new JdbcTypeHandle( resultSet.getInt("DATA_TYPE"), resultSet.getString("TYPE_NAME"), resultSet.getInt("COLUMN_SIZE"), resultSet.getInt("DECIMAL_DIGITS")); Optional<ReadMapping> columnMapping = toPrestoType(session, typeHandle); // skip unsupported column types if (columnMapping.isPresent()) { String columnName = resultSet.getString("COLUMN_NAME"); columns.add(new JdbcColumnHandle(connectorId, columnName, typeHandle, columnMapping.get().getType())); } } if (columns.isEmpty()) { // In rare cases (e.g. PostgreSQL) a table might have no columns. throw new TableNotFoundException(tableHandle.getSchemaTableName()); } return ImmutableList.copyOf(columns); } } catch (SQLException e) { throw new PrestoException(JDBC_ERROR, e); } }
@Override public List<JdbcColumnHandle> getColumns(ConnectorSession session, JdbcTableHandle tableHandle) { try (Connection connection = connectionFactory.openConnection()) { try (ResultSet resultSet = getColumns(tableHandle, connection.getMetaData())) { List<JdbcColumnHandle> columns = new ArrayList<>(); while (resultSet.next()) { JdbcTypeHandle typeHandle = new JdbcTypeHandle( resultSet.getInt("DATA_TYPE"), resultSet.getString("TYPE_NAME"), resultSet.getInt("COLUMN_SIZE"), resultSet.getInt("DECIMAL_DIGITS")); Optional<ReadMapping> columnMapping = toPrestoType(session, typeHandle); // skip unsupported column types if (columnMapping.isPresent()) { String columnName = resultSet.getString("COLUMN_NAME"); columns.add(new JdbcColumnHandle(connectorId, columnName, typeHandle, columnMapping.get().getType())); } } if (columns.isEmpty()) { // In rare cases (e.g. PostgreSQL) a table might have no columns. throw new TableNotFoundException(tableHandle.getSchemaTableName()); } return ImmutableList.copyOf(columns); } } catch (SQLException e) { throw new PrestoException(JDBC_ERROR, e); } }
private void failIfAvroSchemaIsSet(HiveTableHandle handle) { String tableName = handle.getTableName(); String schemaName = handle.getSchemaName(); Optional<Table> table = metastore.getTable(schemaName, tableName); if (!table.isPresent()) { throw new TableNotFoundException(new SchemaTableName(schemaName, tableName)); } if (table.get().getParameters().get(AVRO_SCHEMA_URL_KEY) != null) { throw new PrestoException(NOT_SUPPORTED, "ALTER TABLE not supported when Avro schema url is set"); } }
@Override public void renameTable(String databaseName, String tableName, String newDatabaseName, String newTableName) { Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(databaseName, tableName); if (!source.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } org.apache.hadoop.hive.metastore.api.Table table = source.get(); table.setDbName(newDatabaseName); table.setTableName(newTableName); alterTable(databaseName, tableName, table); }
@Override public PartitionStatistics getTableStatistics(String databaseName, String tableName) { Table table = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); return new PartitionStatistics(getHiveBasicStatistics(table.getParameters()), ImmutableMap.of()); }
@Override public ConnectorTableHandle beginStatisticsCollection(ConnectorSession session, ConnectorTableHandle tableHandle) { verifyJvmTimeZone(); HiveTableHandle handle = (HiveTableHandle) tableHandle; SchemaTableName tableName = handle.getSchemaTableName(); metastore.getTable(tableName.getSchemaName(), tableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(tableName)); return handle; }
@Override public Optional<ConnectorNewTableLayout> getInsertLayout(ConnectorSession session, ConnectorTableHandle tableHandle) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(tableName)); Optional<HiveBucketHandle> hiveBucketHandle = getHiveBucketHandle(table); if (!hiveBucketHandle.isPresent()) { return Optional.empty(); } HiveBucketProperty bucketProperty = table.getStorage().getBucketProperty() .orElseThrow(() -> new NoSuchElementException("Bucket property should be set")); if (!bucketProperty.getSortedBy().isEmpty() && !isSortedWritingEnabled(session)) { throw new PrestoException(NOT_SUPPORTED, "Writing to bucketed sorted Hive tables is disabled"); } HivePartitioningHandle partitioningHandle = new HivePartitioningHandle( hiveBucketHandle.get().getTableBucketCount(), hiveBucketHandle.get().getColumns().stream() .map(HiveColumnHandle::getHiveType) .collect(Collectors.toList()), OptionalInt.of(hiveBucketHandle.get().getTableBucketCount())); List<String> partitionColumns = hiveBucketHandle.get().getColumns().stream() .map(HiveColumnHandle::getName) .collect(Collectors.toList()); return Optional.of(new ConnectorNewTableLayout(partitioningHandle, partitionColumns)); }
@SuppressWarnings("ValueOfIncrementOrDecrementUsed") private ConnectorTableMetadata getTableMetadata(SchemaTableName schemaTableName) { RedisTableDescription table = getDefinedTables().get(schemaTableName); if (table == null) { throw new TableNotFoundException(schemaTableName); } ImmutableList.Builder<ColumnMetadata> builder = ImmutableList.builder(); appendFields(builder, table.getKey()); appendFields(builder, table.getValue()); for (RedisInternalFieldDescription fieldDescription : RedisInternalFieldDescription.values()) { builder.add(fieldDescription.getColumnMetadata(hideInternalColumns)); } return new ConnectorTableMetadata(schemaTableName, builder.build()); }
@Override public void dropColumn(String databaseName, String tableName, String columnName) { verifyCanDropColumn(this, databaseName, tableName, columnName); org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); table.getSd().getCols().removeIf(fieldSchema -> fieldSchema.getName().equals(columnName)); alterTable(databaseName, tableName, table); }