final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build(); setTableInfoDetails(connection, result); log.debug("Finished getting table metadata for qualified name {} for request {}", name, context);
final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build(); setTableInfoDetails(connection, result); log.debug("Finished getting table metadata for qualified name {} for request {}", name, context);
private TableInfo getTableInfo( @Nonnull @NonNull final QualifiedName name, @Nonnull @NonNull final TableMetadata tableMetadata ) { final ImmutableList.Builder<FieldInfo> fieldInfoBuilder = ImmutableList.builder(); // TODO: Ignores clustering, primary key, index, etc columns. We need to rework TableInfo to support for (final ColumnMetadata column : tableMetadata.getColumns()) { final String dataType = column.getType().toString(); fieldInfoBuilder.add( FieldInfo.builder() .name(column.getName()) .sourceType(dataType) .type(this.typeConverter.toMetacatType(dataType)) .build() ); } return TableInfo.builder() .name(QualifiedName.ofTable(name.getCatalogName(), name.getDatabaseName(), tableMetadata.getName())) .fields(fieldInfoBuilder.build()) .build(); } }
private TableInfo getTableInfo( @Nonnull @NonNull final QualifiedName name, @Nonnull @NonNull final TableMetadata tableMetadata ) { final ImmutableList.Builder<FieldInfo> fieldInfoBuilder = ImmutableList.builder(); // TODO: Ignores clustering, primary key, index, etc columns. We need to rework TableInfo to support for (final ColumnMetadata column : tableMetadata.getColumns()) { final String dataType = column.getType().toString(); fieldInfoBuilder.add( FieldInfo.builder() .name(column.getName()) .sourceType(dataType) .type(this.typeConverter.toMetacatType(dataType)) .build() ); } return TableInfo.builder() .name(QualifiedName.ofTable(name.getCatalogName(), name.getDatabaseName(), tableMetadata.getName())) .fields(fieldInfoBuilder.build()) .build(); } }
/** * Convert from data source to partitionInfo. * * @param dataSource dataSource object * @return table info object */ public TableInfo getTableInfoFromDatasource(final DataSource dataSource) { final List<Segment> segmentList = dataSource.getSegmentList(); final Segment latestSegment = segmentList.get(segmentList.size() - 1); final List<FieldInfo> fieldInfos = new ArrayList<>(); for (String dim : latestSegment.getDimensions().split(",")) { fieldInfos.add(FieldInfo.builder() .comment(DruidConfigConstants.DIMENSIONS) .name(dim) .type(BaseType.STRING) .build()); } for (String dim : latestSegment.getMetric().split(",")) { fieldInfos.add(FieldInfo.builder() .comment(DruidConfigConstants.METRICS) .name(dim) .type(BaseType.DOUBLE) .build()); } return TableInfo.builder().fields(fieldInfos) .name(QualifiedName.ofTable(catalogName, DruidConfigConstants.DRUID_DB, dataSource.getName())) .build(); }
/** * Converts IcebergTable to TableDto. * * @param name qualified name * @param table iceberg table object * @param tableLoc iceberg table metadata location * @param auditInfo audit information * @return Metacat table Info */ public TableInfo fromIcebergTableToTableInfo(final QualifiedName name, final com.netflix.iceberg.Table table, final String tableLoc, final AuditInfo auditInfo) { final List<FieldInfo> allFields = this.hiveTypeConverter.icebergeSchemaTofieldDtos(table.schema(), table.spec().fields()); final Map<String, String> tableParameters = new HashMap<>(); tableParameters.put(DirectSqlTable.PARAM_TABLE_TYPE, DirectSqlTable.ICEBERG_TABLE_TYPE); tableParameters.put(DirectSqlTable.PARAM_METADATA_LOCATION, tableLoc); //adding iceberg table properties tableParameters.putAll(table.properties()); return TableInfo.builder().fields(allFields) .metadata(tableParameters) .serde(StorageInfo.builder().uri(table.location()).build()) .name(name).auditInfo(auditInfo) .build(); }
@Override public TableInfo toTableInfo(final QualifiedName tableName, final Table table) { return TableInfo.builder().name(tableName).fields(toFields(table)).auditInfo(toAuditInfo(table)) .serde(toStorageInfo(table)).build(); }
/** * Converts IcebergTable to TableDto. * * @param name qualified name * @param table iceberg table object * @param tableLoc iceberg table metadata location * @param auditInfo audit information * @return Metacat table Info */ public TableInfo fromIcebergTableToTableInfo(final QualifiedName name, final com.netflix.iceberg.Table table, final String tableLoc, final AuditInfo auditInfo) { final List<FieldInfo> allFields = this.hiveTypeConverter.icebergeSchemaTofieldDtos(table.schema(), table.spec().fields()); final Map<String, String> tableParameters = new HashMap<>(); tableParameters.put(DirectSqlTable.PARAM_TABLE_TYPE, DirectSqlTable.ICEBERG_TABLE_TYPE); tableParameters.put(DirectSqlTable.PARAM_METADATA_LOCATION, tableLoc); //adding iceberg table properties tableParameters.putAll(table.properties()); return TableInfo.builder().fields(allFields) .metadata(tableParameters) .serde(StorageInfo.builder().uri(table.location()).build()) .name(name).auditInfo(auditInfo) .build(); }
final AuditInfo auditInfo = AuditInfo.builder().createdDate(creationDate).build(); if (null != table.getTableType() && table.getTableType().equals(TableType.VIRTUAL_VIEW.name())) { return TableInfo.builder() .serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields) .metadata(table.getParameters()).name(name).auditInfo(auditInfo) ).build(); } else { return TableInfo.builder() .serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields) .metadata(table.getParameters()).name(name).auditInfo(auditInfo)
final AuditInfo auditInfo = AuditInfo.builder().createdDate(creationDate).build(); if (null != table.getTableType() && table.getTableType().equals(TableType.VIRTUAL_VIEW.name())) { return TableInfo.builder() .serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields) .metadata(table.getParameters()).name(name).auditInfo(auditInfo) ).build(); } else { return TableInfo.builder() .serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields) .metadata(table.getParameters()).name(name).auditInfo(auditInfo)