@Override public Iterable<String> getSubPartitions(String schema, String table, List<String> partitionColumns, List<String> partitionValues ) throws PartitionNotFoundException { AbstractSchema subSchema = rootSchema.getSubSchema(schema.toLowerCase()).unwrap(AbstractSchema.class); return subSchema.getSubPartitions(table, partitionColumns, partitionValues); } }
@Override public ElasticsearchTable create(SchemaPlus schema, String name, Map<String, Object> operand, RelDataType rowType) { final ElasticsearchSchema esSchema = schema.unwrap(ElasticsearchSchema.class); String type = operand.getOrDefault("tableName", "").toString(); ElasticsearchTable table = new ElasticsearchTranslatableTable(esSchema.getClient(), new ObjectMapper(), esSchema.getIndex(), type, operand); esSchema.addTable(type, table); return table; } }
protected boolean shouldVisitFiles(String schemaName, SchemaPlus schemaPlus) { if (filter == null) { return true; } AbstractSchema schema; try { schema = schemaPlus.unwrap(AbstractSchema.class); } catch (ClassCastException e) { return false; } if (!(schema instanceof WorkspaceSchemaFactory.WorkspaceSchema)) { return false; } WorkspaceSchemaFactory.WorkspaceSchema wsSchema = (WorkspaceSchemaFactory.WorkspaceSchema) schema; Map<String, String> recordValues = new HashMap<>(); recordValues.put(FILES_COL_SCHEMA_NAME, schemaName); recordValues.put(FILES_COL_ROOT_SCHEMA_NAME, wsSchema.getSchemaPath().get(0)); recordValues.put(FILES_COL_WORKSPACE_NAME, wsSchema.getName()); return filter.evaluate(recordValues) != Result.FALSE; }
private static void addSchemasToCloseList(final SchemaPlus tree, final List<AutoCloseable> toClose) { for(String subSchemaName : tree.getSubSchemaNames()) { addSchemasToCloseList(tree.getSubSchema(subSchemaName), toClose); } try { AbstractSchema drillSchemaImpl = tree.unwrap(AbstractSchema.class); toClose.add(drillSchemaImpl); } catch (ClassCastException e) { // Ignore as the SchemaPlus is not an implementation of Drill schema. } } }
@Override public boolean visitSchema(String schemaName, SchemaPlus schema) { AbstractSchema as = schema.unwrap(AbstractSchema.class); records.add(new Records.Schema(IS_CATALOG_NAME, schemaName, "<owner>", as.getTypeName(), as.isMutable())); return false; } }
protected boolean shouldVisitSchema(String schemaName, SchemaPlus schema) { try { // if the schema path is null or empty (try for root schema) if (schemaName == null || schemaName.isEmpty()) { return false; } AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class); if (!drillSchema.showInInformationSchema()) { return false; } if (filter == null) { return true; } final Map<String, String> recordValues = ImmutableMap.of( CATS_COL_CATALOG_NAME, IS_CATALOG_NAME, SHRD_COL_TABLE_SCHEMA, schemaName, SCHS_COL_SCHEMA_NAME, schemaName); // If the filter evaluates to false then we don't need to visit the schema. // For other two results (TRUE, INCONCLUSIVE) continue to visit the schema. return filter.evaluate(recordValues) != Result.FALSE; } catch(ClassCastException e) { // ignore and return true as this is not a Drill schema } return true; }
@Override public void visitFiles(String schemaName, SchemaPlus schemaPlus) { try { AbstractSchema schema = schemaPlus.unwrap(AbstractSchema.class); if (schema instanceof WorkspaceSchemaFactory.WorkspaceSchema) { WorkspaceSchemaFactory.WorkspaceSchema wsSchema = (WorkspaceSchemaFactory.WorkspaceSchema) schema; String defaultLocation = wsSchema.getDefaultLocation(); FileSystem fs = wsSchema.getFS(); boolean recursive = optionManager.getBoolean(ExecConstants.LIST_FILES_RECURSIVELY); FileSystemUtil.listAllSafe(fs, new Path(defaultLocation), recursive).forEach( fileStatus -> records.add(new Records.File(schemaName, wsSchema, fileStatus)) ); } } catch (ClassCastException | UnsupportedOperationException e) { // ignore the exception since either this is not a Drill schema or schema does not support files listing } } }
@Override public void visitTables(String schemaPath, SchemaPlus schema) { final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class); final List<Pair<String, TableType>> tableNamesAndTypes = drillSchema .getTableNamesAndTypes(optionManager.getOption(ExecConstants.ENABLE_BULK_LOAD_TABLE_LIST), (int)optionManager.getOption(ExecConstants.BULK_LOAD_TABLE_LIST_BULK_SIZE)); for (Pair<String, TableType> tableNameAndType : tableNamesAndTypes) { final String tableName = tableNameAndType.getKey(); final TableType tableType = tableNameAndType.getValue(); // Visit the table, and if requested ... if (shouldVisitTable(schemaPath, tableName, tableType)) { visitTableWithType(schemaPath, tableName, tableType); } } }
public Table create(SchemaPlus schema, String name, Map operand, RelDataType rowType) { final DruidSchema druidSchema = schema.unwrap(DruidSchema.class);
public Table create(SchemaPlus schema, String name, Map operand, RelDataType rowType) { final DruidSchema druidSchema = schema.unwrap(DruidSchema.class);
@Override public Expression getExpression(SchemaPlus schema, String tableName, Class clazz) { return Expressions.field( schema.unwrap(ReflectiveSchema.class).getTargetExpression( schema.getParentSchema(), schema.getName()), field); } }
@Override public Expression getExpression(SchemaPlus schema, String tableName, Class clazz) { return Expressions.field( schema.unwrap(ReflectiveSchema.class).getTargetExpression( schema.getParentSchema(), schema.getName()), field); } }
/** * Unwrap given <i>SchemaPlus</i> instance as Drill schema instance (<i>AbstractSchema</i>). Once unwrapped, return * default schema from <i>AbstractSchema</i>. If the given schema is not an instance of <i>AbstractSchema</i> a * {@link UserException} is thrown. */ public static AbstractSchema unwrapAsDrillSchemaInstance(SchemaPlus schemaPlus) { try { return (AbstractSchema) schemaPlus.unwrap(AbstractSchema.class).getDefaultSchema(); } catch (ClassCastException e) { throw UserException.validationError(e) .message("Schema [%s] is not a Drill schema.", getSchemaPath(schemaPlus)) .build(logger); } }
AbstractSchema drillSchema; try { drillSchema = schema.unwrap(AbstractSchema.class); } catch (ClassCastException e) { throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
org.apache.drill.exec.store.AbstractSchema drillSchema; try { drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class); } catch (ClassCastException e) { throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
/** * Visit the tables in the given schema. The * @param schemaPath the path to the given schema * @param schema the given schema */ public void visitTables(String schemaPath, SchemaPlus schema) { final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class); final List<String> tableNames = Lists.newArrayList(schema.getTableNames()); for(Pair<String, ? extends Table> tableNameToTable : drillSchema.getTablesByNames(tableNames)) { final String tableName = tableNameToTable.getKey(); final Table table = tableNameToTable.getValue(); final TableType tableType = table.getJdbcTableType(); // Visit the table, and if requested ... if(shouldVisitTable(schemaPath, tableName, tableType) && visitTable(schemaPath, tableName, table)) { // ... do for each of the table's fields. final RelDataType tableRow = table.getRowType(new JavaTypeFactoryImpl(DRILL_REL_DATATYPE_SYSTEM)); for (RelDataTypeField field: tableRow.getFieldList()) { if (shouldVisitColumn(schemaPath, tableName, field.getName())) { visitField(schemaPath, tableName, field); } } } } }
wsSchema = (WorkspaceSchema) drillSchema.unwrap(AbstractSchema.class).getDefaultSchema(); } catch (ClassCastException e) { throw UserException.validationError()
/** Tests some of the properties of the {@link Lattice} data structure. */ @Test public void testLattice() throws Exception { modelWithLattice("star", "select 1 from \"foodmart\".\"sales_fact_1997\" as s\n" + "join \"foodmart\".\"product\" as p using (\"product_id\")\n" + "join \"foodmart\".\"time_by_day\" as t on t.\"time_id\" = s.\"time_id\"") .doWithConnection(c -> { final SchemaPlus schema = c.getRootSchema(); final SchemaPlus adhoc = schema.getSubSchema("adhoc"); assertThat(adhoc.getTableNames().contains("EMPLOYEES"), is(true)); final Map.Entry<String, CalciteSchema.LatticeEntry> entry = adhoc.unwrap(CalciteSchema.class).getLatticeMap().firstEntry(); final Lattice lattice = entry.getValue().getLattice(); assertThat(lattice.firstColumn("S"), is(10)); assertThat(lattice.firstColumn("P"), is(18)); assertThat(lattice.firstColumn("T"), is(0)); assertThat(lattice.firstColumn("PC"), is(-1)); assertThat(lattice.defaultMeasures.size(), is(1)); assertThat(lattice.rootNode.descendants.size(), is(3)); }); }
/** Tests that it's OK for a lattice to have the same name as a table in the * schema. */ @Test public void testLatticeSql() throws Exception { modelWithLattice("EMPLOYEES", "select * from \"foodmart\".\"days\"") .doWithConnection(c -> { final SchemaPlus schema = c.getRootSchema(); final SchemaPlus adhoc = schema.getSubSchema("adhoc"); assertThat(adhoc.getTableNames().contains("EMPLOYEES"), is(true)); final Map.Entry<String, CalciteSchema.LatticeEntry> entry = adhoc.unwrap(CalciteSchema.class).getLatticeMap().firstEntry(); final Lattice lattice = entry.getValue().getLattice(); final String sql = "SELECT \"days\".\"day\"\n" + "FROM \"foodmart\".\"days\" AS \"days\"\n" + "GROUP BY \"days\".\"day\""; assertThat( lattice.sql(ImmutableBitSet.of(0), ImmutableList.of()), is(sql)); final String sql2 = "SELECT" + " \"days\".\"day\", \"days\".\"week_day\"\n" + "FROM \"foodmart\".\"days\" AS \"days\""; assertThat( lattice.sql(ImmutableBitSet.of(0, 1), false, ImmutableList.of()), is(sql2)); }); }
/** Tests that it's OK for a lattice to have the same name as a table in the * schema. */ @Test public void testLatticeSql() throws Exception { modelWithLattice("EMPLOYEES", "select * from \"foodmart\".\"days\"") .doWithConnection(c -> { final SchemaPlus schema = c.getRootSchema(); final SchemaPlus adhoc = schema.getSubSchema("adhoc"); assertThat(adhoc.getTableNames().contains("EMPLOYEES"), is(true)); final Map.Entry<String, CalciteSchema.LatticeEntry> entry = adhoc.unwrap(CalciteSchema.class).getLatticeMap().firstEntry(); final Lattice lattice = entry.getValue().getLattice(); final String sql = "SELECT \"days\".\"day\"\n" + "FROM \"foodmart\".\"days\" AS \"days\"\n" + "GROUP BY \"days\".\"day\""; assertThat( lattice.sql(ImmutableBitSet.of(0), ImmutableList.of()), is(sql)); final String sql2 = "SELECT" + " \"days\".\"day\", \"days\".\"week_day\"\n" + "FROM \"foodmart\".\"days\" AS \"days\""; assertThat( lattice.sql(ImmutableBitSet.of(0, 1), false, ImmutableList.of()), is(sql2)); }); }