private Table findTable(CalciteSchema schema, String tableName, boolean caseSensitive) { CalciteSchema.TableEntry entry = schema.getTable(tableName, caseSensitive); if (entry != null) { return entry.getTable(); } // Check sub schemas for (CalciteSchema subSchema : schema.getSubSchemaMap().values()) { Table table = findTable(subSchema, tableName, caseSensitive); if (table != null) { return table; } } return null; }
public static SchemaPlus createRootSchema( final DruidSchema druidSchema, final SystemSchema systemSchema, final AuthorizerMapper authorizerMapper ) { final SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus(); rootSchema.add(DruidSchema.NAME, druidSchema); rootSchema.add(InformationSchema.NAME, new InformationSchema(rootSchema, authorizerMapper)); rootSchema.add(SystemSchema.NAME, systemSchema); return rootSchema; }
@Override protected void init(final ProcessorInitializationContext context) { try { DriverManager.registerDriver(new org.apache.calcite.jdbc.Driver()); } catch (final SQLException e) { throw new ProcessException("Failed to load Calcite JDBC Driver", e); } final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(RECORD_READER_FACTORY); properties.add(RECORD_WRITER_FACTORY); properties.add(INCLUDE_ZERO_RECORD_FLOWFILES); properties.add(CACHE_SCHEMA); this.properties = Collections.unmodifiableList(properties); relationships.add(REL_FAILURE); relationships.add(REL_ORIGINAL); }
final JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl(TYPE_SYSTEM);
@Before public void before() { HepProgramBuilder programBuilder = new HepProgramBuilder(); programBuilder.addRuleInstance(new HivePointLookupOptimizerRule.FilterCondition(2)); planner = new HepPlanner(programBuilder.build()); JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl(); RexBuilder rexBuilder = new RexBuilder(typeFactory); final RelOptCluster optCluster = RelOptCluster.create(planner, rexBuilder); RelDataType rowTypeMock = typeFactory.createStructType(MyRecord.class); Mockito.doReturn(rowTypeMock).when(tableMock).getRowType(); Mockito.doReturn(tableMock).when(schemaMock).getTableForMember(Matchers.any()); Mockito.doReturn(hiveTableMDMock).when(tableMock).getHiveTableMD(); builder = HiveRelFactories.HIVE_BUILDER.create(optCluster, schemaMock); }
public FrameworkConfig buildFrameWorkConfig() { if (hasUdf) { List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>(); sqlOperatorTables.add(SqlStdOperatorTable.instance()); sqlOperatorTables.add(new CalciteCatalogReader(CalciteSchema.from(schema), Collections.emptyList(), typeFactory, new CalciteConnectionConfigImpl(new Properties()))); return Frameworks.newConfigBuilder().defaultSchema(schema) .operatorTable(new ChainedSqlOperatorTable(sqlOperatorTables)).build(); } else { return Frameworks.newConfigBuilder().defaultSchema(schema).build(); } }
private CachedStatement buildCachedStatement(final String sql, final Supplier<CalciteConnection> connectionSupplier, final ProcessSession session, final FlowFile flowFile, final RecordReaderFactory recordReaderFactory) throws SQLException { final CalciteConnection connection = connectionSupplier.get(); final SchemaPlus rootSchema = connection.getRootSchema(); final FlowFileTable<?, ?> flowFileTable = new FlowFileTable<>(session, flowFile, recordReaderFactory, getLogger()); rootSchema.add("FLOWFILE", flowFileTable); rootSchema.setCacheEnabled(false); final PreparedStatement stmt = connection.prepareStatement(sql); return new CachedStatement(stmt, flowFileTable, connection); }
@Override public RelDataType toSql(RelDataType type) { if (type instanceof JavaType) { JavaType javaType = (JavaType) type; SqlTypeName sqlTypeName = JavaToSqlTypeConversionRules.instance().lookup(javaType.getJavaClass()); if (sqlTypeName == null) { sqlTypeName = SqlTypeName.ANY; } return createTypeWithNullability(createSqlType(sqlTypeName), type.isNullable()); } return super.toSql(type); } }
/** * Given a table alias, find the corresponding {@link Table} associated with it * */ private Table findTable(String alias) { List<String> names = null; if (tableScope == null) { // no tables to find return null; } for (ScopeChild child : tableScope.children) { if (catalogReader.nameMatcher().matches(child.name, alias)) { names = ((SqlIdentifier) child.namespace.getNode()).names; break; } } if (names == null || names.size() == 0) { return null; } else if (names.size() == 1) { return findTable(catalogReader.getRootSchema(), names.get(0), catalogReader.nameMatcher().isCaseSensitive()); } CalciteSchema.TableEntry entry = SqlValidatorUtil.getTableEntry(catalogReader, names); return entry == null ? null : entry.getTable(); }
/** * Translate a field access, possibly through a projection, to an underlying Druid dataSource. * * @param rowSignature row signature of underlying Druid dataSource * @param project projection, or null * @param fieldNumber number of the field to access * * @return row expression */ public static RexNode fromFieldAccess( final RowSignature rowSignature, final Project project, final int fieldNumber ) { if (project == null) { // I don't think the factory impl matters here. return RexInputRef.of(fieldNumber, rowSignature.getRelDataType(new JavaTypeFactoryImpl())); } else { return project.getChildExps().get(fieldNumber); } }
public QueryPlanner(SchemaPlus schema) { final List<RelTraitDef> traitDefs = new ArrayList<RelTraitDef>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(RelCollationTraitDef.INSTANCE); List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>(); sqlOperatorTables.add(SqlStdOperatorTable.instance()); sqlOperatorTables.add(new CalciteCatalogReader(CalciteSchema.from(schema), Collections.emptyList(), typeFactory, new CalciteConnectionConfigImpl(new Properties()))); FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema(schema) .operatorTable(new ChainedSqlOperatorTable(sqlOperatorTables)) .traitDefs(traitDefs) .context(Contexts.EMPTY_CONTEXT) .ruleSets(StreamsStormRuleSets.getRuleSets()) .costFactory(null) .typeSystem(StormRelDataTypeSystem.STORM_REL_DATATYPE_SYSTEM) .build(); this.planner = Frameworks.getPlanner(config); }
@Test public void testGetTableMapFoo2() { final DruidTable fooTable = (DruidTable) schema.getTableMap().get("foo2"); final RelDataType rowType = fooTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> fields = rowType.getFieldList(); Assert.assertEquals(3, fields.size()); Assert.assertEquals("__time", fields.get(0).getName()); Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName()); Assert.assertEquals("dim2", fields.get(1).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(1).getType().getSqlTypeName()); Assert.assertEquals("m1", fields.get(2).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(2).getType().getSqlTypeName()); } }
@Test public void testGetTableMap() { Assert.assertEquals(ImmutableSet.of("segments", "servers", "server_segments", "tasks"), schema.getTableNames()); final Map<String, Table> tableMap = schema.getTableMap(); Assert.assertEquals(ImmutableSet.of("segments", "servers", "server_segments", "tasks"), tableMap.keySet()); final SystemSchema.SegmentsTable segmentsTable = (SystemSchema.SegmentsTable) schema.getTableMap().get("segments"); final RelDataType rowType = segmentsTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> fields = rowType.getFieldList(); Assert.assertEquals(13, fields.size()); final SystemSchema.TasksTable tasksTable = (SystemSchema.TasksTable) schema.getTableMap().get("tasks"); final RelDataType sysRowType = tasksTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> sysFields = sysRowType.getFieldList(); Assert.assertEquals(13, sysFields.size()); Assert.assertEquals("task_id", sysFields.get(0).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, sysFields.get(0).getType().getSqlTypeName()); final SystemSchema.ServersTable serversTable = (SystemSchema.ServersTable) schema.getTableMap().get("servers"); final RelDataType serverRowType = serversTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> serverFields = serverRowType.getFieldList(); Assert.assertEquals(8, serverFields.size()); Assert.assertEquals("server", serverFields.get(0).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, serverFields.get(0).getType().getSqlTypeName()); }
final BlockBuilder builder = new BlockBuilder(); final JavaTypeFactoryImpl javaTypeFactory = new JavaTypeFactoryImpl(rexBuilder.getTypeFactory().getTypeSystem());
@Test public void testGetTableMapFoo() { final DruidTable fooTable = (DruidTable) schema.getTableMap().get("foo"); final RelDataType rowType = fooTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> fields = rowType.getFieldList(); Assert.assertEquals(6, fields.size()); Assert.assertEquals("__time", fields.get(0).getName()); Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName()); Assert.assertEquals("cnt", fields.get(1).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(1).getType().getSqlTypeName()); Assert.assertEquals("dim1", fields.get(2).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(2).getType().getSqlTypeName()); Assert.assertEquals("dim2", fields.get(3).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(3).getType().getSqlTypeName()); Assert.assertEquals("m1", fields.get(4).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(4).getType().getSqlTypeName()); Assert.assertEquals("unique_dim1", fields.get(5).getName()); Assert.assertEquals(SqlTypeName.OTHER, fields.get(5).getType().getSqlTypeName()); }
public static CalciteState sqlOverDummyTable(String sql) throws RelConversionException, ValidationException, SqlParseException { SchemaPlus schema = Frameworks.createRootSchema(true); JavaTypeFactory typeFactory = new JavaTypeFactoryImpl (RelDataTypeSystem.DEFAULT); StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory) .field("ID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)) .field("NAME", typeFactory.createType(String.class)) .field("ADDR", typeFactory.createType(String.class)) .build(); Table table = streamableTable.stream(); schema.add("FOO", table); schema.add("BAR", table); schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval")); QueryPlanner queryPlanner = new QueryPlanner(schema); StreamsRel tree = queryPlanner.getPlan(sql); System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES)); return new CalciteState(schema, tree); }
public static CalciteState sqlOverSimpleEquiJoinTables(String sql) throws RelConversionException, ValidationException, SqlParseException { SchemaPlus schema = Frameworks.createRootSchema(true); JavaTypeFactory typeFactory = new JavaTypeFactoryImpl (RelDataTypeSystem.DEFAULT); StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory) .field("EMPID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)) .field("EMPNAME", SqlTypeName.VARCHAR) .field("DEPTID", SqlTypeName.INTEGER) .build(); StreamableTable streamableTable2 = new CompilerUtil.TableBuilderInfo(typeFactory) .field("DEPTID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)) .field("DEPTNAME", SqlTypeName.VARCHAR) .build(); Table table = streamableTable.stream(); Table table2 = streamableTable2.stream(); schema.add("EMP", table); schema.add("DEPT", table2); QueryPlanner queryPlanner = new QueryPlanner(schema); StreamsRel tree = queryPlanner.getPlan(sql); System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES)); return new CalciteState(schema, tree); }
public static CalciteState sqlOverDummyGroupByTable(String sql) throws RelConversionException, ValidationException, SqlParseException { SchemaPlus schema = Frameworks.createRootSchema(true); JavaTypeFactory typeFactory = new JavaTypeFactoryImpl (RelDataTypeSystem.DEFAULT); StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory) .field("ID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)) .field("GRPID", SqlTypeName.INTEGER) .field("NAME", typeFactory.createType(String.class)) .field("ADDR", typeFactory.createType(String.class)) .field("AGE", SqlTypeName.INTEGER) .field("SCORE", SqlTypeName.INTEGER) .build(); Table table = streamableTable.stream(); schema.add("FOO", table); schema.add("BAR", table); schema.add("MYSTATICSUM", AggregateFunctionImpl.create(MyStaticSumFunction.class)); schema.add("MYSUM", AggregateFunctionImpl.create(MySumFunction.class)); QueryPlanner queryPlanner = new QueryPlanner(schema); StreamsRel tree = queryPlanner.getPlan(sql); System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES)); return new CalciteState(schema, tree); }
@Test public void testCBOMaxNumToCNF1() { // OR(=($0, 1), AND(=($0, 0), =($1, 8))) // transformation creates 7 nodes AND(OR(=($0, 1), =($0, 0)), OR(=($0, 1), =($1, 8))) // thus, it is triggered final RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl(); final RexBuilder rexBuilder = new RexBuilder(typeFactory); final RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.OR, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(1, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.AND, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(0, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeLiteral(8, typeFactory.createSqlType(SqlTypeName.INTEGER), false)))); final RexNode newCond = RexUtil.toCnf(rexBuilder, maxNumNodesCNF, cond); assertEquals(newCond.toString(), "AND(OR(=($0, 1), =($0, 0)), OR(=($0, 1), =($1, 8)))"); }
@Test public void testCBOMaxNumToCNF2() { // OR(=($0, 1), =($0, 2), AND(=($0, 0), =($1, 8))) // transformation creates 9 nodes AND(OR(=($0, 1), =($0, 2), =($0, 0)), OR(=($0, 1), =($0, 2), =($1, 8))) // thus, it is NOT triggered final RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl(); final RexBuilder rexBuilder = new RexBuilder(typeFactory); final RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.OR, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(1, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(2, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.AND, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(0, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeLiteral(8, typeFactory.createSqlType(SqlTypeName.INTEGER), false)))); final RexNode newCond = RexUtil.toCnf(rexBuilder, maxNumNodesCNF, cond); assertEquals(newCond.toString(), "OR(=($0, 1), =($0, 2), AND(=($0, 0), =($1, 8)))"); }