public CapitalizingJdbcSchema(List<String> parentSchemaPath, String name, DataSource dataSource, SqlDialect dialect, JdbcConvention convention, String catalog, String schema) { super(parentSchemaPath, name); inner = new JdbcSchema(dataSource, dialect, convention, catalog, schema); }
@Override public Set<String> getTableNames() { return inner.getTableNames(); }
@Override public Set<String> getFunctionNames() { return inner.getFunctionNames(); }
try { final JdbcImplementor jdbcImplementor = new JdbcImplementor(dialect, (JavaTypeFactory) optimizedOptiqPlan.getCluster() .getTypeFactory()); final JdbcImplementor.Result result = jdbcImplementor.visitChild(0, optimizedOptiqPlan); String sql = result.asStatement().toSqlString(dialect).getSql(); return sql.replaceAll("VARCHAR\\(2147483647\\)", "STRING");
public RelNode convert(RelNode rel) { LogicalProject project = (LogicalProject) rel; return new JdbcRules.JdbcProject(rel.getCluster(), rel.getTraitSet().replace(this.out), convert( project.getInput(), project.getInput().getTraitSet().replace(this.out).simplify()), project.getProjects(), project.getRowType()); }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("JDBCFilterPushDown has been called"); final HiveFilter filter = call.rel(0); final HiveJdbcConverter converter = call.rel(1); Filter newHiveFilter = filter.copy(filter.getTraitSet(), converter.getInput(), filter.getCondition()); JdbcFilter newJdbcFilter = (JdbcFilter) new JdbcFilterRule(converter.getJdbcConvention()).convert(newHiveFilter); if (newJdbcFilter != null) { RelNode converterRes = converter.copy(converter.getTraitSet(), Arrays.asList(newJdbcFilter)); call.transformTo(converterRes); } }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("JDBCProjectPushDownRule has been called"); final HiveProject project = call.rel(0); final HiveJdbcConverter converter = call.rel(1); Project newHiveProject = project.copy(project.getTraitSet(), converter.getInput(), project.getProjects(), project.getRowType()); JdbcProject newJdbcProject = (JdbcProject) new JdbcProjectRule(converter.getJdbcConvention()).convert(newHiveProject); if (newJdbcProject != null) { RelNode converterRes = converter.copy(converter.getTraitSet(), Arrays.asList(newJdbcProject)); call.transformTo(converterRes); } }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("JDBCSortPushDownRule has been called"); final HiveSortLimit sort = call.rel(0); final HiveJdbcConverter converter = call.rel(1); final RelNode input = call.rel(2); Sort newHiveSort = sort.copy(sort.getTraitSet(), input, sort.getCollation(), sort.getOffsetExpr(), sort.getFetchExpr()); JdbcSort newJdbcSort = (JdbcSort) new JdbcSortRule(converter.getJdbcConvention()).convert(newHiveSort, false); if (newJdbcSort != null) { RelNode converterRes = converter.copy(converter.getTraitSet(), Arrays.asList(newJdbcSort)); call.transformTo(converterRes); } }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("MyAggregationPushDownRule.onMatch has been called"); final HiveAggregate agg = call.rel(0); final HiveJdbcConverter converter = call.rel(1); Aggregate newHiveAggregate = agg.copy(agg.getTraitSet(), converter.getInput(), agg.getIndicatorCount() !=0, agg.getGroupSet(), agg.getGroupSets(), agg.getAggCallList()); JdbcAggregate newJdbcAggregate = (JdbcAggregate) new JdbcAggregateRule(converter.getJdbcConvention()).convert(newHiveAggregate); if (newJdbcAggregate != null) { RelNode converterRes = converter.copy(converter.getTraitSet(), Arrays.asList(newJdbcAggregate)); call.transformTo(converterRes); } }
public DrillJdbcConvention(SqlDialect dialect, String name) { super(dialect, ConstantUntypedNull.INSTANCE, name); // build rules for this convention. ImmutableSet.Builder<RelOptRule> builder = ImmutableSet.builder(); builder.add(JDBC_PRULE_INSTANCE); builder.add(new JdbcDrelConverterRule(this)); builder.add(new DrillJdbcProjectRule(this)); builder.add(new DrillJdbcFilterRule(this)); outside: for (RelOptRule rule : JdbcRules.rules(this)) { final String description = rule.toString(); // we want to black list some rules but the parent Calcite package is all or none. // Therefore, we remove rules with names we don't like. for(String black : RULES_TO_AVOID){ if(description.equals(black)){ continue outside; } } builder.add(rule); } builder.add(RuleInstance.FILTER_SET_OP_TRANSPOSE_RULE); builder.add(RuleInstance.PROJECT_REMOVE_RULE); rules = builder.build(); }
@Override public Collection<Function> getFunctions(String name) { return inner.getFunctions(name); }
public JdbcStoragePlugin(JdbcStorageConfig config, DrillbitContext context, String name) { super(context, name); this.config = config; BasicDataSource source = new BasicDataSource(); source.setDriverClassName(config.getDriver()); source.setUrl(config.getUrl()); if (config.getUsername() != null) { source.setUsername(config.getUsername()); } if (config.getPassword() != null) { source.setPassword(config.getPassword()); } this.source = source; this.dialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, source); this.convention = new DrillJdbcConvention(dialect, name); }
@Override public Table getTable(String name) { Table table = inner.getTable(name); if (table != null) { return table; } if (!areTableNamesCaseSensitive()) { // Oracle and H2 changes unquoted identifiers to uppercase. table = inner.getTable(name.toUpperCase()); if (table != null) { return table; } // Postgres changes unquoted identifiers to lowercase. return inner.getTable(name.toLowerCase()); } // no table was found. return null; }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("JDBCUnionPushDown has been called"); final HiveUnion union = call.rel(0); final HiveJdbcConverter converter1 = call.rel(1); final HiveJdbcConverter converter2 = call.rel(2); List<RelNode> unionInput = Arrays.asList(converter1.getInput(), converter2.getInput()); JdbcUnion jdbcUnion = new JdbcUnion( union.getCluster(), union.getTraitSet().replace(converter1.getJdbcConvention()), unionInput, union.all); call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcUnion)); }
public RelNode convert(RelNode rel) { LogicalFilter filter = (LogicalFilter) rel; return new JdbcRules.JdbcFilter(rel.getCluster(), rel.getTraitSet().replace(this.out), convert(filter.getInput(), filter.getInput().getTraitSet().replace(this.out).simplify()), filter.getCondition()); }
@Override public void onMatch(RelOptRuleCall call) { LOG.debug("JDBCJoinPushDownRule has been called"); final HiveJoin join = call.rel(0); final HiveJdbcConverter converter1 = call.rel(1); final RelNode input1 = converter1.getInput(); final HiveJdbcConverter converter2 = call.rel(2); final RelNode input2 = converter2.getInput(); JdbcJoin jdbcJoin; try { jdbcJoin = new JdbcJoin( join.getCluster(), join.getTraitSet().replace(converter1.getJdbcConvention()), input1, input2, join.getCondition(), join.getVariablesSet(), join.getJoinType()); } catch (InvalidRelException e) { LOG.warn(e.toString()); return; } call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcJoin)); }
@Override public boolean matches(RelOptRuleCall call) { final HiveUnion union = call.rel(0); final HiveJdbcConverter converter1 = call.rel(1); final HiveJdbcConverter converter2 = call.rel(2); // First we compare the convention if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) { return false; } // Second, we compare the connection string if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) { return false; } // Third, we compare the connection user if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) { return false; } return union.getInputs().size() == 2; }
public JdbcPrel(RelOptCluster cluster, RelTraitSet traitSet, JdbcIntermediatePrel prel) { super(cluster, traitSet); final RelNode input = prel.getInput(); rows = input.estimateRowCount(cluster.getMetadataQuery()); convention = (DrillJdbcConvention) input.getTraitSet().getTrait(ConventionTraitDef.INSTANCE); // generate sql for tree. final SqlDialect dialect = convention.getPlugin().getDialect(); final JdbcImplementor jdbcImplementor = new JdbcImplementor( dialect, (JavaTypeFactory) getCluster().getTypeFactory()); final JdbcImplementor.Result result = jdbcImplementor.visitChild(0, input.accept(new SubsetRemover())); sql = result.asStatement().toSqlString(dialect).getSql(); rowType = input.getRowType(); }
public String generateSql() { SqlDialect dialect = getJdbcDialect(); final HiveJdbcImplementor jdbcImplementor = new HiveJdbcImplementor(dialect, (JavaTypeFactory) getCluster().getTypeFactory()); Project topProject; if (getInput() instanceof Project) { topProject = (Project) getInput(); } else { // If it is not a project operator, we add it on top of the input // to force generating the column names instead of * while // translating to SQL RelNode nodeToTranslate = getInput(); RexBuilder builder = getCluster().getRexBuilder(); List<RexNode> projects = new ArrayList<>( nodeToTranslate.getRowType().getFieldList().size()); for (int i = 0; i < nodeToTranslate.getRowType().getFieldCount(); i++) { projects.add(builder.makeInputRef(nodeToTranslate, i)); } topProject = new JdbcProject(nodeToTranslate.getCluster(), nodeToTranslate.getTraitSet(), nodeToTranslate, projects, nodeToTranslate.getRowType()); } final HiveJdbcImplementor.Result result = jdbcImplementor.translate(topProject); return result.asStatement().toSqlString(dialect).getSql(); }
@Override public boolean matches(RelOptRuleCall call) { final HiveJoin join = call.rel(0); final RexNode cond = join.getCondition(); final HiveJdbcConverter converter1 = call.rel(1); final HiveJdbcConverter converter2 = call.rel(2); // First we compare the convention if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) { return false; } // Second, we compare the connection string if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) { return false; } // Third, we compare the connection user if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) { return false; } //We do not push cross join if (cond.isAlwaysTrue()) { return false; } return JDBCRexCallValidator.isValidJdbcOperation(cond, converter1.getJdbcDialect()); }