@Override public boolean matches(RelOptRuleCall call) { final Filter filter = call.rel(0); final RelNode filterChild = call.rel(1); // If the filter is already on top of a TableScan, // we can bail out if (filterChild instanceof TableScan) { return false; } HiveRulesRegistry registry = call.getPlanner().getContext().unwrap(HiveRulesRegistry.class); // If this operator has been visited already by the rule, // we do not need to apply the optimization if (registry != null && registry.getVisited(this).contains(filter)) { return false; } return true; }
@Override public boolean matches(RelOptRuleCall call) { final Filter filter = call.rel(0); final RelNode filterChild = call.rel(1); // If the filter is already on top of a TableScan, // we can bail out if (filterChild instanceof TableScan) { return false; } HiveRulesRegistry registry = call.getPlanner().getContext().unwrap(HiveRulesRegistry.class); // If this operator has been visited already by the rule, // we do not need to apply the optimization if (registry != null && registry.getVisited(this).contains(filter)) { return false; } return true; }
protected void apply(RelOptRuleCall call, Project project, Filter filter, TableScan scan) { RelOptPlanner planner = call.getPlanner(); List<RelOptMaterialization> materializations = (planner instanceof VolcanoPlanner) ? ((VolcanoPlanner) planner).getMaterializations() : ImmutableList.<RelOptMaterialization>of(); if (!materializations.isEmpty()) { RelNode root = project.copy(project.getTraitSet(), Collections.singletonList( filter.copy(filter.getTraitSet(), Collections.singletonList( (RelNode) scan)))); // Costing is done in transformTo(), so we call it repeatedly with all applicable // materialized views and cheapest one will be picked List<RelOptMaterialization> applicableMaterializations = VolcanoPlanner.getApplicableMaterializations(root, materializations); for (RelOptMaterialization materialization : applicableMaterializations) { List<RelNode> subs = new MaterializedViewSubstitutionVisitor( materialization.queryRel, root, relBuilderFactory).go(materialization.tableRel); for (RelNode s : subs) { call.transformTo(s); } } } }
@Override public boolean matches(RelOptRuleCall call) { final RelNode node = call.rel(0); numberMatches++; HiveRulesRegistry registry = call.getPlanner(). getContext().unwrap(HiveRulesRegistry.class); // If this operator has been visited already by the rule, // we do not need to apply the optimization if (registry != null && registry.getVisited(this).contains(node)) { return false; } return true; }
call.getPlanner().setImportance(aggregate, 0.0);
call.getPlanner().onCopy(call.rel(0), newOp);
call.getPlanner().setImportance(second, 0.0);
RelNode rChild = join.getRight(); HiveRulesRegistry registry = call.getPlanner().getContext().unwrap(HiveRulesRegistry.class); assert registry != null; RelNode curr = lChild; lChild = filterFactory.createFilter(lChild, newLeftPredicate); call.getPlanner().onCopy(curr, lChild); call.getPlanner().onCopy(curr, rChild); call.getPlanner().onCopy(join, newJoin);
@Override public void onMatch(RelOptRuleCall call) { final Join join = call.rel(0); final HepRelVertex root = (HepRelVertex) call.getPlanner().getRoot(); if (root.getCurrentRel() != join) { // Bail out return; } // The join is the root, but we should always end up with a Project operator // on top. We will add it. RelBuilder relBuilder = call.builder(); relBuilder.push(join); List<RexNode> identityFields = relBuilder.fields( ImmutableBitSet.range(0, join.getRowType().getFieldCount()).asList()); relBuilder.project(identityFields, ImmutableList.<String>of(), true); call.transformTo(relBuilder.build()); }
HiveRulesRegistry registry = call.getPlanner().getContext().unwrap(HiveRulesRegistry.class); assert registry != null; RexBuilder rB = join.getCluster().getRexBuilder(); lChild = filterFactory.createFilter( lChild, newLeftPredicate.accept(new RexReplacer(lChild))); call.getPlanner().onCopy(curr, lChild); rChild = filterFactory.createFilter( rChild, newRightPredicate.accept(new RexReplacer(rChild))); call.getPlanner().onCopy(curr, rChild); call.getPlanner().onCopy(join, newRel);
); call.getPlanner().setImportance(project, 0.0);
final HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan(); final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner()); final String partitionColumnLabel = settings.getFsPartitionColumnLabel(); call.getPlanner().setImportance(hiveScanRel, 0.0); } catch (final Exception e) { logger.warn("Failed to convert HiveScan to HiveDrillNativeParquetScan", e);
HiveRulesRegistry registry = call.getPlanner().getContext().unwrap(HiveRulesRegistry.class); assert registry != null; RexBuilder rB = join.getCluster().getRexBuilder(); RelNode curr = lChild; lChild = filterFactory.createFilter(lChild, newLeftPredicate); call.getPlanner().onCopy(curr, lChild); RelNode curr = rChild; rChild = filterFactory.createFilter(rChild, newRightPredicate); call.getPlanner().onCopy(curr, rChild); call.getPlanner().onCopy(join, newRel);
call.getPlanner().setImportance(filter, 0.0);
@Override public void onMatch(RelOptRuleCall call) { final ScanPrel scan = call.rel(1); final FilterPrel filter = call.rel(0); final RexNode condition = filter.getCondition(); LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); KafkaGroupScan groupScan = (KafkaGroupScan) scan.getGroupScan(); logger.info("Partitions ScanSpec before pushdown: " + groupScan.getPartitionScanSpecList()); KafkaPartitionScanSpecBuilder builder = new KafkaPartitionScanSpecBuilder(groupScan, conditionExp); List<KafkaPartitionScanSpec> newScanSpec = null; newScanSpec = builder.parseTree(); builder.close(); //Close consumer //No pushdown if(newScanSpec == null) { return; } logger.info("Partitions ScanSpec after pushdown: " + newScanSpec); GroupScan newGroupScan = groupScan.cloneWithNewSpec(newScanSpec); final ScanPrel newScanPrel = new ScanPrel(scan.getCluster(), filter.getTraitSet(), newGroupScan, scan.getRowType(), scan.getTable()); call.transformTo(filter.copy(filter.getTraitSet(), ImmutableList.of(newScanPrel))); }
@Override public void onMatch(RelOptRuleCall call) { final RelNode node = call.rel(0); numberOnMatch++; // If we have fired it already once, we return and the test will fail if (numberOnMatch > 1) { return; } // Register that we have visited this operator in this rule HiveRulesRegistry registry = call.getPlanner(). getContext().unwrap(HiveRulesRegistry.class); if (registry != null) { registry.registerVisited(this, node); } // We create a new op if it is the first time we fire the rule final RelNode newNode = new DummyNode(node.getCluster(), node.getTraitSet()); // We register it so we do not fire the rule on it again if (registry != null) { registry.registerVisited(this, newNode); } call.transformTo(newNode); } }
protected void doPushFilterToScan(final RelOptRuleCall call, final FilterPrel filter, final ProjectPrel project, final ScanPrel scan, final HBaseGroupScan groupScan, final RexNode condition) { final LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); final HBaseFilterBuilder hbaseFilterBuilder = new HBaseFilterBuilder(groupScan, conditionExp); final HBaseScanSpec newScanSpec = hbaseFilterBuilder.parseTree(); if (newScanSpec == null) { return; //no filter pushdown ==> No transformation. } final HBaseGroupScan newGroupsScan = new HBaseGroupScan(groupScan.getUserName(), groupScan.getStoragePlugin(), newScanSpec, groupScan.getColumns()); newGroupsScan.setFilterPushedDown(true); final ScanPrel newScanPrel = new ScanPrel(scan.getCluster(), filter.getTraitSet(), newGroupsScan, scan.getRowType(), scan.getTable()); // Depending on whether is a project in the middle, assign either scan or copy of project to childRel. final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of(newScanPrel)); if (hbaseFilterBuilder.isAllExpressionsConverted()) { /* * Since we could convert the entire filter condition expression into an HBase filter, * we can eliminate the filter operator altogether. */ call.transformTo(childRel); } else { call.transformTo(filter.copy(filter.getTraitSet(), ImmutableList.of(childRel))); } }
conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); } catch (ClassCastException e) {
return; RelTraitSet newTraits = call.getPlanner().emptyTraitSet();
final LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); final MapRDBFilterBuilder maprdbFilterBuilder = new MapRDBFilterBuilder(groupScan, conditionExp); final HBaseScanSpec newScanSpec = maprdbFilterBuilder.parseTree();