public boolean initialize(RexNode condition, DrillScanRelBase scanRel, IndexCallContext context) { GroupScan scan = IndexPlanUtils.getGroupScan(scanRel); PlannerSettings settings = PrelUtil.getPlannerSettings(scanRel.getCluster().getPlanner()); rowKeyJoinBackIOFactor = settings.getIndexRowKeyJoinCostFactor(); if (scan instanceof DbGroupScan) { String conditionAsStr = convertRexToString(condition, scanRel.getRowType()); if (statsCache.get(conditionAsStr) == null) { IndexCollection indexes = ((DbGroupScan)scan).getSecondaryIndexCollection(scanRel); populateStats(condition, indexes, scanRel, context); logger.info("index_plan_info: initialize: scanRel #{} and groupScan {} got fulltable {}, statsCache: {}, fiStatsCache: {}", scanRel.getId(), System.identityHashCode(scan), fullTableScanPayload, statsCache, fIStatsCache); return true; } } return false; }
@Override public boolean matches(RelOptRuleCall call) { final DrillScanRel scan = (DrillScanRel) call.rel(1); GroupScan groupScan = scan.getGroupScan(); // this rule is applicable only for Hive based partition pruning if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) { return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown(); } else { return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown(); } }
@Override public boolean matches(RelOptRuleCall call) { final DrillScanRel scan = (DrillScanRel) call.rel(2); GroupScan groupScan = scan.getGroupScan(); // this rule is applicable only for Hive based partition pruning if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) { return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown(); } else { return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown(); } }
@Override public void onMatch(RelOptRuleCall call) { final ScanPrel scan = call.rel(1); final FilterPrel filter = call.rel(0); final RexNode condition = filter.getCondition(); LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); KafkaGroupScan groupScan = (KafkaGroupScan) scan.getGroupScan(); logger.info("Partitions ScanSpec before pushdown: " + groupScan.getPartitionScanSpecList()); KafkaPartitionScanSpecBuilder builder = new KafkaPartitionScanSpecBuilder(groupScan, conditionExp); List<KafkaPartitionScanSpec> newScanSpec = null; newScanSpec = builder.parseTree(); builder.close(); //Close consumer //No pushdown if(newScanSpec == null) { return; } logger.info("Partitions ScanSpec after pushdown: " + newScanSpec); GroupScan newGroupScan = groupScan.cloneWithNewSpec(newScanSpec); final ScanPrel newScanPrel = new ScanPrel(scan.getCluster(), filter.getTraitSet(), newGroupScan, scan.getRowType(), scan.getTable()); call.transformTo(filter.copy(filter.getTraitSet(), ImmutableList.of(newScanPrel))); }
final HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan(); final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner()); final String partitionColumnLabel = settings.getFsPartitionColumnLabel();
protected void doPushFilterToScan(final RelOptRuleCall call, final FilterPrel filter, final ProjectPrel project, final ScanPrel scan, final HBaseGroupScan groupScan, final RexNode condition) { final LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); final HBaseFilterBuilder hbaseFilterBuilder = new HBaseFilterBuilder(groupScan, conditionExp); final HBaseScanSpec newScanSpec = hbaseFilterBuilder.parseTree(); if (newScanSpec == null) { return; //no filter pushdown ==> No transformation. } final HBaseGroupScan newGroupsScan = new HBaseGroupScan(groupScan.getUserName(), groupScan.getStoragePlugin(), newScanSpec, groupScan.getColumns()); newGroupsScan.setFilterPushedDown(true); final ScanPrel newScanPrel = new ScanPrel(scan.getCluster(), filter.getTraitSet(), newGroupsScan, scan.getRowType(), scan.getTable()); // Depending on whether is a project in the middle, assign either scan or copy of project to childRel. final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of(newScanPrel)); if (hbaseFilterBuilder.isAllExpressionsConverted()) { /* * Since we could convert the entire filter condition expression into an HBase filter, * we can eliminate the filter operator altogether. */ call.transformTo(childRel); } else { call.transformTo(filter.copy(filter.getTraitSet(), ImmutableList.of(childRel))); } }
conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); } catch (ClassCastException e) {
final LogicalExpression conditionExp = DrillOptiq.toDrill(new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, condition); final MapRDBFilterBuilder maprdbFilterBuilder = new MapRDBFilterBuilder(groupScan, conditionExp); final HBaseScanSpec newScanSpec = maprdbFilterBuilder.parseTree();
public IndexComparator(RelOptPlanner planner, IndexConditionInfo.Builder builder) { this.planner = planner; this.builder = builder; this.settings = PrelUtil.getPlannerSettings(planner); }
/** Creates a DrillScanRel for a particular GroupScan */ public DrillScanRel(final RelOptCluster cluster, final RelTraitSet traits, final RelOptTable table, final GroupScan groupScan, final RelDataType rowType, final List<SchemaPath> columns, boolean partitionFilterPushdown) { super(cluster, traits, groupScan, table); this.rowType = rowType; this.columns = columns; this.settings = PrelUtil.getPlannerSettings(cluster.getPlanner()); this.partitionFilterPushdown = partitionFilterPushdown; }
public IndexableExprMarker(RelNode inputRel) { super(true); this.inputRel = inputRel; parserContext = new DrillParseContext(PrelUtil.getPlannerSettings(inputRel.getCluster())); }
public static RelTraitSet fixTraits(RelOptPlanner cluster, RelTraitSet set) { if (getPlannerSettings(cluster).isSingleMode()) { return set.replace(DrillDistributionTrait.ANY); } else { return set; } }
public static boolean isSingleMode(RelOptRuleCall call) { return PrelUtil.getPlannerSettings(call.getPlanner()).isSingleMode(); } }
public DrillJoinRelBase(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right, RexNode condition, JoinRelType joinType) { super(cluster, traits, left, right, condition, CorrelationId.setOf(Collections.<String> emptySet()), joinType); this.joinRowFactor = PrelUtil.getPlannerSettings(cluster.getPlanner()).getRowCountEstimateFactor(); }
@Override public boolean matches(RelOptRuleCall call) { PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner()); return settings.isMemoryEstimationEnabled() || settings.isHashAggEnabled(); }
/** Creates a DrillScan. */ public DrillScanRel(final RelOptCluster cluster, final RelTraitSet traits, final RelOptTable table, boolean partitionFilterPushdown) { // By default, scan does not support project pushdown. // Decision whether push projects into scan will be made solely in DrillPushProjIntoScanRule. this(cluster, traits, table, table.getRowType(), getProjectedColumns(table, true), partitionFilterPushdown); this.settings = PrelUtil.getPlannerSettings(cluster.getPlanner()); }
@Override public double estimateRowCount(RelMetadataQuery mq) { final PlannerSettings settings = PrelUtil.getPlannerSettings(getCluster()); double rowCount = this.getGroupScan().getScanStats(settings).getRecordCount(); logger.debug("#{}.estimateRowCount get rowCount {} from groupscan {}", this.getId(), rowCount, System.identityHashCode(this.getGroupScan())); return rowCount; }
@Override public double estimateRowCount(RelMetadataQuery mq) { final PlannerSettings settings = PrelUtil.getPlannerSettings(getCluster()); double rowCount = this.getGroupScan().getScanStats(settings).getRecordCount(); logger.debug("#{}.estimateRowCount get rowCount {} from groupscan {}", this.getId(), rowCount, System.identityHashCode(this.getGroupScan())); return rowCount; }
@Override public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) { DrillCostBase.DrillCostFactory costFactory = (DrillCostBase.DrillCostFactory) planner.getCostFactory(); double rowCount = estimateRowCount(mq); long fieldWidth = PrelUtil.getPlannerSettings(planner).getOptions() .getLong(ExecConstants.AVERAGE_FIELD_WIDTH_KEY); double rowSize = left.getRowType().getFieldList().size() * fieldWidth; double cpuCost = rowCount * rowSize * DrillCostBase.BASE_CPU_COST; double memCost = !excludeCorrelateColumn ? CORRELATE_MEM_COPY_COST : 0.0; return costFactory.makeCost(rowCount, cpuCost, 0, 0, memCost); }
@Override public boolean matches(RelOptRuleCall call) { final DrillScanRel scan = call.rel(2); GroupScan groupScan = scan.getGroupScan(); // this rule is applicable only for parquet based partition pruning if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) { return groupScan instanceof AbstractParquetGroupScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown(); } else { return groupScan instanceof AbstractParquetGroupScan && groupScan.supportsPartitionFilterPushdown(); } }