DrillRelOptUtil.getFieldsInformation(scan.getRowType(), project.getProjects()); if (columnInfo == null || Utilities.isStarQuery(columnInfo.getFields()) || !groupScan.canPushdownProjects(columnInfo.getFields())) {
@Override public boolean matches(RelOptRuleCall call) { LimitPrel limitPrel = call.rel(0); ProjectPrel projectPrel = call.rel(1); // pushdown only apply limit but not offset, // so if getFetch() return null no need to run this rule. // Do not push across Project containing CONVERT_FROMJSON for limit 0 queries. For limit 0 queries, this would // mess up the schema since Convert_FromJson() is different from other regular functions in that it only knows // the output schema after evaluation is performed. When input has 0 row, Drill essentially does not have a way // to know the output type. if (!limitPrel.isPushDown() && (limitPrel.getFetch() != null) && (!DrillRelOptUtil.isLimit0(limitPrel.getFetch()) || !DrillRelOptUtil.isProjectOutputSchemaUnknown(projectPrel))) { return true; } return false; } };
@Override public void onMatch(RelOptRuleCall call) { final ProjectPrel project = call.rel(1); final LimitPrel limit = call.rel(0); RelNode child = project.getInput(); final RelNode limitUnderProject = new LimitPrel(child.getCluster(), child.getTraitSet(), child, limit.getOffset(), limit.getFetch()); final RelNode newProject = new ProjectPrel(project.getCluster(), project.getTraitSet(), limitUnderProject, project.getProjects(), project.getRowType()); if (DrillRelOptUtil.isProjectFlatten(project)) { //Preserve limit above the project since Flatten can produce more rows. Also mark it so we do not fire the rule again. child = newProject; final RelNode limitAboveProject = new LimitPrel(child.getCluster(), child.getTraitSet(), child, limit.getOffset(), limit.getFetch(), true); call.transformTo(limitAboveProject); } else { call.transformTo(newProject); } }
@Override public boolean matches(RelOptRuleCall call) { DrillLimitRel limitRel = call.rel(0); DrillProjectRel projectRel = call.rel(1); // pushdown only apply limit but not offset, // so if getFetch() return null no need to run this rule. // Do not push across Project containing CONVERT_FROMJSON for limit 0 queries. For limit 0 queries, this would // mess up the schema since Convert_FromJson() is different from other regular functions in that it only knows // the output schema after evaluation is performed. When input has 0 row, Drill essentially does not have a way // to know the output type. // Cannot pushdown limit and offset in to flatten as long as we don't know data distribution in flattened field if (!limitRel.isPushDown() && (limitRel.getFetch() != null) && (!DrillRelOptUtil.isLimit0(limitRel.getFetch()) || !DrillRelOptUtil.isProjectOutputSchemaUnknown(projectRel)) && !DrillRelOptUtil.isProjectOutputRowcountUnknown(projectRel)) { return true; } return false; }
@Override public RelNode visit(LogicalSort sort) { if (DrillRelOptUtil.isLimit0(sort.fetch)) { contains = true; return sort; } return super.visit(sort); }
corr.getRequiredColumns(), corr.getJoinType()); if (!DrillRelOptUtil.isTrivialProject(origProj, true)) { Map<Integer, Integer> mapWithoutCorr = buildMapWithoutCorrColumn(corr, correlationIndex); List<RexNode> outputExprs = DrillRelOptUtil.transformExprs(origProj.getCluster().getRexBuilder(), origProj.getChildExps(), mapWithoutCorr);
protected DrillRel addRenamedProject(DrillRel rel, RelDataType validatedRowType) { RelDataType t = rel.getRowType(); RexBuilder b = rel.getCluster().getRexBuilder(); List<RexNode> projections = Lists.newArrayList(); int projectCount = t.getFieldList().size(); for (int i =0; i < projectCount; i++) { projections.add(b.makeInputRef(rel, i)); } final List<String> fieldNames2 = SqlValidatorUtil.uniquify( validatedRowType.getFieldNames(), SqlValidatorUtil.EXPR_SUGGESTER, rel.getCluster().getTypeFactory().getTypeSystem().isSchemaCaseSensitive()); RelDataType newRowType = RexUtil.createStructType(rel.getCluster().getTypeFactory(), projections, fieldNames2, null); DrillProjectRel topProj = DrillProjectRel.create(rel.getCluster(), rel.getTraitSet(), rel, projections, newRowType); // Add a final non-trivial Project to get the validatedRowType, if child is not project. if (rel instanceof Project && DrillRelOptUtil.isTrivialProject(topProj, true)) { return rel; } else{ return topProj; } }
if (DrillRelOptUtil.findOperators(pred, Collections.emptyList(), BANNED_OPERATORS) == null) { LogicalExpression drillPredicate = DrillOptiq.toDrill( new DrillParseContext(PrelUtil.getPlannerSettings(call.getPlanner())), scan, pred);
public boolean isCompatible(boolean compareNames, boolean allowSubstring) { RelDataType unionType = getRowType(); for (RelNode input : getInputs()) { if (! DrillRelOptUtil.areRowTypesCompatible( input.getRowType(), unionType, compareNames, allowSubstring)) { return false; } } return true; }
public static boolean isTrivialProject(Project project, boolean useNamesInIdentityProjCalc) { if (!useNamesInIdentityProjCalc) { return ProjectRemoveRule.isTrivial(project); } else { return containIdentity(project.getProjects(), project.getRowType(), project.getInput().getRowType()); } }
return DrillRelOptUtil.createRename(queryRelNode, tableFieldNames);
@Override public RelNode visit(RelNode other) { if (other instanceof DrillJoinRelBase || other instanceof DrillAggregateRelBase || other instanceof DrillUnionRelBase) { return other; } if (other instanceof DrillLimitRel) { if (DrillRelOptUtil.isLimit0(((DrillLimitRel) other).getFetch())) { contains = true; return other; } } return super.visit(other); }
true); //outputProj = true : NONE -> OK_NEW_SCHEMA, also handle expression with NULL type. if (prel instanceof Project && DrillRelOptUtil.isTrivialProject(topProject, true)) { return new ProjectPrel(prel.getCluster(), prel.getTraitSet(),
if (DrillRelOptUtil.findOperators(pred, projRel.getProjects(), BANNED_OPERATORS) == null) { qualifiedPredList.add(pred); } else {
@Override public boolean matches(RelOptRuleCall call) { LimitPrel limitPrel = call.rel(0); ProjectPrel projectPrel = call.rel(1); // pushdown only apply limit but not offset, // so if getFetch() return null no need to run this rule. // Do not push across Project containing CONVERT_FROMJSON for limit 0 queries. For limit 0 queries, this would // mess up the schema since Convert_FromJson() is different from other regular functions in that it only knows // the output schema after evaluation is performed. When input has 0 row, Drill essentially does not have a way // to know the output type. if (!limitPrel.isPushDown() && (limitPrel.getFetch() != null) && (!DrillRelOptUtil.isLimit0(limitPrel.getFetch()) || !DrillRelOptUtil.isProjectOutputSchemaUnknown(projectPrel))) { return true; } return false; } };
ProjectPushInfo projectPushInfo = DrillRelOptUtil.getFieldsInformation(scan.getRowType(), project.getProjects()); if (!canPushProjectIntoScan(scan.getTable(), projectPushInfo)) { return;
@Override public void onMatch(RelOptRuleCall call) { final ProjectPrel project = call.rel(1); final LimitPrel limit = call.rel(0); RelNode child = project.getInput(); final RelNode limitUnderProject = new LimitPrel(child.getCluster(), child.getTraitSet(), child, limit.getOffset(), limit.getFetch()); final RelNode newProject = new ProjectPrel(project.getCluster(), project.getTraitSet(), limitUnderProject, project.getProjects(), project.getRowType()); if (DrillRelOptUtil.isProjectFlatten(project)) { //Preserve limit above the project since Flatten can produce more rows. Also mark it so we do not fire the rule again. child = newProject; final RelNode limitAboveProject = new LimitPrel(child.getCluster(), child.getTraitSet(), child, limit.getOffset(), limit.getFetch(), true); call.transformTo(limitAboveProject); } else { call.transformTo(newProject); } }
DrillRelOptUtil.getFieldsInformation(scan.getRowType(), project.getProjects()); if (columnInfo == null || Utilities.isStarQuery(columnInfo.getFields()) || !groupScan.canPushdownProjects(columnInfo.getFields())) {