ExprWalkerInfo ewi = owi.getPrunedPreds(op); .getPrunedPreds((Operator<? extends OperatorDesc>) (op .getChildren().get(0))), owi); owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { owi.addCandidateFilterOp((FilterOperator)op); Map<String, List<ExprNodeDesc>> residual = ewi.getResidualPredicates(true); createFilter(op, residual, owi); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { if (hasUnpushedPredicates) {
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set<String> includes = getQualifiedAliases((Operator<?>) nd, owi); boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, includes); if (hasUnpushedPredicates && HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { if (includes != null || nd instanceof ReduceSinkOperator) { owi.getCandidateFilterOps().clear(); } else { ExprWalkerInfo pruned = owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd); Map<String, List<ExprNodeDesc>> residual = pruned.getResidualPredicates(true); if (residual != null && !residual.isEmpty()) { createFilter((Operator) nd, residual, owi); pruned.getNonFinalCandidates().clear(); } } } return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; // The lateral view forward operator has 2 children, a SELECT(*) and // a SELECT(cols) (for the UDTF operator) The child at index 0 is the // SELECT(*) because that's the way that the DAG was constructed. We // only want to get the predicates from the SELECT(*). ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren() .get(0)); owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, childPreds); return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; TableScanOperator tsOp = (TableScanOperator) nd; mergeWithChildrenPred(tsOp, owi, null, null); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { // remove all the candidate filter operators // when we get to the TS removeAllCandidates(owi); } ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp); // nonFinalCandidates predicates should be empty assert pushDownPreds == null || !pushDownPreds.hasNonFinalCandidates(); return createFilter(tsOp, pushDownPreds, owi); }
HiveConf hiveConf = owi.getParseContext().getConf(); pushFilterToStorage = hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { for (FilterOperator fop : owi.getCandidateFilterOps()) { List<Operator<? extends OperatorDesc>> children = fop.getChildOperators(); List<Operator<? extends OperatorDesc>> parents = fop.getParentOperators(); owi.getCandidateFilterOps().clear(); ExprWalkerInfo walkerInfo = owi.getPrunedPreds(op); if (walkerInfo != null) { walkerInfo.getNonFinalCandidates().clear(); owi.putPrunedPreds(output, walkerInfo);
protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, OpWalkerInfo owi) throws SemanticException { if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { return createFilter((Operator)nd, prunePreds.getResidualPredicates(true), owi); } return null; } }
HiveConf hiveConf = owi.getParseContext().getConf(); pushFilterToStorage = hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { ExprWalkerInfo walkerInfo = owi.getPrunedPreds(op); if (walkerInfo != null) { walkerInfo.getNonFinalCandidates().clear(); owi.putPrunedPreds(output, walkerInfo);
private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) throws SemanticException { if (current.getNumChild() == 0) { return null; } if (current.getNumChild() > 1) { // ppd for multi-insert query is not yet implemented // we assume that nothing can is pushed beyond this operator List<Operator<? extends OperatorDesc>> children = Lists.newArrayList(current.getChildOperators()); for (Operator<?> child : children) { ExprWalkerInfo childInfo = owi.getPrunedPreds(child); createFilter(child, childInfo, owi); } return null; } return owi.getPrunedPreds(current.getChildOperators().get(0)); }
OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext);
RowResolver inputRR = owi.getRowResolver(op); HiveConf hiveConf = owi.getParseContext().getConf(); pushFilterToStorage = hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE); owi.put(output, ctx); return output;
owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi); return hasUnpushedPredicates;
private static void removeAllCandidates(OpWalkerInfo owi) { for (FilterOperator operator : owi.getCandidateFilterOps()) { removeOperator(operator); } owi.getCandidateFilterOps().clear(); }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set<String> aliases = owi.getRowResolver(nd).getTableNames(); boolean ignoreAliases = false; if (aliases.size() == 1 && aliases.contains("")) { // Reduce sink of group by operator ignoreAliases = true; } mergeWithChildrenPred(nd, owi, null, aliases, ignoreAliases); return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; // The lateral view forward operator has 2 children, a SELECT(*) and // a SELECT(cols) (for the UDTF operator) The child at index 0 is the // SELECT(*) because that's the way that the DAG was constructed. We // only want to get the predicates from the SELECT(*). ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren() .get(0)); owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, childPreds); return null; }
protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, OpWalkerInfo owi) throws SemanticException { if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { return createFilter((Operator)nd, prunePreds.getResidualPredicates(true), owi); } return null; } }
HiveConf hiveConf = owi.getParseContext().getConf(); pushFilterToStorage = hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { ExprWalkerInfo walkerInfo = owi.getPrunedPreds(op); if (walkerInfo != null) { walkerInfo.getNonFinalCandidates().clear(); owi.putPrunedPreds(output, walkerInfo);
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, procCtx, nodeOutputs); OpWalkerInfo owi = (OpWalkerInfo) procCtx; ExprWalkerInfo prunedPred = owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd); if (prunedPred == null || !prunedPred.hasAnyCandidates()) { return null; } Map<String, List<ExprNodeDesc>> candidates = prunedPred.getFinalCandidates(); createFilter((Operator)nd, prunedPred, owi); candidates.clear(); return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; TableScanOperator tsOp = (TableScanOperator) nd; mergeWithChildrenPred(tsOp, owi, null, null); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { // remove all the candidate filter operators // when we get to the TS removeAllCandidates(owi); } ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp); // nonFinalCandidates predicates should be empty assert pushDownPreds == null || !pushDownPreds.hasNonFinalCandidates(); return createFilter(tsOp, pushDownPreds, owi); }
OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext);
owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi); return hasUnpushedPredicates;