Refine search
public static boolean isDirectDPPBranch(Operator<?> op) { if (op instanceof SelectOperator && op.getChildOperators() != null && op.getChildOperators().size() == 1) { op = op.getChildOperators().get(0); if (op instanceof GroupByOperator && op.getChildOperators() != null && op.getChildOperators().size() == 1) { op = op.getChildOperators().get(0); return op instanceof SparkPartitionPruningSinkOperator; } } return false; }
public boolean isWithMapjoin() { Operator<?> branchingOp = this.getBranchingOp(); // Check if this is a MapJoin. If so, do not split. for (Operator<?> childOp : branchingOp.getChildOperators()) { if (childOp instanceof ReduceSinkOperator && childOp.getChildOperators().get(0) instanceof MapJoinOperator) { return true; } } return false; }
/** * Return true if contain branch otherwise return false */ public static boolean isInBranch(SparkPartitionPruningSinkOperator op) { Operator<?> curr = op; while (curr.getChildOperators().size() <= 1) { if (curr.getParentOperators() == null || curr.getParentOperators().isEmpty()) { return false; } curr = curr.getParentOperators().get(0); } return true; }
public void removeParent(Operator<? extends OperatorDesc> parent) { int parentIndex = parentOperators.indexOf(parent); assert parentIndex != -1; if (parentOperators.size() == 1) { setParentOperators(null); } else { parentOperators.remove(parentIndex); } int childIndex = parent.getChildOperators().indexOf(this); assert childIndex != -1; if (parent.getChildOperators().size() == 1) { parent.setChildOperators(null); } else { parent.getChildOperators().remove(childIndex); } }
private static void gatherReduceSinkOpsByInput(Multimap<Operator<?>, ReduceSinkOperator> parentToRsOps, Set<Operator<?>> visited, Set<Operator<?>> ops) { for (Operator<?> op : ops) { // If the RS has other RS siblings, we will add it to be considered in next iteration if (op instanceof ReduceSinkOperator && !visited.contains(op)) { Operator<?> parent = op.getParentOperators().get(0); Set<ReduceSinkOperator> s = new LinkedHashSet<>(); for (Operator<?> c : parent.getChildOperators()) { if (c instanceof ReduceSinkOperator) { s.add((ReduceSinkOperator) c); visited.add(c); } } if (s.size() > 1) { parentToRsOps.putAll(parent, s); } } } }
/** * Remove a child and add all of the child's children to the location of the child * * @param child If this operator is not the only parent of the child. There can be unpredictable result. * @throws SemanticException */ public void removeChildAndAdoptItsChildren( Operator<? extends OperatorDesc> child) throws SemanticException { int childIndex = childOperators.indexOf(child); if (childIndex == -1) { throw new SemanticException( "Exception when trying to remove partition predicates: fail to find child from parent"); } childOperators.remove(childIndex); if (child.getChildOperators() != null && child.getChildOperators().size() > 0) { childOperators.addAll(childIndex, child.getChildOperators()); } for (Operator<? extends OperatorDesc> gc : child.getChildOperators()) { List<Operator<? extends OperatorDesc>> parents = gc.getParentOperators(); int index = parents.indexOf(child); if (index == -1) { throw new SemanticException( "Exception when trying to remove partition predicates: fail to find parent from child"); } parents.set(index, this); } }
private static void removeOperator(Operator<? extends OperatorDesc> operator) { // since removeParent/removeChild updates the childOperators and parentOperators list in place // we need to make a copy of list to iterator over them List<Operator<? extends OperatorDesc>> children = new ArrayList<>(operator.getChildOperators()); List<Operator<? extends OperatorDesc>> parents = new ArrayList<>(operator.getParentOperators()); for (Operator<? extends OperatorDesc> parent : parents) { parent.getChildOperators().addAll(children); parent.removeChild(operator); } for (Operator<? extends OperatorDesc> child : children) { child.getParentOperators().addAll(parents); child.removeParent(operator); } }
private void fixupOtherParent( Operator<? extends OperatorDesc> childMultipleParent, Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorChild) { List<Operator<? extends OperatorDesc>> children = childMultipleParent.getChildOperators(); final int childrenCount = children.size(); for (int i = 0; i < childrenCount; i++) { Operator<? extends OperatorDesc> myChild = children.get(i); if (myChild == child) { children.set(i, vectorChild); } } }
public boolean removeChildren(Operator<? extends OperatorDesc> currOp, int depth) { Operator<? extends OperatorDesc> inputOp = currOp; for (int i = 0; i < depth; i++) { // If there are more than 1 children at any level, don't do anything if ((currOp.getChildOperators() == null) || (currOp.getChildOperators().isEmpty()) || (currOp.getChildOperators().size() > 1)) { return false; } currOp = currOp.getChildOperators().get(0); } // add selectOp to match the schema // after that, inputOp is the parent of selOp. for (Operator<? extends OperatorDesc> op : inputOp.getChildOperators()) { op.getParentOperators().clear(); } inputOp.getChildOperators().clear(); Operator<? extends OperatorDesc> selOp = genOutputSelectForGroupBy(inputOp, currOp); // update the childOp of selectOp selOp.setChildOperators(currOp.getChildOperators()); // update the parentOp for (Operator<? extends OperatorDesc> op : currOp.getChildOperators()) { op.replaceParent(currOp, selOp); } return true; }
/** * Clones using the powers of XML. Do not use unless necessary. * @param roots The roots. * @return The clone. */ public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots) { if (roots.isEmpty()) { return new ArrayList<>(); } ByteArrayOutputStream baos = new ByteArrayOutputStream(4096); CompilationOpContext ctx = roots.get(0).getCompilationOpContext(); serializePlan(roots, baos, true); @SuppressWarnings("unchecked") List<Operator<?>> result = deserializePlan(new ByteArrayInputStream(baos.toByteArray()), roots.getClass(), true); // Restore the context. LinkedList<Operator<?>> newOps = new LinkedList<>(result); while (!newOps.isEmpty()) { Operator<?> newOp = newOps.poll(); newOp.setCompilationOpContext(ctx); List<Operator<?>> children = newOp.getChildOperators(); if (children != null) { newOps.addAll(children); } } return result; }
public void removeParent(Operator<? extends OperatorDesc> parent) { int parentIndex = parentOperators.indexOf(parent); assert parentIndex != -1; if (parentOperators.size() == 1) { setParentOperators(null); } else { parentOperators.remove(parentIndex); } int childIndex = parent.getChildOperators().indexOf(this); assert childIndex != -1; if (parent.getChildOperators().size() == 1) { parent.setChildOperators(null); } else { parent.getChildOperators().remove(childIndex); } }
public static void removeOperator(Operator<?> target, ParseContext context) { assert target.getNumParent() == 1 && target.getNumChild() == 1; removeOperator(target, target.getChildOperators().get(0), target.getParentOperators().get(0), context); }
/** * Remove a child and add all of the child's children to the location of the child * * @param child If this operator is not the only parent of the child. There can be unpredictable result. * @throws SemanticException */ public void removeChildAndAdoptItsChildren( Operator<? extends OperatorDesc> child) throws SemanticException { int childIndex = childOperators.indexOf(child); if (childIndex == -1) { throw new SemanticException( "Exception when trying to remove partition predicates: fail to find child from parent"); } childOperators.remove(childIndex); if (child.getChildOperators() != null && child.getChildOperators().size() > 0) { childOperators.addAll(childIndex, child.getChildOperators()); } for (Operator<? extends OperatorDesc> gc : child.getChildOperators()) { List<Operator<? extends OperatorDesc>> parents = gc.getParentOperators(); int index = parents.indexOf(child); if (index == -1) { throw new SemanticException( "Exception when trying to remove partition predicates: fail to find parent from child"); } parents.set(index, this); } }
private static void removeOperator(Operator<? extends OperatorDesc> operator) { List<Operator<? extends OperatorDesc>> children = operator.getChildOperators(); List<Operator<? extends OperatorDesc>> parents = operator.getParentOperators(); for (Operator<? extends OperatorDesc> parent : parents) { parent.getChildOperators().addAll(children); parent.removeChild(operator); } for (Operator<? extends OperatorDesc> child : children) { child.getParentOperators().addAll(parents); child.removeParent(operator); } }
private AppMasterEventOperator findDynamicPartitionBroadcast(Operator<?> parent) { for (Operator<?> op : parent.getChildOperators()) { while (op != null) { if (op instanceof AppMasterEventOperator && op.getConf() instanceof DynamicPruningEventDesc) { // found dynamic partition pruning operator return (AppMasterEventOperator)op; } if (op instanceof ReduceSinkOperator || op instanceof FileSinkOperator) { // crossing reduce sink or file sink means the pruning isn't for this parent. break; } if (op.getChildOperators().size() != 1) { // dynamic partition pruning pipeline doesn't have multiple children break; } op = op.getChildOperators().get(0); } } return null; }
/** * Remove an operator branch. When we see a fork, we know it's time to do the removal. * @param event the leaf node of which branch to be removed */ public static Operator<?> removeBranch(Operator<?> event) { Operator<?> child = event; Operator<?> curr = event; while (curr.getChildOperators().size() <= 1) { child = curr; curr = curr.getParentOperators().get(0); } curr.removeChild(child); return child; }
private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) throws SemanticException { if (current.getNumChild() == 0) { return null; } if (current.getNumChild() > 1) { // ppd for multi-insert query is not yet implemented // we assume that nothing can is pushed beyond this operator List<Operator<? extends OperatorDesc>> children = Lists.newArrayList(current.getChildOperators()); for (Operator<?> child : children) { ExprWalkerInfo childInfo = owi.getPrunedPreds(child); createFilter(child, childInfo, owi); } return null; } return owi.getPrunedPreds(current.getChildOperators().get(0)); }
/** * Returns an operator given the conf and a list of parent operators. */ public static <T extends OperatorDesc> Operator<T> getAndMakeChild(CompilationOpContext cContext, T conf, List<Operator<? extends OperatorDesc>> oplist) { Operator<T> ret = get(cContext, (Class<T>) conf.getClass()); ret.setConf(conf); if (oplist.size() == 0) { return ret; } // Add the new operator as child of each of the passed in operators for (Operator op : oplist) { List<Operator> children = op.getChildOperators(); children.add(ret); } // add parents for the newly created operator List<Operator<? extends OperatorDesc>> parent = new ArrayList<Operator<? extends OperatorDesc>>(); for (Operator op : oplist) { parent.add(op); } ret.setParentOperators(parent); return ret; }
public static void removeOperator(Operator<?> target, ParseContext context) { assert target.getNumParent() == 1 && target.getNumChild() == 1; removeOperator(target, target.getChildOperators().get(0), target.getParentOperators().get(0), context); }
/** * Creates the list of internal column names (represented by field nodes, * these names are used in the RowResolver and are different from the * external column names) that are needed in the subtree. These columns * eventually have to be selected from the table scan. * * @param curOp The root of the operator subtree. * @param child The consumer. * @return a list of field nodes representing the internal column names. */ public List<FieldNode> genColLists(Operator<? extends OperatorDesc> curOp, Operator<? extends OperatorDesc> child) throws SemanticException { if (curOp.getChildOperators() == null) { return null; } if (child instanceof CommonJoinOperator) { int tag = child.getParentOperators().indexOf(curOp); return joinPrunedColLists.get(child).get((byte) tag); } else { return prunedColLists.get(child); } }