public void closeOperator() throws HiveException { if (operator != null) { operator.close(false); operator = null; } }
protected static void removeOperator(Operator<?> target, Operator<?> child, Operator<?> parent, ParseContext context) { for (Operator<?> aparent : target.getParentOperators()) { aparent.replaceChild(target, child); } for (Operator<?> achild : target.getChildOperators()) { achild.replaceParent(target, parent); } target.setChildOperators(null); target.setParentOperators(null); } }
private void setOperatorNotSupported(Operator<? extends OperatorDesc> op) { OperatorDesc desc = op.getConf(); Annotation note = AnnotationUtils.getAnnotation(desc.getClass(), Explain.class); if (note != null) { Explain explainNote = (Explain) note; setNodeIssue(explainNote.displayName() + " (" + op.getType() + ") not supported"); } else { setNodeIssue("Operator " + op.getType() + " not supported"); } }
@Override public String toString() { return getName() + '[' + getIdentifier() + ']'; }
public void removeParents() { for (Operator<?> parent : new ArrayList<Operator<?>>(getParentOperators())) { removeParent(parent); } }
private Operator<FilterDesc> createFilter(Operator<?> target, Operator<?> parent, RowSchema parentRS, ExprNodeDesc filterExpr) { Operator<FilterDesc> filter = OperatorFactory.get(parent.getCompilationOpContext(), new FilterDesc(filterExpr, false), new RowSchema(parentRS.getSignature())); filter.getParentOperators().add(parent); filter.getChildOperators().add(target); parent.replaceChild(target, filter); target.replaceParent(parent, filter); return filter; }
if (!allInitializedParentsAreClosed()) { LOG.debug("Not all parent operators are closed. Not closing."); return; closeOp(abort); publishRunTimeStats(); statsMap.put(Counter.RECORDS_OUT_OPERATOR.name() + "_" + getOperatorId(), runTimeRowsWritable); statsMap.put(getCounterName(Counter.RECORDS_OUT_INTERMEDIATE, hconf), recordCounter); this.runTimeNumRows = 0; logStats(); if (childOperators == null) { return; op.close(abort);
private void startForward(boolean inputFileChangeSenstive, String bigTableBucket) throws Exception { for (Operator<?> source : work.getAliasToWork().values()) { source.reset(); while (!forwardOp.getDone()) { InspectableObject row = fetchOp.getNextRow(); if (row == null) { break; forwardOp.process(row.o, 0); forwardOp.flush(); source.close(false);
public static void connectOperators( Operator<? extends OperatorDesc> operator, Operator<? extends OperatorDesc> childOperator) throws HiveException { List<Operator<? extends OperatorDesc>> newParentOperators = newOperatorList(); newParentOperators.addAll(childOperator.getParentOperators()); newParentOperators.add(operator); childOperator.setParentOperators(newParentOperators); List<Operator<? extends OperatorDesc>> newChildOperators = newOperatorList(); newChildOperators.addAll(operator.getChildOperators()); newChildOperators.add(childOperator); operator.setChildOperators(newChildOperators); }
public static void removeOperator(Operator<?> target, ParseContext context) { assert target.getNumParent() == 1 && target.getNumChild() == 1; removeOperator(target, target.getChildOperators().get(0), target.getParentOperators().get(0), context); }
public static ListSinkOperator replaceFSwithLS(Operator<?> fileSink, String nullFormat) { ListSinkDesc desc = new ListSinkDesc(nullFormat); ListSinkOperator sink = (ListSinkOperator) OperatorFactory.get( fileSink.getCompilationOpContext(), desc); sink.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>()); Operator<? extends OperatorDesc> parent = fileSink.getParentOperators().get(0); sink.getParentOperators().add(parent); parent.replaceChild(fileSink, sink); fileSink.setParentOperators(null); return sink; } }
operator.setOperatorId(op.getOperatorId()); operator.setOperatorType(op.getType()); task.addToOperatorList(operator); if (op.getChildOperators() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency entry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE); entry.setNode(op.getOperatorId()); for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) { entry.addToChildren(childOp.getOperatorId()); if (!opsVisited.contains(childOp)) { opsToVisit.add(childOp);
LOG.trace("End Group"); reducer.endGroup(); reducer.close(abort); ReportStats rps = new ReportStats(rp, jc); reducer.preorderMap(rps);
protected static boolean checkSingleBranchOnly(ReduceSinkOperator cRS, ReduceSinkOperator pRS) { Operator<? extends OperatorDesc> parent = cRS.getParentOperators().get(0); while (parent != pRS) { assert parent.getNumParent() == 1; if (parent.getChildOperators().size() > 1) { return false; } parent = parent.getParentOperators().get(0); } return true; }
public static void setChildrenCollector(List<Operator<? extends OperatorDesc>> childOperators, OutputCollector out) { if (childOperators == null) { return; } for (Operator<? extends OperatorDesc> op : childOperators) { if (op.getName().equals(ReduceSinkOperator.getOperatorName())) { op.setOutputCollector(out); } else { setChildrenCollector(op.getChildOperators(), out); } } }
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; if (isLogDebugEnabled) { LOG.debug("Operator " + id + " " + getName() + " initialized"); } if (childOperators == null || childOperators.isEmpty()) { return; } if (isLogDebugEnabled) { LOG.debug("Initializing children of " + id + " " + getName()); } for (int i = 0; i < childOperatorsArray.length; i++) { childOperatorsArray[i].initialize(hconf, outputObjInspector, childOperatorsTag[i]); if (reporter != null) { childOperatorsArray[i].setReporter(reporter); } } }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); }
public RowSchema getSchema() { return inpOp.getSchema(); } }
reducer.setNextVectorBatchGroupStatus(/* isLastGroupBatch */ true); reducer.endGroup(); reducer.close(abort); dummyOp.close(abort); reducer.preorderMap(rps);
/** * Calls initialize on each of the children with outputObjetInspector as the * output row format. */ protected void initializeChildren(Configuration hconf) throws HiveException { state = State.INIT; LOG.debug("Operator Initialized: {}", this); if (CollectionUtils.isEmpty(childOperators)) { return; } LOG.debug("Initializing Children: {}", this); for (int i = 0; i < childOperatorsArray.length; i++) { childOperatorsArray[i].initialize(hconf, outputObjInspector, childOperatorsTag[i]); if (reporter != null) { childOperatorsArray[i].setReporter(reporter); } } }