List<? extends PlanNode> subPlans = getPredecessorNode().getAlternativePlans(estimator); List<PlanNode> outputPlans = new ArrayList<PlanNode>(); final int parallelism = getParallelism(); final int inDop = getPredecessorNode().getParallelism(); outputPlans.add(new SinkPlanNode(this, "DataSink ("+this.getOperator().getName()+")" ,c)); estimator.costOperator(node); prunePlanAlternatives(outputPlans);
@Override public void computeUnclosedBranchStack() { if (this.openBranches != null) { return; } // we need to track open branches even in the sinks, because they get "closed" when // we build a single "root" for the data flow plan addClosedBranches(getPredecessorNode().closedBranchingNodes); this.openBranches = getPredecessorNode().getBranchesForParent(this.input); }
@Override public void setInput(Map<Operator<?>, OptimizerNode> contractToNode, ExecutionMode defaultExchangeMode) { Operator<?> children = getOperator().getInput(); final OptimizerNode pred; final DagConnection conn; pred = contractToNode.get(children); conn = new DagConnection(pred, this, defaultExchangeMode); // create the connection and add it this.input = conn; pred.addOutgoingConnection(conn); }
/** * Computes the estimated outputs for the data sink. Since the sink does not modify anything, it simply * copies the output estimates from its direct predecessor. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }
DataSinkNode sinkDirect = sinks.get(2); SingleInputNode filterNode = (SingleInputNode) sinkAfterFilter.getPredecessorNode(); SingleInputNode mapNode = (SingleInputNode) filterNode.getPredecessorNode(); TwoInputNode joinNode = (TwoInputNode) sinkAfterJoin.getPredecessorNode(); SingleInputNode joinInput = (SingleInputNode) joinNode.getSecondPredecessorNode(); assertFalse(sinkAfterFilter.getInputConnection().isBreakingPipeline()); assertFalse(sinkAfterJoin.getInputConnection().isBreakingPipeline()); assertFalse(sinkDirect.getInputConnection().isBreakingPipeline()); assertEquals(mapNode, sinkDirect.getPredecessorNode());
DataSinkNode dsn = new DataSinkNode((GenericDataSinkBase<?>) c); this.sinks.add(dsn); n = dsn;
@Override public void computeInterestingPropertiesForInputs(CostEstimator estimator) { final InterestingProperties iProps = new InterestingProperties(); { final RequestedGlobalProperties partitioningProps = new RequestedGlobalProperties(); iProps.addGlobalProperties(partitioningProps); } { final Ordering localOrder = getOperator().getLocalOrder(); final RequestedLocalProperties orderProps = new RequestedLocalProperties(); if (localOrder != null) { orderProps.setOrdering(localOrder); } iProps.addLocalProperties(orderProps); } this.input.setInterestingProperties(iProps); }
@Override public void accept(Visitor<OptimizerNode> visitor) { if (visitor.preVisit(this)) { if (getPredecessorNode() != null) { getPredecessorNode().accept(visitor); } else { throw new CompilerException(); } visitor.postVisit(this); } } }
DataSinkNode sinkAfterCoGroup = sinks.get(2); SingleInputNode reduceNode = (SingleInputNode) sinkAfterReduce.getPredecessorNode(); SingleInputNode mapNode = (SingleInputNode) reduceNode.getPredecessorNode(); SingleInputNode flatMapNode = (SingleInputNode) sinkAfterFlatMap.getPredecessorNode(); TwoInputNode joinNode = (TwoInputNode) flatMapNode.getPredecessorNode(); SingleInputNode filterNode = (SingleInputNode) joinNode.getSecondPredecessorNode(); TwoInputNode coGroupNode = (TwoInputNode) sinkAfterCoGroup.getPredecessorNode(); SingleInputNode otherReduceNode = (SingleInputNode) coGroupNode.getSecondPredecessorNode(); assertFalse(sinkAfterReduce.getInputConnection().isBreakingPipeline()); assertFalse(sinkAfterFlatMap.getInputConnection().isBreakingPipeline()); assertFalse(sinkAfterCoGroup.getInputConnection().isBreakingPipeline());
DataSinkNode dsn = new DataSinkNode((GenericDataSinkBase<?>) c); this.sinks.add(dsn); n = dsn;
List<? extends PlanNode> subPlans = getPredecessorNode().getAlternativePlans(estimator); List<PlanNode> outputPlans = new ArrayList<PlanNode>(); final int parallelism = getParallelism(); final int inDop = getPredecessorNode().getParallelism(); outputPlans.add(new SinkPlanNode(this, "DataSink ("+this.getOperator().getName()+")" ,c)); estimator.costOperator(node); prunePlanAlternatives(outputPlans);
@Override public void computeUnclosedBranchStack() { if (this.openBranches != null) { return; } // we need to track open branches even in the sinks, because they get "closed" when // we build a single "root" for the data flow plan addClosedBranches(getPredecessorNode().closedBranchingNodes); this.openBranches = getPredecessorNode().getBranchesForParent(this.input); }
String path = ((TextOutputFormat<String>)n.getSinkNode().getOperator() .getFormatWrapper().getUserCodeObject()).getOutputFilePath().toString(); Assert.assertTrue("Invalid data sink.", allSinks.remove(path));
/** * Computes the estimated outputs for the data sink. Since the sink does not modify anything, it simply * copies the output estimates from its direct predecessor. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }
SingleInputNode reduceNode = (SingleInputNode) sinkNode.getPredecessorNode(); SingleInputNode keyExtractorNode = (SingleInputNode) reduceNode.getPredecessorNode(); SingleInputNode mapNode = (SingleInputNode) filterNode.getPredecessorNode(); assertFalse(sinkNode.getInputConnection().isBreakingPipeline()); assertFalse(reduceNode.getIncomingConnection().isBreakingPipeline()); assertFalse(keyExtractorNode.getIncomingConnection().isBreakingPipeline());
DataSinkNode dsn = new DataSinkNode((GenericDataSinkBase<?>) c); this.sinks.add(dsn); n = dsn;
List<? extends PlanNode> subPlans = getPredecessorNode().getAlternativePlans(estimator); List<PlanNode> outputPlans = new ArrayList<PlanNode>(); final int parallelism = getParallelism(); final int inDop = getPredecessorNode().getParallelism(); outputPlans.add(new SinkPlanNode(this, "DataSink ("+this.getOperator().getName()+")" ,c)); estimator.costOperator(node); prunePlanAlternatives(outputPlans);
@Override public void computeUnclosedBranchStack() { if (this.openBranches != null) { return; } // we need to track open branches even in the sinks, because they get "closed" when // we build a single "root" for the data flow plan addClosedBranches(getPredecessorNode().closedBranchingNodes); this.openBranches = getPredecessorNode().getBranchesForParent(this.input); }
@Override public void setInput(Map<Operator<?>, OptimizerNode> contractToNode, ExecutionMode defaultExchangeMode) { Operator<?> children = getOperator().getInput(); final OptimizerNode pred; final DagConnection conn; pred = contractToNode.get(children); conn = new DagConnection(pred, this, defaultExchangeMode); // create the connection and add it this.input = conn; pred.addOutgoingConnection(conn); }
/** * Computes the estimated outputs for the data sink. Since the sink does not modify anything, it simply * copies the output estimates from its direct predecessor. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }