protected Fields getOutgoingFields() { return ( (ScopedElement) unwind( next ).getFlowElement() ).resolveIncomingOperationPassThroughFields( outgoingScopes.get( 0 ) ); }
protected void setScopes() { Collection<Duct> ducts = getAllDucts(); for( Duct duct : ducts ) { if( !( duct instanceof ElementDuct ) ) continue; ElementDuct elementDuct = (ElementDuct) duct; // get the actual incoming/outgoing scopes for the full node as we need the total number of branches elementDuct.getIncomingScopes().addAll( node.getPreviousScopes( elementDuct.getFlowElement() ) ); elementDuct.getOutgoingScopes().addAll( node.getNextScopes( elementDuct.getFlowElement() ) ); } } }
FlowElement flowElement = elementDuct.getFlowElement(); throw new IllegalStateException( "unexpected duct type" + duct.getClass().getCanonicalName() ); elementDuct.setBranchNames( branchNames ); elementDuct.setTrapHandler( new TrapHandler( elementFlowProcess, flowElement, trap, branchName ) ); break; if( !elementDuct.hasTrapHandler() ) elementDuct.setTrapHandler( new TrapHandler( flowProcess ) );
/** * Returns a Set as a given tap may be bound to multiple branches * * @param duct * @return */ private Set<String> getTapBranchNamesFor( Duct duct ) { if( ( (Tap) ( (ElementDuct) duct ).getFlowElement() ).isTemporary() ) return Collections.emptySet(); if( duct instanceof SourceStage ) return node.getSourceTapNames( (Tap) ( (SourceStage) duct ).getFlowElement() ); else if( duct instanceof SinkStage ) return node.getSinkTapNames( (Tap) ( (SinkStage) duct ).getFlowElement() ); else throw new IllegalStateException( "duct does not wrap a Tap: " + duct.getClass().getCanonicalName() ); }
LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement());
@Override public void open(Configuration config) { try { currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); if(sources.size() != 1) { throw new RuntimeException("FlowNode for EachMapper may only have a single source"); } FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof Boundary)) { throw new RuntimeException("Source of EachMapper must be a single Boundary"); } Boundary source = (Boundary)sourceElement; streamGraph = new EachStreamGraph( currentProcess, flowNode, source ); sourceStage = this.streamGraph.getSourceStage(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if( throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException( "internal error during EachMapper configuration", throwable ); } }
@Override public void open(Configuration config) { this.calledPrepare = false; try { currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); if(sources.size() != 1) { throw new RuntimeException("FlowNode for GroupByReducer may only have a single source"); } FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof GroupBy)) { throw new RuntimeException("Source of GroupByReducer must be a GroupBy"); } GroupBy source = (GroupBy)sourceElement; streamGraph = new GroupByStreamGraph( currentProcess, flowNode, source ); groupSource = this.streamGraph.getGroupSource(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if( throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException( "internal error during GroupByReducer configuration", throwable ); } }
@Override public void open(Configuration config) { this.calledPrepare = false; try { currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); if(sources.size() != 1) { throw new RuntimeException("FlowNode for CoGroupBufferReducer may only have a single CoGroup source"); } FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof CoGroup)) { throw new RuntimeException("Source of CoGroupBufferReducer must be a CoGroup"); } CoGroup source = (CoGroup)sourceElement; streamGraph = new CoGroupBufferReduceStreamGraph( currentProcess, flowNode, source ); groupSource = this.streamGraph.getGroupSource(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if (throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException("internal error during CoGroupBufferReducer configuration", throwable); } }
@Override public void open(Configuration config) { try { currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); // pick one (arbitrary) source FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof Boundary)) { throw new RuntimeException("Source of HashJoinMapper must be a boundary"); } Boundary source = (Boundary)sourceElement; streamGraph = new HashJoinMapperStreamGraph( currentProcess, flowNode, source ); sourceStage = this.streamGraph.getSourceStage(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if( throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException( "internal error during HashJoinMapper configuration", throwable ); } }
@Override public void open(Configuration config) { this.calledPrepare = false; try { currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); if(sources.size() != 1) { throw new RuntimeException("FlowNode for CoGroupReducer may only have a single source"); } FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof CoGroup)) { throw new RuntimeException("Source of CoGroupReducer must be a CoGroup"); } CoGroup source = (CoGroup)sourceElement; streamGraph = new CoGroupReduceStreamGraph( currentProcess, flowNode, source ); groupSource = this.streamGraph.getGroupSource(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if (throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException("internal error during CoGroupReducer configuration", throwable); } }
@Override public void open(Configuration config) { try { joinedTuples = new Tuple[numJoinInputs]; currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); // pick one (arbitrary) source FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof Boundary)) { throw new RuntimeException("Source of NaryHashJoinJoiner must be a boundary"); } Boundary source = (Boundary)sourceElement; streamGraph = new HashJoinStreamGraph( currentProcess, flowNode, source ); sourceStage = this.streamGraph.getSourceStage(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if( throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException( "internal error during NaryHashJoinJoiner configuration", throwable ); } this.prepareCalled = false; }
LOG.info( "sourcing from: " + ( (ElementDuct) head ).getFlowElement() ); LOG.info( "sinking to: " + ( (ElementDuct) tail ).getFlowElement() );
LOG.info( "sourcing from: {} streamed: {}, id: {}", ( (ElementDuct) head ).getFlowElement(), head == streamedHead, FlowElements.id( ( (ElementDuct) head ).getFlowElement() ) ); LOG.info( "sinking to: {}, id: {}", ( (ElementDuct) tail ).getFlowElement(), FlowElements.id( ( (ElementDuct) tail ).getFlowElement() ) ); logMemory( LOG, "mem after accumulating source: " + ( (ElementDuct) next ).getFlowElement() + ", " );
LOG.info( "sourcing from: {} streamed: {}, id: {}", ( (ElementDuct) head ).getFlowElement(), head == streamedHead, FlowElements.id( ( (ElementDuct) head ).getFlowElement() ) ); LOG.info( "sinking to: {}, id: {}", ( (ElementDuct) tail ).getFlowElement(), FlowElements.id( ( (ElementDuct) tail ).getFlowElement() ) ); logMemory( LOG, "mem after accumulating source: " + ( (ElementDuct) next ).getFlowElement() + ", " );
LOG.info( "sourcing from: " + ( (ElementDuct) head ).getFlowElement() ); LOG.info( "sinking to: " + ( (ElementDuct) tail ).getFlowElement() );
@Override public void open(Configuration config) { try { this.joinedTuples = new Tuple[2]; this.joinInput = new Tuple2<>(new Tuple(), this.joinedTuples); currentProcess = new FlinkFlowProcess(FlinkConfigConverter.toHadoopConfig(config), getRuntimeContext(), flowNode.getID()); Set<FlowElement> sources = flowNode.getSourceElements(); // pick one (arbitrary) source FlowElement sourceElement = sources.iterator().next(); if(!(sourceElement instanceof Boundary)) { throw new RuntimeException("Source of BinaryHashJoinJoiner must be a boundary"); } Boundary source = (Boundary)sourceElement; streamGraph = new HashJoinStreamGraph( currentProcess, flowNode, source ); sourceStage = this.streamGraph.getSourceStage(); for( Duct head : streamGraph.getHeads() ) { LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); } for( Duct tail : streamGraph.getTails() ) { LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement()); } } catch( Throwable throwable ) { if( throwable instanceof CascadingException) { throw (CascadingException) throwable; } throw new FlowException( "internal error during BinaryHashJoinJoiner configuration", throwable ); } this.prepareCalled = false; }
LOG.info( "sourcing from: " + ( (ElementDuct) head ).getFlowElement() ); LOG.info( "sinking to: " + ( (ElementDuct) tail ).getFlowElement() );
LOG.info( "sourcing from: " + ( (ElementDuct) head ).getFlowElement() ); LOG.info( "sinking to: " + ( (ElementDuct) tail ).getFlowElement() );
LOG.info("sourcing from: " + ((ElementDuct) head).getFlowElement()); LOG.info("sinking to: " + ((ElementDuct) tail).getFlowElement());