@Override public String getID() { return flowNode.getID(); }
protected void waitForInputsReady( Map<String, LogicalInput> inputMap ) throws InterruptedException { long beginInputReady = System.currentTimeMillis(); HashSet<Input> inputs = new HashSet<Input>( inputMap.values() ); getContext().waitForAllInputsReady( inputs ); LOG.info( "flow node id: {}, all {} inputs ready in: {}", flowNode.getID(), inputs.size(), Util.formatDurationHMSms( System.currentTimeMillis() - beginInputReady ) ); }
protected void waitForInputsReady( Map<String, LogicalInput> inputMap ) throws InterruptedException { long beginInputReady = System.currentTimeMillis(); HashSet<Input> inputs = new HashSet<Input>( inputMap.values() ); getContext().waitForAllInputsReady( inputs ); LOG.info( "flow node id: {}, all {} inputs ready in: {}", flowNode.getID(), inputs.size(), Util.formatDurationHMSms( System.currentTimeMillis() - beginInputReady ) ); }
protected String getNodeDisplayName( FlowNode flowNode, int idLength ) { if( idLength > Util.ID_LENGTH ) idLength = Util.ID_LENGTH; String flowID = getFlowID().substring( 0, idLength ); String stepID = getID().substring( 0, idLength ); String nodeID = flowNode.getID().substring( 0, idLength ); return String.format( "[%s/%s/%s] %s/%s", flowID, stepID, nodeID, getFlowName(), getName() ); }
@Override public void close() throws Exception { String message = "flow node id: " + flowNode.getID(); logMemory( LOG, message + ", mem on close" ); logCounters( LOG, message + ", counter:", currentProcess ); } }
private Vertex newVertex( FlowNode flowNode, Configuration conf, int parallelism ) { conf.set( FlowNode.CASCADING_FLOW_NODE, pack( flowNode, conf ) ); // todo: pack into payload directly ProcessorDescriptor descriptor = ProcessorDescriptor.create( FlowProcessor.class.getName() ); descriptor.setUserPayload( getPayload( conf ) ); Vertex vertex = Vertex.create( flowNode.getID(), descriptor, parallelism ); if( environment != null ) vertex.setTaskEnvironment( environment ); return vertex; }
private Vertex newVertex( FlowNode flowNode, Configuration conf, int parallelism ) { conf.set( FlowNode.CASCADING_FLOW_NODE, pack( flowNode, conf ) ); // todo: pack into payload directly ProcessorDescriptor descriptor = ProcessorDescriptor.create( FlowProcessor.class.getName() ); descriptor.setUserPayload( getPayload( conf ) ); Vertex vertex = Vertex.create( flowNode.getID(), descriptor, parallelism ); if( environment != null ) vertex.setTaskEnvironment( environment ); return vertex; }
public HadoopMapStreamGraph( HadoopFlowProcess flowProcess, FlowNode node, Tap source ) { super( flowProcess, node, source ); this.source = source; buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), "map", flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), "map", flowProcess.getCurrentSliceNum() ); }
public HadoopReduceStreamGraph( HadoopFlowProcess flowProcess, FlowNode node, FlowElement sourceElement ) { super( flowProcess, node, sourceElement ); buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), "reduce", flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), "reduce", flowProcess.getCurrentSliceNum() ); }
public HadoopMapStreamGraph( HadoopFlowProcess flowProcess, FlowNode node, Tap source ) { super( flowProcess, node, source ); this.source = source; buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), "map", flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), "map", flowProcess.getCurrentSliceNum() ); }
public HadoopReduceStreamGraph( HadoopFlowProcess flowProcess, FlowNode node, FlowElement sourceElement ) { super( flowProcess, node, sourceElement ); buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), "reduce", flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), "reduce", flowProcess.getCurrentSliceNum() ); }
public Hadoop2TezStreamGraph( Hadoop2TezFlowProcess currentProcess, FlowNode flowNode, Map<String, LogicalInput> inputMap, Map<String, LogicalOutput> outputMap ) { super( currentProcess, flowNode ); this.inputMap = inputMap; this.outputMap = outputMap; buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), node.getName(), flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), node.getName(), flowProcess.getCurrentSliceNum() ); }
public Hadoop2TezStreamGraph( Hadoop2TezFlowProcess currentProcess, FlowNode flowNode, Map<String, LogicalInput> inputMap, Map<String, LogicalOutput> outputMap ) { super( currentProcess, flowNode ); this.inputMap = inputMap; this.outputMap = outputMap; buildGraph(); setTraps(); setScopes(); printGraph( node.getID(), node.getName(), flowProcess.getCurrentSliceNum() ); bind(); printBoundGraph( node.getID(), node.getName(), flowProcess.getCurrentSliceNum() ); }
public HashJoinStreamGraph(FlinkFlowProcess flowProcess, FlowNode node, Boundary source) { super(flowProcess, node); sourceStage = handleHead(source); setTraps(); setScopes(); printGraph( node.getID(), "hashjoin", flowProcess.getCurrentSliceNum() ); bind(); }
public HashJoinMapperStreamGraph(FlinkFlowProcess flowProcess, FlowNode node, Boundary source) { super(flowProcess, node); sourceStage = handleHead(source); setTraps(); setScopes(); printGraph( node.getID(), "hashjoin", flowProcess.getCurrentSliceNum() ); bind(); }
public CoGroupBufferReduceStreamGraph(FlinkFlowProcess flowProcess, FlowNode node, CoGroup coGroup) { super(flowProcess, node); buildGraph(coGroup, flowProcess); setTraps(); setScopes(); printGraph( node.getID(), "cogroup", flowProcess.getCurrentSliceNum() ); bind(); }
public GroupByStreamGraph(FlinkFlowProcess flowProcess, FlowNode node, GroupBy groupBy) { super(flowProcess, node); buildGraph(groupBy, flowProcess); setTraps(); setScopes(); printGraph( node.getID(), "groupby", flowProcess.getCurrentSliceNum() ); bind(); }
public SinkStreamGraph(FlinkFlowProcess flowProcess, FlowNode node, Boundary source) { super(flowProcess, node); sourceStage = handleHead(source); setTraps(); setScopes(); printGraph( node.getID(), "sink", flowProcess.getCurrentSliceNum() ); bind(); }
private DataSet<Tuple> translateMap(DataSet<Tuple> input, FlowNode node) { Fields outFields = getOutScope(node).getOutValuesFields(); registerKryoTypes(outFields); int dop = ((Operator)input).getParallelism(); return input .mapPartition(new EachMapper(node)) .returns(new TupleTypeInfo(outFields)) .withParameters(this.getFlinkNodeConfig(node)) .setParallelism(dop) .name("map-" + node.getID()); }