public TestFlowProcess( int numTasks, int taskNum ) { super( new FlowSession() ); this.numTasks = numTasks; this.taskNum = taskNum; }
/** * Method setCurrentSession sets the currentSession of this FlowProcess object. * * @param currentSession the currentSession of this FlowProcess object. */ public void setCurrentSession( FlowSession currentSession ) { this.currentSession = currentSession; currentSession.setCurrentProcess( this ); }
protected ClientState createClientState( FlowProcess flowProcess ) { CascadingServices services = flowProcess.getCurrentSession().getCascadingServices(); if( services == null ) return ClientState.NULL; return services.createClientState( getID() ); }
protected ClientState getClientState() { return getFlowSession().getCascadingServices().createClientState( getID() ); }
public FlowSession getFlowSession() { return new FlowSession( getCascadingServices() ); }
private void initStats() { try { if( processWrapper.hasCounters() ) { flowStats = new ProcessFlowStats( this, getFlowSession().getCascadingServices().createClientState( getID() ), processWrapper ); flowStats.prepare(); flowStats.markPending(); } else { flowStats = createPrepareFlowStats(); } } catch( ProcessException exception ) { throw new FlowException( exception ); } }
LOG.info( "child jvm opts: {}", jobConf.get( "mapred.child.java.opts", "" ) ); currentProcess = new HadoopFlowProcess( new FlowSession(), jobConf, true );
LOG.info( "child jvm opts: {}", jobConf.get( "mapred.child.java.opts", "" ) ); currentProcess = new HadoopFlowProcess( new FlowSession(), jobConf, true );
LOG.info( "child jvm opts: {}", jobConf.get( "mapred.child.java.opts", "" ) ); currentProcess = new HadoopFlowProcess( new FlowSession(), jobConf, false );
LOG.info( "child jvm opts: {}", jobConf.get( "mapred.child.java.opts", "" ) ); currentProcess = new HadoopFlowProcess( new FlowSession(), jobConf, false );
@Override public void initialize() throws Exception { configuration = new TezConfiguration( TezUtils.createConfFromUserPayload( getContext().getUserPayload() ) ); TezUtil.setMRProperties( getContext(), configuration, true ); try { HadoopUtil.initLog4j( configuration ); LOG.info( "cascading version: {}", configuration.get( "cascading.version", "" ) ); currentProcess = new Hadoop2TezFlowProcess( new FlowSession(), getContext(), configuration ); flowNode = deserializeBase64( configuration.getRaw( FlowNode.CASCADING_FLOW_NODE ), configuration, BaseFlowNode.class ); LOG.info( "flow node id: {}, ordinal: {}", flowNode.getID(), flowNode.getOrdinal() ); logMemory( LOG, "flow node id: " + flowNode.getID() + ", mem on start" ); } catch( Throwable throwable ) { if( throwable instanceof CascadingException ) throw (CascadingException) throwable; throw new FlowException( "internal error during processor configuration", throwable ); } }
@Override public void initialize() throws Exception { configuration = new TezConfiguration( TezUtils.createConfFromUserPayload( getContext().getUserPayload() ) ); TezUtil.setMRProperties( getContext(), configuration, true ); try { HadoopUtil.initLog4j( configuration ); LOG.info( "cascading version: {}", configuration.get( "cascading.version", "" ) ); currentProcess = new Hadoop2TezFlowProcess( new FlowSession(), getContext(), configuration ); flowNode = deserializeBase64( configuration.getRaw( FlowNode.CASCADING_FLOW_NODE ), configuration, BaseFlowNode.class ); LOG.info( "flow node id: {}, ordinal: {}", flowNode.getID(), flowNode.getOrdinal() ); logMemory( LOG, "flow node id: " + flowNode.getID() + ", mem on start" ); } catch( Throwable throwable ) { if( throwable instanceof CascadingException ) throw (CascadingException) throwable; throw new FlowException( "internal error during processor configuration", throwable ); } }