/** * Retrieves and updates ambrose node associated with the given flow step. * * @param step step with which to update ambrose node state. * @return node associated with step. */ private DAGNode<CascadingJob> updateNode(FlowStep step) { DAGNode<CascadingJob> node = getNode(step); CascadingJob job = node.getJob(); HadoopStepStats stats = (HadoopStepStats) step.getFlowStepStats(); job.setId(stats.getJobID()); job.setJobStats(stats); mapReduceHelper.addMapReduceJobState(job, stats.getJobClient()); return node; }
/** * Retrieves and updates ambrose node associated with the given flow step. * * @param step step with which to update ambrose node state. * @return node associated with step. */ private DAGNode<CascadingJob> updateNode(FlowStep step) { DAGNode<CascadingJob> node = getNode(step); CascadingJob job = node.getJob(); HadoopStepStats stats = (HadoopStepStats) step.getFlowStepStats(); job.setId(stats.getProcessStepID()); job.setJobStats(stats); mapReduceHelper.addMapReduceJobState(job, stats.getJobClient()); return node; }
@Override public void onStepCompleted(FlowStep flowStep) { LOG.info("Step completed"); HadoopStepStats hdStepStats = (HadoopStepStats)flowStep.getFlowStepStats(); recordStepData(hdStepStats); }
@Override public void onStepStopping(FlowStep flowStep) { LOG.info("Step stopping"); HadoopStepStats hdStepStats = (HadoopStepStats)flowStep.getFlowStepStats(); recordStepData(hdStepStats); }
@Override public boolean onStepThrowable(FlowStep flowStep, Throwable throwable) { HadoopStepStats hdStepStats = (HadoopStepStats)flowStep.getFlowStepStats(); recordTaskErrors(hdStepStats, hdStepStats.getJobID(), true, new TwoNestedMap<String, String, Long>()); return false; } }
@Override public void onStepStarting(FlowStep flowStep) { LOG.info("Step starting"); try { HadoopStepStats hdStepStats = (HadoopStepStats)flowStep.getFlowStepStats(); RunningJob job = hdStepStats.getRunningJob(); persister.onRunning(new LaunchedJob(job.getID().toString(), job.getJobName(), job.getTrackingURL()) ); } catch (NullPointerException | IOException e) { // no op } }
/** * Retrieves and updates ambrose node associated with the given flow step. * * @param step step with which to update ambrose node state. * @return node associated with step. */ private DAGNode<CascadingJob> updateNode(FlowStep step) { DAGNode<CascadingJob> node = getNode(step); CascadingJob job = node.getJob(); HadoopStepStats stats = (HadoopStepStats) step.getFlowStepStats(); job.setId(stats.getJobID()); job.setJobStats(stats); mapReduceHelper.addMapReduceJobState(job, stats.getJobClient()); return node; }
/** * Retrieves and updates ambrose node associated with the given flow step. * * @param step step with which to update ambrose node state. * @return node associated with step. */ private DAGNode<CascadingJob> updateNode(FlowStep step) { DAGNode<CascadingJob> node = getNode(step); CascadingJob job = node.getJob(); HadoopStepStats stats = (HadoopStepStats) step.getFlowStepStats(); job.setId(stats.getProcessStepID()); job.setJobStats(stats); mapReduceHelper.addMapReduceJobState(job, stats.getJobClient()); return node; }
long rowCount = flowStep.getFlowStepStats().getCounterValue( StepCounters.Tuples_Written ); return new Linq4j().singletonEnumerable( rowCount ).enumerator();