/** * Retrieves the ambrose node associated with the given flow step. * * @param step step for which node should be retrieved. * @return node associated with step. */ private DAGNode<CascadingJob> getNode(FlowStep step) { String name = step.getName(); DAGNode<CascadingJob> node = nodesByName.get(name); if (node == null) { throw new IllegalStateException(String.format("Node with name '%s' not found", name)); } return node; }
/** * Retrieves the ambrose node associated with the given flow step. * * @param step step for which node should be retrieved. * @return node associated with step. */ private DAGNode<CascadingJob> getNode(FlowStep step) { String name = step.getName(); DAGNode<CascadingJob> node = nodesByName.get(name); if (node == null) { throw new IllegalStateException(String.format("Node with name '%s' not found", name)); } return node; }
/** * onStepProgressing event is fired whenever a job makes progress. * * @param step the step in the flow that represents the MapReduce job. */ @Override public void onStepRunning(FlowStep step) { // first we report the scripts progress int progress = (int) ((((double) runningJobs) / totalNumberOfJobs) * 100); AmbroseUtils.pushWorkflowProgressEvent(statsWriteService, currentFlowId, progress); // only push job progress events for a completed step once if (completedStepNames.contains(step.getName())) { return; } try { // update node DAGNode<CascadingJob> node = updateNode(step); if (node.getJob().getMapReduceJobState() != null) { AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobProgressEvent(node)); if (node.getJob().getMapReduceJobState().isComplete()) { completedStepNames.add(step.getName()); } } } catch (Exception e) { LOG.error("Failed to handle onStepRunning event", e); } }
/** * onStepProgressing event is fired whenever a job makes progress. * * @param step the step in the flow that represents the MapReduce job. */ @Override public void onStepRunning(FlowStep step) { // first we report the scripts progress int progress = (int) ((((double) runningJobs) / totalNumberOfJobs) * 100); AmbroseUtils.pushWorkflowProgressEvent(statsWriteService, currentFlowId, progress); // only push job progress events for a completed step once if (completedStepNames.contains(step.getName())) { return; } try { // update node DAGNode<CascadingJob> node = updateNode(step); if (node.getJob().getMapReduceJobState() != null) { AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobProgressEvent(node)); if (node.getJob().getMapReduceJobState().isComplete()) { completedStepNames.add(step.getName()); } } } catch (Exception e) { LOG.error("Failed to handle onStepRunning event", e); } }
/** * Retrieves the ambrose node associated with the given flow step. * * @param step step for which node should be retrieved. * @return node associated with step. */ private DAGNode<CascadingJob> getNode(FlowStep step) { String name = step.getName(); DAGNode<CascadingJob> node = nodesByName.get(name); if (node == null) { throw new IllegalStateException(String.format("Node with name '%s' not found", name)); } return node; }
/** * Retrieves the ambrose node associated with the given flow step. * * @param step step for which node should be retrieved. * @return node associated with step. */ private DAGNode<CascadingJob> getNode(FlowStep step) { String name = step.getName(); DAGNode<CascadingJob> node = nodesByName.get(name); if (node == null) { throw new IllegalStateException(String.format("Node with name '%s' not found", name)); } return node; }
protected FlowStepStats( FlowStep flowStep, ClientState clientState ) { super( flowStep.getName(), clientState ); this.flowStep = flowStep; this.flowStep.setFlowStepStats( this ); }
/** * onStepProgressing event is fired whenever a job makes progress. * * @param step the step in the flow that represents the MapReduce job. */ @Override public void onStepRunning(FlowStep step) { // first we report the scripts progress int progress = (int) ((((double) runningJobs) / totalNumberOfJobs) * 100); AmbroseUtils.pushWorkflowProgressEvent(statsWriteService, currentFlowId, progress); // only push job progress events for a completed step once if (completedStepNames.contains(step.getName())) { return; } try { // update node DAGNode<CascadingJob> node = updateNode(step); if (node.getJob().getMapReduceJobState() != null) { AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobProgressEvent(node)); if (node.getJob().getMapReduceJobState().isComplete()) { completedStepNames.add(step.getName()); } } } catch (Exception e) { LOG.error("Failed to handle onStepRunning event", e); } }
/** * onStepProgressing event is fired whenever a job makes progress. * * @param step the step in the flow that represents the MapReduce job. */ @Override public void onStepRunning(FlowStep step) { // first we report the scripts progress int progress = (int) ((((double) runningJobs) / totalNumberOfJobs) * 100); AmbroseUtils.pushWorkflowProgressEvent(statsWriteService, currentFlowId, progress); // only push job progress events for a completed step once if (completedStepNames.contains(step.getName())) { return; } try { // update node DAGNode<CascadingJob> node = updateNode(step); if (node.getJob().getMapReduceJobState() != null) { AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobProgressEvent(node)); if (node.getJob().getMapReduceJobState().isComplete()) { completedStepNames.add(step.getName()); } } } catch (Exception e) { LOG.error("Failed to handle onStepRunning event", e); } }