@Override public void jobEntityCreated(JobBaseAPIEntity entity) throws Exception { if (entity instanceof TaskExecutionAPIEntity) { info.getTasksMap().put(entity.getTags().get(TASK_ID.toString()), (TaskExecutionAPIEntity) entity); } else if (entity instanceof TaskAttemptExecutionAPIEntity) { info.getCompletedTaskAttemptsMap().put(entity.getTags().get(TASK_ATTEMPT_ID.toString()), (TaskAttemptExecutionAPIEntity) entity); } else if (entity instanceof JobExecutionAPIEntity) { JobExecutionAPIEntity jobExecutionAPIEntity = (JobExecutionAPIEntity) entity; info.setCurrentState(jobExecutionAPIEntity.getCurrentState()); info.setStartTime(jobExecutionAPIEntity.getStartTime()); info.setEndTime(jobExecutionAPIEntity.getEndTime()); info.setDurationTime(jobExecutionAPIEntity.getDurationTime()); info.setUserId(jobExecutionAPIEntity.getTags().get(MRJobTagName.USER.toString())); info.setJobId(jobExecutionAPIEntity.getTags().get(MRJobTagName.JOB_ID.toString())); info.setJobDefId(jobExecutionAPIEntity.getTags().get(MRJobTagName.JOD_DEF_ID.toString())); info.setSiteId(jobExecutionAPIEntity.getTags().get(MRJobTagName.SITE.toString())); info.setJobName(jobExecutionAPIEntity.getTags().get(MRJobTagName.JOB_NAME.toString())) ; info.setJobQueueName(jobExecutionAPIEntity.getTags().get(MRJobTagName.JOB_QUEUE.toString())); info.setJobType(jobExecutionAPIEntity.getTags().get(MRJobTagName.JOB_TYPE.toString())); info.setFinishedMaps(jobExecutionAPIEntity.getNumFinishedMaps()); info.setFinishedReduces(jobExecutionAPIEntity.getNumFinishedReduces()); info.setFailedReduces(jobExecutionAPIEntity.getNumFailedReduces()); info.setFailedMaps(jobExecutionAPIEntity.getNumFailedMaps()); info.setTotalMaps(jobExecutionAPIEntity.getNumTotalMaps()); info.setTotalReduces(jobExecutionAPIEntity.getNumTotalReduces()); info.setProgress(100); info.setTrackingUrl(((JobExecutionAPIEntity) entity).getTrackingUrl()); } }
List<GenericMetricEntity> metrics = new ArrayList<>(); if (entity != null) { Long timeStamp = entity.getTimestamp(); Map<String, String> tags = entity.getTags(); metrics.add(metricWrapper(timeStamp, Constants.JOB_EXECUTION_TIME, new double[]{entity.getDurationTime()}, tags)); timeStamp, Constants.MAP_COUNT_RATIO, new double[]{entity.getNumTotalMaps(), 1.0 * entity.getNumFailedMaps() / entity.getNumTotalMaps()}, tags)); timeStamp, Constants.REDUCE_COUNT_RATIO, new double[]{entity.getNumTotalReduces(), 1.0 * entity.getNumFailedReduces() / entity.getNumTotalReduces()}, tags)); org.apache.eagle.jpm.util.jobcounter.JobCounters jobCounters = entity.getJobCounters(); if (jobCounters != null && jobCounters.getCounters() != null) { for (Map<String, Long> metricGroup : jobCounters.getCounters().values()) { baseTags.put(MRJobTagName.JOB_STATUS.toString(), entity.getCurrentState()); metrics.add(metricWrapper(timeStamp / 3600000 * 3600000, Constants.JOB_COUNT_PER_HOUR,
@Override public void flush(JobExecutionAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("submissionTime", entity.getSubmissionTime()); fields.put("startTime", entity.getStartTime()); fields.put("endTime", entity.getEndTime()); fields.put("currentState", entity.getCurrentState()); fields.put("trackingUrl", entity.getTrackingUrl()); collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.JOB_ID.toString()), fields)); } }
public List<String> getSearchTimeDuration(List<JobExecutionAPIEntity> jobEntities) { List<String> pair = new ArrayList<>(); long minStartTime = System.currentTimeMillis(); long maxEndTime = 0; for (JobExecutionAPIEntity jobEntity : jobEntities) { if (minStartTime > jobEntity.getStartTime()) { minStartTime = jobEntity.getStartTime(); } if (maxEndTime < jobEntity.getEndTime()) { maxEndTime = jobEntity.getEndTime(); } } pair.add(DateTimeUtil.millisecondsToHumanDateWithSeconds(minStartTime)); pair.add(DateTimeUtil.millisecondsToHumanDateWithSeconds(maxEndTime)); return pair; } }
jobSubmitEventEntity.getTags().put(MRJobTagName.JOB_NAME.toString(), jobName); jobSubmitEventEntity.getTags().put(MRJobTagName.JOD_DEF_ID.toString(), this.jobDefId); jobExecutionEntity.getTags().put(MRJobTagName.JOB_TYPE.toString(), this.jobType); entityCreated(jobSubmitEventEntity); } else if (values.get(Keys.LAUNCH_TIME) != null) { // job launched jobExecutionEntity.getTags().put(MRJobTagName.USER.toString(), user); jobExecutionEntity.getTags().put(MRJobTagName.JOB_ID.toString(), jobId); jobExecutionEntity.getTags().put(MRJobTagName.JOB_NAME.toString(), jobName); jobExecutionEntity.getTags().put(MRJobTagName.JOD_DEF_ID.toString(), jobDefId); jobExecutionEntity.getTags().put(MRJobTagName.JOB_QUEUE.toString(), queueName); jobExecutionEntity.getTags().put(MRJobTagName.JOB_TYPE.toString(), this.jobType); jobExecutionEntity.setTrackingUrl(buildJobTrackingUrl(jobId)); jobExecutionEntity.setCurrentState(values.get(Keys.JOB_STATUS)); jobExecutionEntity.setStartTime(jobLaunchEventEntity.getTimestamp()); jobExecutionEntity.setEndTime(jobFinishEventEntity.getTimestamp()); jobExecutionEntity.setDurationTime(jobExecutionEntity.getEndTime() - jobExecutionEntity.getStartTime()); jobExecutionEntity.setTimestamp(jobLaunchEventEntity.getTimestamp()); jobExecutionEntity.setSubmissionTime(jobSubmitEventEntity.getTimestamp()); if (values.get(Keys.FAILED_MAPS) != null) { jobExecutionEntity.setNumFailedMaps(Integer.valueOf(values.get(Keys.FAILED_MAPS))); jobExecutionEntity.setNumFailedReduces(Integer.valueOf(values.get(Keys.FAILED_REDUCES))); jobExecutionEntity.setNumFinishedMaps(Integer.valueOf(values.get(Keys.FINISHED_MAPS))); jobExecutionEntity.setNumFinishedReduces(Integer.valueOf(values.get(Keys.FINISHED_REDUCES))); jobExecutionEntity.setNumTotalMaps(numTotalMaps);
if (taskType.equals(Constants.TaskType.MAP.toString()) && duration > jobExecutionEntity.getLastMapDuration()) { jobExecutionEntity.setLastMapDuration(duration); if (taskType.equals(Constants.TaskType.REDUCE.toString()) && duration > jobExecutionEntity.getLastReduceDuration()) { jobExecutionEntity.setLastReduceDuration(duration); if (taskType.equals(Constants.TaskType.MAP.toString()) && entity.getDuration() > jobExecutionEntity.getMaxMapTaskDuration()) { jobExecutionEntity.setMaxMapTaskDuration(entity.getDuration()); if (taskType.equals(Constants.TaskType.REDUCE.toString()) && entity.getDuration() > jobExecutionEntity.getMaxReduceTaskDuration()) { jobExecutionEntity.setMaxReduceTaskDuration(entity.getDuration()); if (entity.getTaskStatus().equals(EagleTaskStatus.FAILED.name()) || entity.getTaskStatus().equals(EagleTaskStatus.KILLED.name())) { jobExecutionEntity.setNumFailedMaps(1 + jobExecutionEntity.getNumFailedMaps()); if (entity.getTaskStatus().equals(EagleTaskStatus.FAILED.name()) || entity.getTaskStatus().equals(EagleTaskStatus.KILLED.name())) { jobExecutionEntity.setNumFailedReduces(1 + jobExecutionEntity.getNumFailedReduces()); jobExecutionEntity.setTotalMapAttempts(1 + jobExecutionEntity.getTotalMapAttempts()); if (entity.getTaskStatus().equals(EagleTaskStatus.FAILED.name()) || entity.getTaskStatus().equals(EagleTaskStatus.KILLED.name())) { jobExecutionEntity.setFailedMapAttempts(1 + jobExecutionEntity.getFailedMapAttempts()); jobExecutionEntity.setTotalReduceAttempts(1 + jobExecutionEntity.getTotalReduceAttempts()); if (entity.getTaskStatus().equals(EagleTaskStatus.FAILED.name()) || entity.getTaskStatus().equals(EagleTaskStatus.KILLED.name())) { jobExecutionEntity.setFailedReduceAttempts(1 + jobExecutionEntity.getFailedReduceAttempts());
public JobCountResponse getRunningJobCount(List<JobExecutionAPIEntity> historyJobs, List<org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity> runningJobs, long startTimeInSecs, long endTimeInSecs, long intervalInSecs) { List<UnitJobCount> jobCounts = new ArrayList<>(); Set<String> jobTypes = new HashSet<>(); Set<String> jobIds = new HashSet<>(); initJobCountList(jobCounts, startTimeInSecs, endTimeInSecs, intervalInSecs); long startTimeInMs = startTimeInSecs * DateTimeUtil.ONESECOND; for (JobExecutionAPIEntity job: historyJobs) { jobIds.add(job.getTags().get(MRJobTagName.JOB_ID.toString())); if (job.getEndTime() >= startTimeInMs) { String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(jobCounts, job.getStartTime() / DateTimeUtil.ONESECOND, job.getEndTime() / DateTimeUtil.ONESECOND, intervalInSecs, jobType); } } for (org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity job : runningJobs) { if (!ResourceUtils.isDuplicate(jobIds, job.getTags().get(MRJobTagName.JOB_ID.toString()))) { String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(jobCounts, job.getStartTime() / DateTimeUtil.ONESECOND, endTimeInSecs, intervalInSecs, jobType); } } JobCountResponse response = new JobCountResponse(); response.jobCounts = jobCounts; response.jobTypes = jobTypes; return response; }
public JobCountResponse getHistoryJobCountGroupByDuration(List<JobExecutionAPIEntity> jobDurations, String timeList) { JobCountResponse response = new JobCountResponse(); List<UnitJobCount> jobCounts = new ArrayList<>(); Set<String> jobTypes = new HashSet<>(); List<Long> times = ResourceUtils.parseDistributionList(timeList); for (int i = 0; i < times.size(); i++) { jobCounts.add(new UnitJobCount(times.get(i))); } for (JobExecutionAPIEntity job : jobDurations) { int jobIndex = ResourceUtils.getDistributionPosition(times, job.getDurationTime() / DateTimeUtil.ONESECOND); UnitJobCount counter = jobCounts.get(jobIndex); String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(counter, jobType); } response.jobCounts = jobCounts; response.jobTypes = jobTypes; return response; }
jobFinishEventEntity.setTags(new HashMap<>(baseTags)); jobExecutionEntity = new JobExecutionAPIEntity(); jobExecutionEntity.setTags(new HashMap<>(baseTags)); jobExecutionEntity.setNumFailedMaps(0); jobExecutionEntity.setNumFailedReduces(0);
long maxFinishedTime = DateTimeUtil.humanDateToSeconds(endTime) * DateTimeUtil.ONESECOND; for (JobExecutionAPIEntity o : res.getObj()) { if (o.getEndTime() <= maxFinishedTime) { finishedJobs.add(o); jobIds.add(o.getTags().get(MRJobTagName.JOB_ID.toString()));
JobCounters jobCounters = jobExecutionAPIEntity.getJobCounters(); jobExecutionAPIEntity.setJobCounters(jobCounters); jobFinished = true; } catch (Exception e) { LOG.error("Failed to update job execution entity: " + this.jobExecutionAPIEntity.toString() + ", due to " + e.getMessage(), e);
List<JobExecutionAPIEntity> jobs = new ArrayList<>(); for (JobExecutionAPIEntity o : historyRes.getObj()) { if (o.getTags().get(MRJobTagName.JOB_TYPE.toString()).equalsIgnoreCase(jobType)) { jobs.add(o);
@Override public void close() throws IOException { // check if this job history file is complete if (jobExecutionEntity.getEndTime() == 0L) { throw new IOException(new JHFWriteNotCompletedException(jobId)); } try { flush(); this.jobCounterMetricsGenerator.flush(); } catch (Exception ex) { throw new IOException(ex); } }
JobHistoryZKStateManager.instance().updateProcessedJob(timeStamp2Date(entity.getTimestamp()), entity.getTags().get(MRJobTagName.JOB_ID.toString()), ((JobExecutionAPIEntity) entity).getCurrentState());
List<org.apache.eagle.jpm.mr.historyentity.JobExecutionAPIEntity> jobs = getJobs(site, currentTime, queryTimeRange.f0(), queryTimeRange.f1()); Set<String> jobIds = new HashSet<>(); jobs.forEach(job -> jobIds.add(job.getTags().get(JOB_ID.toString())));