private String getTaskType(TaskAttemptExecutionAPIEntity taskAttemptInfo) { return taskAttemptInfo.getTags().get(MRJobTagName.TASK_TYPE.toString()); }
public MRJobParser(MRRunningJobConfig.EndpointConfig endpointConfig, MRRunningJobConfig.EagleServiceConfig eagleServiceConfig, AppInfo app, Map<String, JobExecutionAPIEntity> mrJobMap, MRRunningJobManager runningJobManager, List<String> configKeys, Config config) { this.app = app; if (mrJobMap == null) { this.mrJobEntityMap = new HashMap<>(); } else { this.mrJobEntityMap = mrJobMap; } this.mrJobConfigs = new HashMap<>(); this.mrJobEntityCreationHandler = new MRJobEntityCreationHandler(eagleServiceConfig); this.commonTags.put(MRJobTagName.SITE.toString(), endpointConfig.site); this.commonTags.put(MRJobTagName.USER.toString(), app.getUser()); this.commonTags.put(MRJobTagName.JOB_QUEUE.toString(), app.getQueue()); this.runningJobManager = runningJobManager; this.parserStatus = ParserStatus.FINISHED; this.finishedTaskIds = new HashSet<>(); this.configKeys = configKeys; this.config = config; }
public MRTaskExecutionResponse.TaskGroupResponse groupTasksByValue(MRTaskExecutionResponse.TaskGroupResponse result, boolean keepShort, List<TaskExecutionAPIEntity> tasks, long value) { for (TaskExecutionAPIEntity entity : tasks) { String taskType = entity.getTags().get(MRJobTagName.TASK_TYPE.toString()); MRTaskExecutionResponse.TaskGroup taskGroup = result.tasksGroupByType.get(taskType.toUpperCase()); if (entity.getDuration() <= value && keepShort) { taskGroup.shortTasks.add(entity); } if (entity.getDuration() > value) { taskGroup.longTasks.add(entity); } } return result; }
@Override public void flush(TaskAttemptExecutionAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("startTime", entity.getStartTime()); fields.put("endTime", entity.getEndTime()); fields.put("taskStatus", entity.getTaskStatus()); if (!fields.containsKey(MRJobTagName.ERROR_CATEGORY.toString())) { fields.put("errorCategory", ""); } collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.TASK_ATTEMPT_ID.toString()), fields)); } }
@Override public void jobEntityCreated(JobBaseAPIEntity entity) throws Exception { if (!(entity instanceof TaskAttemptExecutionAPIEntity)) { return; } TaskAttemptExecutionAPIEntity e = (TaskAttemptExecutionAPIEntity) entity; Map<String, String> tags = new HashMap<>(); tags.put(MRJobTagName.SITE.toString(), e.getTags().get(MRJobTagName.SITE.toString())); tags.put(MRJobTagName.JOD_DEF_ID.toString(), e.getTags().get(MRJobTagName.JOD_DEF_ID.toString())); tags.put(MRJobTagName.RACK.toString(), e.getTags().get(MRJobTagName.RACK.toString())); tags.put(MRJobTagName.HOSTNAME.toString(), e.getTags().get(MRJobTagName.HOSTNAME.toString())); tags.put(MRJobTagName.JOB_ID.toString(), e.getTags().get(MRJobTagName.JOB_ID.toString())); tags.put(MRJobTagName.TASK_TYPE.toString(), e.getTags().get(MRJobTagName.TASK_TYPE.toString())); CounterKey key = new CounterKey(); key.tags = tags; key.timestamp = roundToMinute(e.getEndTime()); CounterValue value = counters.get(key); if (value == null) { value = new CounterValue(); counters.put(key, value); } if (e.getTaskStatus().equals(EagleTaskStatus.FAILED.name())) { value.failedCount++; } else if (e.getTaskStatus().equals(EagleTaskStatus.KILLED.name())) { value.killedCount++; } value.totalCount++; }
private static JobSuggestionAPIEntity createJobSuggestionEntity(Result.ProcessorResult processorResult, MapReduceAnalyzerEntity entity) { Map<String, String> tags = new HashMap<>(); tags.put(JOB_ID.toString(), entity.getJobId()); tags.put(JOD_DEF_ID.toString(), entity.getJobDefId()); tags.put(SITE.toString(), entity.getSiteId()); tags.put(USER.toString(), entity.getUserId()); tags.put(RULE_TYPE.toString(), processorResult.getRuleType().toString()); tags.put(JOB_QUEUE.toString(), entity.getJobQueueName()); tags.put(JOB_TYPE.toString(), entity.getJobType()); JobSuggestionAPIEntity jobSuggestionAPIEntity = new JobSuggestionAPIEntity(); jobSuggestionAPIEntity.setTags(tags); jobSuggestionAPIEntity.setTimestamp(entity.getStartTime()); // startTime as the job timestamp jobSuggestionAPIEntity.setOptimizerSuggestion(processorResult.getMessage()); jobSuggestionAPIEntity.setOptimizerSettings(processorResult.getSettings()); return jobSuggestionAPIEntity; }
Map<String, String> tags = new HashMap<>(); failureTask.setTags(tags); tags.put(MRJobTagName.SITE.toString(), e.getTags().get(MRJobTagName.SITE.toString())); tags.put(MRJobTagName.JOD_DEF_ID.toString(), e.getTags().get(MRJobTagName.JOD_DEF_ID.toString())); tags.put(MRJobTagName.RACK.toString(), e.getTags().get(MRJobTagName.RACK.toString())); tags.put(MRJobTagName.HOSTNAME.toString(), e.getTags().get(MRJobTagName.HOSTNAME.toString())); tags.put(MRJobTagName.JOB_ID.toString(), e.getTags().get(MRJobTagName.JOB_ID.toString())); tags.put(MRJobTagName.TASK_ATTEMPT_ID.toString(), e.getTags().get(MRJobTagName.TASK_ATTEMPT_ID.toString())); tags.put(MRJobTagName.TASK_TYPE.toString(), e.getTags().get(MRJobTagName.TASK_TYPE.toString())); tags.put(MRJobTagName.ERROR_CATEGORY.toString(), errCategory); entity.getTags().put(MRJobTagName.ERROR_CATEGORY.toString(), errCategory);
@Override public void flush(JobExecutionAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("submissionTime", entity.getSubmissionTime()); fields.put("startTime", entity.getStartTime()); fields.put("endTime", entity.getEndTime()); fields.put("currentState", entity.getCurrentState()); fields.put("trackingUrl", entity.getTrackingUrl()); collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.JOB_ID.toString()), fields)); } }
public void taskExecutionEntityCreated(TaskExecutionAPIEntity taskExecutionAPIEntity) { JobCounters jobCounters = taskExecutionAPIEntity.getJobCounters(); if (jobCounters == null || jobCounters.getCounters() == null) { LOG.warn("found null job counters, task {}", taskExecutionAPIEntity.getTags().get(MRJobTagName.TASK_ID.toString())); return;
public JobCountResponse getHistoryJobCountGroupByDuration(List<JobExecutionAPIEntity> jobDurations, String timeList) { JobCountResponse response = new JobCountResponse(); List<UnitJobCount> jobCounts = new ArrayList<>(); Set<String> jobTypes = new HashSet<>(); List<Long> times = ResourceUtils.parseDistributionList(timeList); for (int i = 0; i < times.size(); i++) { jobCounts.add(new UnitJobCount(times.get(i))); } for (JobExecutionAPIEntity job : jobDurations) { int jobIndex = ResourceUtils.getDistributionPosition(times, job.getDurationTime() / DateTimeUtil.ONESECOND); UnitJobCount counter = jobCounts.get(jobIndex); String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(counter, jobType); } response.jobCounts = jobCounts; response.jobTypes = jobTypes; return response; }
public MRJobTaskCountResponse.HistoryTaskCountResponse countHistoryTask(List<org.apache.eagle.jpm.mr.historyentity.TaskExecutionAPIEntity> tasks, long startTimeInMin, long endTimeInMin) { List<MRJobTaskCountResponse.UnitTaskCount> taskCounts = new ArrayList<>(); for (long i = startTimeInMin; i <= endTimeInMin; i++) { taskCounts.add(new MRJobTaskCountResponse.UnitTaskCount(i * DateTimeUtil.ONEMINUTE, null)); } for (org.apache.eagle.jpm.mr.historyentity.TaskExecutionAPIEntity task : tasks) { String taskType = task.getTags().get(MRJobTagName.TASK_TYPE.toString()); long taskStarTimeMin = task.getStartTime() / DateTimeUtil.ONEMINUTE; long taskEndTimeMin = task.getEndTime() / DateTimeUtil.ONEMINUTE; int relativeStartTime = (int) (taskStarTimeMin - startTimeInMin); int relativeEndTime = (int) (taskEndTimeMin - startTimeInMin); for (int i = relativeStartTime; i <= relativeEndTime; i++) { countTask(taskCounts.get(i), taskType); } } MRJobTaskCountResponse.HistoryTaskCountResponse response = new MRJobTaskCountResponse.HistoryTaskCountResponse(); response.taskCount = taskCounts; return response; }
public void parseConfiguration() throws Exception { Map<String, String> prop = new TreeMap<>(); if (filter.acceptJobConfFile()) { Iterator<Map.Entry<String, String>> iter = configuration.iterator(); while (iter.hasNext()) { String key = iter.next().getKey(); if (included(key) && !excluded(key)) { prop.put(key, configuration.get(key)); } } } // check must-have keys are within prop if (matchMustHaveKeyPatterns(prop)) { JobConfigurationAPIEntity jobConfigurationEntity = new JobConfigurationAPIEntity(); jobConfigurationEntity.setTags(new HashMap<>(baseTags)); jobConfigurationEntity.getTags().put(MRJobTagName.USER.toString(), user); jobConfigurationEntity.getTags().put(MRJobTagName.JOB_ID.toString(), jobId); jobConfigurationEntity.getTags().put(MRJobTagName.JOB_NAME.toString(), jobName); jobConfigurationEntity.getTags().put(MRJobTagName.JOD_DEF_ID.toString(), jobDefId); jobConfigurationEntity.getTags().put(MRJobTagName.JOB_TYPE.toString(), jobType); jobConfigurationEntity.setTimestamp(jobLaunchEventEntity.getTimestamp()); JobConfig jobConfig = new JobConfig(); jobConfig.setConfig(prop); jobConfigurationEntity.setJobConfig(jobConfig); jobConfigurationEntity.setConfigJobName(jobDefId); entityCreated(jobConfigurationEntity); } }
public JobCountResponse getRunningJobCount(List<JobExecutionAPIEntity> historyJobs, List<org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity> runningJobs, long startTimeInSecs, long endTimeInSecs, long intervalInSecs) { List<UnitJobCount> jobCounts = new ArrayList<>(); Set<String> jobTypes = new HashSet<>(); Set<String> jobIds = new HashSet<>(); initJobCountList(jobCounts, startTimeInSecs, endTimeInSecs, intervalInSecs); long startTimeInMs = startTimeInSecs * DateTimeUtil.ONESECOND; for (JobExecutionAPIEntity job: historyJobs) { jobIds.add(job.getTags().get(MRJobTagName.JOB_ID.toString())); if (job.getEndTime() >= startTimeInMs) { String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(jobCounts, job.getStartTime() / DateTimeUtil.ONESECOND, job.getEndTime() / DateTimeUtil.ONESECOND, intervalInSecs, jobType); } } for (org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity job : runningJobs) { if (!ResourceUtils.isDuplicate(jobIds, job.getTags().get(MRJobTagName.JOB_ID.toString()))) { String jobType = job.getTags().get(MRJobTagName.JOB_TYPE.toString()); jobTypes.add(jobType); countJob(jobCounts, job.getStartTime() / DateTimeUtil.ONESECOND, endTimeInSecs, intervalInSecs, jobType); } } JobCountResponse response = new JobCountResponse(); response.jobCounts = jobCounts; response.jobTypes = jobTypes; return response; }
List<JobExecutionAPIEntity> jobs = new ArrayList<>(); for (JobExecutionAPIEntity o : historyRes.getObj()) { if (o.getTags().get(MRJobTagName.JOB_TYPE.toString()).equalsIgnoreCase(jobType)) { jobs.add(o);
@Override public void flush(JobRpcAnalysisAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("trackingUrl", entity.getTrackingUrl()); fields.put("totalOpsPerSecond", entity.getTotalOpsPerSecond()); fields.put("mapOpsPerSecond", entity.getMapOpsPerSecond()); fields.put("reduceOpsPerSecond", entity.getReduceOpsPerSecond()); fields.put("avgOpsPerTask", entity.getAvgOpsPerTask()); fields.put("avgOpsPerMap", entity.getAvgOpsPerMap()); fields.put("avgOpsPerReduce", entity.getAvgOpsPerReduce()); fields.put("currentState", entity.getCurrentState()); fields.put("numTotalMaps", entity.getNumTotalMaps()); fields.put("numTotalReduces", entity.getNumTotalReduces()); fields.put("duration", entity.getDuration()); fields.put("avgMapTime", entity.getAvgMapTime()); fields.put("avgReduceTime", entity.getAvgReduceTime()); collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.JOB_ID.toString()), fields)); }
private AnalyzerEntity convertToAnalysisEntity(JobExecutionAPIEntity jobExecutionAPIEntity) { AnalyzerEntity mrJobAnalysisEntity = new AnalyzerEntity(); Map<String, String> tags = jobExecutionAPIEntity.getTags(); mrJobAnalysisEntity.setJobDefId(tags.get(MRJobTagName.JOD_DEF_ID.toString())); mrJobAnalysisEntity.setJobId(tags.get(MRJobTagName.JOB_ID.toString())); mrJobAnalysisEntity.setSiteId(tags.get(MRJobTagName.SITE.toString())); mrJobAnalysisEntity.setUserId(tags.get(MRJobTagName.USER.toString())); mrJobAnalysisEntity.setStartTime(jobExecutionAPIEntity.getStartTime()); mrJobAnalysisEntity.setEndTime(jobExecutionAPIEntity.getEndTime()); mrJobAnalysisEntity.setDurationTime(jobExecutionAPIEntity.getDurationTime()); mrJobAnalysisEntity.setCurrentState(jobExecutionAPIEntity.getInternalState()); mrJobAnalysisEntity.setJobConfig(new HashMap<>(jobExecutionAPIEntity.getJobConfig())); mrJobAnalysisEntity.setProgress(this.app.getProgress()); return mrJobAnalysisEntity; } }
List<org.apache.eagle.jpm.mr.historyentity.JobExecutionAPIEntity> jobs = getJobs(site, currentTime, queryTimeRange.f0(), queryTimeRange.f1()); Set<String> jobIds = new HashSet<>(); jobs.forEach(job -> jobIds.add(job.getTags().get(JOB_ID.toString()))); String jobId = job.getTags().get(JOB_ID.toString()); String jobQueue = job.getTags().get(JOB_QUEUE.toString()); String user = job.getTags().get(USER.toString());
@Override public void onInputStream(InputStream jobFileInputStream, org.apache.hadoop.conf.Configuration conf) throws Exception { @SuppressWarnings("serial") Map<String, String> baseTags = new HashMap<String, String>() { { put(MRJobTagName.SITE.toString(), appConfig.getJobHistoryEndpointConfig().site); } }; if (!filter.acceptJobFile()) { // close immediately if we don't need job file jobFileInputStream.close(); } else { //get parser and parse, do not need to emit data now JHFParserBase parser = JHFParserFactory.getParser(baseTags, conf, filter, appConfig); parser.parse(jobFileInputStream); jobFileInputStream.close(); } } }
private void taskAttemptEntityCreated(TaskAttemptExecutionAPIEntity entity) { JobCounters jobCounters = entity.getJobCounters(); String taskType = entity.getTags().get(TASK_TYPE.toString()); if (taskType != null && jobCounters != null && jobCounters.getCounters() != null) { if (Constants.TaskType.MAP.toString().equals(taskType.toUpperCase())) { mapAttemptDuration += entity.getDuration(); this.mapTaskAttemptCounterAgg.accumulate(jobCounters.getCounters().get(Constants.TASK_COUNTER)); this.mapFileSystemCounterAgg.accumulate(jobCounters.getCounters().get(Constants.FILE_SYSTEM_COUNTER)); return; } else if (Constants.TaskType.REDUCE.toString().equals(taskType.toUpperCase())) { reduceAttemptDuration += entity.getDuration(); this.reduceTaskAttemptCounterAgg.accumulate(jobCounters.getCounters().get(Constants.TASK_COUNTER)); this.reduceFileSystemTaskCounterAgg.accumulate(jobCounters.getCounters().get(Constants.FILE_SYSTEM_COUNTER)); return; } } ObjectMapper objectMapper = new ObjectMapper(); try { LOG.warn("Unknown task type of task attempt execution entity: " + objectMapper.writeValueAsString(entity)); } catch (Exception e) { LOG.error(e.getMessage(), e); } }
for (String jobId : mrJobEntityMap.keySet()) { JobExecutionAPIEntity entity = mrJobEntityMap.get(jobId); if (entity.getTags().containsKey(MRJobTagName.JOB_TYPE.toString())) { mrJobEntityCreationHandler.add(entity);