@Override public boolean accept(String jobId) { int part = partitioner.partition(numTotalPartitions, jobId); if (part == partitionId) { return true; } return false; } }
public static long getLong(JSONObject obj, String field) { return getLong(obj, field, 0L); }
public static JobNameNormalization getInstance(Config config) { return new JobNameNormalization(config); }
private String getNormalizedName(String jobName, String assignedName) { if (null != assignedName) { return assignedName; } else { return JobNameNormalization.getInstance(this.config.getConfig()).normalize(jobName); } }
@Override protected MRTaskExecutionResponse.TaskGroup getTasks(MRTaskExecutionResponse.TaskGroupResponse tasks) { return tasks.tasksGroupByType.get(Constants.TaskType.MAP.toString()); } }
private void handleJobEnd(JSONObject event) { int jobId = JSONUtils.getInt(event, "Job ID"); SparkJob job = jobs.get(jobId); long completionTime = JSONUtils.getLong(event, "Completion Time", lastEventTime); job.setCompletionTime(completionTime); this.lastEventTime = completionTime; JSONObject jobResult = JSONUtils.getJSONObject(event, "Job Result"); String result = JSONUtils.getString(jobResult, "Result"); if (result.equalsIgnoreCase("JobSucceeded")) { job.setStatus(SparkEntityConstant.SparkJobStatus.SUCCEEDED.toString()); } else { job.setStatus(SparkEntityConstant.SparkJobStatus.FAILED.toString()); } }
private void handleBlockManagerAdd(JSONObject event) { long maxMemory = JSONUtils.getLong(event, "Maximum Memory"); long timestamp = JSONUtils.getLong(event, "Timestamp", lastEventTime); this.lastEventTime = timestamp; JSONObject blockInfo = JSONUtils.getJSONObject(event, "Block Manager ID"); String executorID = JSONUtils.getString(blockInfo, "Executor ID"); String hostAndPort = JSONUtils.getString(blockInfo, "Host") + ":" + JSONUtils.getLong(blockInfo, "Port"); SparkExecutor executor = this.initiateExecutor(executorID, timestamp); executor.setMaxMemory(maxMemory); executor.setHostPort(hostAndPort); }
@Override public void freshFileSystem() throws Exception { try { hdfs.close(); } finally { hdfs = HDFSUtil.getFileSystem(conf); } }
private void addRule(String rule) { for (NormalizationOp op : NormalizationOp.values()) { // split the rule to be source and target string String[] elements = rule.split(op.toString()); if (elements == null || elements.length != 2) { return; } JobNameNormalizationRule r = new JobNameNormalizationRule(); r.pattern = Pattern.compile(elements[0].trim()); r.op = op; r.target = elements[1].trim(); rules.add(r); break; //once one Op is matched, exit } }
private void handleExecutorRemoved(JSONObject event) { String executorID = JSONUtils.getString(event, "Executor ID"); SparkExecutor executor = executors.get(executorID); long removedTime = JSONUtils.getLong(event, "Timestamp", lastEventTime); executor.setEndTime(removedTime); this.lastEventTime = removedTime; }
private void handleExecutorAdd(JSONObject event) { String executorID = (String) event.get("Executor ID"); long executorAddTime = JSONUtils.getLong(event, "Timestamp", lastEventTime); this.lastEventTime = executorAddTime; SparkExecutor executor = this.initiateExecutor(executorID, executorAddTime); JSONObject executorInfo = JSONUtils.getJSONObject(event, "Executor Info"); }
private void finishSparkApp(String sparkAppId) { SparkAppEntity attemptEntity = sparkAppEntityMap.get(sparkAppId); attemptEntity.setYarnState(Constants.AppState.FINISHED.toString()); attemptEntity.setYarnStatus(Constants.AppStatus.FAILED.toString()); sparkJobConfigs.remove(sparkAppId); if (sparkJobConfigs.size() == 0) { this.parserStatus = ParserStatus.APP_FINISHED; } stagesTime.clear(); LOG.info("spark application {} has been finished", sparkAppId); }
public static FileSystem getFileSystem(Configuration conf) throws IOException { HDFSUtil.login(conf); return FileSystem.get(conf); }
@Override public JobSuggestionResponse apply(TaskGroupResponse data) { MRTaskExecutionResponse.TaskGroup tasks = getTasks(data); double smallerSpillBytes = getAverageSpillBytes(tasks.shortTasks); double largerSpillBytes = getAverageSpillBytes(tasks.longTasks); JobSuggestionResponse response = new JobSuggestionResponse(); response.suggestionType = Constants.SuggestionType.MapSpill.toString(); response.suggestionResults = getSpillSuggest(smallerSpillBytes, largerSpillBytes); return response; }
@Override protected MRTaskExecutionResponse.TaskGroup getTasks(MRTaskExecutionResponse.TaskGroupResponse tasks) { return tasks.tasksGroupByType.get(Constants.TaskType.REDUCE.toString()); } }
private void handleAppEnd(JSONObject event) { long endTime = JSONUtils.getLong(event, "Timestamp", lastEventTime); app.setEndTime(endTime); this.lastEventTime = endTime; }
@Override protected MRTaskExecutionResponse.TaskGroup getTasks(MRTaskExecutionResponse.TaskGroupResponse tasks) { return tasks.tasksGroupByType.get(Constants.TaskType.REDUCE.toString()); } }
public void countTask(MRJobTaskCountResponse.UnitTaskCount counter, String taskType) { counter.taskCount++; if (taskType.equalsIgnoreCase(Constants.TaskType.MAP.toString())) { counter.mapTaskCount++; } else if (taskType.equalsIgnoreCase(Constants.TaskType.REDUCE.toString())) { counter.reduceTaskCount++; } }
@Override protected MRTaskExecutionResponse.TaskGroup getTasks(MRTaskExecutionResponse.TaskGroupResponse tasks) { return tasks.tasksGroupByType.get(Constants.TaskType.MAP.toString()); }
private MRTaskExecutionResponse.TaskGroup getTasks(TaskGroupResponse data) { return data.tasksGroupByType.get(Constants.TaskType.MAP.toString()); }