public static String getBuildInstance(Output output) { final String str = output.getExtra().get(BUILD_INSTANCE); if (str != null) { return str; } return "unknown"; }
public static long getExtraInfoAsLong(Output output, String key, long defaultValue) { final String str = output.getExtra().get(key); if (str != null) { return Long.parseLong(str); } else { return defaultValue; } }
protected final Map<String, String> getExtraInfo() { return getOutput().getExtra(); }
private String findExtraInfo(String key, String dft, boolean backward) { ArrayList<AbstractExecutable> tasks = new ArrayList<AbstractExecutable>(getTasks()); if (backward) { Collections.reverse(tasks); } for (AbstractExecutable child : tasks) { Output output = getManager().getOutput(child.getId()); String value = output.getExtra().get(key); if (value != null) return value; } return dft; } }
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = getOutput(); if (output.getExtra().containsKey(START_TIME)) { final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID); if (sparkJobID == null) { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { String status = getAppState(sparkJobID); if (status == null || status.equals("FAILED") || status.equals("KILLED")) { //remove previous mr job info super.onExecuteStart(executableContext); } else { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = getOutput(); if (output.getExtra().containsKey(START_TIME)) { final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID); if (mrJobId == null) { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { ExecutableManager mgr = getManager(); Map<String, String> extra = mgr.getOutput(getId()).getExtra(); String sparkJobId = extra.get(ExecutableConstants.SPARK_JOB_ID); if (!StringUtils.isEmpty(sparkJobId)) { extra = mgr.getOutput(getId()).getExtra(); if (extra != null && extra.get(ExecutableConstants.SPARK_JOB_ID) != null) { killAppRetry(extra.get(ExecutableConstants.SPARK_JOB_ID)); extra = mgr.getOutput(getId()).getExtra(); extra.put(ExecutableConstants.SPARK_JOB_ID, ""); getManager().addJobInfo(getId(), extra);
final String mrJobId = errorOutput.getExtra().get(ExecutableConstants.MR_JOB_ID); dataMap.put("mr_job_id", StringUtil.noBlank(mrJobId, "Not initialized")); } else {
Configuration conf = new Configuration(HadoopUtil.getCurrentConfiguration()); String[] jobArgs = overwriteJobConf(conf, context.getConfig(), getMapReduceParams().trim().split("\\s+")); final Map<String, String> extra = mgr.getOutput(getId()).getExtra(); if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) { job = new Cluster(conf).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));
private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) { Preconditions.checkNotNull(stepOutput); JobInstance.JobStep result = new JobInstance.JobStep(); result.setId(task.getId()); result.setName(task.getName()); result.setSequenceID(i); result.setStatus(parseToJobStepStatus(stepOutput.getState())); for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) { if (entry.getKey() != null && entry.getValue() != null) { result.putInfo(entry.getKey(), entry.getValue()); } } result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput)); result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput)); if (task instanceof ShellExecutable) { result.setExecCmd(((ShellExecutable) task).getCmd()); } if (task instanceof MapReduceExecutable) { result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams()); result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000); } if (task instanceof HadoopShellExecutable) { result.setExecCmd(((HadoopShellExecutable) task).getJobParams()); } return result; }
for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) { if (entry.getKey() != null && entry.getValue() != null) { result.putInfo(entry.getKey(), entry.getValue());
protected long getExtraInfoAsLong(String key, long defaultValue) { final String str = executableManager.getOutput(getId()).getExtra().get(key); if (str != null) { return Long.parseLong(str); } else { return defaultValue; } }
String sourceRecordsCount = baseCuboidOutput.getExtra().get(ExecutableConstants.SOURCE_RECORDS_COUNT); Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsCount), "Can't get cube source record count."); long sourceCount = Long.parseLong(sourceRecordsCount); String sourceRecordsSize = baseCuboidOutput.getExtra().get(ExecutableConstants.SOURCE_RECORDS_SIZE); Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsSize), "Can't get cube source record size."); long sourceSize = Long.parseLong(sourceRecordsSize); boolean segmentReady = true; if (!StringUtils.isBlank(getConvertToHfileStepId())) { String cubeSizeString = executableManager.getOutput(getConvertToHfileStepId()).getExtra().get(ExecutableConstants.HDFS_BYTES_WRITTEN); Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size."); size = Long.parseLong(cubeSizeString) / 1024;
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = executableManager.getOutput(getId()); if (output.getExtra().containsKey(START_TIME)) { final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID); if (mrJobId == null) { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { Job job = new Cluster(new Configuration()).getJob(JobID.forName(mrJobId)); if (job.getJobState() == JobStatus.State.FAILED) { //remove previous mr job info super.onExecuteStart(executableContext); } else { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } catch (InterruptedException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
try { Job job; final Map<String, String> extra = executableManager.getOutput(getId()).getExtra(); if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) { job = new Cluster(new Configuration()).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));
return new ExecuteResult(ExecuteResult.State.FAILED, "there is no segment with id:" + getSegmentId()); String cubeSizeString = executableManager.getOutput(getConvertToHfileStepId()).getExtra().get(ExecutableConstants.HDFS_BYTES_WRITTEN); Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size."); long cubeSize = Long.parseLong(cubeSizeString) / 1024;
public static String getExtraInfo(Output output, String key) { return output.getExtra().get(key); }
public static long getExtraInfoAsLong(Output output, String key, long defaultValue) { final String str = output.getExtra().get(key); if (str != null) { return Long.parseLong(str); } else { return defaultValue; } }
public static long getExtraInfoAsLong(Output output, String key, long defaultValue) { final String str = output.getExtra().get(key); if (str != null) { return Long.parseLong(str); } else { return defaultValue; } }
private String findExtraInfo(String key, String dft, boolean backward) { ArrayList<AbstractExecutable> tasks = new ArrayList<AbstractExecutable>(getTasks()); if (backward) { Collections.reverse(tasks); } for (AbstractExecutable child : tasks) { Output output = getManager().getOutput(child.getId()); String value = output.getExtra().get(key); if (value != null) return value; } return dft; } }