@Override public void write(DataOutput out) throws IOException { out.writeLong(fakeAppId.getClusterTimestamp()); out.writeInt(fakeAppId.getId()); token.write(out); out.writeLong(creationTime); out.writeInt(vertexParallelism); if (vertexSpec != null) { out.writeInt(vertexSpec.length); out.write(vertexSpec); } else { out.writeInt(0); } if (vertexSpecSignature != null) { out.writeInt(vertexSpecSignature.length); out.write(vertexSpecSignature); } else { out.writeInt(0); } }
private void setupMRLegacyConfigs(ProcessorContext processorContext) { // Hive "insert overwrite local directory" uses task id as dir name // Setting the id in jobconf helps to have the similar dir name as MR StringBuilder taskAttemptIdBuilder = new StringBuilder("attempt_"); taskAttemptIdBuilder.append(processorContext.getApplicationId().getClusterTimestamp()) .append("_") .append(jobIdFormat.format(processorContext.getApplicationId().getId())) .append("_"); if (isMap) { taskAttemptIdBuilder.append("m_"); } else { taskAttemptIdBuilder.append("r_"); } taskAttemptIdBuilder.append(taskIdFormat.format(processorContext.getTaskIndex())) .append("_") .append(processorContext.getTaskAttemptNumber()); // In MR, mapreduce.task.attempt.id is same as mapred.task.id. Go figure. String taskAttemptIdStr = taskAttemptIdBuilder.toString(); this.jobConf.set("mapred.task.id", taskAttemptIdStr); this.jobConf.set("mapreduce.task.attempt.id", taskAttemptIdStr); this.jobConf.setInt("mapred.task.partition", processorContext.getTaskIndex()); }
private void setupMRLegacyConfigs(ProcessorContext processorContext) { // Hive "insert overwrite local directory" uses task id as dir name // Setting the id in jobconf helps to have the similar dir name as MR StringBuilder taskAttemptIdBuilder = new StringBuilder("attempt_"); taskAttemptIdBuilder.append(processorContext.getApplicationId().getClusterTimestamp()) .append("_") .append(jobIdFormat.format(processorContext.getApplicationId().getId())) .append("_"); if (isMap) { taskAttemptIdBuilder.append("m_"); } else { taskAttemptIdBuilder.append("r_"); } taskAttemptIdBuilder.append(taskIdFormat.format(processorContext.getTaskIndex())) .append("_") .append(processorContext.getTaskAttemptNumber()); // In MR, mapreduce.task.attempt.id is same as mapred.task.id. Go figure. String taskAttemptIdStr = taskAttemptIdBuilder.toString(); this.jobConf.set("mapred.task.id", taskAttemptIdStr); this.jobConf.set("mapreduce.task.attempt.id", taskAttemptIdStr); this.jobConf.setInt("mapred.task.partition", processorContext.getTaskIndex()); }
+ jstormMasterContext.appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + jstormMasterContext.appAttemptID.getAttemptId());
+ applicationAttemptID.getApplicationId().getId() + ", clustertimestamp=" + applicationAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + applicationAttemptID.getAttemptId());
+ appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId());
@Override public int compareTo(ApplicationId other) { if (this.getClusterTimestamp() - other.getClusterTimestamp() == 0) { return this.getId() - other.getId(); } else { return this.getClusterTimestamp() > other.getClusterTimestamp() ? 1 : this.getClusterTimestamp() < other.getClusterTimestamp() ? -1 : 0; } }
@Override public int compareTo(ApplicationId other) { if (this.getClusterTimestamp() - other.getClusterTimestamp() == 0) { return this.getId() - other.getId(); } else { return this.getClusterTimestamp() > other.getClusterTimestamp() ? 1 : this.getClusterTimestamp() < other.getClusterTimestamp() ? -1 : 0; } }
@Override public int hashCode() { // Generated by eclipse. final int prime = 371237; int result = 6521; long clusterTimestamp = getClusterTimestamp(); result = prime * result + (int) (clusterTimestamp ^ (clusterTimestamp >>> 32)); result = prime * result + getId(); return result; }
public static String generateDefaultFlowName(String appName, ApplicationId appId) { return (appName != null && !appName.equals(YarnConfiguration.DEFAULT_APPLICATION_NAME)) ? appName : "flow_" + appId.getClusterTimestamp() + "_" + appId.getId(); }
@Override public String toString() { StringBuilder builder = new StringBuilder(JOB); builder.append(SEPARATOR); builder.append(getAppId().getClusterTimestamp()); builder.append(SEPARATOR); builder.append(jobIdFormat.get().format(getId())); return builder.toString(); }
@Override public String toString() { StringBuilder sb = new StringBuilder(64); sb.append(APPLICATION_ID_PREFIX); sb.append(getClusterTimestamp()); sb.append('_'); FastNumberFormat.format(sb, getId(), APP_ID_MIN_DIGITS); return sb.toString(); }
/** * Add the unique string to the given builder. * @param builder the builder to append to * @return the builder that was passed in */ protected StringBuilder appendTo(StringBuilder builder) { builder.append(SEPARATOR); builder.append(applicationId.getClusterTimestamp()); builder.append(SEPARATOR); tezAppIdFormat.get().format(applicationId.getId(), builder); return builder.append(SEPARATOR).append(id); }
@Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(TIMELINE_ENTITY_GROUPID_STR_PREFIX + "_"); ApplicationId appId = getApplicationId(); sb.append(appId.getClusterTimestamp()).append("_"); sb.append(appId.getId()).append("_"); sb.append(getTimelineEntityGroupId()); return sb.toString(); }
@Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(TIMELINE_ENTITY_GROUPID_STR_PREFIX + "_"); ApplicationId appId = getApplicationId(); sb.append(appId.getClusterTimestamp()).append("_"); sb.append(appId.getId()).append("_"); sb.append(getTimelineEntityGroupId()); return sb.toString(); }
@Override public String toString() { StringBuilder builder = new StringBuilder(TASK); JobId jobId = getJobId(); builder.append("_").append(jobId.getAppId().getClusterTimestamp()); builder.append("_").append( JobId.jobIdFormat.get().format(jobId.getAppId().getId())); builder.append("_"); builder.append(getTaskType() == TaskType.MAP ? "m" : "r").append("_"); builder.append(taskIdFormat.get().format(getId())); return builder.toString(); }
@Override public String toString() { StringBuilder sb = new StringBuilder(64); sb.append(APP_ATTEMPT_ID_PREFIX); ApplicationId appId = getApplicationId(); sb.append(appId.getClusterTimestamp()); sb.append('_'); FastNumberFormat.format(sb, appId.getId(), APP_ID_MIN_DIGITS); sb.append('_'); FastNumberFormat.format(sb, getAttemptId(), ATTEMPT_ID_MIN_DIGITS); return sb.toString(); }
@Override public void stopApplication(ApplicationTerminationContext context) { ApplicationId appId = context.getApplicationId(); JobID jobId = new JobID(Long.toString(appId.getClusterTimestamp()), appId.getId()); try { removeJobShuffleInfo(jobId); } catch (IOException e) { LOG.error("Error during stopApp", e); // TODO add API to AuxiliaryServices to report failures } }
public static org.apache.hadoop.mapred.TaskAttemptID createMockTaskAttemptIDFromTezTaskAttemptId(TezTaskAttemptID tezTaId, boolean isMap) { TezVertexID vId = tezTaId.getTaskID().getVertexID(); ApplicationId appId = vId.getDAGId().getApplicationId(); return new org.apache.hadoop.mapred.TaskAttemptID( new org.apache.hadoop.mapred.TaskID(String.valueOf(appId.getClusterTimestamp()) + String.valueOf(vId.getId()), appId.getId(), isMap ? TaskType.MAP : TaskType.REDUCE, tezTaId.getTaskID().getId()), tezTaId.getId()); }
private void verifyTokenWithTamperedID(final Configuration conf, final CustomAM am, Token<ClientToAMTokenIdentifier> token) throws IOException { // Malicious user, messes with appId UserGroupInformation ugi = UserGroupInformation.createRemoteUser("me"); ClientToAMTokenIdentifier maliciousID = new ClientToAMTokenIdentifier(BuilderUtils.newApplicationAttemptId( BuilderUtils.newApplicationId(am.appAttemptId.getApplicationId() .getClusterTimestamp(), 42), 43), UserGroupInformation .getCurrentUser().getShortUserName()); verifyTamperedToken(conf, am, token, ugi, maliciousID); }