@Override public void finalizeGlobal(int parallelism) throws IOException { try { JobContext jobContext = new JobContextImpl(this.jobConf, new JobID()); OutputCommitter outputCommitter = this.jobConf.getOutputCommitter(); // finalize HDFS output format outputCommitter.commitJob(jobContext); } catch (Exception e) { throw new RuntimeException(e); } }
/** * create the temporary output file for hadoop RecordWriter. * @param taskNumber The number of the parallel instance. * @param numTasks The number of parallel tasks. * @throws java.io.IOException */ @Override public void open(int taskNumber, int numTasks) throws IOException { // enforce sequential open() calls synchronized (OPEN_MUTEX) { if (Integer.toString(taskNumber + 1).length() > 6) { throw new IOException("Task id too large."); } TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0") + Integer.toString(taskNumber + 1) + "_0"); this.jobConf.set("mapred.task.id", taskAttemptID.toString()); this.jobConf.setInt("mapred.task.partition", taskNumber + 1); // for hadoop 2.2 this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString()); this.jobConf.setInt("mapreduce.task.partition", taskNumber + 1); this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID); this.outputCommitter = this.jobConf.getOutputCommitter(); JobContext jobContext = new JobContextImpl(this.jobConf, new JobID()); this.outputCommitter.setupJob(jobContext); this.recordWriter = this.mapredOutputFormat.getRecordWriter(null, this.jobConf, Integer.toString(taskNumber + 1), new HadoopDummyProgressable()); } }
hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());
jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobCtx = new JobContextImpl(jobConf, new JobID(jobId.globalId().toString(), jobId.localId()));
/** * test JobID * @throws IOException */ @SuppressWarnings("deprecation") @Test (timeout=5000) public void testJobID() throws IOException{ JobID jid = new JobID("001",2); ByteArrayOutputStream out = new ByteArrayOutputStream(); jid.write(new DataOutputStream(out)); assertEquals(jid,JobID.read(new DataInputStream(new ByteArrayInputStream(out.toByteArray())))); assertEquals("job_001_0001",JobID.getJobIDsPattern("001",1)); } /**
/** * test without TASK_LOG_DIR * * @throws IOException */ @Test (timeout=50000) public void testTaskLogWithoutTaskLogDir() throws IOException { // TaskLog tasklog= new TaskLog(); System.clearProperty(YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR); // test TaskLog assertEquals(TaskLog.getMRv2LogDir(), null); TaskAttemptID taid = mock(TaskAttemptID.class); JobID jid = new JobID("job", 1); when(taid.getJobID()).thenReturn(jid); when(taid.toString()).thenReturn("JobId"); File f = TaskLog.getTaskLogFile(taid, true, LogName.STDOUT); assertTrue(f.getAbsolutePath().endsWith("stdout")); }
/** * Construct an empty {@link JobProfile}. */ public JobProfile() { jobid = new JobID(); }
/** * Constructs a TaskInProgressId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the {@link TaskType} * @param id the tip number */ public TaskID(String jtIdentifier, int jobId, TaskType type, int id) { this(new JobID(jtIdentifier, jobId), type, id); }
/** * Construct an empty {@link JobProfile}. */ public JobProfile() { jobid = new JobID(); }
/** * Constructs a TaskInProgressId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the {@link TaskType} * @param id the tip number */ public TaskID(String jtIdentifier, int jobId, TaskType type, int id) { this(new JobID(jtIdentifier, jobId), type, id); }
assertEquals(TaskLog.getMRv2LogDir(), "testString"); TaskAttemptID taid = mock(TaskAttemptID.class); JobID jid = new JobID("job", 1);
/** * test deprecated methods of TaskID * @throws IOException */ @SuppressWarnings("deprecation") @Test (timeout=5000) public void testDepricatedMethods() throws IOException { JobID jid = new JobID(); TaskID test = new TaskID(jid, true, 1); assertEquals(test.getTaskType(), TaskType.MAP); test = new TaskID(jid, false, 1); assertEquals(test.getTaskType(), TaskType.REDUCE); test = new TaskID("001", 1, false, 1); assertEquals(test.getTaskType(), TaskType.REDUCE); test = new TaskID("001", 1, true, 1); assertEquals(test.getTaskType(), TaskType.MAP); ByteArrayOutputStream out = new ByteArrayOutputStream(); test.write(new DataOutputStream(out)); TaskID ti = TaskID.read(new DataInputStream(new ByteArrayInputStream(out .toByteArray()))); assertEquals(ti.toString(), test.toString()); assertEquals("task_001_0001_m_000002", TaskID.getTaskIDsPattern("001", 1, true, 2)); assertEquals("task_003_0001_m_000004", TaskID.getTaskIDsPattern("003", 1, TaskType.MAP, 4)); assertEquals("003_0001_m_000004", TaskID.getTaskIDsPatternWOPrefix("003", 1, TaskType.MAP, 4).toString()); }
@Deprecated public static JobID read(DataInput in) throws IOException { JobID jobId = new JobID(); jobId.readFields(in); return jobId; }
/** * Allocates a new JobId string. */ public synchronized JobID getNewJobId() throws IOException { return new JobID(getTrackerIdentifier(), nextJobId++); }
/** * Allocates a new JobId string. */ public JobID getNewJobId() throws IOException { JobID id = new JobID(getTrackerIdentifier(), nextJobId.getAndIncrement()); // get the user group info UserGroupInformation ugi = UserGroupInformation.getCurrentUGI(); // mark the user for this id jobToUserMap.put(id, ugi.getUserName()); LOG.info("Job id " + id + " assigned to user " + ugi.getUserName()); return id; }
@Override public void init(@Nonnull Context context) { outputCommitter = jobConf.getOutputCommitter(); jobContext = new JobContextImpl(jobConf, new JobID()); uncheckRun(() -> outputCommitter.setupJob(jobContext)); }
/** * Downgrade a new JobID to an old one * @param old a new or old JobID * @return either old or a new JobID build to match old */ public static JobID downgrade(org.apache.hadoop.mapreduce.JobID old) { if (old instanceof JobID) { return (JobID) old; } else { return new JobID(old.getJtIdentifier(), old.getId()); } }
@Override public void stopApplication(ApplicationTerminationContext context) { ApplicationId appId = context.getApplicationId(); JobID jobId = new JobID(Long.toString(appId.getClusterTimestamp()), appId.getId()); try { removeJobShuffleInfo(jobId); } catch (IOException e) { LOG.error("Error during stopApp", e); // TODO add API to AuxiliaryServices to report failures } }
@Test public void testReduceTaskReportsWithNullJob() throws Exception { TestJobClient client = new TestJobClient(new JobConf()); Cluster mockCluster = mock(Cluster.class); client.setCluster(mockCluster); JobID id = new JobID("test",0); when(mockCluster.getJob(id)).thenReturn(null); TaskReport[] result = client.getReduceTaskReports(id); assertEquals(0, result.length); verify(mockCluster).getJob(id); }