@Override public HadoopInputSplit[] createInputSplits(int minNumSplits) throws IOException { configuration.setInt("mapreduce.input.fileinputformat.split.minsize", minNumSplits); JobContext jobContext = new JobContextImpl(configuration, new JobID()); List<InputSplit> splits; try { splits = this.hCatInputFormat.getSplits(jobContext); } catch (InterruptedException e) { throw new IOException("Could not get Splits.", e); } HadoopInputSplit[] hadoopInputSplits = new HadoopInputSplit[splits.size()]; for (int i = 0; i < hadoopInputSplits.length; i++){ hadoopInputSplits[i] = new HadoopInputSplit(i, splits.get(i), jobContext); } return hadoopInputSplits; }
/** {@inheritDoc} */ @Override public JobID getNewJobID() throws IOException, InterruptedException { try { conf.setLong(HadoopCommonUtils.REQ_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); HadoopJobId jobID = execute(HadoopProtocolNextTaskIdTask.class); conf.setLong(HadoopCommonUtils.RESPONSE_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); return new JobID(jobID.globalId().toString(), jobID.localId()); } catch (GridClientException e) { throw new IOException("Failed to get new job ID.", e); } }
@Override public JobContext newJobContext(Configuration jobConf) throws IOException { // Implementing: // return new JobContext(jobConf, new JobID()); JobID jobId = new JobID(); Constructor<JobContext> c; try { c = JobContext.class.getConstructor(Configuration.class, JobID.class); return c.newInstance(jobConf, jobId); } catch (Exception e) { throw new IllegalStateException( "Failed to instantiate new JobContext(jobConf, new JobID())", e); } }
@Override public HadoopInputSplit[] createInputSplits(int minNumSplits) throws IOException { configuration.setInt("mapreduce.input.fileinputformat.split.minsize", minNumSplits); JobContext jobContext = new JobContextImpl(configuration, new JobID()); jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } List<org.apache.hadoop.mapreduce.InputSplit> splits; try { splits = this.mapreduceInputFormat.getSplits(jobContext); } catch (InterruptedException e) { throw new IOException("Could not get Splits.", e); } HadoopInputSplit[] hadoopInputSplits = new HadoopInputSplit[splits.size()]; for (int i = 0; i < hadoopInputSplits.length; i++) { hadoopInputSplits[i] = new HadoopInputSplit(i, splits.get(i), jobContext); } return hadoopInputSplits; }
@Override public JobContext newJobContext(Configuration jobConf) throws IOException { // Implementing: // return new JobContext(jobConf, new JobID()); JobID jobId = new JobID(); Constructor<JobContext> c; try { c = JobContext.class.getConstructor(Configuration.class, JobID.class); return c.newInstance(jobConf, jobId); } catch (Exception e) { throw new IllegalStateException( "Failed to instantiate new JobContext(jobConf, new JobID())", e); } }
@Override public void finalizeGlobal(int parallelism) throws IOException { JobContext jobContext; TaskAttemptContext taskContext; try { TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0") + Integer.toString(1) + "_0"); jobContext = new JobContextImpl(this.configuration, new JobID()); taskContext = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext); } catch (Exception e) { throw new RuntimeException(e); } jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } // finalize HDFS output format if (this.outputCommitter != null) { this.outputCommitter.commitJob(jobContext); } }
this.context = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(this.context); this.outputCommitter.setupJob(new JobContextImpl(this.configuration, new JobID())); } catch (Exception e) { throw new RuntimeException(e);
/** * Tests job counters retrieval for unknown job id. * * @throws Exception If failed. */ private void tstUnknownJobCounters() throws Exception { IgniteHadoopClientProtocolProvider provider = provider(); ClientProtocol proto = provider.create(config(HadoopAbstractSelfTest.REST_PORT)); try { proto.getJobCounters(new JobID(UUID.randomUUID().toString(), -1)); fail("exception must be thrown"); } catch (Exception e) { assert e instanceof IOException : "wrong error has been thrown"; } }
JobID jobId = new JobID(status.jobId().globalId().toString(), status.jobId().localId());
@Before public void setUp() throws IOException { conf = new Configuration(); clientProtocol = mock(ClientProtocol.class); Cluster cluster = mock(Cluster.class); when(cluster.getConf()).thenReturn(conf); when(cluster.getClient()).thenReturn(clientProtocol); JobStatus jobStatus = new JobStatus(new JobID("job_000", 1), 0f, 0f, 0f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname", "tmp-jobfile", "tmp-url"); job = Job.getInstance(cluster, jobStatus, conf); job = spy(job); }
@Before public void setUp() throws Exception { conf = new Configuration(); jobId = new JobID("test", 1); jobContext = new JobContextImpl(conf, jobId); }
@Test public void testSharedCacheEnabledWithJobJarInSharedCache() throws Exception { JobConf jobConf = createJobConf(); jobConf.set(MRJobConfig.SHARED_CACHE_MODE, "enabled"); Job job = new Job(jobConf); job.setJobID(new JobID("567789", 1)); // shared cache is enabled for every file type // the # of times SharedCacheClient.use is called should == // total # of files/libjars/archive/jobjar uploadFilesToRemoteFS(job, jobConf, 8, 3, 2, true); }
@Test public void testSharedCacheEnabled() throws Exception { JobConf jobConf = createJobConf(); jobConf.set(MRJobConfig.SHARED_CACHE_MODE, "enabled"); Job job = new Job(jobConf); job.setJobID(new JobID("567789", 1)); // shared cache is enabled for every file type // the # of times SharedCacheClient.use is called should == // total # of files/libjars/archive/jobjar uploadFilesToRemoteFS(job, jobConf, 8, 3, 2, false); }
@Test public void testSharedCacheArchivesAndLibjarsEnabled() throws Exception { JobConf jobConf = createJobConf(); jobConf.set(MRJobConfig.SHARED_CACHE_MODE, "archives,libjars"); Job job = new Job(jobConf); job.setJobID(new JobID("567789", 1)); // shared cache is enabled for archives and libjars type // the # of times SharedCacheClient.use is called should == // total # of libjars and archives uploadFilesToRemoteFS(job, jobConf, 5, 1, 2, true); }
@Test public void testSharedCacheDisabled() throws Exception { JobConf jobConf = createJobConf(); Job job = new Job(jobConf); job.setJobID(new JobID("567789", 1)); // shared cache is disabled by default uploadFilesToRemoteFS(job, jobConf, 0, 0, 0, false); }
private void validateSplitMetaInfo() throws IOException { JobSplit.TaskSplitMetaInfo[] splitInfo = SplitMetaInfoReader.readSplitMetaInfo(new JobID(), fs, conf, submitDir); assertEquals("Number of splits", 1, splitInfo.length); assertEquals("Number of block locations", 14, splitInfo[0].getLocations().length); } }
/** * Test of toString method, of class TaskID. */ @Test public void testToString() { JobID jobId = new JobID("1234", 1); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); String str = String.format("task_1234_0001_%c_000000", TaskID.getRepresentingCharacter(type)); assertEquals("The toString() method returned the wrong value", str, taskId.toString()); } }
/** * Test of hashCode method, of class TaskID. */ @Test public void testHashCode() { TaskType[] types = TaskType.values(); for (int i = 0; i < types.length; i++) { JobID jobId = new JobID("1234" + i, i); TaskID taskId1 = new TaskID(jobId, types[i], i); TaskID taskId2 = new TaskID(jobId, types[i], i); assertTrue("The hashcode() method gave unequal hash codes for two equal " + "task IDs", taskId1.hashCode() == taskId2.hashCode()); } }
/** * Test of getJobID method, of class TaskID. */ @Test public void testGetJobID() { JobID jobId = new JobID("1234", 0); TaskID taskId = new TaskID(jobId, TaskType.MAP, 0); assertSame("TaskID did not store the JobID correctly", jobId, taskId.getJobID()); taskId = new TaskID(); assertEquals("Job ID was set unexpectedly in default contsructor", "", taskId.getJobID().getJtIdentifier()); }
/** * Test of getTaskType method, of class TaskID. */ @Test public void testGetTaskType0args() { JobID jobId = new JobID("1234", 0); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); assertEquals("TaskID incorrectly reported its type", type, taskId.getTaskType()); } TaskID taskId = new TaskID(); assertEquals("TaskID of default type incorrectly reported its type", TaskType.REDUCE, taskId.getTaskType()); }