@Override public TaskID createTaskID() { return new TaskID("", 0, TaskType.MAP, 0); }
private void mockTaskAttemptContext(String indexType) { TaskAttemptID fakeTaskId = new TaskAttemptID(new TaskID("foo_task_" + indexType, 123, TaskType.REDUCE, 2), 2); when(fakeTaskAttemptContext.getTaskAttemptID()).thenReturn(fakeTaskId); when(fakeTaskAttemptContext.getConfiguration()).thenReturn(job.getConfiguration()); }
/** * Test of hashCode method, of class TaskID. */ @Test public void testHashCode() { TaskType[] types = TaskType.values(); for (int i = 0; i < types.length; i++) { JobID jobId = new JobID("1234" + i, i); TaskID taskId1 = new TaskID(jobId, types[i], i); TaskID taskId2 = new TaskID(jobId, types[i], i); assertTrue("The hashcode() method gave unequal hash codes for two equal " + "task IDs", taskId1.hashCode() == taskId2.hashCode()); } }
/** * Test of getJobID method, of class TaskID. */ @Test public void testGetJobID() { JobID jobId = new JobID("1234", 0); TaskID taskId = new TaskID(jobId, TaskType.MAP, 0); assertSame("TaskID did not store the JobID correctly", jobId, taskId.getJobID()); taskId = new TaskID(); assertEquals("Job ID was set unexpectedly in default contsructor", "", taskId.getJobID().getJtIdentifier()); }
/** * Test of readFields method, of class TaskID. */ @Test public void testReadFields() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); out.writeInt(0); out.writeInt(1); WritableUtils.writeVInt(out, 4); out.write(new byte[] {0x31, 0x32, 0x33, 0x34}); WritableUtils.writeEnum(out, TaskType.REDUCE); DataInputByteBuffer in = new DataInputByteBuffer(); in.reset(ByteBuffer.wrap(baos.toByteArray())); TaskID instance = new TaskID(); instance.readFields(in); assertEquals("The readFields() method did not produce the expected task ID", "task_1234_0001_r_000000", instance.toString()); }
/** * Test of appendTo method, of class TaskID. */ @Test public void testAppendTo() { JobID jobId = new JobID("1234", 1); StringBuilder builder = new StringBuilder(); for (TaskType type : TaskType.values()) { builder.setLength(0); TaskID taskId = new TaskID(jobId, type, 0); String str = String.format("_1234_0001_%c_000000", TaskID.getRepresentingCharacter(type)); assertEquals("The appendTo() method appended the wrong value", str, taskId.appendTo(builder).toString()); } try { new TaskID().appendTo(null); fail("The appendTo() method allowed a null builder"); } catch (NullPointerException ex) { // Expected } }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the TaskType * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, type, taskId), id); }
/** * Test of getTaskType method, of class TaskID. */ @Test public void testGetTaskType0args() { JobID jobId = new JobID("1234", 0); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); assertEquals("TaskID incorrectly reported its type", type, taskId.getTaskType()); } TaskID taskId = new TaskID(); assertEquals("TaskID of default type incorrectly reported its type", TaskType.REDUCE, taskId.getTaskType()); }
/** * Constructs a TaskId object from given parts. * @param jtIdentifier jobTracker identifier * @param jobId job number * @param type the TaskType * @param taskId taskId number * @param id the task attempt number */ public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, int taskId, int id) { this(new TaskID(jtIdentifier, jobId, type, taskId), id); }
/** * Test of toString method, of class TaskID. */ @Test public void testToString() { JobID jobId = new JobID("1234", 1); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); String str = String.format("task_1234_0001_%c_000000", TaskID.getRepresentingCharacter(type)); assertEquals("The toString() method returned the wrong value", str, taskId.toString()); } }
JobID jobId1 = new JobID("1234", 1); JobID jobId2 = new JobID("2345", 2); TaskID taskId1 = new TaskID(jobId1, TaskType.MAP, 0); TaskID taskId2 = new TaskID(jobId1, TaskType.MAP, 0); taskId2 = new TaskID(jobId2, TaskType.MAP, 0); taskId2 = new TaskID(jobId1, TaskType.MAP, 1); taskId1 = new TaskID(jobId1, types[i], 0); taskId2 = new TaskID(jobId1, types[j], 0);
JobID jobId = new JobID("a", 0); TaskAttemptID reduceId1 = new TaskAttemptID( new TaskID(jobId, TaskType.REDUCE, 0), 0); TaskAttemptID mapId1 = new TaskAttemptID( new TaskID(jobId, TaskType.MAP, 1), 0); TaskAttemptID mapId2 = new TaskAttemptID( new TaskID(jobId, TaskType.MAP, 2), 0); new TaskID(jobId, TaskType.REDUCE, 3), 0); TaskAttemptID mapId3 = new TaskAttemptID( new TaskID(jobId, TaskType.MAP, 4), 0); TaskAttemptID mapId4 = new TaskAttemptID( new TaskID(jobId, TaskType.MAP, 5), 0);
public void testCompareTo() { JobID jobId = new JobID("1234", 1); TaskID taskId1 = new TaskID(jobId, TaskType.REDUCE, 0); TaskID taskId2 = new TaskID(jobId, TaskType.REDUCE, 0); taskId2 = new TaskID(jobId, TaskType.MAP, 1); taskId1 = new TaskID(jobId, types[i], 0); taskId2 = new TaskID(jobId, types[j], 0);
@Test public void testCloneMapContext() throws Exception { TaskID taskId = new TaskID(jobId, TaskType.MAP, 0); TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0); MapContext<IntWritable, IntWritable, IntWritable, IntWritable> mapContext = new MapContextImpl<IntWritable, IntWritable, IntWritable, IntWritable>( conf, taskAttemptid, null, null, null, null, null); Mapper<IntWritable, IntWritable, IntWritable, IntWritable>.Context mapperContext = new WrappedMapper<IntWritable, IntWritable, IntWritable, IntWritable>().getMapContext( mapContext); ContextFactory.cloneMapContext(mapperContext, conf, null, null); }
/** * Test of isMap method, of class TaskID. */ @Test public void testIsMap() { JobID jobId = new JobID("1234", 0); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); if (type == TaskType.MAP) { assertTrue("TaskID for map task did not correctly identify itself " + "as a map task", taskId.isMap()); } else { assertFalse("TaskID for " + type + " task incorrectly identified " + "itself as a map task", taskId.isMap()); } } TaskID taskId = new TaskID(); assertFalse("TaskID of default type incorrectly identified itself as a " + "map task", taskId.isMap()); }
/** * Test of write method, of class TaskID. */ @Test public void testWrite() throws Exception { JobID jobId = new JobID("1234", 1); TaskID taskId = new TaskID(jobId, TaskType.JOB_SETUP, 0); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); taskId.write(out); DataInputByteBuffer in = new DataInputByteBuffer(); byte[] buffer = new byte[4]; in.reset(ByteBuffer.wrap(baos.toByteArray())); assertEquals("The write() method did not write the expected task ID", 0, in.readInt()); assertEquals("The write() method did not write the expected job ID", 1, in.readInt()); assertEquals("The write() method did not write the expected job " + "identifier length", 4, WritableUtils.readVInt(in)); in.readFully(buffer, 0, 4); assertEquals("The write() method did not write the expected job " + "identifier length", "1234", new String(buffer)); assertEquals("The write() method did not write the expected task type", TaskType.JOB_SETUP, WritableUtils.readEnum(in, TaskType.class)); }
/** * Creates new setup {@link TaskAttemptContext} from hadoop {@link Configuration} and {@link * JobID}. * * @param conf hadoop {@link Configuration} * @param jobID jobId of the created {@link TaskAttemptContext} * @return new setup {@link TaskAttemptContext} */ static TaskAttemptContext createSetupTaskContext(Configuration conf, JobID jobID) { final TaskID taskId = new TaskID(jobID, TaskType.JOB_SETUP, 0); return createTaskAttemptContext(conf, new TaskAttemptID(taskId, 0)); }
@Test(timeout = 5000) public void testTaskID() throws IOException, InterruptedException { JobID jobid = new JobID("1014873536921", 6); TaskID tid = new TaskID(jobid, TaskType.MAP, 0); org.apache.hadoop.mapred.TaskID tid1 = org.apache.hadoop.mapred.TaskID.downgrade(tid); org.apache.hadoop.mapred.TaskReport treport = new org.apache.hadoop.mapred.TaskReport(tid1, 0.0f, State.FAILED.toString(), null, TIPStatus.FAILED, 100, 100, new org.apache.hadoop.mapred.Counters()); Assert .assertEquals(treport.getTaskId(), "task_1014873536921_0006_m_000000"); Assert.assertEquals(treport.getTaskID().toString(), "task_1014873536921_0006_m_000000"); } }
CSVCarbonWriter(CarbonLoadModel loadModel, Configuration hadoopConf) throws IOException { CarbonTableOutputFormat.setLoadModel(hadoopConf, loadModel); CarbonTableOutputFormat format = new CarbonTableOutputFormat(); JobID jobId = new JobID(UUID.randomUUID().toString(), 0); Random random = new Random(); TaskID task = new TaskID(jobId, TaskType.MAP, random.nextInt()); TaskAttemptID attemptID = new TaskAttemptID(task, random.nextInt()); TaskAttemptContextImpl context = new TaskAttemptContextImpl(hadoopConf, attemptID); this.recordWriter = format.getRecordWriter(context); this.context = context; this.writable = new ObjectArrayWritable(); }