/** * Create a new {@link TaskAttemptID} instance. * * @return a new {@link TaskAttemptID} instance */ public static TaskAttemptID newTaskAttemptID() { return TaskAttemptID.forName(ATTEMPT + SEPARATOR + Long.toString(System.currentTimeMillis()) + SEPARATOR + 0 + SEPARATOR + 'm' + SEPARATOR + 0 + SEPARATOR + 0); } }
@Override public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) { TaskAttemptID taskAttemptId = TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID)); if (taskAttemptId == null) { // If the caller is not within a mapper/reducer (if reading from the table via CliDriver), // then TaskAttemptID.forname() may return NULL. Fall back to using default constructor. taskAttemptId = new TaskAttemptID(); } return new TaskAttemptContextImpl(conf, taskAttemptId) { @Override public void progress() { progressable.progress(); } }; }
public ParquetRecordWriterWrapper( final OutputFormat<Void, ParquetHiveRecord> realOutputFormat, final JobConf jobConf, final String name, final Progressable progress, Properties tableProperties) throws IOException { try { // create a TaskInputOutputContext TaskAttemptID taskAttemptID = TaskAttemptID.forName(jobConf.get("mapred.task.id")); if (taskAttemptID == null) { taskAttemptID = new TaskAttemptID(); } taskContext = ContextUtil.newTaskAttemptContext(jobConf, taskAttemptID); LOG.info("initialize serde with table properties."); initializeSerProperties(taskContext, tableProperties); LOG.info("creating real writer to write at " + name); realWriter = ((ParquetOutputFormat) realOutputFormat).getRecordWriter(taskContext, new Path(name)); LOG.info("real writer: " + realWriter); } catch (final InterruptedException e) { throw new IOException(e); } }
public ParquetRecordWriterWrapper( final OutputFormat<Void, ParquetHiveRecord> realOutputFormat, final JobConf jobConf, final String name, final Progressable progress, Properties tableProperties) throws IOException { try { // create a TaskInputOutputContext TaskAttemptID taskAttemptID = TaskAttemptID.forName(jobConf.get("mapred.task.id")); if (taskAttemptID == null) { taskAttemptID = new TaskAttemptID(); } taskContext = ContextUtil.newTaskAttemptContext(jobConf, taskAttemptID); LOG.info("initialize serde with table properties."); initializeSerProperties(taskContext, tableProperties); LOG.info("creating real writer to write at " + name); realWriter = ((ParquetOutputFormat) realOutputFormat).getRecordWriter(taskContext, new Path(name)); LOG.info("real writer: " + realWriter); } catch (final InterruptedException e) { throw new IOException(e); } }
/** * Returns a TaskAttemptContext instance created from the given parameters. * @param job an instance of o.a.h.mapreduce.Job * @param taskId an identifier for the task attempt id. Should be parsable by * {@link TaskAttemptID#forName(String)} * @return a concrete TaskAttemptContext instance of o.a.h.mapreduce.TaskAttemptContext */ @Override @SuppressWarnings("unchecked") public <T, J> T createTestTaskAttemptContext(J job, String taskId) { Job j = (Job)job; return (T)new TaskAttemptContextImpl(j.getConfiguration(), TaskAttemptID.forName(taskId)); } }
TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0") + Integer.toString(taskNumber + 1)
@Override public void finalizeGlobal(int parallelism) throws IOException { JobContext jobContext; TaskAttemptContext taskContext; try { TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0") + Integer.toString(1) + "_0"); jobContext = new JobContextImpl(this.configuration, new JobID()); taskContext = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext); } catch (Exception e) { throw new RuntimeException(e); } jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } // finalize HDFS output format if (this.outputCommitter != null) { this.outputCommitter.commitJob(jobContext); } }
final ParquetInputSplit split = getSplit(oldSplit, jobConf); TaskAttemptID taskAttemptID = TaskAttemptID.forName(jobConf.get(IOConstants.MAPRED_TASK_ID)); if (taskAttemptID == null) { taskAttemptID = new TaskAttemptID();
final ParquetInputSplit split = getSplit(oldSplit, jobConf); TaskAttemptID taskAttemptID = TaskAttemptID.forName(jobConf.get(IOConstants.MAPRED_TASK_ID)); if (taskAttemptID == null) { taskAttemptID = new TaskAttemptID();
.andReturn(job.getConfiguration()).anyTimes(); expect(context.getTaskAttemptID()) .andReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0")) .anyTimes(); expect(context.getNumReduceTasks()).andReturn(1);
TaskAttemptID taskAttemptId = TaskAttemptID.forName(job.get(MRJobConfig.TASK_ATTEMPT_ID)); if (taskAttemptId != null) { attemptNum = taskAttemptId.getId();
@Before @SuppressWarnings("unchecked") // mocked generics public void setup() { LOG.info(">>>> " + name.getMethodName()); job = new JobConf(); job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false); jobWithRetry = new JobConf(); jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true); id = TaskAttemptID.forName("attempt_0_1_r_1_1"); ss = mock(ShuffleSchedulerImpl.class); mm = mock(MergeManagerImpl.class); r = mock(Reporter.class); metrics = mock(ShuffleClientMetrics.class); except = mock(ExceptionReporter.class); key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0}); connection = mock(HttpURLConnection.class); allErrs = mock(Counters.Counter.class); when(r.getCounter(anyString(), anyString())).thenReturn(allErrs); ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1); maps.add(map1ID); maps.add(map2ID); when(ss.getMapsForHost(host)).thenReturn(maps); }
private void verifyReservedMapOutputType(MergeManagerImpl<Text, Text> mgr, long size, String expectedShuffleMode) throws IOException { final TaskAttemptID mapId = TaskAttemptID.forName("attempt_0_1_m_1_1"); final MapOutput<Text, Text> mapOutput = mgr.reserve(mapId, size, 1); assertEquals("Shuffled bytes: " + size, expectedShuffleMode, mapOutput.getDescription()); mgr.unreserve(size); }
/** Construct a TaskAttemptID object from given string * @return constructed TaskAttemptID object or null if the given String is null * @throws IllegalArgumentException if the given string is malformed */ public static TaskAttemptID forName(String str ) throws IllegalArgumentException { return (TaskAttemptID) org.apache.hadoop.mapreduce.TaskAttemptID.forName(str); }
/** Get the task attempt id */ public TaskAttemptID getAttemptId() { return TaskAttemptID.forName(datum.attemptId.toString()); } /** Get the task type */
/** Get the attempt id */ public TaskAttemptID getTaskAttemptId() { return TaskAttemptID.forName(datum.getAttemptId().toString()); } /** Get the event type */
public void setDatum(Object oDatum) { this.datum = (TaskAttemptFinished)oDatum; this.attemptId = TaskAttemptID.forName(datum.attemptId.toString()); this.taskType = TaskType.valueOf(datum.taskType.toString()); this.taskStatus = datum.taskStatus.toString(); this.finishTime = datum.finishTime; this.rackName = datum.rackname.toString(); this.hostname = datum.hostname.toString(); this.state = datum.state.toString(); this.counters = EventReader.fromAvro(datum.counters); }
public void setDatum(Object oDatum) { this.datum = (TaskAttemptFinished)oDatum; this.attemptId = TaskAttemptID.forName(datum.attemptId.toString()); this.taskType = TaskType.valueOf(datum.taskType.toString()); this.taskStatus = datum.taskStatus.toString(); this.finishTime = datum.finishTime; this.rackName = datum.rackname.toString(); this.hostname = datum.hostname.toString(); this.state = datum.state.toString(); this.counters = EventReader.fromAvro(datum.counters); }
public void setDatum(Object oDatum) { this.datum = (TaskFinished)oDatum; this.taskid = TaskID.forName(datum.taskid.toString()); if (datum.successfulAttemptId != null) { this.successfulAttemptId = TaskAttemptID .forName(datum.successfulAttemptId.toString()); } this.finishTime = datum.finishTime; this.taskType = TaskType.valueOf(datum.taskType.toString()); this.status = datum.status.toString(); this.counters = EventReader.fromAvro(datum.counters); }
public void setDatum(Object oDatum) { this.datum = (TaskFinished)oDatum; this.taskid = TaskID.forName(datum.getTaskid().toString()); if (datum.getSuccessfulAttemptId() != null) { this.successfulAttemptId = TaskAttemptID .forName(datum.getSuccessfulAttemptId().toString()); } this.finishTime = datum.getFinishTime(); this.taskType = TaskType.valueOf(datum.getTaskType().toString()); this.status = datum.getStatus().toString(); this.counters = EventReader.fromAvro(datum.getCounters()); }