testing.injectedFailureCount = context.getTaskAttemptID().getId();
Exception ex = null; try { zkw = new ZKWatcher(conf, context.getTaskAttemptID().toString(), null); clusterIds = Collections.singletonList(ZKClusterId.getUUIDForCluster(zkw)); } catch (ZooKeeperConnectionException e) {
private void futureGet(Context context) throws IOException { try { future.get(); } catch (Exception e) { throw new IOException("Failed to build cube in mapper " + context.getTaskAttemptID().getTaskID().getId(), e); } } }
public void map(LongWritable key, Text value, Context context ) throws IOException, InterruptedException { String id = context.getTaskAttemptID().toString(); // Mapper 0 does not output anything if (!id.endsWith("0_0")) { context.write(key, value); } } }
@Override protected void setup(Context context) throws IOException, InterruptedException { Path attemptDir = FileOutputFormat.getWorkOutputPath(context); String filename = context.getTaskAttemptID() .getTaskID().toString(); Path sideEffectFile = new Path(attemptDir, filename); sideEffectStream = FileSystem.get(context.getConfiguration()) .create(sideEffectFile); log.info("Opened file = {}", sideEffectFile); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { context.write(new IntWritable(context.getTaskAttemptID().getTaskID() .getId()), new VectorWritable(new DenseVector(mYtY.getData()))); } }
@SuppressWarnings("unchecked") private Context createSubContextForHadoop1(Context context) throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Constructor<?> constructor = Context.class.getDeclaredConstructor(Mapper.class, Configuration.class, TaskAttemptID.class, RecordReader.class, RecordWriter.class, OutputCommitter.class, StatusReporter.class, InputSplit.class); constructor.setAccessible(true); return (Context) constructor.newInstance(mapper, outer.getConfiguration(), outer.getTaskAttemptID(), reader, new SubMapRecordWriter(), context.getOutputCommitter(), new SubMapStatusReporter(), outer.getInputSplit()); }
private void futureGet(Context context) throws IOException { try { future.get(); } catch (Exception e) { throw new IOException("Failed to build cube in mapper " + context.getTaskAttemptID().getTaskID().getId(), e); } } }
private Random createRandom(Context context) { long taskId = 0; if (context.getTaskAttemptID() != null) { // MRUnit returns null LOGGER.debug("context.getTaskAttemptID().getId(): {}", context.getTaskAttemptID().getId()); LOGGER.debug("context.getTaskAttemptID().getTaskID().getId(): {}", context.getTaskAttemptID().getTaskID().getId()); taskId = context.getTaskAttemptID().getTaskID().getId(); // taskId = 0, 1, ..., N } // create a good random seed, yet ensure deterministic PRNG sequence for easy reproducability return new Random(421439783L * (taskId + 1)); }
private Path getTmpFile(Path target, Mapper.Context context) { Path targetWorkPath = new Path(context.getConfiguration(). get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH)); Path root = target.equals(targetWorkPath)? targetWorkPath.getParent() : targetWorkPath; LOG.info("Creating temp file: " + new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString())); return new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString()); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { context.write(new IntWritable(context.getTaskAttemptID().getTaskID() .getId()), new VectorWritable(new DenseVector(mYtY.getData()))); } }
public void map(LongWritable key, Text value, Context context ) throws IOException, InterruptedException { String id = context.getTaskAttemptID().toString(); // Mapper 0 does not output anything if (!id.endsWith("0_0")) { context.write(key, value); } } }
GobblinMultiTaskAttempt.runWorkUnits(this.jobState.getJobId(), context.getTaskAttemptID().toString(), this.jobState, this.workUnits, this.taskStateTracker, this.taskExecutor, this.taskStateStore, multiTaskAttemptCommitPolicy, jobBroker); GobblinOutputCommitter gobblinOutputCommitter = (GobblinOutputCommitter) context.getOutputCommitter(); gobblinOutputCommitter.getAttemptIdToMultiTaskAttempt() .put(context.getTaskAttemptID().toString(), gobblinMultiTaskAttempt);
this.jobMetrics.get() .startMetricReportingWithFileSuffix(HadoopUtils.getStateFromConf(configuration), context.getTaskAttemptID().toString());
@Override protected void setup(Context context) throws IOException, InterruptedException { id = Bytes.toBytes("Job: "+context.getJobID() + " Task: " + context.getTaskAttemptID()); this.connection = ConnectionFactory.createConnection(context.getConfiguration()); instantiateHTable();
outputFS, finalSegmentTemplate, context.getTaskAttemptID(), config.DATA_SEGMENT_PUSHER ),
byte valBytes[] = new byte[valLength]; int taskId = context.getTaskAttemptID().getTaskID().getId(); assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!";
byte valBytes[] = new byte[valLength]; int taskId = context.getTaskAttemptID().getTaskID().getId(); assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!"; Random random = new Random();
@Override public void run(Context ctx) throws IOException, InterruptedException { System.out.println("Running task: " + ctx.getTaskAttemptID().getTaskID().getId()); latch.get("mapAwaitLatch").await(); mapExecCnt.incrementAndGet(); System.out.println("Completed task: " + ctx.getTaskAttemptID().getTaskID().getId()); } }
/** {@inheritDoc} */ @Override protected void setup(Context ctx) throws IOException, InterruptedException { X.println("___ Mapper: " + ctx.getTaskAttemptID()); String taskId = ctx.getTaskAttemptID().toString(); LocalFileSystem locFs = FileSystem.getLocal(ctx.getConfiguration()); String workDir = locFs.getWorkingDirectory().toString(); assertNull(taskWorkDirs.put(workDir, taskId)); }