killLauncherChildJobs(conf, context.getJobID().toString()); context.getJobID().toString(), conf.get("user.name"), conf.get(OVERRIDE_CLASSPATH)); executeWatcher(pool, conf, context.getJobID(), proc.getInputStream(), statusdir, STDOUT_FNAME); executeWatcher(pool, conf, context.getJobID(), proc.getErrorStream(), statusdir, STDERR_FNAME); KeepAlive keepAlive = startCounterKeepAlive(pool, context); updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(), proc.exitValue()); LOG.info("templeton: collecting logs for " + context.getJobID().toString() + " to " + statusdir + "/logs"); LogRetriever logRetriever = new LogRetriever(statusdir, jobType, conf);
WebHCatJTShim tracker = ShimLoader.getHadoopShims().getWebHCatShim(conf, ugi); try { Set<String> childJobs = tracker.getJobs(context.getJobID().toString(), startTime); if (childJobs.size() == 0) { LOG.info("No child jobs found to reconnect with"); updateJobStatePercentAndChildId(conf, context.getJobID().toString(), null, childJobIdString); exitCode = 1; updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(), exitCode); break; updateJobStatePercentAndChildId(conf, context.getJobID().toString(), percent, null);
@Override protected void setup(Context context) throws IOException, InterruptedException { id = Bytes.toBytes("Job: "+context.getJobID() + " Task: " + context.getTaskAttemptID()); this.connection = ConnectionFactory.createConnection(context.getConfiguration()); instantiateHTable();
WebHCatJTShim tracker = ShimLoader.getHadoopShims().getWebHCatShim(conf, ugi); try { Set<String> childJobs = tracker.getJobs(context.getJobID().toString(), startTime); if (childJobs.size() == 0) { LOG.info("No child jobs found to reconnect with"); updateJobStatePercentAndChildId(conf, context.getJobID().toString(), null, childJobIdString); exitCode = 1; updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(), exitCode); break; updateJobStatePercentAndChildId(conf, context.getJobID().toString(), percent, null);
killLauncherChildJobs(conf, context.getJobID().toString()); context.getJobID().toString(), conf.get("user.name"), conf.get(OVERRIDE_CLASSPATH)); executeWatcher(pool, conf, context.getJobID(), proc.getInputStream(), statusdir, STDOUT_FNAME); executeWatcher(pool, conf, context.getJobID(), proc.getErrorStream(), statusdir, STDERR_FNAME); KeepAlive keepAlive = startCounterKeepAlive(pool, context); updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(), proc.exitValue()); LOG.info("templeton: collecting logs for " + context.getJobID().toString() + " to " + statusdir + "/logs"); LogRetriever logRetriever = new LogRetriever(statusdir, jobType, conf);
/** * Here is set up the task id, in order to be attached to each tuple **/ @Override public void setup(Context context) throws IOException, InterruptedException { super.setup(context); int taskIDInt = context.getTaskAttemptID().getTaskID().getId(); taskID = String.valueOf(taskIDInt); pOperator = mp.getLeaves().get(0); while(true) { if(pOperator instanceof POCounter){ ((POCounter) pOperator).setTaskId(taskIDInt); ((POCounter) pOperator).resetLocalCounter(); break; } else { pOperator = mp.getPredecessors(pOperator).get(0); } } PigStatusReporter reporter = PigStatusReporter.getInstance(); if (reporter != null) { reporter.incrCounter( JobControlCompiler.PIG_MAP_RANK_NAME + context.getJobID().toString(), taskID, 0); } }
/** * Get string, replacing variables in the output. * * %JOB_ID% => job id * %TASK_ID% => task id * %USER% => owning user name * * @param key name of key to lookup * @param defaultValue value to return if no mapping exists. This can also * have variables, which will be substituted. * @param context mapper context * @return value for key, with variables expanded */ public String getStringVars(String key, String defaultValue, Mapper.Context context) { String value = get(key); if (value == null) { if (defaultValue == null) { return null; } value = defaultValue; } value = value.replace("%JOB_ID%", context.getJobID().toString()); value = value.replace("%TASK_ID%", context.getTaskAttemptID().toString()); value = value.replace("%USER%", get("user.name", "unknown_user")); return value; }
@Override protected void setup(Context context) throws KuduException { id = "Job: " + context.getJobID() + " Task: " + context.getTaskAttemptID(); Configuration conf = context.getConfiguration(); CommandLineParser parser = new CommandLineParser(conf); client = parser.getClient(); table = client.openTable(getTableName(conf)); headsTable = client.openTable(getHeadsTable(conf)); session = client.newSession(); session.setFlushMode(SessionConfiguration.FlushMode.MANUAL_FLUSH); session.setMutationBufferSpace(WIDTH_DEFAULT); session.setIgnoreAllDuplicateRows(true); this.width = context.getConfiguration().getInt(GENERATOR_WIDTH_KEY, WIDTH_DEFAULT); current = new byte[this.width][]; int wrapMultiplier = context.getConfiguration().getInt(GENERATOR_WRAP_KEY, WRAP_DEFAULT); this.wrap = (long)wrapMultiplier * width; this.numNodes = context.getConfiguration().getLong( GENERATOR_NUM_ROWS_PER_MAP_KEY, (long)WIDTH_DEFAULT * WRAP_DEFAULT); if (this.numNodes < this.wrap) { this.wrap = this.numNodes; } }
/** * Creates simple mapreduce context that says it has a single record but won't actually * return any records as tests are not expected to read the records. * @return * @throws java.io.IOException * @throws InterruptedException */ private Mapper.Context getContext() throws java.io.IOException, InterruptedException { Mapper.Context context = mock(Mapper.Context.class); Configuration conf = new Configuration(); conf.set("mapreduce.task.id", UUID.randomUUID().toString()); when(context.getConfiguration()).thenReturn(conf); TaskAttemptID taskAttemptID = new TaskAttemptID(); when(context.getTaskAttemptID()).thenReturn(taskAttemptID); JobID jobID = new JobID("job001", 1); when(context.getJobID()).thenReturn(jobID); // Simulate a single record by answering 'true' once when(context.nextKeyValue()).thenAnswer(new Answer<Object>() { boolean answer = true; @Override public Object answer(InvocationOnMock invocation) { if (answer == true) { answer = false; return true; } return false; } }); return context; }
@Override protected void setup(Context context) throws IOException, InterruptedException { id = Bytes.toBytes("Job: "+context.getJobID() + " Task: " + context.getTaskAttemptID()); this.connection = ConnectionFactory.createConnection(context.getConfiguration()); instantiateHTable(); this.width = context.getConfiguration().getInt(GENERATOR_WIDTH_KEY, WIDTH_DEFAULT); current = new byte[this.width][]; int wrapMultiplier = context.getConfiguration().getInt(GENERATOR_WRAP_KEY, WRAP_DEFAULT); this.wrap = (long)wrapMultiplier * width; this.numNodes = context.getConfiguration().getLong( GENERATOR_NUM_ROWS_PER_MAP_KEY, (long)WIDTH_DEFAULT * WRAP_DEFAULT); if (this.numNodes < this.wrap) { this.wrap = this.numNodes; } this.multipleUnevenColumnFamilies = isMultiUnevenColumnFamilies(context.getConfiguration()); this.numWalkers = context.getConfiguration().getInt(CONCURRENT_WALKER_KEY, CONCURRENT_WALKER_DEFAULT); this.walkersStop = false; this.conf = context.getConfiguration(); }
/** * While tuples are collected, they are counted one by one by a global counter per task. **/ @Override public void collect(Context context, Tuple tuple) throws InterruptedException, IOException { context.write(null, tuple); PigStatusReporter reporter = PigStatusReporter.getInstance(); if (reporter != null) { reporter.incrCounter( JobControlCompiler.PIG_MAP_RANK_NAME + context.getJobID().toString(), taskID, 1); } } }
@Override public void preApplication() throws InstantiationException, IllegalAccessException { workerLogic = new BlockWorkerContextLogic(); workerLogic.preApplication(new BlockWorkerContextApiWrapper<>(this), new BlockOutputHandle(getContext().getJobID().toString(), getConf(), getContext())); }
@Override public void initialize() throws InstantiationException, IllegalAccessException { blockMasterLogic.initialize(getConf(), new BlockMasterApiWrapper(this, new BlockOutputHandle(getContext().getJobID().toString(), getConf(), getContext()))); }