@Override public void cancel(String jobId) throws IOException { JobID id = JobID.forName(jobId); Cluster cluster = new Cluster(this.getConf()); try { Job job = cluster.getJob(id); if (job == null) { LOG.error("No job found for " + id); // should we throw exception return; } if (job.isComplete() || job.isRetired()) { return; } job.killJob(); LOG.debug("Killed copy job " + id); } catch (InterruptedException e) { throw new IOException(e); } }
Configuration conf = new Configuration(HadoopUtil.getCurrentConfiguration()); overwriteJobConf(conf, executableContext.getConfig(), getMapReduceParams().trim().split("\\s+")); Job job = new Cluster(conf).getJob(JobID.forName(mrJobId)); if (job == null || job.getJobState() == JobStatus.State.FAILED) {
final Map<String, String> extra = mgr.getOutput(getId()).getExtra(); if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) { job = new Cluster(conf).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID))); logger.info("mr_job_id:" + extra.get(ExecutableConstants.MR_JOB_ID) + " resumed"); } else {
private Job mockJob(Cluster mockCluster, String jobId, State jobState) throws IOException, InterruptedException { Job mockJob = mock(Job.class); when(mockCluster.getJob(JobID.forName(jobId))).thenReturn(mockJob); JobStatus status = new JobStatus(null, 0, 0, 0, 0, jobState, JobPriority.HIGH, null, null, null, null); when(mockJob.getStatus()).thenReturn(status); return mockJob; }
@Test public void testGetJobWithRetry() throws Exception { Configuration conf = new Configuration(); conf.setInt(MRJobConfig.MR_CLIENT_JOB_MAX_RETRIES, 1); final Cluster mockCluster = mock(Cluster.class); final Job mockJob = Job.getInstance(conf); when(mockCluster.getJob(any(JobID.class))).thenReturn( null).thenReturn(mockJob); CLI cli = new CLI(conf); cli.cluster = mockCluster; Job job = cli.getJob(JobID.forName("job_1234654654_001")); Assert.assertTrue("job is null", job != null); }
@Test public void testGetJobWithoutRetry() throws Exception { Configuration conf = new Configuration(); conf.setInt(MRJobConfig.MR_CLIENT_JOB_MAX_RETRIES, 0); final Cluster mockCluster = mock(Cluster.class); when(mockCluster.getJob(any(JobID.class))).thenReturn(null); CLI cli = new CLI(conf); cli.cluster = mockCluster; Job job = cli.getJob(JobID.forName("job_1234654654_001")); Assert.assertTrue("job is not null", job == null); }
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = executableManager.getOutput(getId()); if (output.getExtra().containsKey(START_TIME)) { final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID); if (mrJobId == null) { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { Job job = new Cluster(new Configuration()).getJob(JobID.forName(mrJobId)); if (job.getJobState() == JobStatus.State.FAILED) { //remove previous mr job info super.onExecuteStart(executableContext); } else { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } catch (InterruptedException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
final Map<String, String> extra = executableManager.getOutput(getId()).getExtra(); if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) { job = new Cluster(new Configuration()).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID))); logger.info("mr_job_id:" + extra.get(ExecutableConstants.MR_JOB_ID + " resumed")); } else {
/** Construct a JobId object from given string * @return constructed JobId object or null if the given String is null * @throws IllegalArgumentException if the given string is malformed */ public static JobID forName(String str) throws IllegalArgumentException { return (JobID) org.apache.hadoop.mapreduce.JobID.forName(str); }
/** Construct a JobId object from given string * @return constructed JobId object or null if the given String is null * @throws IllegalArgumentException if the given string is malformed */ public static JobID forName(String str) throws IllegalArgumentException { return (JobID) org.apache.hadoop.mapreduce.JobID.forName(str); }
/** Construct a JobId object from given string * @return constructed JobId object or null if the given String is null * @throws IllegalArgumentException if the given string is malformed */ public static JobID forName(String str) throws IllegalArgumentException { return (JobID) org.apache.hadoop.mapreduce.JobID.forName(str); }
/** Construct a JobId object from given string * @return constructed JobId object or null if the given String is null * @throws IllegalArgumentException if the given string is malformed */ public static JobID forName(String str) throws IllegalArgumentException { return (JobID) org.apache.hadoop.mapreduce.JobID.forName(str); }
@Test public void testListAttemptIdsWithInvalidInputs() throws Exception { JobID jobId = JobID.forName(jobIdStr); Cluster mockCluster = mock(Cluster.class); Job job = mock(Job.class); CLI cli = spy(new CLI(new Configuration())); doReturn(mockCluster).when(cli).createCluster(); when(mockCluster.getJob(jobId)).thenReturn(job); int retCode_JOB_SETUP = cli.run(new String[] { "-list-attempt-ids", jobIdStr, "JOB_SETUP", "running" }); int retCode_JOB_CLEANUP = cli.run(new String[] { "-list-attempt-ids", jobIdStr, "JOB_CLEANUP", "running" }); int retCode_invalidTaskState = cli.run(new String[] { "-list-attempt-ids", jobIdStr, "REDUCE", "complete" }); String jobIdStr2 = "job_1015298225799_0016"; int retCode_invalidJobId = cli.run(new String[] { "-list-attempt-ids", jobIdStr2, "MAP", "running" }); assertEquals("JOB_SETUP is an invalid input,exit code should be -1", -1, retCode_JOB_SETUP); assertEquals("JOB_CLEANUP is an invalid input,exit code should be -1", -1, retCode_JOB_CLEANUP); assertEquals("complete is an invalid input,exit code should be -1", -1, retCode_invalidTaskState); assertEquals("Non existing job id should be skippted with -1", -1, retCode_invalidJobId); }
@Test public void testListAttemptIdsWithValidInput() throws Exception { JobID jobId = JobID.forName(jobIdStr); Cluster mockCluster = mock(Cluster.class); Job job = mock(Job.class);
private static JobHistoryParser.JobInfo createJobInfo2() { JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo(); job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "test"; job.jobname = "Dupe counter output"; job.jobQueueName = "root.test"; job.jobConfPath = "/tmp/job.xml"; job.launchTime = job.submitTime + 1000; job.totalMaps = 1; job.totalReduces = 0; job.failedMaps = 0; job.failedReduces = 0; job.succeededMaps = 1; job.succeededReduces = 0; job.jobStatus = JobStatus.State.SUCCEEDED.name(); job.totalCounters = createDeprecatedCounters(); job.mapCounters = createDeprecatedCounters(); job.reduceCounters = createDeprecatedCounters(); job.tasksMap = new HashMap<>(); addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.JOB_CLEANUP, 3, TaskStatus.State.SUCCEEDED); return job; }
/** Get the Job ID */ public JobID getJobId() { return JobID.forName(datum.getJobid().toString()); } /** Get the job finish time */
/** Get the Job ID */ public JobID getJobId() { return JobID.forName(datum.getJobid().toString()); } /** Get the job priority */
job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "rkanter"; job.jobname = "my job";
public void setDatum(Object oDatum) { this.datum = (JobFinished) oDatum; this.jobId = JobID.forName(datum.jobid.toString()); this.finishTime = datum.finishTime; this.finishedMaps = datum.finishedMaps; this.finishedReduces = datum.finishedReduces; this.failedMaps = datum.failedMaps; this.failedReduces = datum.failedReduces; this.mapCounters = EventReader.fromAvro(datum.mapCounters); this.reduceCounters = EventReader.fromAvro(datum.reduceCounters); this.totalCounters = EventReader.fromAvro(datum.totalCounters); }
public void setDatum(Object oDatum) { this.datum = (JobFinished) oDatum; this.jobId = JobID.forName(datum.getJobid().toString()); this.finishTime = datum.getFinishTime(); this.finishedMaps = datum.getFinishedMaps(); this.finishedReduces = datum.getFinishedReduces(); this.failedMaps = datum.getFailedMaps(); this.failedReduces = datum.getFailedReduces(); this.mapCounters = EventReader.fromAvro(datum.getMapCounters()); this.reduceCounters = EventReader.fromAvro(datum.getReduceCounters()); this.totalCounters = EventReader.fromAvro(datum.getTotalCounters()); }