Refine search
HCatMapRedUtil.createTaskAttemptContext(currJobContext.getJobConf(), currTaskContext.getTaskAttemptID(), currTaskContext.getProgressible()); new FileOutputCommitter(new Path(localJobInfo.getLocation()), currTaskContext) .getWorkPath().toString());
@Override public void abortJob(JobContext context, int state) throws IOException { JobConf conf = context.getJobConf();; Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); String fileName = (state == JobStatus.FAILED) ? TestJobCleanup.ABORT_FAILED_FILE_NAME : TestJobCleanup.ABORT_KILLED_FILE_NAME; fs.create(new Path(outputPath, fileName)).close(); } }
super.commitJob(context); JobConf job = context.getJobConf(); FileSystem inFs = inPath.getFileSystem(job); if (!inFs.getFileStatus(inPath).isDir()) return; Path gindex_path = new Path(inPath, "_master.heap"); if (inFs.exists(gindex_path)) return; PrintStream gout = new PrintStream(inFs.create(gindex_path, false)); FileSystem outFs = outPath.getFileSystem(job); FileStatus[] results = outFs.listStatus(outPath); for (FileStatus fileStatus : results) {
JobConf conf = new JobConf(jobContext.getConfiguration()); if (!keepTaskFiles(conf)) { String jobTempDir = conf.get(MRJobConfig.MAPREDUCE_JOB_DIR); Path jobTempDirPath = new Path(jobTempDir); FileSystem fs = jobTempDirPath.getFileSystem(conf); fs.delete(jobTempDirPath, true);
public void cleanupJob(JobContext context) throws IOException { JobConf conf = context.getJobConf(); // do the clean up of temporary directory Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME); FileSystem fileSys = tmpDir.getFileSystem(conf); context.getProgressible().progress(); if (fileSys.exists(tmpDir)) { fileSys.delete(tmpDir, true); } } }
super.commitJob(context); JobConf job = context.getJobConf(); Path outPath = BlockOutputFormat.getOutputPath(job); FileSystem outFs = outPath.getFileSystem(job); FileStatus[] reducesOut = outFs.listStatus(outPath); Queue<FileStatus> dirsToJoin = new ArrayDeque<FileStatus>(Arrays.asList(reducesOut)); while (!dirsToJoin.isEmpty()) { FileStatus dirToConcat = dirsToJoin.poll(); FileStatus[] dirContents = outFs.listStatus(dirToConcat.getPath()); Vector<Path> filesToConcat = new Vector<Path>(); for (FileStatus content : dirContents) { } else if (content.getPath().getName().startsWith("part-")) { filesToConcat.add(content.getPath()); Path tmpPath = dirToConcat.getPath().suffix("_tmp"); outFs.rename(filesToConcat.get(0), tmpPath); outFs.delete(dirToConcat.getPath(), true); outFs.rename(tmpPath, dirToConcat.getPath());
@Override public void commitJob(JobContext jobContext) throws IOException { baseOutputCommitter.commitJob(jobContext); RevisionManager rm = null; try { Configuration conf = jobContext.getConfiguration(); Path srcPath = FileOutputFormat.getOutputPath(jobContext.getJobConf()); if (!FileSystem.get(conf).exists(srcPath)) { throw new IOException("Failed to bulk import hfiles. " + "Intermediate data directory is cleaned up or missing. " + "Please look at the bulk import job if it exists for failure reason"); } Path destPath = new Path(srcPath.getParent(), srcPath.getName() + "_hfiles"); boolean success = ImportSequenceFile.runJob(jobContext, conf.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY), srcPath, destPath); if (!success) { cleanIntermediate(jobContext); throw new IOException("Failed to bulk import hfiles." + " Please look at the bulk import job for failure reason"); } rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf); rm.commitWriteTransaction(HBaseRevisionManagerUtil.getWriteTransaction(conf)); cleanIntermediate(jobContext); } finally { if (rm != null) rm.close(); } }
fs = FileSystem.getLocal(job); Path rootDir = new Path(TEST_ROOT_DIR); createInputFile(rootDir); ReflectionUtils.newInstance(jContext.getInputFormatClass(), job); JobSplitWriter.createSplitFiles(new Path(TEST_ROOT_DIR), job, new Path(TEST_ROOT_DIR).getFileSystem(job), splits); TaskSplitMetaInfo[] splitMetaInfo = fs.delete(rootDir, true);
@Override public void commitJob(JobContext context) throws IOException { Configuration conf = context.getConfiguration(); Path share = new Path(conf.get("share")); FileSystem fs = FileSystem.get(conf); while (true) { if (fs.exists(share)) { break; } UtilsForTests.waitFor(100); } super.commitJob(context); } }
Path submitJobDir = new Path(getSystemDir(), jobId.toString()); Path sharedFilesDir = new Path(getSystemDir(), jobSubmitClient.CAR); Path submitSplitFile = new Path(submitJobDir, "job.split"); Path submitJobFile = new Path(submitJobDir, "job.xml"); int reduces = job.getNumReduceTasks(); JobContext context = new JobContext(job, jobId); ReflectionUtils.newInstance(context.getOutputFormatClass(), job); output.checkOutputSpecs(context); } else { LOG.debug("Creating splits at " + fs.makeQualified(submitSplitFile)); List<RawSplit> maps; if (job.getUseNewMapper()) { JobConf conf = null; if (job.getUseNewMapper()) { conf = context.getJobConf(); } else { conf = job; FileSystem.create(fs, submitJobFile, new FsPermission(JOB_FILE_PERMISSION)); try {
private void cleanIntermediate(JobContext jobContext) throws IOException { FileSystem fs = FileSystem.get(jobContext.getConfiguration()); fs.delete(FileOutputFormat.getOutputPath(jobContext.getJobConf()), true); } }
@Override public void commitJob(JobContext context) throws IOException { waitForSignalFile(FileSystem.get(context.getJobConf()), cleanupSignalFile); super.commitJob(context); } }
@Override public JobConf getJobConf(org.apache.hadoop.mapred.JobContext context) { return context.getJobConf(); }
public static int getKafkaMaxPullMinutesPerTask(JobContext job) { return job.getConfiguration().getInt(KAFKA_MAX_PULL_MINUTES_PER_TASK, -1); }
List<RawSplit> computeNewSplits(JobContext job) throws IOException, InterruptedException, ClassNotFoundException { JobConf conf = job.getJobConf(); org.apache.hadoop.mapreduce.InputFormat<?,?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), job.getJobConf());
private org.apache.hadoop.mapreduce.TaskAttemptContext toTaskAttemptContext(JobContext jobContext) { assert jobContext != null; // NOTE: JobContext can be instance of TaskAttemptContext. if (TaskAttemptContext.class.isInstance(jobContext)) { return TaskAttemptContext.class.cast(jobContext); } Progressable progressable = jobContext.getProgressible(); if (progressable == null) { LOG.warn(MessageFormat.format( "JobContext has no progressable object: {0}", jobContext.getClass().getName())); } if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Progressable object is found (jobId={0}, object={1})", //$NON-NLS-1$ jobContext.getJobID(), progressable)); } TaskAttemptID id = new TaskAttemptID(new TaskID(jobContext.getJobID(), TaskType.MAP, 0), 0); return new TaskAttemptContextImpl(jobContext.getConfiguration(), id) { @Override public void progress() { progressable.progress(); super.progress(); } }; }
private static Counter createCounter(JobContext context) { assert context != null; if (context instanceof Progressable) { return new ProgressableCounter((Progressable) context); } else if (context instanceof org.apache.hadoop.mapred.JobContext) { return new ProgressableCounter(((org.apache.hadoop.mapred.JobContext) context).getProgressible()); } else { return new Counter(); } }
@Override public void cleanupJob(JobContext context) throws IOException { System.err.println("---- HERE ----"); JobConf conf = context.getJobConf(); Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); fs.create(new Path(outputPath, CUSTOM_CLEANUP_FILE_NAME)).close(); } }
JobConf conf = new JobConf(jobContext.getConfiguration()); if (!keepTaskFiles(conf)) { String jobTempDir = conf.get(MRJobConfig.MAPREDUCE_JOB_DIR); Path jobTempDirPath = new Path(jobTempDir); FileSystem fs = jobTempDirPath.getFileSystem(conf); fs.delete(jobTempDirPath, true);
@Override @Deprecated public void cleanupJob(JobContext context) throws IOException { JobConf conf = context.getJobConf(); // do the clean up of temporary directory Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME); FileSystem fileSys = tmpDir.getFileSystem(conf); context.getProgressible().progress(); if (fileSys.exists(tmpDir)) { fileSys.delete(tmpDir, true); } else { LOG.warn("Output Path is Null in cleanup"); } } }