@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { boolean result = false; for (OutputCommitter committer : committers) { result |= committer.needsTaskCommit(taskContext); } return result; }
@Override public boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext taskContext) throws IOException { boolean result = false; for (OutputCommitter committer : committers) { result |= committer.needsTaskCommit(taskContext); } return result; }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { try { this.recordWriter.close(this.context); } catch (InterruptedException e) { throw new IOException("Could not close RecordReader.", e); } if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } Path outputPath = new Path(this.configuration.get("mapred.output.dir")); // rename tmp-file to final name FileSystem fs = FileSystem.get(outputPath.toUri(), this.configuration); String taskNumberStr = Integer.toString(this.taskNumber); String tmpFileTemplate = "tmp-r-00000"; String tmpFile = tmpFileTemplate.substring(0, 11 - taskNumberStr.length()) + taskNumberStr; if (fs.exists(new Path(outputPath.toString() + "/" + tmpFile))) { fs.rename(new Path(outputPath.toString() + "/" + tmpFile), new Path(outputPath.toString() + "/" + taskNumberStr)); } } }
@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { boolean needTaskCommit = false; for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); needTaskCommit = needTaskCommit || outputContainer.getBaseCommitter().needsTaskCommit( outputContainer.getContext()); } return needTaskCommit; }
@Override public void commitTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); OutputCommitter baseCommitter = outputContainer.getBaseCommitter(); TaskAttemptContext committerContext = outputContainer.getContext(); if (baseCommitter.needsTaskCommit(committerContext)) { LOGGER.info("Calling commitTask for alias: " + alias); baseCommitter.commitTask(committerContext); } } }
@Override public void commitTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Committing task-attempt for " + outputJobInfo.getLocation()); TaskAttemptContext dynContext = dynamicContexts.get(dynKey); OutputCommitter dynCommitter = baseDynamicCommitters.get(dynKey); if (dynCommitter.needsTaskCommit(dynContext)) { dynCommitter.commitTask(dynContext); } else { LOG.info("Skipping commitTask() for " + outputJobInfo.getLocation()); } } } });
@Override public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { return getBaseOutputCommitter().needsTaskCommit(HCatMapRedUtil.createTaskAttemptContext(context)); }
private OutputCommitter setupOutputCommitter(boolean needsTaskCommit) throws IOException { OutputCommitter outputCommitter = Mockito.mock(OutputCommitter.class); when(outputCommitter.needsTaskCommit(nullable(TaskAttemptContext.class))).thenReturn(needsTaskCommit); doNothing().when(outputCommitter).commitTask(any(TaskAttemptContext.class)); return outputCommitter; }
/** * Commit task. * * @param outputFormat Output format. * @throws IgniteCheckedException In case of Grid exception. * @throws IOException In case of IO exception. * @throws InterruptedException In case of interrupt. */ protected void commit(@Nullable OutputFormat outputFormat) throws IgniteCheckedException, IOException, InterruptedException { if (hadoopCtx.writer() != null) { assert outputFormat != null; OutputCommitter outputCommitter = outputFormat.getOutputCommitter(hadoopCtx); if (outputCommitter.needsTaskCommit(hadoopCtx)) outputCommitter.commitTask(hadoopCtx); } }
@Override public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { FileOutputFormatContainer.setWorkOutputPath(context); return getBaseOutputCommitter().needsTaskCommit(HCatMapRedUtil.createTaskAttemptContext(context)); } else { // called explicitly through FileRecordWriterContainer.close() if dynamic - return false by default return true; } }
if (committer.needsTaskCommit(cntxt)) { committer.commitTask(cntxt);
@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { return baseCommitter.needsTaskCommit(taskContext); }
@Override public boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext taskContext) throws IOException { boolean result = false; for (OutputCommitter committer : committers) { result |= committer.needsTaskCommit(taskContext); } return result; }
@Override public void commitTask(TaskAttemptContext taskContext) throws IOException { if (rootOutputcommitter.needsTaskCommit(taskContext)) { rootOutputcommitter.commitTask(taskContext); } for (Map.Entry<String, OutputCommitter> committer : committers.entrySet()) { TaskAttemptContext namedTaskContext = MultipleOutputs.getNamedTaskContext(taskContext, committer.getKey()); if (committer.getValue().needsTaskCommit(namedTaskContext)) { committer.getValue().commitTask(namedTaskContext); } } }
@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { boolean needTaskCommit = false; for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); needTaskCommit = needTaskCommit || outputContainer.getBaseCommitter().needsTaskCommit( outputContainer.getContext()); } return needTaskCommit; }
@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { boolean needTaskCommit = false; for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); needTaskCommit = needTaskCommit || outputContainer.getBaseCommitter().needsTaskCommit( outputContainer.getContext()); } return needTaskCommit; }
@Override public void commitTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); OutputCommitter baseCommitter = outputContainer.getBaseCommitter(); TaskAttemptContext committerContext = outputContainer.getContext(); if (baseCommitter.needsTaskCommit(committerContext)) { LOGGER.info("Calling commitTask for alias: " + alias); baseCommitter.commitTask(committerContext); } } }
@Override public void commitTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); OutputCommitter baseCommitter = outputContainer.getBaseCommitter(); TaskAttemptContext committerContext = outputContainer.getContext(); if (baseCommitter.needsTaskCommit(committerContext)) { LOGGER.info("Calling commitTask for alias: " + alias); baseCommitter.commitTask(committerContext); } } }
@Override public boolean needsTaskCommit( TaskAttemptContext context) throws IOException { return outputCommitter.needsTaskCommit( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { return getBaseOutputCommitter().needsTaskCommit(HCatMapRedUtil.createTaskAttemptContext(context)); } else { // called explicitly through FileRecordWriterContainer.close() if dynamic - return false by default return true; } }