/** * Constructor. * * @param ctx Task attempt context. * @param delegate Delegate. * @throws IOException If failed. */ private TestOutputCommitter(TaskAttemptContext ctx, FileOutputCommitter delegate) throws IOException { super(FileOutputFormat.getOutputPath(ctx), ctx); this.delegate = delegate; }
@Override public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException { if (this.committer == null) { this.committer = new AvroKeyCompactorOutputCommitter(FileOutputFormat.getOutputPath(context), context); } return this.committer; }
/** * Constructor. * @param context The TaskAttempContext to supply the writer with information form the job configuration */ public AvroTrevniRecordWriterBase(TaskAttemptContext context) throws IOException { schema = initSchema(context); meta = filterMetadata(context.getConfiguration()); writer = new AvroColumnWriter<>(schema, meta, ReflectData.get()); Path outputPath = FileOutputFormat.getOutputPath(context); String dir = FileOutputFormat.getUniqueFile(context, "part", ""); dirPath = new Path(outputPath.toString() + "/" + dir); fs = dirPath.getFileSystem(context.getConfiguration()); fs.mkdirs(dirPath); blockSize = fs.getDefaultBlockSize(); }
job.getConfiguration(), progressable); final Path outputdir = FileOutputFormat.getOutputPath(tac); final Path taskAttemptOutputdir = new FileOutputCommitter(outputdir, tac).getWorkPath(); final org.apache.hadoop.mapreduce.RecordWriter<
this.fs = FileSystem.get(context.getConfiguration()); this.taskStateStore = new FsStateStore<>(this.fs, FileOutputFormat.getOutputPath(context).toUri().getPath(), TaskState.class);
@Override public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException { if (this.commiter == null) { this.commiter = new CamusSweeperOutputCommitter(FileOutputFormat.getOutputPath(context), context); } return this.commiter; }
private void writePrevious(Collection<EtlKey> missedKeys, JobContext context) throws IOException { FileSystem fs = FileSystem.get(context.getConfiguration()); Path output = FileOutputFormat.getOutputPath(context); if (fs.exists(output)) { fs.mkdirs(output); } output = new Path(output, EtlMultiOutputFormat.OFFSET_PREFIX + "-previous"); SequenceFile.Writer writer = SequenceFile.createWriter(fs, context.getConfiguration(), output, EtlKey.class, NullWritable.class); for (EtlKey key : missedKeys) { writer.append(key, NullWritable.get()); } writer.close(); }
protected void writeRequests(List<CamusRequest> requests, JobContext context) throws IOException { FileSystem fs = FileSystem.get(context.getConfiguration()); Path output = FileOutputFormat.getOutputPath(context); if (fs.exists(output)) { fs.mkdirs(output); } output = new Path(output, EtlMultiOutputFormat.REQUESTS_FILE); SequenceFile.Writer writer = SequenceFile.createWriter(fs, context.getConfiguration(), output, EtlRequest.class, NullWritable.class); for (CamusRequest r : requests) { //TODO: factor out kafka specific request functionality writer.append(r, NullWritable.get()); } writer.close(); }
throws IOException { final Path outputPath = FileOutputFormat.getOutputPath(context); final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath(); final Configuration conf = context.getConfiguration();
/** * Get output directory job is using * @param context job context * @return Path for output directory */ private static Path getOutputDir(JobContext context) { return FileOutputFormat.getOutputPath(context); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); }
public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); }
public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context ) throws IOException { if (committer == null) { Path output = getOutputPath(context); committer = new FileOutputCommitter(output, context); } return committer; } }
public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); }
public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context ) throws IOException { if (committer == null) { Path output = getOutputPath(context); committer = new FileOutputCommitter(output, context); } return committer; } }
public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new FileOutputCommitter(FileOutputFormat.getOutputPath(context), context); }
@Override public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException { if (this.committer == null) { this.committer = new AvroKeyCompactorOutputCommitter(FileOutputFormat.getOutputPath(context), context); } return this.committer; }
@Override public void cleanupJob(JobContext context) throws IOException { System.err.println("---- HERE ----"); Path outputPath = FileOutputFormat.getOutputPath(context); FileSystem fs = outputPath.getFileSystem(context.getConfiguration()); fs.create(new Path(outputPath, CUSTOM_CLEANUP_FILE_NAME)).close(); } }
@Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { Path outputPath = FileOutputFormat.getOutputPath(context); FileSystem fs = outputPath.getFileSystem(context.getConfiguration()); String fileName = (state.equals(JobStatus.State.FAILED)) ? ABORT_FAILED_FILE_NAME : ABORT_KILLED_FILE_NAME; fs.create(new Path(outputPath, fileName)).close(); } }