@Override public void stop() { context.progress(); context.setStatus("STOPPED"); }
@Override public void start() { context.progress(); context.setStatus("STARTED"); }
@Override public RecordReader<LongWritable, BytesRefArrayWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(split.toString()); return new RCFileMapReduceRecordReader<LongWritable, BytesRefArrayWritable>(); }
@Override public void stopSection(String section) { context.progress(); context.setStatus(StringUtils.format("STOPPED [%s]", section)); } };
@Override public void startSection(String section) { context.progress(); context.setStatus(StringUtils.format("STARTED [%s]", section)); }
taskContext.setStatus("Committed");
/** {@inheritDoc} */ @Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { final FileSplit file = (FileSplit)inputSplit; context.setStatus(file.toString()); final AvroColumnReader.Params params = new AvroColumnReader.Params(new HadoopInput(file.getPath(), context.getConfiguration())); params.setModel(ReflectData.get()); if (AvroJob.getInputKeySchema(context.getConfiguration()) != null) { params.setSchema(AvroJob.getInputKeySchema(context.getConfiguration())); } reader = new AvroColumnReader<>(params); rows = reader.getRowCount(); }
public RecordReader<LongWritable, Text> createRecordReader( InputSplit genericSplit, TaskAttemptContext context) throws IOException { context.setStatus(genericSplit.toString()); return new FastaRecordReader(); }
log.info("Kafka pull time limit reached"); statusMsg += " max read " + maxMsg; context.setStatus(statusMsg); log.info(key.getTopic() + " max read " + maxMsg); mapperContext.getCounter("total", "request-time(ms)").increment(reader.getFetchTime()); statusMsg += statusMsg.length() > 0 ? "; " : ""; statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":" + request.getPartition(); context.setStatus(statusMsg); DateTime time = new DateTime(curTimeStamp); statusMsg += " begin read at " + time.toString(); context.setStatus(statusMsg); log.info(key.getTopic() + " begin read at " + time.toString()); endTimeStamp = (time.plusHours(this.maxPullHours)).getMillis(); this.numRecordsReadForCurrentPartition))); statusMsg += " max read " + maxMsg; context.setStatus(statusMsg); log.info(key.getTopic() + " max read " + maxMsg); mapperContext.getCounter("total", "request-time(ms)").increment(reader.getFetchTime());
@Override public void setStatus(String status) { context.setStatus(status); } }
@Override public void setStatus(String status) { context.setStatus(status); } }
@Override public RecordReader<AvroWrapper<T>, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(split.toString()); String jsonSchema = context.getConfiguration().get(AvroJob.INPUT_SCHEMA); Schema schema = new Schema.Parser().parse(jsonSchema); return new AvroRecordReader<T>(schema); }
public RecordReader<Text, Text> createRecordReader(InputSplit genericSplit, TaskAttemptContext context) throws IOException { context.setStatus(genericSplit.toString()); return new KeyValueLineRecordReader(context.getConfiguration()); }
@Override public RecordReader<AvroWrapper<T>, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(split.toString()); String jsonSchema = context.getConfiguration().get(AvroJob.INPUT_SCHEMA); Schema schema = new Schema.Parser().parse(jsonSchema); return new AvroRecordReader<T>(schema); } }
@Override public void progress(String msg) { try { if (rep != null) { rep.setStatus(msg); } }catch (Exception e) { rep.progress(); } }
public RecordReader<Text, SequencedFragment> createRecordReader( InputSplit genericSplit, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(genericSplit.toString()); return new FastqRecordReader(context.getConfiguration(), (FileSplit)genericSplit); // cast as per example in TextInputFormat } }
public RecordReader<Text, ReferenceFragment> createRecordReader( InputSplit genericSplit, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(genericSplit.toString()); return new FastaRecordReader(context.getConfiguration(), (FileSplit)genericSplit); // cast as per example in TextInputFormat } }
/** Create a record reader for the given split * @param split file split * @param context the task-attempt context * @return RecordReader */ public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException { context.setStatus(split.toString()); return new FilterRecordReader<K, V>(context.getConfiguration()); }
public RecordReader<Text, Text> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException { context.setStatus(split.toString()); return new SequenceFileAsJSONRecordBatchReader(); } }
/** Create a record reader for the given split * @param split file split * @param context the task-attempt context * @return RecordReader */ public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException { context.setStatus(split.toString()); return new FilterRecordReader<K, V>(context.getConfiguration()); }