@Override public RecordReader<LongWritable, Text> getRecordReader( InputSplit split, JobConf job, Reporter reporter) throws IOException { InputSplit targetSplit = ((SymlinkTextInputSplit)split).getTargetSplit(); // The target data is in TextInputFormat. TextInputFormat inputFormat = new TextInputFormat(); inputFormat.configure(job); RecordReader innerReader = null; try { innerReader = inputFormat.getRecordReader(targetSplit, job, reporter); } catch (Exception e) { innerReader = HiveIOExceptionHandlerUtil .handleRecordReaderCreationException(e, job); } HiveRecordReader rr = new HiveRecordReader(innerReader, job); rr.initIOContext((FileSplit)targetSplit, job, TextInputFormat.class, innerReader); return rr; }
@Override public RecordReader<LongWritable, Text> getRecordReader( InputSplit split, JobConf job, Reporter reporter) throws IOException { InputSplit targetSplit = ((SymlinkTextInputSplit)split).getTargetSplit(); // The target data is in TextInputFormat. TextInputFormat inputFormat = new TextInputFormat(); inputFormat.configure(job); RecordReader innerReader = null; try { innerReader = inputFormat.getRecordReader(targetSplit, job, reporter); } catch (Exception e) { innerReader = HiveIOExceptionHandlerUtil .handleRecordReaderCreationException(e, job); } HiveRecordReader rr = new HiveRecordReader(innerReader, job); rr.initIOContext((FileSplit)targetSplit, job, TextInputFormat.class, innerReader); return rr; }
@Override public void reset() { // TODO Auto-generated method stub try { this.hasMore = true; this.reader = input_format.getRecordReader(this.split, this.jobConf, voidReporter); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { FileSplit fileSplit = (FileSplit) split; FileSystem fs = FileSystem.get(fileSplit.getPath().toUri(), job); FSDataInputStream is = fs.open(fileSplit.getPath()); byte[] header = new byte[3]; RecordReader reader = null; try { is.readFully(header); } catch (EOFException eof) { reader = textInputFormat.getRecordReader(split, job, reporter); } finally { is.close(); } if (header[0] == 'S' && header[1] == 'E' && header[2] == 'Q') { reader = seqFileInputFormat.getRecordReader(split, job, reporter); } else { reader = textInputFormat.getRecordReader(split, job, reporter); } return reader; }
@Override public void setFile(String file, long offset, long length) { JobConf defaultConf = new JobConf(); this.split = new FileSplit( new Path( file ), offset, length, defaultConf); this.hasMore = true; this.jobConf = defaultConf; //this.split = split; this.input_format = new TextInputFormat(); try { this.reader = input_format.getRecordReader(this.split, this.jobConf, voidReporter); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } this.key = reader.createKey(); }
public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { FileSplit fileSplit = (FileSplit) split; FileSystem fs = FileSystem.get(fileSplit.getPath().toUri(), job); FSDataInputStream is = fs.open(fileSplit.getPath()); byte[] header = new byte[3]; RecordReader reader = null; try { is.readFully(header); } catch (EOFException eof) { reader = textInputFormat.getRecordReader(split, job, reporter); } finally { is.close(); } if (header[0] == 'S' && header[1] == 'E' && header[2] == 'Q') { reader = seqFileInputFormat.getRecordReader(split, job, reporter); } else { reader = textInputFormat.getRecordReader(split, job, reporter); } return reader; }
@Override public RecordReader<LongWritable, Text> getRecordReader( InputSplit split, JobConf job, Reporter reporter) throws IOException { InputSplit targetSplit = ((SymlinkTextInputSplit)split).getTargetSplit(); // The target data is in TextInputFormat. TextInputFormat inputFormat = new TextInputFormat(); inputFormat.configure(job); return inputFormat.getRecordReader(targetSplit, job, reporter); }
LOG.debug("split["+j+"]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter); try { int count = 0;
private static List<Text> readSplit(TextInputFormat format, InputSplit split, JobConf job) throws IOException { List<Text> result = new ArrayList<Text>(); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); LongWritable key = reader.createKey(); Text value = reader.createValue(); while (reader.next(key, value)) { result.add(value); value = reader.createValue(); } reader.close(); return result; }
LOG.debug("split["+j+"]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], conf, reporter); try { int counter = 0;
private static List<Text> readSplit(TextInputFormat format, InputSplit split, JobConf jobConf) throws IOException { List<Text> result = new ArrayList<Text>(); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, jobConf, voidReporter); LongWritable key = reader.createKey(); Text value = reader.createValue(); while (reader.next(key, value)) { result.add(value); value = reader.createValue(); } reader.close(); return result; }
@Override public RecordReader<LongWritable, Text> getRecordReader( InputSplit split, JobConf job, Reporter reporter) throws IOException { InputSplit targetSplit = ((SymlinkTextInputSplit)split).getTargetSplit(); // The target data is in TextInputFormat. TextInputFormat inputFormat = new TextInputFormat(); inputFormat.configure(job); RecordReader innerReader = null; try { innerReader = inputFormat.getRecordReader(targetSplit, job, reporter); } catch (Exception e) { innerReader = HiveIOExceptionHandlerUtil .handleRecordReaderCreationException(e, job); } HiveRecordReader rr = new HiveRecordReader(innerReader, job); rr.initIOContext((FileSplit)targetSplit, job, TextInputFormat.class, innerReader); return rr; }
LOG.debug("split["+j+"]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], conf, Reporter.NULL); try { int counter = 0;
private static List<Text> readSplit(TextInputFormat format, InputSplit split, JobConf job) throws IOException { List<Text> result = new ArrayList<Text>(); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); LongWritable key = reader.createKey(); Text value = reader.createValue(); while (reader.next(key, value)) { result.add(value); value = reader.createValue(); } reader.close(); return result; }
LOG.debug("split[" + j + "]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], conf, Reporter.NULL); try { int counter = 0;
LOG.debug("split["+j+"]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter); try { int count = 0;
job.setInputFormat(inputFormat.getClass()); try { reader = inputFormat.getRecordReader(split, job, Reporter.NULL); key = reader.createKey(); value = reader.createValue();
@Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { try { /** * Extract the list of field names for the parser to use if it is NOT a star query. If it is a star query just * pass through an empty map, because the parser is going to have to build all possibilities. */ final Map<String, String> fieldMapping = !isStarQuery() ? makeParserFields() : null; writer = new VectorContainerWriter(output); parser = new HttpdParser(writer.rootAsMap(), context.getManagedBuffer(), HttpdLogFormatPlugin.this.getConfig().getLogFormat(), HttpdLogFormatPlugin.this.getConfig().getTimestampFormat(), fieldMapping); final Path path = fs.makeQualified(new Path(work.getPath())); FileSplit split = new FileSplit(path, work.getStart(), work.getLength(), new String[]{""}); TextInputFormat inputFormat = new TextInputFormat(); JobConf job = new JobConf(fs.getConf()); job.setInt("io.file.buffer.size", fragmentContext.getConfig().getInt(ExecConstants.TEXT_LINE_READER_BUFFER_SIZE)); job.setInputFormat(inputFormat.getClass()); lineReader = (LineRecordReader) inputFormat.getRecordReader(split, job, Reporter.NULL); lineNumber = lineReader.createKey(); } catch (NoSuchMethodException | MissingDissectorsException | InvalidDissectorException e) { throw handleAndGenerate("Failure creating HttpdParser", e); } catch (IOException e) { throw handleAndGenerate("Failure creating HttpdRecordReader", e); } }