@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { if (LOG.isDebugEnabled()) { LOG.debug("getSplits started"); } Configuration conf = ShimLoader.getHadoopShims().getConfiguration(jobContext); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(conf, createContext(conf, -1)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } if (LOG.isDebugEnabled()) { LOG.debug("getSplits finished"); } return result; }
@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { if (LOG.isDebugEnabled()) { LOG.debug("getSplits started"); } Configuration conf = ShimLoader.getHadoopShims().getConfiguration(jobContext); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(conf, createContext(conf, -1)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } if (LOG.isDebugEnabled()) { LOG.debug("getSplits finished"); } return result; }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { Path file = getDefaultWorkFile(context, ""); return new OrcRecordWriter(file, OrcFile.writerOptions( ShimLoader.getHadoopShims().getConfiguration(context))); } }
@Override public RecordReader<NullWritable, OrcStruct> createRecordReader( InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { FileSplit fileSplit = (FileSplit) inputSplit; Path path = fileSplit.getPath(); Configuration conf = ShimLoader.getHadoopShims() .getConfiguration(context); return new OrcRecordReader(OrcFile.createReader(path, OrcFile.readerOptions(conf)), ShimLoader.getHadoopShims().getConfiguration(context), fileSplit.getStart(), fileSplit.getLength()); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { Path file = getDefaultWorkFile(context, ""); return new OrcRecordWriter(file, OrcFile.writerOptions( ShimLoader.getHadoopShims().getConfiguration(context))); } }
@Override public RecordReader<NullWritable, OrcStruct> createRecordReader( InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { FileSplit fileSplit = (FileSplit) inputSplit; Path path = fileSplit.getPath(); Configuration conf = ShimLoader.getHadoopShims() .getConfiguration(context); return new OrcRecordReader(OrcFile.createReader(path, OrcFile.readerOptions(conf)), ShimLoader.getHadoopShims().getConfiguration(context), fileSplit.getStart(), fileSplit.getLength()); }
@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(ShimLoader.getHadoopShims() .getConfiguration(jobContext)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); return result; }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { Path file = getDefaultWorkFile(context, ""); return new OrcRecordWriter(file, OrcFile.writerOptions( ShimLoader.getHadoopShims().getConfiguration(context))); } }
@Override public RecordReader<NullWritable, OrcStruct> createRecordReader( InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { FileSplit fileSplit = (FileSplit) inputSplit; Path path = fileSplit.getPath(); Configuration conf = ShimLoader.getHadoopShims() .getConfiguration(context); return new OrcRecordReader(OrcFile.createReader(path, OrcFile.readerOptions(conf)), ShimLoader.getHadoopShims().getConfiguration(context), fileSplit.getStart(), fileSplit.getLength()); }