@SuppressWarnings("unchecked") private org.apache.hadoop.mapred.RecordReader<NullWritable, OrcStruct> createVectorizedReader(InputSplit split, JobConf conf, Reporter reporter ) throws IOException { return (org.apache.hadoop.mapred.RecordReader) new VectorizedOrcInputFormat().getRecordReader(split, conf, reporter); }
@SuppressWarnings("unchecked") private org.apache.hadoop.mapred.RecordReader<NullWritable, OrcStruct> createVectorizedReader(InputSplit split, JobConf conf, Reporter reporter ) throws IOException { return (org.apache.hadoop.mapred.RecordReader) new VectorizedOrcInputFormat().getRecordReader(split, conf, reporter); }
@Override public boolean validateInput(FileSystem fs, HiveConf conf, List<FileStatus> files ) throws IOException { if (Utilities.getIsVectorized(conf)) { return new VectorizedOrcInputFormat().validateInput(fs, conf, files); } if (files.size() <= 0) { return false; } for (FileStatus file : files) { if (!HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE).equals("mr")) { // 0 length files cannot be ORC files, not valid for MR. if (file.getLen() == 0) { return false; } } try { OrcFile.createReader(file.getPath(), OrcFile.readerOptions(conf).filesystem(fs).maxLength(file.getLen())); } catch (IOException e) { return false; } } return true; }
@Override public boolean validateInput(FileSystem fs, HiveConf conf, List<FileStatus> files ) throws IOException { if (Utilities.getUseVectorizedInputFileFormat(conf)) { return new VectorizedOrcInputFormat().validateInput(fs, conf, files); } if (files.size() <= 0) { return false; } for (FileStatus file : files) { // 0 length files cannot be ORC files if (file.getLen() == 0) { return false; } try { OrcFile.createReader(file.getPath(), OrcFile.readerOptions(conf).filesystem(fs).maxLength(file.getLen())); } catch (IOException e) { return false; } } return true; }
@SuppressWarnings("unchecked") private org.apache.hadoop.mapred.RecordReader<NullWritable, OrcStruct> createVectorizedReader(InputSplit split, JobConf conf, Reporter reporter ) throws IOException { return (org.apache.hadoop.mapred.RecordReader) new VectorizedOrcInputFormat().getRecordReader(split, conf, reporter); }
@Override public boolean validateInput(FileSystem fs, HiveConf conf, ArrayList<FileStatus> files ) throws IOException { if (Utilities.isVectorMode(conf)) { return new VectorizedOrcInputFormat().validateInput(fs, conf, files); } if (files.size() <= 0) { return false; } for (FileStatus file : files) { try { OrcFile.createReader(file.getPath(), OrcFile.readerOptions(conf).filesystem(fs)); } catch (IOException e) { return false; } } return true; }