@Override public void close() { closed = true; try { recordReader.close(); } catch (IOException e) { throw new PrestoException(RAPTOR_ERROR, e); } }
@Override public void close() { // some hive input formats are broken and bad things can happen if you close them multiple times if (closed) { return; } closed = true; try { stats.addMaxCombinedBytesPerRow(recordReader.getMaxCombinedBytesPerRow()); recordReader.close(); } catch (IOException e) { throw new UncheckedIOException(e); } }
private RowBlock read(TempFile tempFile, Type readerType) throws IOException { DataSize dataSize = new DataSize(1, MEGABYTE); OrcDataSource orcDataSource = new FileOrcDataSource(tempFile.getFile(), dataSize, dataSize, dataSize, true); OrcReader orcReader = new OrcReader(orcDataSource, ORC, dataSize, dataSize, dataSize, dataSize); Map<Integer, Type> includedColumns = new HashMap<>(); includedColumns.put(0, readerType); OrcRecordReader recordReader = orcReader.createRecordReader(includedColumns, OrcPredicate.TRUE, UTC, newSimpleAggregatedMemoryContext(), OrcReader.INITIAL_BATCH_SIZE); recordReader.nextBatch(); RowBlock block = (RowBlock) recordReader.readBlock(readerType, 0); recordReader.close(); return block; }
private RowBlock read(TempFile tempFile, Type readerType) throws IOException { DataSize dataSize = new DataSize(1, MEGABYTE); OrcDataSource orcDataSource = new FileOrcDataSource(tempFile.getFile(), dataSize, dataSize, dataSize, true); OrcReader orcReader = new OrcReader(orcDataSource, ORC, dataSize, dataSize, dataSize, dataSize); Map<Integer, Type> includedColumns = new HashMap<>(); includedColumns.put(0, readerType); OrcRecordReader recordReader = orcReader.createRecordReader(includedColumns, OrcPredicate.TRUE, UTC, newSimpleAggregatedMemoryContext(), OrcReader.INITIAL_BATCH_SIZE); recordReader.nextBatch(); RowBlock block = (RowBlock) recordReader.readBlock(readerType, 0); recordReader.close(); return block; }
reader.close();
reader.close();
reader.close();
reader.close();
reader.close();
reader.close();