.readByteBuffer(filePath, measureColumnChunkOffsets.get(blockletColumnIndex), measureColumnChunkLength.get(blockletColumnIndex));
/** * The method calculate the B-Tree metadata size. * * @param tableBlockInfo * @return */ public static long calculateMetaSize(TableBlockInfo tableBlockInfo) throws IOException { FileReader fileReader = null; try { long completeBlockLength = tableBlockInfo.getBlockLength(); long footerPointer = completeBlockLength - 8; String filePath = tableBlockInfo.getFilePath(); fileReader = FileFactory.getFileHolder(FileFactory.getFileType(filePath)); long actualFooterOffset = fileReader.readLong(filePath, footerPointer); return footerPointer - actualFooterOffset; } finally { if (null != fileReader) { try { fileReader.finish(); } catch (IOException e) { // ignore the exception as nothing we can do about it fileReader = null; } } } }
@Override public MeasureRawColumnChunk readRawMeasureChunk(FileReader fileReader, int columnIndex) throws IOException { int dataLength = 0; if (measureColumnChunkOffsets.size() - 1 == columnIndex) { DataChunk2 metadataChunk = null; synchronized (fileReader) { metadataChunk = CarbonUtil.readDataChunk(ByteBuffer.wrap(fileReader .readByteArray(filePath, measureColumnChunkOffsets.get(columnIndex), measureColumnChunkLength.get(columnIndex))), 0, measureColumnChunkLength.get(columnIndex)); } dataLength = measureColumnChunkLength.get(columnIndex) + metadataChunk.data_page_length; } else { long currentMeasureOffset = measureColumnChunkOffsets.get(columnIndex); dataLength = (int) (measureColumnChunkOffsets.get(columnIndex + 1) - currentMeasureOffset); } ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader .readByteBuffer(filePath, measureColumnChunkOffsets.get(columnIndex), dataLength); } MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(columnIndex, buffer, 0, dataLength, this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
/** * This method return the version details in formatted string by reading from carbondata file * * @param dataFilePath * @return * @throws IOException */ public static String getVersionDetails(String dataFilePath) throws IOException { long fileSize = FileFactory.getCarbonFile(dataFilePath, FileFactory.getFileType(dataFilePath)).getSize(); FileReader fileReader = FileFactory.getFileHolder(FileFactory.getFileType(dataFilePath)); ByteBuffer buffer = fileReader.readByteBuffer(FileFactory.getUpdatedFilePath(dataFilePath), fileSize - 8, 8); fileReader.finish(); CarbonFooterReaderV3 footerReader = new CarbonFooterReaderV3(dataFilePath, buffer.getLong()); FileFooter3 footer = footerReader.readFooterVersion3(); if (null != footer.getExtra_info()) { return footer.getExtra_info().get(CarbonCommonConstants.CARBON_WRITTEN_BY_FOOTER_INFO) + " in version: " + footer.getExtra_info() .get(CarbonCommonConstants.CARBON_WRITTEN_VERSION); } else { return "Version Details are not found in carbondata file"; } } }
@Override public void close() { if (null != dataBlockIterator) { dataBlockIterator.close(); } try { fileReader.finish(); } catch (IOException e) { LOGGER.error(e); } }
private MeasureColumnChunkReader getMeasureColumnChunkReader(FileReader fileReader) { ColumnarFormatVersion version = ColumnarFormatVersion.valueOf(blockInfos.get(index).getDetailInfo().getVersionNumber()); if (fileReader.isReadPageByPage()) { return CarbonDataReaderFactory.getInstance().getMeasureColumnChunkReader(version, blockInfos.get(index).getDetailInfo().getBlockletInfo(), blockInfos.get(index).getFilePath(), true); } else { return CarbonDataReaderFactory.getInstance().getMeasureColumnChunkReader(version, blockInfos.get(index).getDetailInfo().getBlockletInfo(), blockInfos.get(index).getFilePath(), false); } }
AbstractDetailQueryResultIterator(List<BlockExecutionInfo> infos, QueryModel queryModel, ExecutorService execService) { String batchSizeString = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE); if (null != batchSizeString) { try { batchSize = Integer.parseInt(batchSizeString); } catch (NumberFormatException ne) { LOGGER.error("Invalid inmemory records size. Using default value"); batchSize = CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE_DEFAULT; } } else { batchSize = CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE_DEFAULT; } this.recorder = queryModel.getStatisticsRecorder(); this.blockExecutionInfos = infos; this.fileReader = FileFactory.getFileHolder( FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath())); this.fileReader.setReadPageByPage(queryModel.isReadPageByPage()); this.execService = execService; intialiseInfos(); initQueryStatiticsModel(); }
synchronized (fileReader) { columnIndexData = fileReader .readByteArray(filePath, dataChunk.getRowIdPageOffset(), dataChunk.getRowIdPageLength()); synchronized (fileReader) { key = fileReader .readByteArray(filePath, dataChunk.getRlePageOffset(), dataChunk.getRlePageLength());
private DataBlockIterator getDataBlockIterator() { if (blockExecutionInfos.size() > 0) { try { fileReader.finish(); } catch (IOException e) { throw new RuntimeException(e); } BlockExecutionInfo executionInfo = blockExecutionInfos.get(0); blockExecutionInfos.remove(executionInfo); return new DataBlockIterator(executionInfo, fileReader, batchSize, queryStatisticsModel, execService); } return null; }
private DimensionColumnChunkReader getDimensionColumnChunkReader(FileReader fileReader) { ColumnarFormatVersion version = ColumnarFormatVersion.valueOf(blockInfos.get(index).getDetailInfo().getVersionNumber()); if (fileReader.isReadPageByPage()) { return CarbonDataReaderFactory.getInstance().getDimensionColumnChunkReader(version, blockInfos.get(index).getDetailInfo().getBlockletInfo(), dimensionLens, blockInfos.get(index).getFilePath(), true); } else { return CarbonDataReaderFactory.getInstance().getDimensionColumnChunkReader(version, blockInfos.get(index).getDetailInfo().getBlockletInfo(), dimensionLens, blockInfos.get(index).getFilePath(), false); } }
.readByteBuffer(filePath, measureColumnChunkOffsets.get(columnIndex), dataLength);
@Override public List<ColumnSchema> getSchema(TableBlockInfo tableBlockInfo) throws IOException { FileReader fileReader = null; List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>(); try { long completeBlockLength = tableBlockInfo.getBlockLength(); long footerPointer = completeBlockLength - 8; fileReader = FileFactory.getFileHolder(FileFactory.getFileType(tableBlockInfo.getFilePath())); long actualFooterOffset = fileReader.readLong(tableBlockInfo.getFilePath(), footerPointer); CarbonFooterReader reader = new CarbonFooterReader(tableBlockInfo.getFilePath(), actualFooterOffset); FileFooter footer = reader.readFooter(); List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns(); for (int i = 0; i < table_columns.size(); i++) { columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i))); } } finally { if (null != fileReader) { fileReader.finish(); } } return columnSchemaList; } }
public void scan() throws Exception { BlockletScannedResult blockletScannedResult = null; while (blockletIterator.hasNext()) { DataRefNode dataBlock = blockletIterator.next(); if (dataBlock.getColumnsMaxValue() == null || blockletScanner.isScanRequired(dataBlock)) { RawBlockletColumnChunks rawBlockletColumnChunks = RawBlockletColumnChunks.newInstance( blockExecutionInfo.getTotalNumberDimensionToRead(), blockExecutionInfo.getTotalNumberOfMeasureToRead(), fileReader, dataBlock); blockletScanner.readBlocklet(rawBlockletColumnChunks); blockletScannedResult = blockletScanner.scanBlocklet(rawBlockletColumnChunks); if (blockletScannedResult != null && blockletScannedResult.hasNext()) { scannedResults.add(blockletScannedResult); } } } fileReader.finish(); }
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, (int) (dimensionChunksOffset.get(endBlockletColumnIndex + 1) - currentDimensionOffset));
long footerPointer = completeBlockLength - 8; fileReader = FileFactory.getFileHolder(FileFactory.getFileType(tableBlockInfo.getFilePath())); long actualFooterOffset = fileReader.readLong(tableBlockInfo.getFilePath(), footerPointer); CarbonFooterReader reader = new CarbonFooterReader(tableBlockInfo.getFilePath(), actualFooterOffset); } finally { if (null != fileReader) { fileReader.finish();
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, dimensionChunksLength.get(blockletColumnIndex));
buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnIndex + 1) - currentMeasureOffset));
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, length);
/** * Below method will be used to read the chunk based on block index * * @param fileReader file reader to read the blocks from file * @param columnIndex column to be read * @return dimension column chunk */ public DimensionRawColumnChunk readRawDimensionChunk(FileReader fileReader, int columnIndex) throws IOException { int length = 0; if (dimensionChunksOffset.size() - 1 == columnIndex) { // Incase of last block read only for datachunk and read remaining while converting it. length = dimensionChunksLength.get(columnIndex); } else { long currentDimensionOffset = dimensionChunksOffset.get(columnIndex); length = (int) (dimensionChunksOffset.get(columnIndex + 1) - currentDimensionOffset); } ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader.readByteBuffer(filePath, dimensionChunksOffset.get(columnIndex), length); } DimensionRawColumnChunk rawColumnChunk = new DimensionRawColumnChunk(columnIndex, buffer, 0, length, this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, (int) (dimensionChunksOffset.get(endColumnBlockletIndex + 1) - currentDimensionOffset));