private void initQueryStatiticsModel() { this.queryStatisticsModel = new QueryStatisticsModel(); this.queryStatisticsModel.setRecorder(recorder); QueryStatistic queryStatisticTotalBlocklet = new QueryStatistic(); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, queryStatisticTotalBlocklet); queryStatisticsModel.getRecorder().recordStatistics(queryStatisticTotalBlocklet); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, queryStatisticValidScanBlocklet); queryStatisticsModel.getRecorder().recordStatistics(queryStatisticValidScanBlocklet); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, totalNumberOfPages); queryStatisticsModel.getRecorder().recordStatistics(totalNumberOfPages); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages); queryStatisticsModel.getRecorder().recordStatistics(validPages); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.PAGE_SCANNED, scannedPages); queryStatisticsModel.getRecorder().recordStatistics(scannedPages); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime); queryStatisticsModel.getRecorder().recordStatistics(scanTime); queryStatisticsModel.getStatisticsTypeAndObjMap() .put(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime);
private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
public void loadPage() { if (lazyChunkWrapper.getRawColumnChunk() == null) { try { lazyBlockletLoader.load(); } catch (IOException e) { throw new RuntimeException(e); } } long startTime = System.currentTimeMillis(); if (isMeasure) { ((MeasureRawColumnChunk) lazyChunkWrapper.getRawColumnChunk()) .convertToColumnPageAndFillVector(pageNumber, vectorInfo, reusableDataBuffer); } else { ((DimensionRawColumnChunk) lazyChunkWrapper.getRawColumnChunk()) .convertToDimColDataChunkAndFillVector(pageNumber, vectorInfo, reusableDataBuffer); } if (queryStatisticsModel.isEnabled()) { QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME); pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME, pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime)); } }
private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); } }
@Override public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException { long startTime = System.currentTimeMillis(); this.filterExecuter.readColumnChunks(rawBlockletColumnChunks); // adding statistics for carbon read time QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime.getCount() + (System.currentTimeMillis() - startTime)); }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
/** * This case is used only in case of compaction, since it does not use filter flow. */ public void fillDataChunks() { freeDataChunkMemory(); if (pageCounter >= pageFilteredRowCount.length) { return; } long startTime = System.currentTimeMillis(); for (int i = 0; i < dimensionColumnPages.length; i++) { if (dimensionColumnPages[i][pageCounter] == null && dimRawColumnChunks[i] != null) { dimensionColumnPages[i][pageCounter] = dimRawColumnChunks[i] .convertToDimColDataChunkWithOutCache(pageCounter, null); } } for (int i = 0; i < measureColumnPages.length; i++) { if (measureColumnPages[i][pageCounter] == null && msrRawColumnChunks[i] != null) { measureColumnPages[i][pageCounter] = msrRawColumnChunks[i] .convertToColumnPageWithOutCache(pageCounter, null); } } QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME); pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME, pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime)); }
private void fillDimensionData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures, int batchSize) { long startTime = System.currentTimeMillis(); List<byte[]> dictionaryKeyArrayBatch = scannedResult.getDictionaryKeyArrayBatch(batchSize); List<byte[][]> noDictionaryKeyArrayBatch = scannedResult.getNoDictionaryKeyArrayBatch(batchSize); List<byte[][]> complexTypeKeyArrayBatch = scannedResult.getComplexTypeKeyArrayBatch(batchSize); // it will same for one blocklet so can be computed only once byte[] implicitColumnByteArray = scannedResult.getBlockletId() .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); // Note: size check in for loop is for dictionaryKeyArrayBatch as this size can be lesser than // batch size in case of IUD scenarios for (int i = 0; i < dictionaryKeyArrayBatch.size(); i++) { // 1 for ByteArrayWrapper object which will contain dictionary and no dictionary data Object[] row = new Object[1 + queryMeasures.length]; ByteArrayWrapper wrapper = new ByteArrayWrapper(); wrapper.setDictionaryKey(dictionaryKeyArrayBatch.get(i)); wrapper.setNoDictionaryKeys(noDictionaryKeyArrayBatch.get(i)); wrapper.setComplexTypesKeys(complexTypeKeyArrayBatch.get(i)); wrapper.setImplicitColumnByteArray(implicitColumnByteArray); row[0] = wrapper; listBasedResult.add(row); } QueryStatistic keyColumnFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME); keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME, keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getActualQueryMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); // re-fill dictionary and no dictionary key arrays for the newly added columns if (dimensionInfo.isDictionaryColumnAdded()) { fillDictionaryKeyArrayBatchWithLatestSchema(listBasedResult); } if (dimensionInfo.isNoDictionaryColumnAdded()) { fillNoDictionaryKeyArrayBatchWithLatestSchema(listBasedResult); } QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
listBasedResult.add(row); QueryStatistic keyColumnFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME); keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME,
listBasedResult.add(row); QueryStatistic keyColumnFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME); keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME,
@Override public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException { long startTime = System.currentTimeMillis(); DimensionRawColumnChunk[] dimensionRawColumnChunks = rawBlockletColumnChunks.getDataBlock() .readDimensionChunks(rawBlockletColumnChunks.getFileReader(), blockExecutionInfo.getAllSelectedDimensionColumnIndexRange()); rawBlockletColumnChunks.setDimensionRawColumnChunks(dimensionRawColumnChunks); MeasureRawColumnChunk[] measureRawColumnChunks = rawBlockletColumnChunks.getDataBlock() .readMeasureChunks(rawBlockletColumnChunks.getFileReader(), blockExecutionInfo.getAllSelectedMeasureIndexRange()); rawBlockletColumnChunks.setMeasureRawColumnChunks(measureRawColumnChunks); // adding statistics for carbon read time QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime.getCount() + (System.currentTimeMillis() - startTime)); }
throws FilterUnsupportedException, IOException { long startTime = System.currentTimeMillis(); QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime)); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_PAGE_SCANNED); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + pages.cardinality()); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
throws FilterUnsupportedException, IOException { long startTime = System.currentTimeMillis(); QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime)); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_PAGE_SCANNED); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + bitSetGroup.getValidPages()); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
BlockletScannedResult scannedResult = new NonFilterQueryScannedResult(blockExecutionInfo, queryStatisticsModel); QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, totalBlockletStatistic.getCount() + 1); QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic validScannedBlockletStatistic.getCount() + 1); QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_PAGE_SCANNED); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + rawBlockletColumnChunks.getDataBlock().numberOfPages()); QueryStatistic totalPagesScanned = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_PAGE_SCANNED); totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
QueryStatistic time = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); time.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
@Override public boolean isScanRequired(DataRefNode dataBlock) { QueryStatistic totalPagesScanned = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_PAGE_SCANNED); totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED,