/** * Creates the schema to store summary information or the information which can be stored only * once per datamap. It stores datamap level max/min of each column and partition information of * datamap * * @throws MemoryException */ protected void createSummaryDMStore(BlockletDataMapModel blockletDataMapModel) throws MemoryException { taskSummaryDMStore = getMemoryDMStore(blockletDataMapModel.isAddToUnsafe()); }
@Override public int getTotalBlocks() { if (isLegacyStore) { return super.getTotalBlocklets(); } else { return blockNum; } }
@Override protected short getBlockletNumOfEntry(int index) { if (isLegacyStore) { return super.getBlockletNumOfEntry(index); } else { //in blocklet datamap, each entry contains info of one blocklet return 1; } }
@Override public MeasureRawColumnChunk readMeasureChunk(FileReader fileReader, int columnIndex) throws IOException { MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader(fileReader); MeasureRawColumnChunk measureRawColumnChunk = measureColumnChunkReader.readRawMeasureChunk(fileReader, columnIndex); updateMeasureRawColumnChunkMinMaxValues(measureRawColumnChunk); return measureRawColumnChunk; }
@Override public SegmentProperties getSegmentProperties(Segment segment) throws IOException { List<CoarseGrainDataMap> dataMaps = getDataMaps(segment); assert (dataMaps.size() > 0); CoarseGrainDataMap coarseGrainDataMap = dataMaps.get(0); assert (coarseGrainDataMap instanceof BlockDataMap); BlockDataMap dataMap = (BlockDataMap) coarseGrainDataMap; return dataMap.getSegmentProperties(); }
protected boolean useMinMaxForExecutorPruning(FilterResolverIntf filterResolverIntf) { if (isLegacyStore) { return super.useMinMaxForExecutorPruning(filterResolverIntf); } return BlockletDataMapUtil .useMinMaxForBlockletPruning(filterResolverIntf, getMinMaxCacheColumns()); }
/** * create dataMap based on cache level * * @param carbonTable * @return */ public static DataMap createDataMap(CarbonTable carbonTable) { boolean cacheLevelBlock = BlockletDataMapUtil.isCacheLevelBlock(carbonTable); if (cacheLevelBlock) { // case1: when CACHE_LEVEL = BLOCK return new BlockDataMap(); } else { // case2: when CACHE_LEVEL = BLOCKLET return new BlockletDataMap(); } }
protected void setMinMaxFlagForTaskSummary(DataMapRow summaryRow, CarbonRowSchema[] taskSummarySchema, SegmentProperties segmentProperties, boolean[] minMaxFlag) { // add min max flag for all the dimension columns boolean[] minMaxFlagValuesForColumnsToBeCached = BlockletDataMapUtil .getMinMaxFlagValuesForColumnsToBeCached(segmentProperties, getMinMaxCacheColumns(), minMaxFlag); addMinMaxFlagValues(summaryRow, taskSummarySchema[TASK_MIN_MAX_FLAG], minMaxFlagValuesForColumnsToBeCached, TASK_MIN_MAX_FLAG); }
@Override public DimensionRawColumnChunk readDimensionChunk(FileReader fileReader, int columnIndex) throws IOException { DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader(fileReader); return dimensionChunksReader.readRawDimensionChunk(fileReader, columnIndex); }
@Override public DataRefNode getNextDataRefNode() { if (index + 1 < blockInfos.size()) { return new BlockletDataRefNode(blockInfos, index + 1, dimensionLens); } return null; }
public BlockletDataMapIndexWrapper(String segmentId,List<BlockDataMap> dataMaps) { this.dataMaps = dataMaps; this.wrapperSize = 0L; this.segmentId = segmentId; // add the size of each and every datamap in this wrapper for (BlockDataMap dataMap : dataMaps) { this.wrapperSize += dataMap.getMemorySize(); } }
@Override public void init(DataMapModel dataMapModel) throws IOException, MemoryException { super.init(dataMapModel); }
protected int getTotalBlocklets() { ByteBuffer byteBuffer = ByteBuffer.wrap(getBlockletRowCountForEachBlock()); int sum = 0; while (byteBuffer.hasRemaining()) { sum += byteBuffer.getShort(); } return sum; }
@Override protected short getBlockletId(DataMapRow dataMapRow) { if (isLegacyStore) { return super.getBlockletId(dataMapRow); } return dataMapRow.getShort(BLOCKLET_ID_INDEX); }
private void finishWriting(CarbonRowSchema[] taskSummarySchema, byte[] filePath, byte[] fileName, byte[] segmentId, DataMapRowImpl summaryRow) throws MemoryException { if (memoryDMStore != null) { memoryDMStore.finishWriting(); } if (null != taskSummaryDMStore) { addTaskSummaryRowToUnsafeMemoryStore(taskSummarySchema, summaryRow, filePath, fileName, segmentId); taskSummaryDMStore.finishWriting(); } }
protected void createMemorySchema(BlockletDataMapModel blockletDataMapModel) throws MemoryException { memoryDMStore = getMemoryDMStore(blockletDataMapModel.isAddToUnsafe()); }
@Override public MeasureRawColumnChunk[] readMeasureChunks(FileReader fileReader, int[][] columnIndexRange) throws IOException { MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader(fileReader); MeasureRawColumnChunk[] measureRawColumnChunks = measureColumnChunkReader.readRawMeasureChunks(fileReader, columnIndexRange); updateMeasureRawColumnChunkMinMaxValues(measureRawColumnChunks); return measureRawColumnChunks; }
@Override protected int getTotalBlocklets() { if (isLegacyStore) { return super.getTotalBlocklets(); } else { return memoryDMStore.getRowCount(); } }
@Override public DimensionRawColumnChunk[] readDimensionChunks(FileReader fileReader, int[][] blockIndexes) throws IOException { DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader(fileReader); return dimensionChunksReader.readRawDimensionChunks(fileReader, blockIndexes); }
public IndexWrapper(List<TableBlockInfo> blockInfos, SegmentProperties segmentProperties) { this.blockInfos = blockInfos; this.segmentProperties = segmentProperties; dataRefNode = new BlockletDataRefNode(blockInfos, 0, this.segmentProperties.getDimensionColumnsValueSize()); }