protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk, DataChunk2 measureColumnChunk, int copyPoint, ReusableDataBuffer reusableDataBuffer) throws MemoryException, IOException { assert (measureColumnChunk.getEncoder_meta().size() > 0); List<ByteBuffer> encoder_meta = measureColumnChunk.getEncoder_meta(); byte[] encodedMeta = encoder_meta.get(0).array(); ValueEncoderMeta meta = CarbonUtil.deserializeEncoderMetaV2(encodedMeta); ColumnPageDecoder codec = encodingFactory.createDecoderLegacy(meta, CompressorFactory.NativeSupportedCompressor.SNAPPY.getName()); byte[] rawData = measureRawColumnChunk.getRawData().array(); return codec .decode(rawData, copyPoint, measureColumnChunk.data_page_length); } }
private ColumnPage decodeDimensionByMeta(DataChunk2 pageMetadata, ByteBuffer pageData, int offset, boolean isLocalDictEncodedPage, ColumnVectorInfo vectorInfo, BitSet nullBitSet, ReusableDataBuffer reusableDataBuffer) throws IOException, MemoryException { List<Encoding> encodings = pageMetadata.getEncoders(); List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta(); String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta( pageMetadata.getChunk_meta()); ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, encoderMetas, compressorName, vectorInfo != null); if (vectorInfo != null) { decoder .decodeAndFillVector(pageData.array(), offset, pageMetadata.data_page_length, vectorInfo, nullBitSet, isLocalDictEncodedPage, pageMetadata.numberOfRowsInpage, reusableDataBuffer); return null; } else { return decoder .decode(pageData.array(), offset, pageMetadata.data_page_length, isLocalDictEncodedPage); } }
/** * Decode measure column page with page header and raw data starting from offset */ protected ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset, ColumnVectorInfo vectorInfo, BitSet nullBitSet, ReusableDataBuffer reusableDataBuffer) throws MemoryException, IOException { List<Encoding> encodings = pageMetadata.getEncoders(); org.apache.carbondata.core.metadata.encoder.Encoding.validateEncodingTypes(encodings); List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta(); String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(pageMetadata.getChunk_meta()); ColumnPageDecoder codec = encodingFactory.createDecoder(encodings, encoderMetas, compressorName, vectorInfo != null); if (vectorInfo != null) { codec.decodeAndFillVector(pageData.array(), offset, pageMetadata.data_page_length, vectorInfo, nullBitSet, false, pageMetadata.numberOfRowsInpage, reusableDataBuffer); return null; } else { return codec .decode(pageData.array(), offset, pageMetadata.data_page_length); } } }
public Object getFieldValue(_Fields field) { switch (field) { case CHUNK_META: return getChunk_meta(); case ROW_MAJOR: return isRowMajor(); case DATA_PAGE_LENGTH: return getData_page_length(); case ROWID_PAGE_LENGTH: return getRowid_page_length(); case RLE_PAGE_LENGTH: return getRle_page_length(); case PRESENCE: return getPresence(); case SORT_STATE: return getSort_state(); case ENCODERS: return getEncoders(); case ENCODER_META: return getEncoder_meta(); case MIN_MAX: return getMin_max(); case NUMBER_OF_ROWS_INPAGE: return getNumberOfRowsInpage(); } throw new IllegalStateException(); }