public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case NUM_NULLS: return getNum_nulls(); case NUM_ROWS: return getNum_rows(); case ENCODING: return getEncoding(); case DEFINITION_LEVELS_BYTE_LENGTH: return getDefinition_levels_byte_length(); case REPETITION_LEVELS_BYTE_LENGTH: return getRepetition_levels_byte_length(); case IS_COMPRESSED: return isIs_compressed(); case STATISTICS: return getStatistics(); } throw new java.lang.IllegalStateException(); }
public Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case NUM_NULLS: return getNum_nulls(); case NUM_ROWS: return getNum_rows(); case ENCODING: return getEncoding(); case DEFINITION_LEVELS_BYTE_LENGTH: return getDefinition_levels_byte_length(); case REPETITION_LEVELS_BYTE_LENGTH: return getRepetition_levels_byte_length(); case IS_COMPRESSED: return isIs_compressed(); case STATISTICS: return getStatistics(); } throw new IllegalStateException(); }
repetitionLevels, definitionLevels, parquetMetadataConverter.getEncoding(dataHeaderV2.getEncoding()), data, uncompressedPageSize,
private long readDataPageV2(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - dataHeaderV2.getDefinition_levels_byte_length(); pages.add(new DataPageV2( dataHeaderV2.getNum_rows(), dataHeaderV2.getNum_nulls(), dataHeaderV2.getNum_values(), getSlice(dataHeaderV2.getRepetition_levels_byte_length()), getSlice(dataHeaderV2.getDefinition_levels_byte_length()), getParquetEncoding(Encoding.valueOf(dataHeaderV2.getEncoding().name())), getSlice(dataSize), uncompressedPageSize, MetadataReader.readStats( dataHeaderV2.getStatistics(), descriptor.getColumnDescriptor().getType()), dataHeaderV2.isIs_compressed())); return dataHeaderV2.getNum_values(); } }
private long readDataPageV2(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - dataHeaderV2.getDefinition_levels_byte_length(); pages.add(new DataPageV2( dataHeaderV2.getNum_rows(), dataHeaderV2.getNum_nulls(), dataHeaderV2.getNum_values(), getSlice(dataHeaderV2.getRepetition_levels_byte_length()), getSlice(dataHeaderV2.getDefinition_levels_byte_length()), getParquetEncoding(Encoding.valueOf(dataHeaderV2.getEncoding().name())), getSlice(dataSize), uncompressedPageSize, MetadataReader.readStats( dataHeaderV2.getStatistics(), descriptor.getColumnDescriptor().getType()), dataHeaderV2.isIs_compressed())); return dataHeaderV2.getNum_values(); } }
this.readAsBytesInput(dataHeaderV2.getRepetition_levels_byte_length()), this.readAsBytesInput(dataHeaderV2.getDefinition_levels_byte_length()), converter.getEncoding(dataHeaderV2.getEncoding()), this.readAsBytesInput(dataSize), uncompressedPageSize,
this.readAsBytesInput(dataHeaderV2.getRepetition_levels_byte_length()), this.readAsBytesInput(dataHeaderV2.getDefinition_levels_byte_length()), converter.getEncoding(dataHeaderV2.getEncoding()), this.readAsBytesInput(dataSize), uncompressedPageSize,
dataHeaderV2.getRepetition_levels_byte_length(), dataHeaderV2.getDefinition_levels_byte_length()), parquetMetadataConverter.getEncoding(dataHeaderV2.getEncoding()), BytesInput.from(destBuffer, dataHeaderV2.getRepetition_levels_byte_length() + dataHeaderV2.getDefinition_levels_byte_length(),