public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case ENCODING: return getEncoding(); case DEFINITION_LEVEL_ENCODING: return getDefinition_level_encoding(); case REPETITION_LEVEL_ENCODING: return getRepetition_level_encoding(); case STATISTICS: return getStatistics(); } throw new java.lang.IllegalStateException(); }
public DataPageHeader deepCopy() { return new DataPageHeader(this); }
case NUM_VALUES: if (value == null) { unsetNum_values(); } else { setNum_values((Integer)value); unsetEncoding(); } else { setEncoding((Encoding)value); unsetDefinition_level_encoding(); } else { setDefinition_level_encoding((Encoding)value); unsetRepetition_level_encoding(); } else { setRepetition_level_encoding((Encoding)value); unsetStatistics(); } else { setStatistics((Statistics)value);
valuesRead += pageHeader.getDictionary_page_header().getNum_values(); } else { valuesRead += pageHeader.getData_page_header().getNum_values(); parent.totalPageValuesRead += valuesRead;
struct.data_page_header = new DataPageHeader(); struct.data_page_header.read(iprot); struct.setData_page_headerIsSet(true);
private PageHeader newDataPageHeader( int uncompressedSize, int compressedSize, int valueCount, org.apache.parquet.column.statistics.Statistics statistics, org.apache.parquet.column.Encoding rlEncoding, org.apache.parquet.column.Encoding dlEncoding, org.apache.parquet.column.Encoding valuesEncoding) { PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); // TODO: pageHeader.crc = ...; pageHeader.setData_page_header(new DataPageHeader( valueCount, getEncoding(valuesEncoding), getEncoding(dlEncoding), getEncoding(rlEncoding))); if (!statistics.isEmpty()) { pageHeader.getData_page_header().setStatistics(toParquetStatistics(statistics)); } return pageHeader; }
@Override public boolean equals(java.lang.Object that) { if (that == null) return false; if (that instanceof DataPageHeader) return this.equals((DataPageHeader)that); return false; }
valueReadSoFar += pageHeader.data_page_header.getNum_values(); ByteBuf buf = allocator.buffer(pageHeader.compressed_page_size); lastPage = buf;
struct.data_page_header = new DataPageHeader(); struct.data_page_header.read(iprot); struct.setData_page_headerIsSet(true);
@Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof DataPageHeader) return this.equals((DataPageHeader)that); return false; }
public Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case ENCODING: return getEncoding(); case DEFINITION_LEVEL_ENCODING: return getDefinition_level_encoding(); case REPETITION_LEVEL_ENCODING: return getRepetition_level_encoding(); case STATISTICS: return getStatistics(); } throw new IllegalStateException(); }
case NUM_VALUES: if (value == null) { unsetNum_values(); } else { setNum_values((java.lang.Integer)value); unsetEncoding(); } else { setEncoding((Encoding)value); unsetDefinition_level_encoding(); } else { setDefinition_level_encoding((Encoding)value); unsetRepetition_level_encoding(); } else { setRepetition_level_encoding((Encoding)value); unsetStatistics(); } else { setStatistics((Statistics)value);
valueReadSoFar += pageHeader.data_page_header.getNum_values(); ByteBuffer destBuffer = uncompressPage(pageHeader, true); return new DataPageV1(
case 5: // DATA_PAGE_HEADER if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.data_page_header = new DataPageHeader(); struct.data_page_header.read(iprot); struct.setData_page_headerIsSet(true); } else {
public DataPageHeader deepCopy() { return new DataPageHeader(this); }
if (!(this_present_data_page_header && that_present_data_page_header)) return false; if (!this.data_page_header.equals(that.data_page_header)) return false;
private long readDataPageV1(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); pages.add(new DataPageV1( getSlice(compressedPageSize), dataHeaderV1.getNum_values(), uncompressedPageSize, MetadataReader.readStats( dataHeaderV1.getStatistics(), descriptor.getColumnDescriptor().getType()), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getRepetition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getDefinition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getEncoding().name())))); return dataHeaderV1.getNum_values(); }
case 5: // DATA_PAGE_HEADER if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.data_page_header = new DataPageHeader(); struct.data_page_header.read(iprot); struct.setData_page_headerIsSet(true); } else {
private PageHeader newDataPageHeader( int uncompressedSize, int compressedSize, int valueCount, org.apache.parquet.column.Encoding rlEncoding, org.apache.parquet.column.Encoding dlEncoding, org.apache.parquet.column.Encoding valuesEncoding) { PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); // TODO: pageHeader.crc = ...; pageHeader.setData_page_header(new DataPageHeader( valueCount, getEncoding(valuesEncoding), getEncoding(dlEncoding), getEncoding(rlEncoding))); return pageHeader; }
if (!(this_present_data_page_header && that_present_data_page_header)) return false; if (!this.data_page_header.equals(that.data_page_header)) return false;