public long getIndexSummaryOffHeapSize() { return indexSummary.getOffHeapSize(); }
public long getIndexSummaryOffHeapSize() { return indexSummary.getOffHeapSize(); }
public long getIndexSummaryOffHeapSize() { return indexSummary.getOffHeapSize(); }
public long getIndexSummaryOffHeapSize() { return indexSummary.getOffHeapSize(); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }