/** Sole constructor. */ public static MonotonicBlockPackedReader of(IndexInput in, int packedIntsVersion, int blockSize, long valueCount, boolean direct) throws IOException { return new MonotonicBlockPackedReader(in, packedIntsVersion, blockSize, valueCount, direct); }
@Override public long get(long index) { assert index >= 0 && index < valueCount; final int block = (int) (index >>> blockShift); final int idx = (int) (index & blockMask); return expected(minValues[block], averages[block], idx) + subReaders[block].get(idx); }
/** returns an address instance for sortedset ordinal lists */ private synchronized MonotonicBlockPackedReader getOrdIndexInstance(FieldInfo field, NumericEntry entry) throws IOException { MonotonicBlockPackedReader instance = ordIndexInstances.get(field.name); if (instance == null) { data.seek(entry.offset); instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false); if (!merging) { ordIndexInstances.put(field.name, instance); ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return instance; }
int packedIntsVersion = data.readVInt(); int blockSize = data.readVInt(); final MonotonicBlockPackedReader live = MonotonicBlockPackedReader.of(data, packedIntsVersion, blockSize, entry.count, false); final Norms nestedInstance = loadNorms(entry.nested); final int upperBound = entry.count-1; int packedIntsVersion = data.readVInt(); int blockSize = data.readVInt(); MonotonicBlockPackedReader live = MonotonicBlockPackedReader.of(data, packedIntsVersion, blockSize, entry.count, true); final SparseFixedBitSet set = new SparseFixedBitSet(maxDoc); for (int i = 0; i < live.size(); i++) { int doc = (int) live.get(i); set.set(doc);
@Override public String toString() { return getClass().getSimpleName() + "(size=" + termAddresses.size() + ")"; } }
@Override public long ramBytesUsed() { return ((termOffsets!=null)? termOffsets.ramBytesUsed() : 0) + ((termsDictOffsets!=null)? termsDictOffsets.ramBytesUsed() : 0); }
public FieldIndexData(IndexInput in, PagedBytes termBytes, long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart, long numIndexTerms) throws IOException { this.termsStart = termsStart; termBytesStart = termBytes.getPointer(); IndexInput clone = in.clone(); clone.seek(indexStart); this.numIndexTerms = numIndexTerms; assert this.numIndexTerms > 0: "numIndexTerms=" + numIndexTerms; // slurp in the images from disk: try { final long numTermBytes = packedIndexStart - indexStart; termBytes.copy(clone, numTermBytes); // records offsets into main terms dict file termsDictOffsets = MonotonicBlockPackedReader.of(clone, packedIntsVersion, blocksize, numIndexTerms, false); // records offsets into byte[] term data termOffsets = MonotonicBlockPackedReader.of(clone, packedIntsVersion, blocksize, 1+numIndexTerms, false); } finally { clone.close(); } }
@Override public long get(long index) { assert index >= 0 && index < valueCount; final int block = (int) (index >>> blockShift); final int idx = (int) (index & blockMask); return expected(minValues[block], averages[block], idx) + decodeDelta(subReaders[block].get(idx)); }
/** returns an address instance for variable-length binary values. */ private synchronized MonotonicBlockPackedReader getAddressInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
public CompressedBinaryDocValues(BinaryEntry bytes, MonotonicBlockPackedReader addresses, ReverseTermsIndex index, IndexInput data) throws IOException { this.maxTermLength = bytes.maxLength; this.numValues = bytes.count; this.addresses = addresses; this.numIndexValues = addresses.size(); this.data = data; this.reverseTerms = index.terms; this.reverseAddresses = index.termAddresses; this.numReverseIndexValues = reverseAddresses.size(); this.termsEnum = getTermsEnum(data); }
@Override public long ramBytesUsed() { long bytesUsed = reader.ramBytesUsed(); if (addresses != null) { bytesUsed += addresses.ramBytesUsed(); } return bytesUsed; }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
@Override public long get(long index) { assert index >= 0 && index < valueCount; final int block = (int) (index >>> blockShift); final int idx = (int) (index & blockMask); return expected(minValues[block], averages[block], idx) + decodeDelta(subReaders[block].get(idx)); }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES); } } return addresses; }
@Override int decodeBlock(int block, long[] dest) { final int count = super.decodeBlock(block, dest); final float average = averages[block]; for (int i = 0; i < count; ++i) { dest[i] += expected(0, average, i); } return count; }
public CompressedBinaryDocValues(BinaryEntry bytes, MonotonicBlockPackedReader addresses, ReverseTermsIndex index, IndexInput data) throws IOException { this.maxTermLength = bytes.maxLength; this.numValues = bytes.count; this.addresses = addresses; this.numIndexValues = addresses.size(); this.data = data; this.reverseTerms = index.terms; this.reverseAddresses = index.termAddresses; this.numReverseIndexValues = reverseAddresses.size(); this.termsEnum = getTermsEnum(data); }
@Override public long ramBytesUsed() { return termAddresses.ramBytesUsed() + terms.ramBytesUsed(); }