@Override public long ramBytesUsed() { return byteArray.ramBytesUsed(); } }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }
@Override public byte get(int index) { return byteArray.get(offset + index); }
@Override public void writeByte(byte b) throws IOException { ensureCapacity(count + 1L); bytes.set(count, b); count++; }
public long maxBucket() { return runLens.size() >>> p; }
void upgradeToHll(long bucket) { ensureCapacity(bucket + 1); final IntArray values = hashSet.values(bucket); try { runLens.fill(bucket << p, (bucket << p) + m, (byte) 0); for (long i = 0; i < values.size(); ++i) { final int encoded = values.get(i); collectHllEncoded(bucket, encoded); } algorithm.set(bucket); } finally { Releasables.close(values); } }
@Override public BytesRef next() throws IOException { if (nextFragmentSize != 0) { final boolean materialized = byteArray.get(offset + position, nextFragmentSize, slice); assert materialized == false : "iteration should be page aligned but array got materialized"; position += nextFragmentSize; final int remaining = length - position; nextFragmentSize = Math.min(remaining, PAGE_SIZE); return slice; } else { assert nextFragmentSize == 0 : "fragmentSize expected [0] but was: [" + nextFragmentSize + "]"; return null; // we are done with this iteration } } };
private void set(long bucket, int index, int value) { writeSpare.putInt(0, value); runLens.set(index(bucket, index), writeSpare.array(), 0, 4); }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }
@Override public void reset() { // shrink list of pages if (bytes.size() > PageCacheRecycler.PAGE_SIZE_IN_BYTES) { bytes = bigArrays.resize(bytes, PageCacheRecycler.PAGE_SIZE_IN_BYTES); } // go back to start count = 0; }
@Override public void fill(long fromIndex, long toIndex, byte value) { in.fill(fromIndex, toIndex, value); }
@Override public BytesRef toBytesRef() { BytesRef bref = new BytesRef(); // if length <= pagesize this will dereference the page, or materialize the byte[] byteArray.get(offset, length, bref); return bref; }
public void merge(long thisBucket, HyperLogLogPlusPlus other, long otherBucket) { if (p != other.p) { throw new IllegalArgumentException(); } ensureCapacity(thisBucket + 1); if (other.algorithm.get(otherBucket) == LINEAR_COUNTING) { final IntArray values = other.hashSet.values(otherBucket); try { for (long i = 0; i < values.size(); ++i) { final int encoded = values.get(i); if (algorithm.get(thisBucket) == LINEAR_COUNTING) { collectLcEncoded(thisBucket, encoded); } else { collectHllEncoded(thisBucket, encoded); } } } finally { Releasables.close(values); } } else { if (algorithm.get(thisBucket) != HYPERLOGLOG) { upgradeToHll(thisBucket); } final long thisStart = thisBucket << p; final long otherStart = otherBucket << p; for (int i = 0; i < m; ++i) { runLens.set(thisStart + i, (byte) Math.max(runLens.get(thisStart + i), other.runLens.get(otherStart + i))); } } }
@Override public void writeBytes(byte[] b, int offset, int length) { // nothing to copy if (length == 0) { return; } // illegal args: offset and/or length exceed array size if (b.length < (offset + length)) { throw new IllegalArgumentException("Illegal offset " + offset + "/length " + length + " for byte[] of length " + b.length); } // get enough pages for new size ensureCapacity(((long) count) + length); // bulk copy bytes.set(count, b, offset, length); // advance count += length; }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }
/** Grow an array to a size that is larger than <code>minSize</code>, * preserving content, and potentially reusing part of the provided array. */ public ByteArray grow(ByteArray array, long minSize) { if (minSize <= array.size()) { return array; } final long newSize = overSize(minSize, PageCacheRecycler.BYTE_PAGE_SIZE, 1); return resize(array, newSize); }
void upgradeToHll(long bucket) { ensureCapacity(bucket + 1); final IntArray values = hashSet.values(bucket); try { runLens.fill(bucket << p, (bucket << p) + m, (byte) 0); for (long i = 0; i < values.size(); ++i) { final int encoded = values.get(i); collectHllEncoded(bucket, encoded); } algorithm.set(bucket); } finally { Releasables.close(values); } }