@Override public void writeByte(byte b) throws IOException { ensureCapacity(count + 1L); bytes.set(count, b); count++; }
private void collectHll(long bucket, long index, int runLen) { final long bucketIndex = (bucket << p) + index; runLens.set(bucketIndex, (byte) Math.max(runLen, runLens.get(bucketIndex))); }
private void set(long bucket, int index, int value) { writeSpare.putInt(0, value); runLens.set(index(bucket, index), writeSpare.array(), 0, 4); }
@Override public void writeBytes(byte[] b, int offset, int length) { // nothing to copy if (length == 0) { return; } // illegal args: offset and/or length exceed array size if (b.length < (offset + length)) { throw new IllegalArgumentException("Illegal offset " + offset + "/length " + length + " for byte[] of length " + b.length); } // get enough pages for new size ensureCapacity(((long) count) + length); // bulk copy bytes.set(count, b, offset, length); // advance count += length; }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }
@Override public void set(long index, byte[] buf, int offset, int len) { in.set(index, buf, offset, len); }
public void merge(long thisBucket, HyperLogLogPlusPlus other, long otherBucket) { if (p != other.p) { throw new IllegalArgumentException(); } ensureCapacity(thisBucket + 1); if (other.algorithm.get(otherBucket) == LINEAR_COUNTING) { final IntArray values = other.hashSet.values(otherBucket); try { for (long i = 0; i < values.size(); ++i) { final int encoded = values.get(i); if (algorithm.get(thisBucket) == LINEAR_COUNTING) { collectLcEncoded(thisBucket, encoded); } else { collectHllEncoded(thisBucket, encoded); } } } finally { Releasables.close(values); } } else { if (algorithm.get(thisBucket) != HYPERLOGLOG) { upgradeToHll(thisBucket); } final long thisStart = thisBucket << p; final long otherStart = otherBucket << p; for (int i = 0; i < m; ++i) { runLens.set(thisStart + i, (byte) Math.max(runLens.get(thisStart + i), other.runLens.get(otherStart + i))); } } }
public static HyperLogLogPlusPlus readFrom(StreamInput in, BigArrays bigArrays) throws IOException { final int precision = in.readVInt(); HyperLogLogPlusPlus counts = new HyperLogLogPlusPlus(precision, bigArrays, 1); final boolean algorithm = in.readBoolean(); if (algorithm == LINEAR_COUNTING) { counts.algorithm.clear(0); final long size = in.readVLong(); for (long i = 0; i < size; ++i) { final int encoded = in.readInt(); counts.hashSet.add(0, encoded); } } else { counts.algorithm.set(0); for (int i = 0; i < counts.m; ++i) { counts.runLens.set(i, in.readByte()); } } return counts; }
@Override public void writeByte(byte b) throws IOException { ensureCapacity(count + 1L); bytes.set(count, b); count++; }
@Override public void writeByte(byte b) throws IOException { ensureCapacity(count + 1L); bytes.set(count, b); count++; }
@Override public void writeByte(byte b) throws IOException { ensureCapacity(count + 1L); bytes.set(count, b); count++; }
private void set(long bucket, int index, int value) { writeSpare.putInt(0, value); runLens.set(index(bucket, index), writeSpare.array(), 0, 4); }
private void set(long bucket, int index, int value) { writeSpare.putInt(0, value); runLens.set(index(bucket, index), writeSpare.array(), 0, 4); }
@Override public void writeByte(byte b) throws IOException { ensureCapacity(count+1); bytes.set(count, b); count++; }
private void set(long bucket, int index, int value) { writeSpare.putInt(0, value); runLens.set(index(bucket, index), writeSpare.array(), 0, 4); }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }