protected final long id(long index, long id) { return ids.set(index, id + 1) - 1; }
@Override void copyCurrent(int slot) { values = bigArrays.grow(values, slot+1); values.set(slot, currentValue); }
private void append(long id, long key) { keys = bigArrays.grow(keys, id + 1); keys.set(id, key); }
@Override protected void removeAndAdd(long index) { final long id = id(index, -1); assert id >= 0; final long key = keys.set(id, 0); reset(key, id); }
private void fill(int index, boolean bit) { int wordNum = index >> 6; bits = bigArrays.grow(bits,wordNum+1); long bitmask = 1L << index; long value = bit ? bits.get(wordNum) | bitmask : bits.get(wordNum) & ~bitmask; bits.set(wordNum, value); }
@Override void copyCurrent(int slot) { values = bigArrays.grow(values, slot+1); if (missingBucket && missingCurrentValue) { bits.clear(slot); } else { assert missingCurrentValue == false; if (missingBucket) { bits.set(slot); } values.set(slot, currentValue); } }
/** Resize the array to the exact provided size. */ public LongArray resize(LongArray array, long size) { if (array instanceof BigLongArray) { return resizeInPlace((BigLongArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final LongArray newArray = newLongArray(size, arr.clearOnResize); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } array.close(); return newArray; } }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
private T set(long key, T value) { if (value == null) { throw new IllegalArgumentException("Null values are not supported"); } for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) { final T previous = values.set(i, value); if (previous == null) { // slot was free keys.set(i, key); ++size; return null; } else if (key == keys.get(i)) { // we just updated the value return previous; } else { // not the right key, repair and continue values.set(i, previous); } } }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }
termsTotalFreqs.set(currentTermOrd, currentTotalTermFreq);
centroids.set(bucket, InternalGeoCentroid.encodeLatLon(pt[1], pt[0]));
@Override void copyCurrent(int slot) { values = bigArrays.grow(values, slot+1); values.set(slot, currentValue); }
@Override public void postCollect() throws IOException { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { allVisitedOrds.or(bits); } } final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { final BytesRef value = values.lookupOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); } for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { counts.collect(bucket, hashes.get(ord)); } } } } }
@Override protected void removeAndAdd(long index) { final long id = id(index, -1); assert id >= 0; final long key = keys.set(id, 0); reset(key, id); }
@Override protected void removeAndAdd(long index) { final long id = id(index, -1); assert id >= 0; final long key = keys.set(id, 0); reset(key, id); }
@Override protected void removeAndAdd(long index) { final long id = id(index, -1); assert id >= 0; final long key = keys.set(id, 0); reset(key, id); }
private void fill(int index, boolean bit) { int wordNum = index >> 6; bits = bigArrays.grow(bits,wordNum+1); long bitmask = 1L << index; long value = bit ? bits.get(wordNum) | bitmask : bits.get(wordNum) & ~bitmask; bits.set(wordNum, value); }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
private void append(long id, BytesRef key, int code) { assert size == id; final long startOffset = startOffsets.get(size); bytes = bigArrays.grow(bytes, startOffset + key.length); bytes.set(startOffset, key.bytes, key.offset, key.length); startOffsets = bigArrays.grow(startOffsets, size + 2); startOffsets.set(size + 1, startOffset + key.length); hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); }