@Override protected void removeAndAdd(long index) { final long key = keys.get(index); final T value = values.set(index, null); --size; final T removed = set(key, value); assert removed == null; }
@Override public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx) { for (long i = 0; i < collectors.size(); ++i) { collectors.set(i, null);
private T set(long key, T value) { if (value == null) { throw new IllegalArgumentException("Null values are not supported"); } for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) { final T previous = values.set(i, value); if (previous == null) { // slot was free keys.set(i, key); ++size; return null; } else if (key == keys.get(i)) { // we just updated the value return previous; } else { // not the right key, repair and continue values.set(i, previous); } } }
MultiBucketAggregatorWrapper(BigArrays bigArrays, SearchContext context, Aggregator parent, AggregatorFactory<?> factory, Aggregator first) { this.bigArrays = bigArrays; this.parent = parent; this.factory = factory; this.first = first; context.addReleasable(this, Lifetime.PHASE); aggregators = bigArrays.newObjectArray(1); aggregators.set(0, first); collectors = bigArrays.newObjectArray(1); }
public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); if (sampler == null) { sampler = new PerParentBucketSamples(parentBucket, currentScorer, readerContext); perBucketSamples.set((int) parentBucket, sampler); } sampler.collect(docId); maxDocId = Math.max(maxDocId, docId); } }
/** Resize the array to the exact provided size. */ public <T> ObjectArray<T> resize(ObjectArray<T> array, long size) { if (array instanceof BigObjectArray) { return resizeInPlace((BigObjectArray<T>) array, size); } else { final ObjectArray<T> newArray = newObjectArray(size); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } array.close(); return newArray; } }
@Override void copyCurrent(int slot) { values = bigArrays.grow(values, slot+1); valueBuilders = bigArrays.grow(valueBuilders, slot+1); BytesRefBuilder builder = valueBuilders.get(slot); int byteSize = builder == null ? 0 : builder.bytes().length; if (builder == null) { builder = new BytesRefBuilder(); valueBuilders.set(slot, builder); } if (missingBucket && currentValue == null) { values.set(slot, null); } else { assert currentValue != null; builder.copyBytes(currentValue); breakerConsumer.accept(builder.bytes().length - byteSize); values.set(slot, builder.get()); } }
/** * Remove the entry which has this key in the hash table and return the * associated value or null if there was no entry associated with this key. */ public T remove(long key) { for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) { final T previous = values.set(i, null); if (previous == null) { return null; } else if (keys.get(i) == key) { --size; for (long j = nextSlot(i, mask); used(j); j = nextSlot(j, mask)) { removeAndAdd(j); } return previous; } else { // repair and continue values.set(i, previous); } } }
@Override public void collect(int doc, long bucket) throws IOException { collectors = bigArrays.grow(collectors, bucket + 1); LeafBucketCollector collector = collectors.get(bucket); if (collector == null) { aggregators = bigArrays.grow(aggregators, bucket + 1); Aggregator aggregator = aggregators.get(bucket); if (aggregator == null) { aggregator = factory.create(parent, true); aggregator.preCollection(); aggregators.set(bucket, aggregator); } collector = aggregator.getLeafCollector(ctx); if (scorer != null) { // Passing a null scorer can cause unexpected NPE at a later time, // which can't not be directly linked to the fact that a null scorer has been supplied. collector.setScorer(scorer); } collectors.set(bucket, collector); } collector.collect(doc, 0); }
@Override public void collect(int doc, long bucketOrd) throws IOException { visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1); FixedBitSet bits = visitedOrds.get(bucketOrd); if (bits == null) { bits = new FixedBitSet(maxOrd); visitedOrds.set(bucketOrd, bits); } if (values.advanceExact(doc)) { for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { bits.set((int) ord); } } }
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); TDigestState state = states.get(bucket); if (state == null) { state = new TDigestState(compression); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.add(values.nextValue()); } } } };
@Override public void collect(int doc, long bucket) throws IOException { valueSketches = bigArrays.grow(valueSketches, bucket + 1); TDigestState valueSketch = valueSketches.get(bucket); if (valueSketch == null) { valueSketch = new TDigestState(compression); valueSketches.set(bucket, valueSketch); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { final double value = values.nextValue(); valueSketch.add(value); } } } };
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };
@Override protected void removeAndAdd(long index) { final long key = keys.get(index); final T value = values.set(index, null); --size; final T removed = set(key, value); assert removed == null; }
@Override protected void removeAndAdd(long index) { final long key = keys.get(index); final T value = values.set(index, null); --size; final T removed = set(key, value); assert removed == null; }
public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); if (sampler == null) { sampler = new PerParentBucketSamples(parentBucket, currentScorer, readerContext); perBucketSamples.set((int) parentBucket, sampler); } sampler.collect(docId); maxDocId = Math.max(maxDocId, docId); } }
MultiBucketAggregatorWrapper(BigArrays bigArrays, SearchContext context, Aggregator parent, AggregatorFactory<?> factory, Aggregator first) { this.bigArrays = bigArrays; this.parent = parent; this.factory = factory; this.first = first; context.addReleasable(this, Lifetime.PHASE); aggregators = bigArrays.newObjectArray(1); aggregators.set(0, first); collectors = bigArrays.newObjectArray(1); }
public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); if (sampler == null) { sampler = new PerParentBucketSamples(parentBucket, currentScorer, readerContext); perBucketSamples.set((int) parentBucket, sampler); } sampler.collect(docId); maxDocId = Math.max(maxDocId, docId); } }
MultiBucketAggregatorWrapper(BigArrays bigArrays, SearchContext context, Aggregator parent, AggregatorFactory<?> factory, Aggregator first) { this.bigArrays = bigArrays; this.parent = parent; this.factory = factory; this.first = first; context.addReleasable(this, Lifetime.PHASE); aggregators = bigArrays.newObjectArray(1); aggregators.set(0, first); collectors = bigArrays.newObjectArray(1); }
public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); if (sampler == null) { sampler = new PerParentBucketSamples(parentBucket, currentScorer, readerContext); perBucketSamples.set((int) parentBucket, sampler); } sampler.collect(docId); maxDocId = Math.max(maxDocId, docId); } }