/** * Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized. */ public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { docCounts.increment(bucketOrd, 1); subCollector.collect(doc, bucketOrd); }
/** * Utility method to increment the doc counts of the given bucket (identified by the bucket ordinal) */ public final void incrementBucketDocCount(long bucketOrd, int inc) { docCounts = bigArrays.grow(docCounts, bucketOrd + 1); docCounts.increment(bucketOrd, inc); }
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; if (singleValues.advanceExact(doc)) { final int ord = singleValues.ordValue(); segmentDocCounts.increment(ord + 1, 1); } } };
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; if (segmentOrds.advanceExact(doc)) { for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { segmentDocCounts.increment(segmentOrd + 1, 1); } } } };
if (topSlot != null) { docCounts.increment(topSlot, 1); return topSlot;
public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { try (IntArray oldDocCounts = docCounts) { docCounts = bigArrays.newIntArray(newNumBuckets, true); docCounts.fill(0, newNumBuckets, 0); for (int i = 0; i < oldDocCounts.size(); i++) { int docCount = oldDocCounts.get(i); if (docCount != 0) { docCounts.increment(mergeMap[i], docCount); } } } }
/** * Add <code>k</code> to the hash table associated with <code>bucket</code>. * Return {@code -1} if the value was already in the set or the new set size if it was added. */ public int add(long bucket, int k) { sizes = bigArrays.grow(sizes, bucket + 1); assert k != 0; for (int i = (k & mask); ; i = (i + 1) & mask) { final int v = get(bucket, i); if (v == 0) { // means unused, take it! set(bucket, i, k); return sizes.increment(bucket, 1); } else if (v == k) { // k is already in the set return -1; } } }
/** * Utility method to increment the doc counts of the given bucket (identified by the bucket ordinal) */ public final void incrementBucketDocCount(long bucketOrd, int inc) { docCounts = bigArrays.grow(docCounts, bucketOrd + 1); docCounts.increment(bucketOrd, inc); }
@Override public int increment(long index, int inc) { return in.increment(index, inc); }
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; final int ord = singleValues.getOrd(doc); segmentDocCounts.increment(ord + 1, 1); } };
/** * Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized. */ public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { docCounts.increment(bucketOrd, 1); subCollector.collect(doc, bucketOrd); }
/** * Utility method to increment the doc counts of the given bucket (identified by the bucket ordinal) */ public final void incrementBucketDocCount(long bucketOrd, int inc) { docCounts = bigArrays.grow(docCounts, bucketOrd + 1); docCounts.increment(bucketOrd, inc); }
/** * Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized. */ public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { docCounts.increment(bucketOrd, 1); subCollector.collect(doc, bucketOrd); }
/** * Utility method to increment the doc counts of the given bucket (identified by the bucket ordinal) */ public final void incrementBucketDocCount(long bucketOrd, int inc) { docCounts = bigArrays.grow(docCounts, bucketOrd + 1); docCounts.increment(bucketOrd, inc); }
/** * Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized. */ public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { docCounts.increment(bucketOrd, 1); subCollector.collect(doc, bucketOrd); }
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; if (singleValues.advanceExact(doc)) { final int ord = singleValues.ordValue(); segmentDocCounts.increment(ord + 1, 1); } } };
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; segmentOrds.setDocument(doc); final int numOrds = segmentOrds.cardinality(); for (int i = 0; i < numOrds; i++) { final long segmentOrd = segmentOrds.ordAt(i); segmentDocCounts.increment(segmentOrd + 1, 1); } } };
@Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; if (segmentOrds.advanceExact(doc)) { for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { segmentDocCounts.increment(segmentOrd + 1, 1); } } } };
@Override protected void existingParent(long parentIdx) throws IOException { float currentScore = scorer.score(); if (currentScore > scores.get(parentIdx)) { scores.set(parentIdx, currentScore); } occurrences.increment(parentIdx, 1); } }
public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { try (IntArray oldDocCounts = docCounts) { docCounts = bigArrays.newIntArray(newNumBuckets, true); docCounts.fill(0, newNumBuckets, 0); for (int i = 0; i < oldDocCounts.size(); i++) { int docCount = oldDocCounts.get(i); if (docCount != 0) { docCounts.increment(mergeMap[i], docCount); } } } }