private Reader(PagedBytes pagedBytes) { blocks = ArrayUtil.copyOfSubArray(pagedBytes.blocks, 0, pagedBytes.numBlocks); blockBits = pagedBytes.blockBits; blockMask = pagedBytes.blockMask; blockSize = pagedBytes.blockSize; bytesUsedPerBlock = pagedBytes.bytesUsedPerBlock; }
float[] cachedScores() { return docs == null ? null : ArrayUtil.copyOfSubArray(scores, 0, docCount); } }
int[] cachedDocs() { return docs == null ? null : ArrayUtil.copyOfSubArray(docs, 0, docCount); }
/** Build the {@link BlendedTermQuery}. */ public BlendedTermQuery build() { return new BlendedTermQuery( ArrayUtil.copyOfSubArray(terms, 0, numTerms), ArrayUtil.copyOfSubArray(boosts, 0, numTerms), ArrayUtil.copyOfSubArray(contexts, 0, numTerms), rewriteMethod); }
/** * Creates a new BytesRef that points to a copy of the bytes from * <code>other</code> * <p> * The returned BytesRef will have a length of other.length * and an offset of zero. */ public static BytesRef deepCopyOf(BytesRef other) { return new BytesRef(ArrayUtil.copyOfSubArray(other.bytes, other.offset, other.offset + other.length), 0, other.length); }
/** * Creates a new LongsRef that points to a copy of the longs from * <code>other</code> * <p> * The returned IntsRef will have a length of other.length * and an offset of zero. */ public static LongsRef deepCopyOf(LongsRef other) { return new LongsRef(ArrayUtil.copyOfSubArray(other.longs, other.offset, other.offset + other.length), 0, other.length); }
/** * Creates a new CharsRef that points to a copy of the chars from * <code>other</code> * <p> * The returned CharsRef will have a length of other.length * and an offset of zero. */ public static CharsRef deepCopyOf(CharsRef other) { return new CharsRef(ArrayUtil.copyOfSubArray(other.chars, other.offset, other.offset + other.length), 0, other.length); }
/** * Creates a new IntsRef that points to a copy of the ints from * <code>other</code> * <p> * The returned IntsRef will have a length of other.length * and an offset of zero. */ public static IntsRef deepCopyOf(IntsRef other) { return new IntsRef(ArrayUtil.copyOfSubArray(other.ints, other.offset, other.offset + other.length), 0, other.length); }
/** Build a new {@link CharsRef} that has the same content as this builder. */ public CharsRef toCharsRef() { return new CharsRef(ArrayUtil.copyOfSubArray(ref.chars, 0, ref.length), 0, ref.length); }
/** * Build a new {@link BytesRef} that has the same content as this buffer. */ public BytesRef toBytesRef() { return new BytesRef(ArrayUtil.copyOfSubArray(ref.bytes, 0, ref.length)); }
assert bits.length == ID_LENGTH + 1; assert bits[0] == 0; return ArrayUtil.copyOfSubArray(bits, 1, bits.length); } else { byte[] result = new byte[ID_LENGTH];
@Override public final String toString(String field) { final StringBuilder sb = new StringBuilder(); if (this.field.equals(field) == false) { sb.append(this.field); sb.append(':'); } // print ourselves as "range per dimension" for (int i = 0; i < numDims; i++) { if (i > 0) { sb.append(','); } int startOffset = bytesPerDim * i; sb.append('['); sb.append(toString(i, ArrayUtil.copyOfSubArray(lowerPoint, startOffset, startOffset + bytesPerDim))); sb.append(" TO "); sb.append(toString(i, ArrayUtil.copyOfSubArray(upperPoint, startOffset, startOffset + bytesPerDim))); sb.append(']'); } return sb.toString(); }
@Override public MonotonicLongValues build() { finish(); pending = null; final PackedInts.Reader[] values = ArrayUtil.copyOfSubArray(this.values, 0, valuesOff); final long[] mins = ArrayUtil.copyOfSubArray(this.mins, 0, valuesOff); final float[] averages = ArrayUtil.copyOfSubArray(this.averages, 0, valuesOff); final long ramBytesUsed = MonotonicLongValues.BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + RamUsageEstimator.sizeOf(mins) + RamUsageEstimator.sizeOf(averages); return new MonotonicLongValues(pageShift, pageMask, values, mins, averages, size, ramBytesUsed); }
@Override public DeltaPackedLongValues build() { finish(); pending = null; final PackedInts.Reader[] values = ArrayUtil.copyOfSubArray(this.values, 0, valuesOff); final long[] mins = ArrayUtil.copyOfSubArray(this.mins, 0, valuesOff); final long ramBytesUsed = DeltaPackedLongValues.BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + RamUsageEstimator.sizeOf(mins); return new DeltaPackedLongValues(pageShift, pageMask, values, mins, size, ramBytesUsed); }
this.bits[i4096] = ArrayUtil.copyOfSubArray(bits, 0, nonZeroLongCount); this.nonZeroLongCount += nonZeroLongCount; return;
/** Build a {@link PackedLongValues} instance that contains values that * have been added to this builder. This operation is destructive. */ public PackedLongValues build() { finish(); pending = null; final PackedInts.Reader[] values = ArrayUtil.copyOfSubArray(this.values, 0, valuesOff); final long ramBytesUsed = PackedLongValues.BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); return new PackedLongValues(pageShift, pageMask, values, size, ramBytesUsed); }
@Override protected Similarity.SimWeight getStats(IndexSearcher searcher) throws IOException { final int[] positions = PhraseQuery.this.getPositions(); if (positions.length < 2) { throw new IllegalStateException("PhraseWeight does not support less than 2 terms, call rewrite first"); } else if (positions[0] != 0) { throw new IllegalStateException("PhraseWeight requires that the first position is 0, call rewrite first"); } final IndexReaderContext context = searcher.getTopReaderContext(); states = new TermContext[terms.length]; TermStatistics termStats[] = new TermStatistics[terms.length]; int termUpTo = 0; for (int i = 0; i < terms.length; i++) { final Term term = terms[i]; states[i] = TermContext.build(context, term); if (needsScores) { TermStatistics termStatistics = searcher.termStatistics(term, states[i]); if (termStatistics != null) { termStats[termUpTo++] = termStatistics; } } } if (termUpTo > 0) { return similarity.computeWeight(boost, searcher.collectionStatistics(field), ArrayUtil.copyOfSubArray(termStats, 0, termUpTo)); } else { return null; // no terms at all, we won't use similarity } }
private long[][] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedSetDocValues oldValues) throws IOException { long[][] ords = new long[maxDoc][]; int docID; while ((docID = oldValues.nextDoc()) != NO_MORE_DOCS) { int newDocID = sortMap.oldToNew(docID); long[] docOrds = new long[1]; int upto = 0; while (true) { long ord = oldValues.nextOrd(); if (ord == NO_MORE_ORDS) { break; } if (upto == docOrds.length) { docOrds = ArrayUtil.grow(docOrds); } docOrds[upto++] = ord; } ords[newDocID] = ArrayUtil.copyOfSubArray(docOrds, 0, upto); } return ords; }
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
sets[currentBlock] = new ShortArrayDocIdSet(ArrayUtil.copyOfSubArray(buffer, 0, currentBlockCardinality));