private void refillDocs() throws IOException { final int left = docFreq - docUpto; assert left > 0; if (left >= BLOCK_SIZE) { forUtil.readBlock(docIn, encoded, docDeltaBuffer); forUtil.readBlock(docIn, encoded, freqBuffer); } else if (docFreq == 1) { docDeltaBuffer[0] = singletonDocID; freqBuffer[0] = (int) totalTermFreq; } else { readVIntBlock(docIn, docDeltaBuffer, freqBuffer, left, true); } docBufferUpto = 0; }
@Override public IntBlockTermState clone() { IntBlockTermState other = new IntBlockTermState(); other.copyFrom(this); return other; }
@Override public BlockTermState newTermState() { return new IntBlockTermState(); }
private void refillDocs() throws IOException { final int left = docFreq - docUpto; assert left > 0; if (left >= BLOCK_SIZE) { forUtil.readBlock(docIn, encoded, docDeltaBuffer); if (indexHasFreq) { if (needsFreq) { forUtil.readBlock(docIn, encoded, freqBuffer); } else { forUtil.skipBlock(docIn); // skip over freqs } } } else if (docFreq == 1) { docDeltaBuffer[0] = singletonDocID; freqBuffer[0] = (int) totalTermFreq; } else { // Read vInts: readVIntBlock(docIn, docDeltaBuffer, freqBuffer, left, indexHasFreq); } docBufferUpto = 0; }
private void skipPositions() throws IOException { // Skip positions now: int toSkip = posPendingCount - freq; final int leftInBlock = BLOCK_SIZE - posBufferUpto; if (toSkip < leftInBlock) { posBufferUpto += toSkip; } else { toSkip -= leftInBlock; while(toSkip >= BLOCK_SIZE) { assert posIn.getFilePointer() != lastPosBlockFP; forUtil.skipBlock(posIn); toSkip -= BLOCK_SIZE; } refillPositions(); posBufferUpto = toSkip; } position = 0; }
/** * Sets the values for the current skip data. */ public void bufferSkip(int doc, int numDocs, long posFP, long payFP, int posBufferUpto, int payloadByteUpto) throws IOException { initSkip(); this.curDoc = doc; this.curDocPointer = docOut.getFilePointer(); this.curPosPointer = posFP; this.curPayPointer = payFP; this.curPosBufferUpto = posBufferUpto; this.curPayloadByteUpto = payloadByteUpto; bufferSkip(numDocs); }
skipWriter.bufferSkip(lastBlockDocID, docCount, lastBlockPosFP, lastBlockPayFP, lastBlockPosBufferUpto, lastBlockPayloadByteUpto); forUtil.writeBlock(docDeltaBuffer, encoded, docOut); if (writeFreqs) { forUtil.writeBlock(freqBuffer, encoded, docOut);
@Override public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { String previous = si.putAttribute(MODE_KEY, mode.name()); if (previous != null && previous.equals(mode.name()) == false) { throw new IllegalStateException("found existing value for " + MODE_KEY + " for segment: " + si.name + "old=" + previous + ", new=" + mode.name()); } return impl(mode).fieldsWriter(directory, si, context); }
@Override public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException { String value = si.getAttribute(MODE_KEY); if (value == null) { throw new IllegalStateException("missing value for " + MODE_KEY + " for segment: " + si.name); } Mode mode = Mode.valueOf(value); return impl(mode).fieldsReader(directory, si, fn, context); }
/** * Instantiates a new codec, specifying the stored fields compression * mode to use. * @param mode stored fields compression mode to use for newly * flushed/merged segments. */ public Lucene70Codec(Mode mode) { super("Lucene70"); this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode)); }
@Override public String toString() { return getName() + "(blocksize=" + BLOCK_SIZE + ")"; }
@Override public int nextDoc() throws IOException { if (docUpto == docFreq) { return doc = NO_MORE_DOCS; } if (docBufferUpto == BLOCK_SIZE) { refillDocs(); } accum += docDeltaBuffer[docBufferUpto]; docUpto++; doc = accum; freq = freqBuffer[docBufferUpto]; docBufferUpto++; return doc; }
@Override public Directory getCompoundReader(Directory dir, SegmentInfo si, IOContext context) throws IOException { return new Lucene50CompoundReader(dir, si, context); }
@Override public int nextDoc() throws IOException { if (docUpto == docFreq) { return doc = NO_MORE_DOCS; } if (docBufferUpto == BLOCK_SIZE) { refillDocs(); } accum += docDeltaBuffer[docBufferUpto]; freq = freqBuffer[docBufferUpto]; posPendingCount += freq; docBufferUpto++; docUpto++; doc = accum; position = 0; return doc; }
@Override public int nextDoc() throws IOException { if (docUpto == docFreq) { return doc = NO_MORE_DOCS; } if (docBufferUpto == BLOCK_SIZE) { refillDocs(); } accum += docDeltaBuffer[docBufferUpto]; freq = freqBuffer[docBufferUpto]; posPendingCount += freq; docBufferUpto++; docUpto++; doc = accum; position = 0; lastStartOffset = 0; return doc; }
/** Returns an array of strings, one for each file in the directory. */ @Override public String[] listAll() { ensureOpen(); String[] res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name for (int i = 0; i < res.length; i++) { res[i] = segmentName + res[i]; } return res; }
@Override public int setField(FieldInfo fieldInfo) { super.setField(fieldInfo); skipWriter.setField(writePositions, writeOffsets, writePayloads); lastState = emptyState; if (writePositions) { if (writePayloads || writeOffsets) { return 3; // doc + pos + pay FP } else { return 2; // doc + pos FP } } else { return 1; // doc FP } }
@Override public void startTerm() { docStartFP = docOut.getFilePointer(); if (writePositions) { posStartFP = posOut.getFilePointer(); if (writePayloads || writeOffsets) { payStartFP = payOut.getFilePointer(); } } lastDocID = 0; lastBlockDocID = -1; skipWriter.resetSkip(); }
private void refillDocs() throws IOException { final int left = docFreq - docUpto; assert left > 0; if (left >= BLOCK_SIZE) { forUtil.readBlock(docIn, encoded, docDeltaBuffer); forUtil.readBlock(docIn, encoded, freqBuffer); } else if (docFreq == 1) { docDeltaBuffer[0] = singletonDocID; freqBuffer[0] = (int) totalTermFreq; } else { // Read vInts: readVIntBlock(docIn, docDeltaBuffer, freqBuffer, left, true); } docBufferUpto = 0; }
@Override public IntBlockTermState newTermState() { return new IntBlockTermState(); }