@Override public void resetSkip() { lastDocFP = docOut.getFilePointer(); if (fieldHasPositions) { lastPosFP = posOut.getFilePointer(); if (fieldHasOffsets || fieldHasPayloads) { lastPayFP = payOut.getFilePointer(); } } initialized = false; }
@Override public long getFilePointer() { return delegate.getFilePointer(); }
@Override public void finishDoc() throws IOException { // Since we don't know df for current term, we had to buffer // those skip data for each block, and when a new doc comes, // write them to skip file. if (docBufferUpto == BLOCK_SIZE) { lastBlockDocID = lastDocID; if (posOut != null) { if (payOut != null) { lastBlockPayFP = payOut.getFilePointer(); } lastBlockPosFP = posOut.getFilePointer(); lastBlockPosBufferUpto = posBufferUpto; lastBlockPayloadByteUpto = payloadByteUpto; } docBufferUpto = 0; } }
DirectMonotonicWriter(IndexOutput metaOut, IndexOutput dataOut, long numValues, int blockShift) { this.meta = metaOut; this.data = dataOut; this.numValues = numValues; if (blockShift < 2 || blockShift > 30) { throw new IllegalArgumentException("blockShift must be in [3-30], got " + blockShift); } final int blockSize = 1 << blockShift; this.buffer = new long[blockSize]; this.bufferSize = 0; this.baseDataPointer = dataOut.getFilePointer(); }
@Override public void startTerm() { docStartFP = docOut.getFilePointer(); if (writePositions) { posStartFP = posOut.getFilePointer(); if (writePayloads || writeOffsets) { payStartFP = payOut.getFilePointer(); } } lastDocID = 0; lastBlockDocID = -1; skipWriter.resetSkip(); }
public long finish() throws IOException { if (leafCount > 0) { writeLeafBlock(); leafCount = 0; } if (valueCount == 0) { return -1; } pointCount = valueCount; long indexFP = out.getFilePointer(); int numInnerNodes = leafBlockStartValues.size(); //System.out.println("BKDW: now rotate numInnerNodes=" + numInnerNodes + " leafBlockStarts=" + leafBlockStartValues.size()); byte[] index = new byte[(1+numInnerNodes) * (1+bytesPerDim)]; rotateToTree(1, 0, numInnerNodes, index, leafBlockStartValues); long[] arr = new long[leafBlockFPs.size()]; for(int i=0;i<leafBlockFPs.size();i++) { arr[i] = leafBlockFPs.get(i); } writeIndex(out, maxPointsInLeafNode, arr, index); return indexFP; }
/** * Sets the values for the current skip data. */ public void bufferSkip(int doc, int numDocs, long posFP, long payFP, int posBufferUpto, int payloadByteUpto) throws IOException { initSkip(); this.curDoc = doc; this.curDocPointer = docOut.getFilePointer(); this.curPosPointer = posFP; this.curPayPointer = payFP; this.curPosBufferUpto = posBufferUpto; this.curPayloadByteUpto = payloadByteUpto; bufferSkip(numDocs); }
@Override public void finish(FieldInfos fis, int numDocs) throws IOException { if (!pendingDocs.isEmpty()) { flush(); numDirtyChunks++; // incomplete: we had to force this flush } if (numDocs != this.numDocs) { throw new RuntimeException("Wrote " + this.numDocs + " docs, finish called with numDocs=" + numDocs); } indexWriter.finish(numDocs, vectorsStream.getFilePointer()); vectorsStream.writeVLong(numChunks); vectorsStream.writeVLong(numDirtyChunks); CodecUtil.writeFooter(vectorsStream); }
/** * Writes the buffered skip lists to the given output. * * @param output the IndexOutput the skip lists shall be written to * @return the pointer the skip list starts */ public long writeSkip(IndexOutput output) throws IOException { long skipPointer = output.getFilePointer(); //System.out.println("skipper.writeSkip fp=" + skipPointer); if (skipBuffer == null || skipBuffer.length == 0) return skipPointer; for (int level = numberOfSkipLevels - 1; level > 0; level--) { long length = skipBuffer[level].getFilePointer(); if (length > 0) { output.writeVLong(length); skipBuffer[level].writeTo(output); } } skipBuffer[0].writeTo(output); return skipPointer; } }
@Override public void finish(FieldInfos fis, int numDocs) throws IOException { if (numBufferedDocs > 0) { flush(); numDirtyChunks++; // incomplete: we had to force this flush } else { assert bufferedDocs.getPosition() == 0; } if (docBase != numDocs) { throw new RuntimeException("Wrote " + docBase + " docs, finish called with numDocs=" + numDocs); } indexWriter.finish(numDocs, fieldsStream.getFilePointer()); fieldsStream.writeVLong(numChunks); fieldsStream.writeVLong(numDirtyChunks); CodecUtil.writeFooter(fieldsStream); assert bufferedDocs.getPosition() == 0; }
meta.writeLong(data.getFilePointer() - baseDataPointer); if (maxDelta == 0) { meta.writeByte((byte) 0);
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
private void writeTermsIndex(SortedSetDocValues values) throws IOException { final long size = values.getValueCount(); meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); long start = data.getFilePointer(); writer.finish(); meta.writeLong(start); meta.writeLong(data.getFilePointer() - start); start = data.getFilePointer(); addressBuffer.writeTo(data); meta.writeLong(start); meta.writeLong(data.getFilePointer() - start);
meta.writeLong(0L); } else { long offset = data.getFilePointer(); meta.writeLong(offset); values = normsProducer.getNorms(field); IndexedDISI.writeBitSet(values, data); meta.writeLong(data.getFilePointer() - offset); meta.writeLong(min); } else { meta.writeLong(data.getFilePointer()); values = normsProducer.getNorms(field); writeValues(values, numBytesPerValue, data);
@Override public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); long[] stats = writeValues(field, valuesProducer); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; assert numValues >= numDocsWithField; meta.writeInt(numDocsWithField); if (numValues > numDocsWithField) { long start = data.getFilePointer(); meta.writeLong(start); meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); final DirectMonotonicWriter addressesWriter = DirectMonotonicWriter.getInstance(meta, data, numDocsWithField + 1L, DIRECT_MONOTONIC_BLOCK_SHIFT); long addr = 0; addressesWriter.add(addr); SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { addr += values.docValueCount(); addressesWriter.add(addr); } addressesWriter.finish(); meta.writeLong(data.getFilePointer() - start); } }
private void flush() throws IOException { indexWriter.writeIndex(numBufferedDocs, fieldsStream.getFilePointer()); // transform end offsets into lengths final int[] lengths = endOffsets; for (int i = numBufferedDocs - 1; i > 0; --i) { lengths[i] = endOffsets[i] - endOffsets[i - 1]; assert lengths[i] >= 0; } final boolean sliced = bufferedDocs.getPosition() >= 2 * chunkSize; writeHeader(docBase, numBufferedDocs, numStoredFields, lengths, sliced); // compress stored fields to fieldsStream if (sliced) { // big chunk, slice it for (int compressed = 0; compressed < bufferedDocs.getPosition(); compressed += chunkSize) { compressor.compress(bufferedDocs.getBytes(), compressed, Math.min(chunkSize, bufferedDocs.getPosition() - compressed), fieldsStream); } } else { compressor.compress(bufferedDocs.getBytes(), 0, bufferedDocs.getPosition(), fieldsStream); } // reset docBase += numBufferedDocs; numBufferedDocs = 0; bufferedDocs.reset(); numChunks++; }
CodecUtil.writeIndexHeader(indexStream, codecNameIdx, VERSION_CURRENT, si.getId(), segmentSuffix); CodecUtil.writeIndexHeader(fieldsStream, codecNameDat, VERSION_CURRENT, si.getId(), segmentSuffix); assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == fieldsStream.getFilePointer(); assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
indexWriter.writeIndex(chunkDocs, vectorsStream.getFilePointer());
long startOffset = data.getFilePointer(); try (ChecksumIndexInput in = dir.openChecksumInput(file, IOContext.READONCE)) { data.writeLong(checksum); long endOffset = data.getFilePointer();
offsets[i] = out.getFilePointer(); addPositions(in, out); i++;