@Override void onDocValuesUpdate(FieldInfo info, DocValuesFieldUpdates.Iterator iterator) throws IOException { if (this.field.equals(info.name)) { pendingDeleteCount += applySoftDeletes(iterator, getMutableBits()); assert assertPendingDeletes(); this.info.setSoftDelCount(this.info.getSoftDelCount() + pendingDeleteCount); super.dropChanges(); } assert dvGeneration < info.getDocValuesGen() : "we have seen this generation update already: " + dvGeneration + " vs. " + info.getDocValuesGen(); assert dvGeneration != -2 : "docValues generation is still uninitialized"; dvGeneration = info.getDocValuesGen(); }
/** * Returns {@code true} if the specified docvalues fields have not been updated */ public static boolean isCacheable(LeafReaderContext ctx, String... fields) { for (String field : fields) { FieldInfo fi = ctx.reader().getFieldInfos().fieldInfo(field); if (fi != null && fi.getDocValuesGen() > -1) return false; } return true; } }
continue; long docValuesGen = fi.getDocValuesGen(); if (docValuesGen == -1) { if (baseProducer == null) {
private DocValuesConsumer getInstance(FieldInfo field) throws IOException { DocValuesFormat format = null; if (field.getDocValuesGen() != -1) { final String formatName = field.getAttribute(PER_FIELD_FORMAT_KEY); if (field.getDocValuesGen() == -1 && previousValue != null) { throw new IllegalStateException("found existing value for " + PER_FIELD_FORMAT_KEY + ", field=" + field.name + ", old=" + previousValue + ", new=" + formatName); if (field.getDocValuesGen() != -1) { final String suffixAtt = field.getAttribute(PER_FIELD_SUFFIX_KEY); if (field.getDocValuesGen() == -1 && previousValue != null) { throw new IllegalStateException("found existing value for " + PER_FIELD_SUFFIX_KEY + ", field=" + field.name + ", old=" + previousValue + ", new=" + suffix);
private void ensureInitialized(IOSupplier<CodecReader> readerIOSupplier) throws IOException { if (dvGeneration == -2) { FieldInfos fieldInfos = readFieldInfos(); FieldInfo fieldInfo = fieldInfos.fieldInfo(field); // we try to only open a reader if it's really necessary ie. indices that are mainly append only might have // big segments that don't even have any docs in the soft deletes field. In such a case it's simply // enough to look at the FieldInfo for the field and check if the field has DocValues if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.NONE) { // in order to get accurate numbers we need to have a least one reader see here. onNewReader(readerIOSupplier.get(), info); } else { // we are safe here since we don't have any doc values for the soft-delete field on disk // no need to open a new reader dvGeneration = fieldInfo == null ? -1 : fieldInfo.getDocValuesGen(); } } }
clone.putAttribute(e.getKey(), e.getValue()); clone.setDocValuesGen(fi.getDocValuesGen());
builder.add(fieldInfo, fieldInfo.getDocValuesGen()); fieldToReader.put(fieldInfo.name, reader);
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
&& fieldInfo.getDocValuesGen() == -1 && fieldInfo.getDocValuesType() != DocValuesType.NONE); final boolean isFullyHardDeleted = newSegment.getDelCount() == newSegment.info.maxDoc();
output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); output.writeVInt(fi.getPointDataDimensionCount());
public static int hashCode(FieldInfo fi) { int h = 17; h = h * 31 + fi.number; h = h * 31 + fi.name.hashCode(); h = h * 31 + (int) fi.getDocValuesGen(); // skip attributes on purpose for performance return h; }
public void handleMaintenance() { if (this.infoCache.size() > MAX_FIELD_INFO_COUNT) { // remove only fields with docValues: the rest will be present everywhere this.infoCache.values().removeIf(fieldInfo -> fieldInfo.getDocValuesGen() >= 0); } if (this.infosCache.size() > MAX_INFOS_COUNT) { this.infosCache.clear(); } } }
void checkCanWrite(FieldInfo field) { if ((field.getDocValuesType() == DocValuesType.NUMERIC || field.getDocValuesType() == DocValuesType.BINARY) && field.getDocValuesGen() != -1) { // ok } else { throw new UnsupportedOperationException("this codec can only be used for reading"); } } }
void checkCanWrite(FieldInfo field) { if ((field.getDocValuesType() == DocValuesType.NUMERIC || field.getDocValuesType() == DocValuesType.BINARY) && field.getDocValuesGen() != -1) { // ok } else { throw new UnsupportedOperationException("this codec can only be used for reading"); } } }
void checkCanWrite(FieldInfo field) { if ((field.getDocValuesType() == DocValuesType.NUMERIC || field.getDocValuesType() == DocValuesType.BINARY) && field.getDocValuesGen() != -1) { // ok } else { throw new UnsupportedOperationException("this codec can only be used for reading"); } } }
@Override void onDocValuesUpdate(FieldInfo info, DocValuesFieldUpdates.Iterator iterator) throws IOException { if (this.field.equals(info.name)) { pendingDeleteCount += applySoftDeletes(iterator, getMutableBits()); assert assertPendingDeletes(); this.info.setSoftDelCount(this.info.getSoftDelCount() + pendingDeleteCount); super.dropChanges(); } assert dvGeneration < info.getDocValuesGen() : "we have seen this generation update already: " + dvGeneration + " vs. " + info.getDocValuesGen(); assert dvGeneration != -2 : "docValues generation is still uninitialized"; dvGeneration = info.getDocValuesGen(); }
/** * Returns {@code true} if the specified docvalues fields have not been updated */ public static boolean isCacheable(LeafReaderContext ctx, String... fields) { for (String field : fields) { FieldInfo fi = ctx.reader().getFieldInfos().fieldInfo(field); if (fi != null && fi.getDocValuesGen() > -1) return false; } return true; } }
public static boolean equals(FieldInfo a, FieldInfo b) { return a.number == b.number && a.name.equals(b.name) && a.getDocValuesGen() == b.getDocValuesGen() && a.getPointDimensionCount() == b.getPointDimensionCount() && a.getPointNumBytes() == b.getPointNumBytes() && a.getIndexOptions() == b.getIndexOptions() && a.hasPayloads() == b.hasPayloads() && a.hasVectors() == b.hasVectors() && a.omitsNorms() == b.omitsNorms() && a.hasNorms() == b.hasNorms(); }
private void ensureInitialized(IOSupplier<CodecReader> readerIOSupplier) throws IOException { if (dvGeneration == -2) { FieldInfos fieldInfos = readFieldInfos(); FieldInfo fieldInfo = fieldInfos.fieldInfo(field); // we try to only open a reader if it's really necessary ie. indices that are mainly append only might have // big segments that don't even have any docs in the soft deletes field. In such a case it's simply // enough to look at the FieldInfo for the field and check if the field has DocValues if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.NONE) { // in order to get accurate numbers we need to have a least one reader see here. onNewReader(readerIOSupplier.get(), info); } else { // we are safe here since we don't have any doc values for the soft-delete field on disk // no need to open a new reader dvGeneration = fieldInfo == null ? -1 : fieldInfo.getDocValuesGen(); } } }
private FieldInfoStatus(final FieldInfo info) { this(info.number, info.omitsNorms(), info.hasNorms(), info.hasPayloads(), info.hasVectors(), info.getDocValuesGen(), info.getDocValuesType(), info.getIndexOptions(), info.getPointDimensionCount(), info.getPointNumBytes()); }