void setInvertState() { invertState = new FieldInvertState(indexCreatedVersionMajor, fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); ft.setStoreTermVectors(fieldInfo.hasVectors()); ft.setOmitNorms(fieldInfo.omitsNorms()); ft.setIndexOptions(fieldInfo.getIndexOptions()); doc.add(new StoredField(fieldInfo.name, new String(value, StandardCharsets.UTF_8), ft)); }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false) { long normValue; if (invertState.length == 0) { // the field exists in this document, but it did not have // any indexed tokens, so we assign a default value of zero // to the norm normValue = 0; } else { normValue = similarity.computeNorm(invertState); } norms.addValue(docState.docID, normValue); } termsHashPerField.finish(); }
public FieldInfo add(FieldInfo fi, long dvGen) { // IMPORTANT - reuse the field number if possible for consistent field numbers across segments return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), dvGen, fi.getPointDataDimensionCount(), fi.getPointIndexDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField()); }
private void writeNorms(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, sortMap, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; if (fi.isSoftDeletesField()) bits |= SOFT_DELETES_FIELD;
@Override public NumericDocValues getNormValues(String field) { Info info = fields.get(field); if (info == null || info.fieldInfo.omitsNorms()) { return null; } return info.getNormDocValues(); }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); ft.setStoreTermVectors(fieldInfo.hasVectors()); ft.setOmitNorms(fieldInfo.omitsNorms()); ft.setIndexOptions(fieldInfo.getIndexOptions()); doc.add(new Field(fieldInfo.name, new String(value, StandardCharsets.UTF_8), ft)); }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType( TextField.TYPE_STORED ); ft.setStoreTermVectors( fieldInfo.hasVectors() ); ft.setOmitNorms( fieldInfo.omitsNorms() ); ft.setIndexOptions( fieldInfo.getIndexOptions() ); getDocument().add( new Field( fieldInfo.name, new String( value, StandardCharsets.UTF_8 ), ft ) ); }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType( TextField.TYPE_STORED ); ft.setStoreTermVectors( fieldInfo.hasVectors() ); ft.setOmitNorms( fieldInfo.omitsNorms() ); ft.setIndexOptions( fieldInfo.getIndexOptions() ); getDocument().add( new Field( fieldInfo.name, new String( value, StandardCharsets.UTF_8 ), ft ) ); }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false && invertState.length != 0) { norms.addValue(docState.docID, similarity.computeNorm(invertState)); } termsHashPerField.finish(); }
void setInvertState() { invertState = new FieldInvertState(fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType( TextField.TYPE_STORED ); ft.setStoreTermVectors( fieldInfo.hasVectors() ); ft.setOmitNorms( fieldInfo.omitsNorms() ); ft.setIndexOptions( fieldInfo.getIndexOptions() ); getDocument().add( new Field( fieldInfo.name, new String( value, StandardCharsets.UTF_8 ), ft ) ); }
void setInvertState() { invertState = new FieldInvertState(indexCreatedVersionMajor, fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
@Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); ft.setStoreTermVectors(fieldInfo.hasVectors()); ft.setOmitNorms(fieldInfo.omitsNorms()); ft.setIndexOptions(fieldInfo.getIndexOptions()); doc.add(new StoredField(fieldInfo.name, new String(value, StandardCharsets.UTF_8), ft)); }
public FieldInfo add(FieldInfo fi) { // IMPORTANT - reuse the field number if possible for consistent field numbers across segments return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType()); }
public static boolean equals(FieldInfo a, FieldInfo b) { return a.number == b.number && a.name.equals(b.name) && a.getDocValuesGen() == b.getDocValuesGen() && a.getPointDimensionCount() == b.getPointDimensionCount() && a.getPointNumBytes() == b.getPointNumBytes() && a.getIndexOptions() == b.getIndexOptions() && a.hasPayloads() == b.hasPayloads() && a.hasVectors() == b.hasVectors() && a.omitsNorms() == b.omitsNorms() && a.hasNorms() == b.hasNorms(); }
private FieldInfoStatus(final FieldInfo info) { this(info.number, info.omitsNorms(), info.hasNorms(), info.hasPayloads(), info.hasVectors(), info.getDocValuesGen(), info.getDocValuesType(), info.getIndexOptions(), info.getPointDimensionCount(), info.getPointNumBytes()); }
public FieldInfo add(FieldInfo fi, long dvGen) { // IMPORTANT - reuse the field number if possible for consistent field numbers across segments return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), dvGen, fi.getPointDataDimensionCount(), fi.getPointIndexDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField()); }