List<SegmentCommitInfo> createBackupSegmentInfos() { final List<SegmentCommitInfo> list = new ArrayList<>(size()); for(final SegmentCommitInfo info : this) { assert info.info.getCodec() != null; list.add(info.clone()); } return list; }
/** Returns all files in use by this segment. */ public Collection<String> files() throws IOException { // Start from the wrapped info's files: Collection<String> files = new HashSet<>(info.files()); // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for // updates) and then maybe even be able to remove LiveDocsFormat.files(). // Must separately add any live docs files: info.getCodec().liveDocsFormat().files(this, files); // must separately add any field updates files for (Set<String> updatefiles : dvUpdatesFiles.values()) { files.addAll(updatefiles); } // must separately add fieldInfos files files.addAll(fieldInfosFiles); return files; }
/** * Returns a copy of this instance, also copying each * SegmentInfo. */ @Override public SegmentInfos clone() { try { final SegmentInfos sis = (SegmentInfos) super.clone(); // deep clone, first recreate all collections: sis.segments = new ArrayList<>(size()); for(final SegmentCommitInfo info : this) { assert info.info.getCodec() != null; // dont directly access segments, use add method!!! sis.add(info.clone()); } sis.userData = new HashMap<>(userData); return sis; } catch (CloneNotSupportedException e) { throw new RuntimeException("should not happen", e); } }
private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, Directory dir, final Long gen, FieldInfos infos) throws IOException { Directory dvDir = dir; String segmentSuffix = ""; if (gen.longValue() != -1) { dvDir = si.info.dir; // gen'd files are written outside CFS, so use SegInfo directory segmentSuffix = Long.toString(gen.longValue(), Character.MAX_RADIX); } // set SegmentReadState to list only the fields that are relevant to that gen SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, IOContext.READ, segmentSuffix); DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat(); return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) { @SuppressWarnings("synthetic-access") @Override protected void release() throws IOException { object.close(); synchronized (SegmentDocValues.this) { genDVProducers.remove(gen); } } }; }
/** * init most recent FieldInfos for the current commit */ private FieldInfos initFieldInfos() throws IOException { if (!si.hasFieldUpdates()) { return core.coreFieldInfos; } else { // updates always outside of CFS FieldInfosFormat fisFormat = si.info.getCodec().fieldInfosFormat(); final String segmentSuffix = Long.toString(si.getFieldInfosGen(), Character.MAX_RADIX); return fisFormat.read(si.info.dir, si.info, segmentSuffix, IOContext.READONCE); } }
private FieldInfos readFieldInfos() throws IOException { SegmentInfo segInfo = info.info; Directory dir = segInfo.dir; if (info.hasFieldUpdates() == false) { // updates always outside of CFS Closeable toClose; if (segInfo.getUseCompoundFile()) { toClose = dir = segInfo.getCodec().compoundFormat().getCompoundReader(segInfo.dir, segInfo, IOContext.READONCE); } else { toClose = null; dir = segInfo.dir; } try { return segInfo.getCodec().fieldInfosFormat().read(dir, segInfo, "", IOContext.READONCE); } finally { IOUtils.close(toClose); } } else { FieldInfosFormat fisFormat = segInfo.getCodec().fieldInfosFormat(); final String segmentSuffix = Long.toString(info.getFieldInfosGen(), Character.MAX_RADIX); return fisFormat.read(dir, segInfo, segmentSuffix, IOContext.READONCE); } }
/** * NOTE: this method creates a compound file for all files returned by * info.files(). While, generally, this may include separate norms and * deletion files, this SegmentInfo must not reference such files when this * method is called, because they are not allowed within a compound file. */ static final void createCompoundFile(InfoStream infoStream, TrackingDirectoryWrapper directory, final SegmentInfo info, IOContext context, IOUtils.IOConsumer<Collection<String>> deleteFiles) throws IOException { // maybe this check is not needed, but why take the risk? if (!directory.getCreatedFiles().isEmpty()) { throw new IllegalStateException("pass a clean trackingdir for CFS creation"); } if (infoStream.isEnabled("IW")) { infoStream.message("IW", "create compound file"); } // Now merge all added files boolean success = false; try { info.getCodec().compoundFormat().write(directory, info, context); success = true; } finally { if (!success) { // Safe: these files must exist deleteFiles.accept(directory.getCreatedFiles()); } } // Replace all previous files with the CFS/CFE files: info.setFiles(new HashSet<>(directory.getCreatedFiles())); }
PointsFormat fmt = state.segmentInfo.getCodec().pointsFormat(); if (fmt == null) { throw new IllegalStateException("field=\"" + perField.fieldInfo.name + "\" was indexed as points but codec does not support points");
DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat(); dvConsumer = fmt.fieldsConsumer(state);
static FieldInfos readFieldInfos(SegmentCommitInfo si) throws IOException { Codec codec = si.info.getCodec(); FieldInfosFormat reader = codec.fieldInfosFormat(); if (si.hasFieldUpdates()) { // there are updates, we read latest (always outside of CFS) final String segmentSuffix = Long.toString(si.getFieldInfosGen(), Character.MAX_RADIX); return reader.read(si.info.dir, si.info, segmentSuffix, IOContext.READONCE); } else if (si.info.getUseCompoundFile()) { // cfs try (Directory cfs = codec.compoundFormat().getCompoundReader(si.info.dir, si.info, IOContext.DEFAULT)) { return reader.read(cfs, si.info, "", IOContext.READONCE); } } else { // no cfs return reader.read(si.info.dir, si.info, "", IOContext.READONCE); } }
FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state); boolean success = false; try {
SegmentMerger(List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException { if (context.context != IOContext.Context.MERGE) { throw new IllegalArgumentException("IOContext.context should be MERGE; got: " + context.context); } mergeState = new MergeState(readers, segmentInfo, infoStream); directory = dir; this.codec = segmentInfo.getCodec(); this.context = context; this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers); Version minVersion = Version.LATEST; for (CodecReader reader : readers) { Version leafMinVersion = reader.getMetaData().getMinVersion(); if (leafMinVersion == null) { minVersion = null; break; } if (minVersion.onOrAfter(leafMinVersion)) { minVersion = leafMinVersion; } } assert segmentInfo.minVersion == null : "The min version should be set by SegmentMerger for merged segments"; segmentInfo.minVersion = minVersion; if (mergeState.infoStream.isEnabled("SM")) { if (segmentInfo.getIndexSort() != null) { mergeState.infoStream.message("SM", "index sort during merge: " + segmentInfo.getIndexSort()); } } }
SegmentCoreReaders(Directory dir, SegmentCommitInfo si, IOContext context) throws IOException { final Codec codec = si.info.getCodec(); final Directory cfsDir; // confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory. boolean success = false; fieldsReaderOrig = si.info.getCodec().storedFieldsFormat().fieldsReader(cfsDir, si.info, coreFieldInfos, context); termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null;
if (oldReader.isNRT) { Bits liveDocs = commitInfo.hasDeletions() ? commitInfo.info.getCodec().liveDocsFormat() .readLiveDocs(commitInfo.info.dir, commitInfo, IOContext.READONCE) : null; newReaders[i] = new SegmentReader(commitInfo, oldReader, liveDocs, liveDocs, } else { Bits liveDocs = commitInfo.hasDeletions() ? commitInfo.info.getCodec().liveDocsFormat() .readLiveDocs(commitInfo.info.dir, commitInfo, IOContext.READONCE) : null; newReaders[i] = new SegmentReader(commitInfo, oldReader, liveDocs, liveDocs,
out.writeString(si.getCodec().getName()); out.writeLong(siPerCommit.getDelGen()); int delCount = siPerCommit.getDelCount();
private void writeNorms(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, sortMap, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
info.info.getUseCompoundFile(), info.info.getCodec(), info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort()); SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getSoftDelCount(), info.getDelGen(),
Codec codec = info.info.getCodec(); codec.liveDocsFormat().writeLiveDocs(liveDocs, trackingDir, info, pendingDeleteCount, IOContext.DEFAULT); success = true;
final Codec codec = si.info.getCodec(); try { if (si.hasDeletions()) {
Codec codec = info.info.getCodec(); final FixedBitSet bits; if (sortMap == null) {