@Override public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment, MergeContext mergeContext) { return newSegment.info.getUseCompoundFile(); }
/** Used for debugging. Format may suddenly change. * * <p>Current format looks like * <code>_a(3.1):c45/4:[sorter=<long: "timestamp">!]</code>, which means * the segment's name is <code>_a</code>; it was created with Lucene 3.1 (or * '?' if it's unknown); it's using compound file * format (would be <code>C</code> if not compound); it * has 45 documents; it has 4 deletions (this part is * left off when there are no deletions); it is sorted by the timestamp field * in descending order (this part is omitted for unsorted segments).</p> */ public String toString(int delCount) { StringBuilder s = new StringBuilder(); s.append(name).append('(').append(version == null ? "?" : version).append(')').append(':'); char cfs = getUseCompoundFile() ? 'c' : 'C'; s.append(cfs); s.append(maxDoc); if (delCount != 0) { s.append('/').append(delCount); } if (indexSort != null) { s.append(":[indexSort="); s.append(indexSort); s.append(']'); } // TODO: we could append toString of attributes() here? return s.toString(); }
/** Returns true if this single info is already fully merged (has no * pending deletes, is in the same dir as the * writer, and matches the current compound file setting */ protected final boolean isMerged(SegmentInfos infos, SegmentCommitInfo info, MergeContext mergeContext) throws IOException { assert mergeContext != null; int delCount = mergeContext.numDeletesToMerge(info); assert assertDelCount(delCount, info); return delCount == 0 && useCompoundFile(infos, info, mergeContext) == info.info.getUseCompoundFile(); }
static FieldInfos readFieldInfos(SegmentCommitInfo si) throws IOException { Codec codec = si.info.getCodec(); FieldInfosFormat reader = codec.fieldInfosFormat(); if (si.hasFieldUpdates()) { // there are updates, we read latest (always outside of CFS) final String segmentSuffix = Long.toString(si.getFieldInfosGen(), Character.MAX_RADIX); return reader.read(si.info.dir, si.info, segmentSuffix, IOContext.READONCE); } else if (si.info.getUseCompoundFile()) { // cfs try (Directory cfs = codec.compoundFormat().getCompoundReader(si.info.dir, si.info, IOContext.DEFAULT)) { return reader.read(cfs, si.info, "", IOContext.READONCE); } } else { // no cfs return reader.read(si.info.dir, si.info, "", IOContext.READONCE); } }
private FieldInfos readFieldInfos() throws IOException { SegmentInfo segInfo = info.info; Directory dir = segInfo.dir; if (info.hasFieldUpdates() == false) { // updates always outside of CFS Closeable toClose; if (segInfo.getUseCompoundFile()) { toClose = dir = segInfo.getCodec().compoundFormat().getCompoundReader(segInfo.dir, segInfo, IOContext.READONCE); } else { toClose = null; dir = segInfo.dir; } try { return segInfo.getCodec().fieldInfosFormat().read(dir, segInfo, "", IOContext.READONCE); } finally { IOUtils.close(toClose); } } else { FieldInfosFormat fisFormat = segInfo.getCodec().fieldInfosFormat(); final String segmentSuffix = Long.toString(info.getFieldInfosGen(), Character.MAX_RADIX); return fisFormat.read(dir, segInfo, segmentSuffix, IOContext.READONCE); } }
try { SegmentReader newReader; if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) {
msg(infoStream, " codec=" + codec); segInfoStat.codec = codec; msg(infoStream, " compound=" + info.info.getUseCompoundFile()); segInfoStat.compound = info.info.getUseCompoundFile(); msg(infoStream, " numFiles=" + info.files().size()); Sort indexSort = info.info.getIndexSort();
info.info.getUseCompoundFile(), info.info.getCodec(), info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort()); SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getSoftDelCount(), info.getDelGen(),
output.writeInt(si.maxDoc()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeMapOfStrings(si.getDiagnostics()); Set<String> files = si.files();
if (si.info.getUseCompoundFile()) { cfsDir = cfsReader = codec.compoundFormat().getCompoundReader(dir, si.info, context); } else {
private void fillSegmentInfo(SegmentReader segmentReader, boolean verbose, boolean search, Map<String, Segment> segments) { SegmentCommitInfo info = segmentReader.getSegmentInfo(); assert segments.containsKey(info.info.name) == false; Segment segment = new Segment(info.info.name); segment.search = search; segment.docCount = segmentReader.numDocs(); segment.delDocCount = segmentReader.numDeletedDocs(); segment.version = info.info.getVersion(); segment.compound = info.info.getUseCompoundFile(); try { segment.sizeInBytes = info.sizeInBytes(); } catch (IOException e) { logger.trace(() -> new ParameterizedMessage("failed to get size for [{}]", info.info.name), e); } segment.memoryInBytes = segmentReader.ramBytesUsed(); segment.segmentSort = info.info.getIndexSort(); if (verbose) { segment.ramTree = Accountables.namedAccountable("root", segmentReader); } segment.attributes = info.info.getAttributes(); // TODO: add more fine grained mem stats values to per segment info here segments.put(info.info.name, segment); }
segment.delDocCount = info.getDelCount(); segment.version = info.info.getVersion(); segment.compound = info.info.getUseCompoundFile(); try { segment.sizeInBytes = info.sizeInBytes();
private ImmutableOpenMap<String, Long> getSegmentFileSizes(SegmentReader segmentReader) { Directory directory = null; SegmentCommitInfo segmentCommitInfo = segmentReader.getSegmentInfo(); boolean useCompoundFile = segmentCommitInfo.info.getUseCompoundFile(); if (useCompoundFile) { try {
/** Returns true if this single nfo is optimized (has no * pending norms or deletes, is in the same dir as the * writer, and matches the current compound file setting */ @Override protected boolean isMerged(SegmentInfoPerCommit info) throws IOException { return !info.hasDeletions() && info.info.getUseCompoundFile() == getUseCompoundFile(); }
/** Returns true if this single nfo is optimized (has no * pending norms or deletes, is in the same dir as the * writer, and matches the current compound file setting */ @Override protected boolean isMerged(SegmentInfoPerCommit info) throws IOException { return !info.hasDeletions() && info.info.getUseCompoundFile() == getUseCompoundFile(); }
/** Returns true if this single info is already fully merged (has no * pending deletes, is in the same dir as the * writer, and matches the current compound file setting */ protected final boolean isMerged(SegmentInfos infos, SegmentCommitInfo info, MergeContext mergeContext) throws IOException { assert mergeContext != null; int delCount = mergeContext.numDeletesToMerge(info); assert assertDelCount(delCount, info); return delCount == 0 && useCompoundFile(infos, info, mergeContext) == info.info.getUseCompoundFile(); }
/** Returns true if this single nfo is optimized (has no * pending norms or deletes, is in the same dir as the * writer, and matches the current compound file setting */ private boolean isOptimized(IndexWriter writer, SegmentInfo info) throws IOException { return !info.hasDeletions() && !info.hasSeparateNorms() && info.dir == writer.getDirectory() && info.getUseCompoundFile() == useCompoundFile; }
/** Returns true if this single info is already fully merged (has no * pending deletes, is in the same dir as the * writer, and matches the current compound file setting */ protected final boolean isMerged(SegmentInfos infos, SegmentCommitInfo info, IndexWriter writer) throws IOException { assert writer != null; boolean hasDeletions = writer.numDeletedDocs(info) > 0; return !hasDeletions && info.info.dir == writer.getDirectory() && useCompoundFile(infos, info, writer) == info.info.getUseCompoundFile(); }
/** Returns true if this single nfo is optimized (has no * pending norms or deletes, is in the same dir as the * writer, and matches the current compound file setting */ private boolean isOptimized(IndexWriter writer, SegmentInfo info) throws IOException { return !info.hasDeletions() && !info.hasSeparateNorms() && info.dir == writer.getDirectory() && info.getUseCompoundFile() == useCompoundFile; }
/** Returns true if this single nfo is optimized (has no * pending norms or deletes, is in the same dir as the * writer, and matches the current compound file setting */ @Override protected boolean isMerged(SegmentInfo info) throws IOException { IndexWriter w = writer.get(); return !info.hasDeletions() && !info.hasSeparateNorms() && info.dir == w.getDirectory() && info.getUseCompoundFile() == getUseCompoundFile(); }