Refine search
@Override public IndexWriterConfig newInstance() { IndexWriterConfig verboseConfig = IndexWriterConfigs.standard(); verboseConfig.setCodec( Codec.getDefault() ); return verboseConfig; } }
private static Codec readCodec(DataInput input) throws IOException { final String name = input.readString(); try { return Codec.forName(name); } catch (IllegalArgumentException e) { // maybe it's an old default codec that moved if (name.startsWith("Lucene")) { throw new IllegalArgumentException("Could not load codec '" + name + "'. Did you forget to add lucene-backward-codecs.jar?", e); } throw e; } }
private FieldInfos readFieldInfos() throws IOException { SegmentInfo segInfo = info.info; Directory dir = segInfo.dir; if (info.hasFieldUpdates() == false) { // updates always outside of CFS Closeable toClose; if (segInfo.getUseCompoundFile()) { toClose = dir = segInfo.getCodec().compoundFormat().getCompoundReader(segInfo.dir, segInfo, IOContext.READONCE); } else { toClose = null; dir = segInfo.dir; } try { return segInfo.getCodec().fieldInfosFormat().read(dir, segInfo, "", IOContext.READONCE); } finally { IOUtils.close(toClose); } } else { FieldInfosFormat fisFormat = segInfo.getCodec().fieldInfosFormat(); final String segmentSuffix = Long.toString(info.getFieldInfosGen(), Character.MAX_RADIX); return fisFormat.read(dir, segInfo, segmentSuffix, IOContext.READONCE); } }
SegmentCoreReaders(Directory dir, SegmentCommitInfo si, IOContext context) throws IOException { final Codec codec = si.info.getCodec(); final Directory cfsDir; // confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory. boolean success = false; if (si.info.getUseCompoundFile()) { cfsDir = cfsReader = codec.compoundFormat().getCompoundReader(dir, si.info, context); } else { cfsReader = null; coreFieldInfos = codec.fieldInfosFormat().read(cfsDir, si.info, "", context); final PostingsFormat format = codec.postingsFormat(); normsProducer = codec.normsFormat().normsProducer(segmentReadState); assert normsProducer != null; } else { fieldsReaderOrig = si.info.getCodec().storedFieldsFormat().fieldsReader(cfsDir, si.info, coreFieldInfos, context); termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null; pointsReader = codec.pointsFormat().fieldsReader(segmentReadState); } else { pointsReader = null;
final Codec codec = info.info.getCodec(); final DocValuesFormat docValuesFormat = codec.docValuesFormat(); fieldInfosFiles = writeFieldInfosGen(fieldInfos, trackingDir, codec.fieldInfosFormat()); } finally { if (reader != this.reader) {
private ImmutableOpenMap<String, Long> getSegmentFileSizes(SegmentReader segmentReader) { Directory directory = null; SegmentCommitInfo segmentCommitInfo = segmentReader.getSegmentInfo(); boolean useCompoundFile = segmentCommitInfo.info.getUseCompoundFile(); if (useCompoundFile) { try { directory = engineConfig.getCodec().compoundFormat().getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ); } catch (IOException e) { if (useCompoundFile) { try { files = directory.listAll(); } catch (IOException e) { final Directory finalDirectory = directory; long length = 0L; try { length = directory.fileLength(file); } catch (NoSuchFileException | FileNotFoundException e) { final Directory finalDirectory = directory; directory.close(); } catch (IOException e) { final Directory finalDirectory = directory;
segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT); CodecUtil.writeIndexHeader(segnOutput, "segments", VERSION_CURRENT, StringHelper.randomId(), Long.toString(nextGeneration, Character.MAX_RADIX)); Version segmentVersion = siPerCommit.info.getVersion(); if (minSegmentVersion == null || segmentVersion.onOrAfter(minSegmentVersion) == false) { minSegmentVersion = segmentVersion; SegmentInfo si = siPerCommit.info; segnOutput.writeString(si.name); byte segmentID[] = si.getId(); segnOutput.writeBytes(segmentID, segmentID.length); segnOutput.writeString(si.getCodec().getName()); segnOutput.writeLong(siPerCommit.getDelGen()); int delCount = siPerCommit.getDelCount(); CodecUtil.writeFooter(segnOutput); segnOutput.close(); directory.sync(Collections.singleton(segmentFileName)); success = true; } finally {
if (useCompoundFile) { try { directory = engineConfig.getCodec().compoundFormat().getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ); } catch (IOException e) { logger.warn(() -> new ParameterizedMessage("Error when opening compound reader for Directory [{}] and SegmentCommitInfo [{}]", segmentReader.directory(), segmentCommitInfo), e); if (useCompoundFile) { try { files = directory.listAll(); } catch (IOException e) { final Directory finalDirectory = directory; long length = 0L; try { length = directory.fileLength(file); } catch (NoSuchFileException | FileNotFoundException e) { final Directory finalDirectory = directory;
Version segmentVersion = siPerCommit.info.getVersion(); if (minSegmentVersion == null || segmentVersion.onOrAfter(minSegmentVersion) == false) { minSegmentVersion = segmentVersion; byte segmentID[] = si.getId(); if (segmentID.length != StringHelper.ID_LENGTH) { throw new IllegalStateException("cannot write segment: invalid id segment=" + si.name + "id=" + StringHelper.idToString(segmentID)); out.writeString(si.getCodec().getName()); out.writeLong(siPerCommit.getDelGen()); int delCount = siPerCommit.getDelCount();
if (oldReader != null && Arrays.equals(commitInfo.info.getId(), oldReader.getSegmentInfo().info.getId()) == false) { throw new IllegalStateException("same segment " + commitInfo.info.name + " has invalid doc count change; likely you are re-opening a reader after illegally removing index files yourself and building a new index in their place. Use IndexWriter.deleteAll or open a new IndexWriter using OpenMode.CREATE instead"); try { SegmentReader newReader; if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) { if (oldReader.isNRT) { Bits liveDocs = commitInfo.hasDeletions() ? commitInfo.info.getCodec().liveDocsFormat() .readLiveDocs(commitInfo.info.dir, commitInfo, IOContext.READONCE) : null; newReaders[i] = new SegmentReader(commitInfo, oldReader, liveDocs, liveDocs, } else { Bits liveDocs = commitInfo.hasDeletions() ? commitInfo.info.getCodec().liveDocsFormat() .readLiveDocs(commitInfo.info.dir, commitInfo, IOContext.READONCE) : null; newReaders[i] = new SegmentReader(commitInfo, oldReader, liveDocs, liveDocs,
public CodecService(@Nullable MapperService mapperService, Logger logger) { final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder(); if (mapperService == null) { codecs.put(DEFAULT_CODEC, new Lucene70Codec()); codecs.put(BEST_COMPRESSION_CODEC, new Lucene70Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger)); } codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { codecs.put(codec, Codec.forName(codec)); } this.codecs = codecs.immutableMap(); }
assert liveDocs != null; assert liveDocs.length() == info.info.maxDoc(); Codec codec = info.info.getCodec(); codec.liveDocsFormat().writeLiveDocs(liveDocs, trackingDir, info, pendingDeleteCount, IOContext.DEFAULT); success = true; } finally {
IOContext context = new IOContext(new FlushInfo(newSegment.info.maxDoc(), newSegment.sizeInBytes())); Set<String> originalFiles = newSegment.info.files(); newSegment.info.setUseCompoundFile(true); codec.segmentInfoFormat().write(directory, newSegment.info, context); bits = sortLiveDocs(flushedSegment.liveDocs, sortMap); codec.liveDocsFormat().writeLiveDocs(bits, directory, info, delCount, context); newSegment.setDelCount(delCount); newSegment.advanceDelGen();
private void writeNorms(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, sortMap, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
this.si = si.clone(); this.originalSi = si; this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort()); final Codec codec = si.info.getCodec(); try { if (si.hasDeletions()) { hardLiveDocs = liveDocs = codec.liveDocsFormat().readLiveDocs(directory(), si, IOContext.READONCE); } else { assert si.getDelCount() == 0;
int maxDoc = state.segmentInfo.maxDoc(); DocValuesConsumer dvConsumer = null; boolean success = false; DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat(); dvConsumer = fmt.fieldsConsumer(state);
/** * NOTE: this method creates a compound file for all files returned by * info.files(). While, generally, this may include separate norms and * deletion files, this SegmentInfo must not reference such files when this * method is called, because they are not allowed within a compound file. */ static final void createCompoundFile(InfoStream infoStream, TrackingDirectoryWrapper directory, final SegmentInfo info, IOContext context, IOUtils.IOConsumer<Collection<String>> deleteFiles) throws IOException { // maybe this check is not needed, but why take the risk? if (!directory.getCreatedFiles().isEmpty()) { throw new IllegalStateException("pass a clean trackingdir for CFS creation"); } if (infoStream.isEnabled("IW")) { infoStream.message("IW", "create compound file"); } // Now merge all added files boolean success = false; try { info.getCodec().compoundFormat().write(directory, info, context); success = true; } finally { if (!success) { // Safe: these files must exist deleteFiles.accept(directory.getCreatedFiles()); } } // Replace all previous files with the CFS/CFE files: info.setFiles(new HashSet<>(directory.getCreatedFiles())); }
/** Returns all files in use by this segment. */ public Collection<String> files() throws IOException { // Start from the wrapped info's files: Collection<String> files = new HashSet<>(info.files()); // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for // updates) and then maybe even be able to remove LiveDocsFormat.files(). // Must separately add any live docs files: info.getCodec().liveDocsFormat().files(this, files); // must separately add any field updates files for (Set<String> updatefiles : dvUpdatesFiles.values()) { files.addAll(updatefiles); } // must separately add fieldInfos files files.addAll(fieldInfosFiles); return files; }
String name = Codec.getDefault().getName(); if (Arrays.asList(codecService.availableCodecs()).contains(name)) { LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName()); assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName()); if (randomBoolean()) { engine.config().setEnableGcDeletes(false);
int maxDoc = state.segmentInfo.maxDoc(); long t0 = System.nanoTime(); writeNorms(state, sortMap); docWriter.codec.fieldInfosFormat().write(state.directory, state.segmentInfo, "", state.fieldInfos, IOContext.DEFAULT); if (docState.infoStream.isEnabled("IW")) { docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write fieldInfos");