private static SegmentCommitInfo getSegmentCommitInfo() { SegmentInfo segmentInfo = new SegmentInfo( mock( Directory.class ), Version.LATEST, "test", Integer.MAX_VALUE, true, mock( Codec.class ), MapUtil.stringMap(), RandomUtils.nextBytes( 16 ), MapUtil.stringMap() ); return new SegmentCommitInfo( segmentInfo, 1, 1L, 1L, 1L ); }
SegmentInfo si = new SegmentInfo(directoryOrig, Version.LATEST, null, mergeSegmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort()); Map<String,String> details = new HashMap<>(); details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
deleteSlice = deleteQueue.newSlice(); segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort()); assert numDocsInRAM == 0; if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), info.info.getMinVersion(), segName, info.info.maxDoc(), info.info.getUseCompoundFile(), info.info.getCodec(), info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort());
SegmentInfo info = new SegmentInfo(directoryOrig, Version.LATEST, null, mergedName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort); si.setFiles(files); } catch (Throwable exception) {
si = new SegmentInfo(dir, version, minVersion, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort); si.setFiles(files); } catch (Throwable exception) {
public Object clone () { SegmentInfo si = new SegmentInfo(name, docCount, dir); si.isCompoundFile = isCompoundFile; si.delGen = delGen; si.delCount = delCount; si.preLockless = preLockless; si.hasSingleNormFile = hasSingleNormFile; if (normGen != null) { si.normGen = (long[]) normGen.clone(); } si.docStoreOffset = docStoreOffset; si.docStoreSegment = docStoreSegment; si.docStoreIsCompoundFile = docStoreIsCompoundFile; return si; }
public Object clone () { SegmentInfo si = new SegmentInfo(name, docCount, dir); si.isCompoundFile = isCompoundFile; si.delGen = delGen; si.delCount = delCount; si.preLockless = preLockless; si.hasSingleNormFile = hasSingleNormFile; if (normGen != null) { si.normGen = (long[]) normGen.clone(); } si.docStoreOffset = docStoreOffset; si.docStoreSegment = docStoreSegment; si.docStoreIsCompoundFile = docStoreIsCompoundFile; return si; }
/** * Adds a document to this index, using the provided analyzer instead of the * value of {@link #getAnalyzer()}. If the document contains more than * {@link #maxFieldLength} terms for a given field, the remainder are * discarded. */ public void addDocument(Document doc, Analyzer analyzer) throws IOException { DocumentWriter dw = new DocumentWriter(ramDirectory, analyzer, similarity, maxFieldLength); String segmentName = newSegmentName(); dw.addDocument(segmentName, doc); synchronized (this) { segmentInfos.addElement(new SegmentInfo(segmentName, 1, ramDirectory)); maybeMergeSegments(); } }
public void split(Path destDir, String[] segs) throws IOException { Files.createDirectories(destDir); FSDirectory destFSDir = FSDirectory.open(destDir); SegmentInfos destInfos = new SegmentInfos(); destInfos.counter = infos.counter; for (String n : segs) { SegmentCommitInfo infoPerCommit = getInfo(n); SegmentInfo info = infoPerCommit.info; // Same info just changing the dir: SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.maxDoc(), info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics(), info.getId(), new HashMap<String,String>()); destInfos.add(new SegmentCommitInfo(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen(), infoPerCommit.getDocValuesGen())); // now copy files over Collection<String> files = infoPerCommit.files(); for (final String srcName : files) { Path srcFile = dir.resolve(srcName); Path destFile = destDir.resolve(srcName); Files.copy(srcFile, destFile); } } destInfos.changed(); destInfos.commit(destFSDir); // System.out.println("destDir:"+destDir.getAbsolutePath()); } }
public void split(Path destDir, String[] segs) throws IOException { Files.createDirectories(destDir); FSDirectory destFSDir = FSDirectory.open(destDir); SegmentInfos destInfos = new SegmentInfos(); destInfos.counter = infos.counter; for (String n : segs) { SegmentCommitInfo infoPerCommit = getInfo(n); SegmentInfo info = infoPerCommit.info; // Same info just changing the dir: SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.maxDoc(), info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics(), info.getId(), new HashMap<String,String>()); destInfos.add(new SegmentCommitInfo(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen(), infoPerCommit.getDocValuesGen())); // now copy files over Collection<String> files = infoPerCommit.files(); for (final String srcName : files) { Path srcFile = dir.resolve(srcName); Path destFile = destDir.resolve(srcName); Files.copy(srcFile, destFile); } } destInfos.changed(); destInfos.commit(destFSDir); // System.out.println("destDir:"+destDir.getAbsolutePath()); } }
public void split(Path destDir, String[] segs) throws IOException { Files.createDirectories(destDir); FSDirectory destFSDir = FSDirectory.open(destDir); SegmentInfos destInfos = new SegmentInfos(infos.getIndexCreatedVersionMajor()); destInfos.counter = infos.counter; for (String n : segs) { SegmentCommitInfo infoPerCommit = getInfo(n); SegmentInfo info = infoPerCommit.info; // Same info just changing the dir: SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.getMinVersion(), info.name, info.maxDoc(), info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics(), info.getId(), new HashMap<>(), null); destInfos.add(new SegmentCommitInfo(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getSoftDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen(), infoPerCommit.getDocValuesGen())); // now copy files over Collection<String> files = infoPerCommit.files(); for (final String srcName : files) { Path srcFile = dir.resolve(srcName); Path destFile = destDir.resolve(srcName); Files.copy(srcFile, destFile); } } destInfos.changed(); destInfos.commit(destFSDir); // System.out.println("destDir:"+destDir.getAbsolutePath()); } }
@Override public SegmentInfo read(Directory dir, String segment, byte segmentID[], IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene46SegmentInfoFormat.SI_EXTENSION); try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) { int codecVersion = CodecUtil.checkHeader(input, Lucene46SegmentInfoFormat.CODEC_NAME, Lucene46SegmentInfoFormat.VERSION_START, Lucene46SegmentInfoFormat.VERSION_CURRENT); final Version version; try { version = Version.parse(input.readString()); } catch (ParseException pe) { throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe); } final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount, input); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String,String> diagnostics = Collections.unmodifiableMap(input.readStringStringMap()); final Set<String> files = Collections.unmodifiableSet(input.readStringSet()); if (codecVersion >= Lucene46SegmentInfoFormat.VERSION_CHECKSUM) { CodecUtil.checkFooter(input); } else { CodecUtil.checkEOF(input); } final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, null, Collections.<String,String>emptyMap()); si.setFiles(files); return si; } }
deleteSlice = deleteQueue.newSlice(); segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort()); assert numDocsInRAM == 0; if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
deleteSlice = deleteQueue.newSlice(); segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, segmentName, -1, false, codec, Collections.<String,String>emptyMap(), StringHelper.randomId(), new HashMap<String,String>()); assert numDocsInRAM == 0; if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
new SegmentInfo(input.readString(), input.readInt(), directory); addElement(si);
SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), segName, info.info.maxDoc(), info.info.getUseCompoundFile(), info.info.getCodec(), info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes());
/** Merges the provided indexes into this index. * <p>After this completes, the index is optimized. </p> * <p>The provided IndexReaders are not closed.</p> */ public synchronized void addIndexes(IndexReader[] readers) throws IOException { optimize(); // start with zero or 1 seg String mergedName = newSegmentName(); SegmentMerger merger = new SegmentMerger(directory, mergedName, false); if (segmentInfos.size() == 1) // add existing index, if any merger.add(new SegmentReader(segmentInfos.info(0))); for (int i = 0; i < readers.length; i++) // add new indexes merger.add(readers[i]); int docCount = merger.merge(); // merge 'em segmentInfos.setSize(0); // pop old infos & add new segmentInfos.addElement(new SegmentInfo(mergedName, docCount, directory)); synchronized (directory) { // in- & inter-process sync new Lock.With(directory.makeLock("commit.lock"), COMMIT_LOCK_TIMEOUT) { public Object doBody() throws IOException { segmentInfos.write(directory); // commit changes return null; } }.run(); } }
final Map<String,String> attributes = input.readMapOfStrings(); si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null); si.setFiles(files); } catch (Throwable exception) {