@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { return in.createOutput(name, context); }
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { IndexOutput output = in.createOutput(name, context); createdFileNames.add(name); return output; }
@Override public IndexOutput createOutput( String name, IOContext context ) throws IOException { return delegate.createOutput( name, context ); }
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { writeLock.ensureValid(); return in.createOutput(name, context); }
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { return getDirectory(name).createOutput(name, context); }
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { ensureOpen(); // This Directory is only supposed to be used during merging, // so all writes should have MERGE context, else there is a bug // somewhere that is failing to pass down the right IOContext: assert context.context == IOContext.Context.MERGE: "got context=" + context.context; // Because rateLimiter is bound to a particular merge thread, this method should // always be called from that context. Verify this. assert mergeThread == Thread.currentThread() : "Not the same merge thread, current=" + Thread.currentThread() + ", expected=" + mergeThread; return new RateLimitedIndexOutput(rateLimiter, in.createOutput(name, context)); } };
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { if (VERBOSE) { System.out.println("nrtdir.createOutput name=" + name); } if (doCacheWrite(name, context)) { if (VERBOSE) { System.out.println(" to cache"); } return cache.createOutput(name, context); } else { return in.createOutput(name, context); } }
@Override public void finish() throws IOException { if (finished) { throw new IllegalStateException("already finished"); } finished = true; CodecUtil.writeFooter(dataOut); String indexFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, Lucene60PointsFormat.INDEX_EXTENSION); // Write index file try (IndexOutput indexOut = writeState.directory.createOutput(indexFileName, writeState.context)) { CodecUtil.writeIndexHeader(indexOut, Lucene60PointsFormat.META_CODEC_NAME, Lucene60PointsFormat.INDEX_VERSION_CURRENT, writeState.segmentInfo.getId(), writeState.segmentSuffix); int count = indexFPs.size(); indexOut.writeVInt(count); for(Map.Entry<String,Long> ent : indexFPs.entrySet()) { FieldInfo fieldInfo = writeState.fieldInfos.fieldInfo(ent.getKey()); if (fieldInfo == null) { throw new IllegalStateException("wrote field=\"" + ent.getKey() + "\" but that field doesn't exist in FieldInfos"); } indexOut.writeVInt(fieldInfo.number); indexOut.writeVLong(ent.getValue()); } CodecUtil.writeFooter(indexOut); } }
/** * Copies an existing {@code src} file from directory {@code from} * to a non-existent file {@code dest} in this directory. */ public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException { boolean success = false; try (IndexInput is = from.openInput(src, context); IndexOutput os = createOutput(dest, context)) { os.copyBytes(is, is.length()); success = true; } finally { if (!success) { IOUtils.deleteFilesIgnoringExceptions(this, dest); } } }
Lucene70NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeIndexHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeIndexHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); maxDoc = state.segmentInfo.maxDoc(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
synchronized private void persist() throws IOException { String fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT); boolean success = false; try { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(refCounts.size()); for(Entry<Long,Integer> ent : refCounts.entrySet()) { out.writeVLong(ent.getKey()); out.writeVInt(ent.getValue()); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); IOUtils.deleteFilesIgnoringExceptions(dir, fileName); } else { IOUtils.close(out); } } dir.sync(Collections.singletonList(fileName)); if (nextWriteGen > 0) { String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen-1); // exception OK: likely it didn't exist IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile); } nextWriteGen++; }
/** expert: Creates a new writer */ public Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeIndexHeader(data, dataCodec, Lucene70DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeIndexHeader(meta, metaCodec, Lucene70DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); maxDoc = state.segmentInfo.maxDoc(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
/** Full constructor */ public Lucene60PointsWriter(SegmentWriteState writeState, int maxPointsInLeafNode, double maxMBSortInHeap) throws IOException { assert writeState.fieldInfos.hasPointValues(); this.writeState = writeState; this.maxPointsInLeafNode = maxPointsInLeafNode; this.maxMBSortInHeap = maxMBSortInHeap; String dataFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, Lucene60PointsFormat.DATA_EXTENSION); dataOut = writeState.directory.createOutput(dataFileName, writeState.context); boolean success = false; try { CodecUtil.writeIndexHeader(dataOut, Lucene60PointsFormat.DATA_CODEC_NAME, Lucene60PointsFormat.DATA_VERSION_CURRENT, writeState.segmentInfo.getId(), writeState.segmentSuffix); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(dataOut); } } }
private void write(Directory directory) throws IOException { long nextGeneration = getNextPendingGeneration(); String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS, "", nextGeneration); // Always advance the generation on write: generation = nextGeneration; IndexOutput segnOutput = null; boolean success = false; try { segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT); write(directory, segnOutput); segnOutput.close(); directory.sync(Collections.singleton(segmentFileName)); success = true; } finally { if (success) { pendingCommit = true; } else { // We hit an exception above; try to close the file // but suppress any exception: IOUtils.closeWhileHandlingException(segnOutput); // Try not to leave a truncated segments_N file in // the index: IOUtils.deleteFilesIgnoringExceptions(directory, segmentFileName); } } }
termsOut = state.directory.createOutput(termsName, state.context); boolean success = false; IndexOutput indexOut = null; indexOut = state.directory.createOutput(indexName, state.context); CodecUtil.writeIndexHeader(indexOut, BlockTreeTermsReader.TERMS_INDEX_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
@Override public void writeLiveDocs(Bits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException { long gen = info.getNextDelGen(); String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen); int delCount = 0; try (IndexOutput output = dir.createOutput(name, context)) { CodecUtil.writeIndexHeader(output, CODEC_NAME, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX)); final int longCount = FixedBitSet.bits2words(bits.length()); for (int i = 0; i < longCount; ++i) { long currentBits = 0; for (int j = i << 6, end = Math.min(j + 63, bits.length() - 1); j <= end; ++j) { if (bits.get(j)) { currentBits |= 1L << j; // mod 64 } else { delCount += 1; } } output.writeLong(currentBits); } CodecUtil.writeFooter(output); } if (delCount != info.getDelCount() + newDelCount) { throw new CorruptIndexException("bits.deleted=" + delCount + " info.delcount=" + info.getDelCount() + " newdelcount=" + newDelCount, name); } }
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileNameExists(fileName)) { // Another thread beat us... return; } assert slowFileExists(in, fileName) == false: "fileName=" + fileName + " exists both in cache and in delegate"; final IOContext context = IOContext.DEFAULT; final IndexOutput out = in.createOutput(fileName, context); IndexInput in = null; try { in = cache.openInput(fileName, context); out.copyBytes(in, in.length()); } finally { IOUtils.close(in, out); } // Lock order: uncacheLock -> this synchronized(this) { // Must sync here because other sync methods have // if (cache.fileNameExists(name)) { ... } else { ... }: cache.deleteFile(fileName); } } }
String entriesFile = IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION); try (IndexOutput data = dir.createOutput(dataFile, context); IndexOutput entries = dir.createOutput(entriesFile, context)) { CodecUtil.writeIndexHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), ""); CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), "");
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene60FieldInfosFormat.CODEC_NAME, Lucene60FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size());