Refine search
try { final long dirStart = termsOut.getFilePointer(); final long indexDirStart = indexOut.getFilePointer(); termsOut.writeVInt(fields.size()); termsOut.writeVInt(field.fieldInfo.number); assert field.numTerms > 0; termsOut.writeVLong(field.numTerms); termsOut.writeVInt(field.rootCode.length); termsOut.writeBytes(field.rootCode.bytes, field.rootCode.offset, field.rootCode.length); assert field.fieldInfo.getIndexOptions() != IndexOptions.NONE; if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) { termsOut.writeVLong(field.sumTotalTermFreq); termsOut.writeVLong(field.sumDocFreq); termsOut.writeVInt(field.docCount); termsOut.writeVInt(field.longsSize); indexOut.writeVLong(field.indexStartFP); writeBytesRef(termsOut, field.minTerm); writeBytesRef(termsOut, field.maxTerm);
@Override public void close() throws IOException { dataOut.close(); } }
private void addTermsDict(SortedSetDocValues values) throws IOException { final long size = values.getValueCount(); meta.writeVLong(size); meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; DirectMonotonicWriter writer = DirectMonotonicWriter.getInstance(meta, addressBuffer, numBlocks, DIRECT_MONOTONIC_BLOCK_SHIFT); long start = data.getFilePointer(); int maxLength = 0; TermsEnum iterator = values.termsEnum(); for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { writer.add(data.getFilePointer() - start); data.writeVInt(term.length); data.writeBytes(term.bytes, term.offset, term.length); } else { final int prefixLength = StringHelper.bytesDifference(previous.get(), term); data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); if (prefixLength >= 15) { data.writeVInt(prefixLength - 15); data.writeVInt(suffixLength - 16); data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); meta.writeInt(maxLength); meta.writeLong(start);
private static void writeBytesRef(IndexOutput out, BytesRef bytes) throws IOException { out.writeVInt(bytes.length); out.writeBytes(bytes.bytes, bytes.offset, bytes.length); } }
@Override public void append(byte[] packedValue, long ord, int docID) throws IOException { assert packedValue.length == packedBytesLength; out.writeBytes(packedValue, 0, packedValue.length); out.writeInt(docID); if (singleValuePerDoc == false) { if (longOrds) { out.writeLong(ord); } else { assert ord <= Integer.MAX_VALUE; out.writeInt((int) ord); } } count++; assert expectedCount == 0 || count <= expectedCount; }
private void writeIndex(IndexOutput out, int countPerLeaf, int numLeaves, byte[] packedIndex) throws IOException { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(numDataDims); out.writeVInt(numIndexDims); out.writeVInt(countPerLeaf); out.writeVInt(bytesPerDim); assert numLeaves > 0; out.writeVInt(numLeaves); out.writeBytes(minPackedValue, 0, packedIndexBytesLength); out.writeBytes(maxPackedValue, 0, packedIndexBytesLength); out.writeVLong(pointCount); out.writeVInt(docsSeen.cardinality()); out.writeVInt(packedIndex.length); out.writeBytes(packedIndex, 0, packedIndex.length); }
private int createFile(Directory dir, String fileName) throws IOException { int size = rnd.nextInt(1000) + 1; byte[] data = randomBytes(size); IndexOutput o = dir.createOutput(fileName, IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); return size; }
private void checkStream(int size, int buffer) throws IOException { Random rand = new Random(); byte[] data = new byte[size]; rand.nextBytes(data); Directory dir = new RAMDirectory(); IndexOutput out = dir.createOutput("test"); out.writeBytes(data, data.length); out.close(); InputStream in = new IndexInputStream(dir.openInput("test")); if (buffer != 0) { in = new BufferedInputStream(in, buffer); } byte[] buf = new byte[3]; int len; int pos = 0; while ((len = in.read(buf)) > -1) { for (int i = 0; i < len; i++, pos++) { assertEquals(data[pos], buf[i]); } } in.close(); // assert length assertEquals(data.length, pos); } }
@Test public void testIndexWritingAndFinding() throws IOException, InterruptedException { verifyBoth(cache0, cache1); IndexOutput indexOutput = dirA.createOutput(filename, IOContext.DEFAULT); indexOutput.writeString("no need to write, nobody ever will read this"); indexOutput.close(); assertFileExistsHavingRLCount(filename, 1, true); IndexInput openInput = dirB.openInput(filename, IOContext.DEFAULT); assertFileExistsHavingRLCount(filename, 2, true); dirA.deleteFile(filename); assertFileExistsHavingRLCount(filename, 1, false); //Lucene does use clone() - lock implementation ignores it as a clone is //cast on locked segments and released before the close on the parent object IndexInput clone = (IndexInput) openInput.clone(); assertFileExistsHavingRLCount(filename, 1, false); clone.close(); assertFileExistsHavingRLCount(filename, 1, false); openInput.close(); assertFileNotExists(filename); dirA.close(); dirB.close(); verifyBoth(cache0, cache1); }
private void copy(Directory src, Directory dist, String name) throws IOException { byte[] buf = new byte[bufferSize]; IndexOutput os = null; IndexInput is = null; try { os = dist.createOutput(name); is = src.openInput(name); long len = is.length(); long readCount = 0; while (readCount < len) { int toRead = readCount + bufferSize > len ? (int) (len - readCount) : bufferSize; is.readBytes(buf, 0, toRead); os.writeBytes(buf, toRead); readCount += toRead; } } finally { // graceful cleanup try { if (os != null) os.close(); } finally { if (is != null) is.close(); } } }
public void testWriteChunks() throws Exception { final int BUFFER_SIZE = 64; Cache cache = cacheManager.getCache(); Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME).chunkSize(BUFFER_SIZE).create(); IndexOutput io = dir.createOutput("MyNewFile.txt", IOContext.DEFAULT); io.writeByte((byte) 66); io.writeByte((byte) 69); io.close(); assertTrue(Arrays.asList(dir.listAll()).contains("MyNewFile.txt")); assertNotNull(cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE, -1))); // test contents by reading: byte[] buf = new byte[9]; IndexInput ii = dir.openInput("MyNewFile.txt", IOContext.DEFAULT); ii.readBytes(buf, 0, (int) ii.length()); ii.close(); assertEquals(new String(new byte[] { 66, 69 }), new String(buf).trim()); String testText = "This is some rubbish again that will span more than one chunk - one hopes. Who knows, maybe even three or four chunks."; io = dir.createOutput("MyNewFile.txt", IOContext.DEFAULT); ((InfinispanIndexOutput)io).seek(0); io.writeBytes(testText.getBytes(), 0, testText.length()); io.close(); // now compare. byte[] chunk1 = (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE, -1)); byte[] chunk2 = (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 1, BUFFER_SIZE, -1)); assert null != chunk1; assert null != chunk2; assert testText.equals(new String(chunk1) + new String(chunk2).trim()); dir.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); }
@Override public void close() throws IOException { flushBuffer(); _tmpOutput.close(); IndexInput input = _directory.openInput(_name + Z_TMP); try { long len = input.length(); long readCount = 0; while (readCount < len) { int toRead = readCount + _buffer.length > len ? (int) (len - readCount) : _buffer.length; input.readBytes(_buffer, 0, toRead); _output.writeBytes(_buffer, toRead); readCount += toRead; } _output.writeInt(_blockCount); _output.writeInt(_buffer.length); _output.writeLong(_position); _output.writeLong(VERSION); } finally { try { _output.close(); } finally { input.close(); } } _directory.deleteFile(_name + Z_TMP); _compressor.end(); }
String entriesFile = IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION); try (IndexOutput data = dir.createOutput(dataFile, context); IndexOutput entries = dir.createOutput(entriesFile, context)) { CodecUtil.writeIndexHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), ""); CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), ""); entries.writeVInt(si.files().size()); for (String file : si.files()) { long startOffset = data.getFilePointer(); try (IndexInput in = dir.openInput(file, IOContext.READONCE)) { data.copyBytes(in, in.length()); long endOffset = data.getFilePointer(); entries.writeString(IndexFileNames.stripSegmentName(file)); entries.writeLong(startOffset); entries.writeLong(length); CodecUtil.writeFooter(data); CodecUtil.writeFooter(entries);
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
byte[] assertWrites(Directory dir, int blobSize) throws IOException { byte[] data = randomBytes(fileSize); IndexOutput o = dir.createOutput("test", IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); assertTrue(dir.fileExists("test")); assertEquals(fileSize, dir.fileLength("test")); IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); assertEquals(blobSize, testNode.getProperty(PROP_BLOB_SIZE).getValue(Type.LONG).longValue()); assertBlobSizeInWrite(testNode.getProperty(JCR_DATA), blobSize, fileSize); return data; }
String entriesFile = IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION); try (IndexOutput data = dir.createOutput(dataFile, context); IndexOutput entries = dir.createOutput(entriesFile, context)) { CodecUtil.writeIndexHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), ""); CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), ""); entries.writeVInt(si.files().size()); for (String file : si.files()) { long startOffset = data.getFilePointer(); try (ChecksumIndexInput in = dir.openChecksumInput(file, IOContext.READONCE)) { CodecUtil.verifyAndCopyIndexHeader(in, data, si.getId()); long numBytesToCopy = in.length() - CodecUtil.footerLength() - in.getFilePointer(); data.copyBytes(in, numBytesToCopy); data.writeInt(CodecUtil.FOOTER_MAGIC); data.writeInt(0); data.writeLong(checksum); long endOffset = data.getFilePointer(); entries.writeString(IndexFileNames.stripSegmentName(file)); entries.writeLong(startOffset); entries.writeLong(length);
synchronized private void persist() throws IOException { String fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT); boolean success = false; try { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(refCounts.size()); for(Entry<Long,Integer> ent : refCounts.entrySet()) { out.writeVLong(ent.getKey()); out.writeVInt(ent.getValue()); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); IOUtils.deleteFilesIgnoringExceptions(dir, fileName); } else { IOUtils.close(out); } } dir.sync(Collections.singletonList(fileName)); if (nextWriteGen > 0) { String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen-1); // exception OK: likely it didn't exist IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile); } nextWriteGen++; }
private void testOn(Directory dir, int writeSize, int readSize, Cache cache) throws IOException { if (cache != null) cache.clear();//needed to make sure no chunks are left over in case of Infinispan implementation final String filename = "chunkTest"; IndexOutput indexOutput = dir.createOutput(filename, IOContext.DEFAULT); byte[] toWrite = fillBytes(writeSize); indexOutput.writeBytes(toWrite, writeSize); indexOutput.close(); if (cache != null) { AssertJUnit.assertEquals(writeSize, DirectoryIntegrityCheck.deepCountFileSize(new FileCacheKey(INDEXNAME, filename, -1), cache, -1)); } AssertJUnit.assertEquals(writeSize, indexOutput.getFilePointer()); byte[] results = new byte[readSize]; IndexInput openInput = dir.openInput(filename, IOContext.DEFAULT); try { openInput.readBytes(results, 0, readSize); for (int i = 0; i < writeSize && i < readSize; i++) { AssertJUnit.assertEquals(results[i], toWrite[i]); } if (readSize > writeSize) AssertJUnit.fail("should have thrown an IOException for reading past EOF"); } catch (IOException ioe) { if (readSize <= writeSize) AssertJUnit.fail("should not have thrown an IOException" + ioe.getMessage()); } }
@Override public void close() throws IOException { try { /* * write the offsets per field such that we know where * we need to load the FSTs from */ long pointer = output.getFilePointer(); output.writeVInt(fieldOffsets.size()); for (Map.Entry<String, Long> entry : fieldOffsets.entrySet()) { output.writeString(entry.getKey()); output.writeVLong(entry.getValue()); } output.writeLong(pointer); CodecUtil.writeFooter(output); } finally { IOUtils.close(output); } }
CodecUtil.writeIndexHeader(out, "segments", VERSION_CURRENT, StringHelper.randomId(), Long.toString(generation, Character.MAX_RADIX)); out.writeVInt(Version.LATEST.major); out.writeVInt(Version.LATEST.minor); out.writeVInt(Version.LATEST.bugfix); out.writeVInt(indexCreatedVersionMajor); out.writeLong(version); out.writeVLong(counter); // write counter out.writeInt(size()); out.writeVInt(minSegmentVersion.major); out.writeVInt(minSegmentVersion.minor); out.writeVInt(minSegmentVersion.bugfix); out.writeString(si.name); byte segmentID[] = si.getId(); if (segmentID.length != StringHelper.ID_LENGTH) { throw new IllegalStateException("cannot write segment: invalid id segment=" + si.name + "id=" + StringHelper.idToString(segmentID)); out.writeBytes(segmentID, segmentID.length); out.writeString(si.getCodec().getName()); out.writeLong(siPerCommit.getDelGen()); int delCount = siPerCommit.getDelCount(); if (delCount < 0 || delCount > si.maxDoc()) { throw new IllegalStateException("cannot write segment: invalid maxDoc segment=" + si.name + " maxDoc=" + si.maxDoc() + " delCount=" + delCount); out.writeInt(delCount);