/** * Lucene 5 creates temporary "pending_segments_n" files during commit that get renamed to "segment_n", and since the * NoopSegmentReadLocker never deletes files, some garbage are left behind */ void verifyIgnoringFiles(Cache cache,Set<String> ignoring) { DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEX_NAME, ignoring); }
void assertFileExistsHavingRLCount(String fileName, int expectedReadcount, boolean expectRegisteredInFat) { DirectoryIntegrityCheck.assertFileExistsHavingRLCount(cache0, fileName, INDEX_NAME, expectedReadcount, CHUNK_SIZE, expectRegisteredInFat, -1); DirectoryIntegrityCheck.assertFileExistsHavingRLCount(cache1, fileName, INDEX_NAME, expectedReadcount, CHUNK_SIZE, expectRegisteredInFat, -1); }
void assertFileNotExists(String fileName) throws InterruptedException { DirectoryIntegrityCheck.assertFileNotExists(cache0, INDEX_NAME, fileName, 10000L, -1); DirectoryIntegrityCheck.assertFileNotExists(cache1, INDEX_NAME, fileName, 10000L, -1); }
final long finalSize = REPEATABLE_BUFFER_SIZE + someTextAsBytes.length; assert ((InfinispanIndexOutput)io).length() == finalSize; assert ((InfinispanIndexOutput)io).length() == DirectoryIntegrityCheck.deepCountFileSize(new FileCacheKey(INDEXNAME,fileName, -1), cache, -1); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
private void testOn(Directory dir, int writeSize, int readSize, Cache cache) throws IOException { if (cache != null) cache.clear();//needed to make sure no chunks are left over in case of Infinispan implementation final String filename = "chunkTest"; IndexOutput indexOutput = dir.createOutput(filename, IOContext.DEFAULT); byte[] toWrite = fillBytes(writeSize); indexOutput.writeBytes(toWrite, writeSize); indexOutput.close(); if (cache != null) { AssertJUnit.assertEquals(writeSize, DirectoryIntegrityCheck.deepCountFileSize(new FileCacheKey(INDEXNAME, filename, -1), cache, -1)); } AssertJUnit.assertEquals(writeSize, indexOutput.getFilePointer()); byte[] results = new byte[readSize]; IndexInput openInput = dir.openInput(filename, IOContext.DEFAULT); try { openInput.readBytes(results, 0, readSize); for (int i = 0; i < writeSize && i < readSize; i++) { AssertJUnit.assertEquals(results[i], toWrite[i]); } if (readSize > writeSize) AssertJUnit.fail("should have thrown an IOException for reading past EOF"); } catch (IOException ioe) { if (readSize <= writeSize) AssertJUnit.fail("should not have thrown an IOException" + ioe.getMessage()); } }
FileMetadata metadata = (FileMetadata) value; long totalFileSize = metadata.getSize(); long actualFileSize = deepCountFileSize(fileCacheKey, cache, affinitySegmentId); assertEquals(actualFileSize, totalFileSize); if (!ignoreFiles.contains(fileCacheKey.getFileName())) {
void verifyBoth(Cache cache0, Cache cache1) { DirectoryIntegrityCheck.verifyDirectoryStructure(cache0, INDEX_NAME); DirectoryIntegrityCheck.verifyDirectoryStructure(cache1, INDEX_NAME); }
public static void verifyDirectoryStructure(Cache cache, String indexName, Set<String> ignoreFiles) { verifyDirectoryStructure(cache, indexName, false, ignoreFiles, -1); }
/** * Verifies that no garbage elements are left over in the cache and that for each type of object * the expected value is stored. Also asserts for proper size metadata comparing to actual bytes * used in chunks. It's assumed that only one index is stored in the inspected cache, and that * the index is not being used by IndexReaders or IndexWriters. * * @param cache * The cache to inspect * @param indexName * The name of the unique index stored in the cache */ public static void verifyDirectoryStructure(Cache cache, String indexName) { verifyDirectoryStructure(cache, indexName, false, Collections.emptySet(), -1); }
public static void verifyDirectoryStructure(Cache cache, String indexName, boolean wasAStressTest, int affinitySegmentId) { verifyDirectoryStructure(cache,indexName,wasAStressTest, Collections.emptySet(), affinitySegmentId); }
@Test public void stressTestOnStore() throws InterruptedException, IOException { cache = cacheManager.getCache(); assert cache!=null; Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, indexName).create(); PerformanceCompareStressTest.stressTestDirectory(dir, "InfinispanClusteredWith-Store"); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, indexName, true, -1); }
/** * Helper for testReadWholeFile test: * creates a file and then verifies it's readability in specific corner cases. * Then reuses the same parameters to verify the file rename capabilities. */ private void verifyOnBuffer(final String fileName, final int fileSize, final int bufferSize, Cache cache, Directory dir, final int readBuffer) throws IOException { createFileWithRepeatableContent(dir, fileName, fileSize); assertReadByteWorkingCorrectly(dir, fileName, fileSize); assertReadBytesWorkingCorrectly(dir, fileName, fileSize, readBuffer); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); final String newFileName = fileName+".bak"; ((DirectoryExtensions)dir).renameFile(fileName, newFileName); assertReadByteWorkingCorrectly(dir, newFileName, fileSize); assertReadBytesWorkingCorrectly(dir, newFileName, fileSize, readBuffer); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); List<String> fileList = Arrays.asList(dir.listAll()); assert fileList.contains(newFileName); assert !fileList.contains(fileName); }
@Test public void testReadWholeFile() throws IOException { final int BUFFER_SIZE = 64; Cache cache = cacheManager.getCache(); Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME) .chunkSize(BUFFER_SIZE) .overrideSegmentReadLocker(makeTestableReadLocker(cache, INDEXNAME)) .create(); verifyOnBuffer("SingleChunk.txt", 61, BUFFER_SIZE, cache, dir, 15); final int VERY_BIG_FILE_SIZE = 10000; assert BUFFER_SIZE < VERY_BIG_FILE_SIZE; verifyOnBuffer("MultipleChunks.txt", VERY_BIG_FILE_SIZE, BUFFER_SIZE, cache, dir, 33); final int LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE = 256; assert (LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE % BUFFER_SIZE) == 0; verifyOnBuffer("LastChunkFilled.txt", LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE, BUFFER_SIZE, cache, dir, 11); assertHasNChunks(4, cache, INDEXNAME, "LastChunkFilled.txt.bak", BUFFER_SIZE); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); final int LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE = 257; assert (LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE % BUFFER_SIZE) == 1; verifyOnBuffer("LonelyByteInLastChunk.txt", LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE, BUFFER_SIZE, cache, dir, 12); assertHasNChunks(5, cache, INDEXNAME, "LonelyByteInLastChunk.txt.bak", BUFFER_SIZE); dir.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); }
public void testWriteChunks() throws Exception { final int BUFFER_SIZE = 64; Cache cache = cacheManager.getCache(); Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME).chunkSize(BUFFER_SIZE).create(); IndexOutput io = dir.createOutput("MyNewFile.txt", IOContext.DEFAULT); io.writeByte((byte) 66); io.writeByte((byte) 69); io.close(); assertTrue(Arrays.asList(dir.listAll()).contains("MyNewFile.txt")); assertNotNull(cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE, -1))); // test contents by reading: byte[] buf = new byte[9]; IndexInput ii = dir.openInput("MyNewFile.txt", IOContext.DEFAULT); ii.readBytes(buf, 0, (int) ii.length()); ii.close(); assertEquals(new String(new byte[] { 66, 69 }), new String(buf).trim()); String testText = "This is some rubbish again that will span more than one chunk - one hopes. Who knows, maybe even three or four chunks."; io = dir.createOutput("MyNewFile.txt", IOContext.DEFAULT); ((InfinispanIndexOutput)io).seek(0); io.writeBytes(testText.getBytes(), 0, testText.length()); io.close(); // now compare. byte[] chunk1 = (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE, -1)); byte[] chunk2 = (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 1, BUFFER_SIZE, -1)); assert null != chunk1; assert null != chunk2; assert testText.equals(new String(chunk1) + new String(chunk2).trim()); dir.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); }
@Test public void testWriteChunksDefaultChunks() throws Exception { Cache cache = cacheManager.getCache(); Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME).create(); final String testText = "This is some rubbish"; final byte[] testTextAsBytes = testText.getBytes(); IndexOutput io = dir.createOutput("MyNewFile.txt", IOContext.DEFAULT); io.writeByte((byte) 1); io.writeByte((byte) 2); io.writeByte((byte) 3); io.writeBytes(testTextAsBytes, testTextAsBytes.length); io.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); FileCacheKey fileCacheKey = new FileCacheKey(INDEXNAME, "MyNewFile.txt", -1); assert null != cache.get(fileCacheKey); FileMetadata metadata = (FileMetadata) cache.get(fileCacheKey); AssertJUnit.assertEquals(testTextAsBytes.length + 3, metadata.getSize()); assert null != cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, DirectoryBuilderImpl.DEFAULT_BUFFER_SIZE, -1)); // test contents by reading: IndexInput ii = dir.openInput("MyNewFile.txt", IOContext.DEFAULT); assert ii.readByte() == 1; assert ii.readByte() == 2; assert ii.readByte() == 3; byte[] buf = new byte[testTextAsBytes.length]; ii.readBytes(buf, 0, testTextAsBytes.length); ii.close(); assert testText.equals(new String(buf).trim()); dir.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); }
DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
@Test public void testReadRandomSampleFile() throws IOException { final int BUFFER_SIZE = 64; Cache cache = cacheManager.getCache(); Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME).chunkSize(BUFFER_SIZE).create(); final int FILE_SIZE = 1000; assert BUFFER_SIZE < FILE_SIZE; createFileWithRepeatableContent(dir, "RandomSampleFile.txt", FILE_SIZE); IndexInput indexInput = dir.openInput("RandomSampleFile.txt", IOContext.DEFAULT); assert indexInput.length() == FILE_SIZE; RepeatableLongByteSequence bytesGenerator = new RepeatableLongByteSequence(); Random r = new Random(); long seekPoint = 0; // Now it reads some random byte and it compares to the expected byte for (int i = 0; i < FILE_SIZE; i++) { if (seekPoint == i) { byte expectedByte = bytesGenerator.nextByte(); byte actualByte = indexInput.readByte(); assert expectedByte == actualByte; seekPoint = indexInput.getFilePointer() + r.nextInt(10); indexInput.seek(seekPoint); } else { bytesGenerator.nextByte(); } } indexInput.close(); dir.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME); }
@Test public void testIndexWritingAndFinding() throws IOException { final String indexName = "indexName"; final Cache cache0 = cache(0, "lucene"); final Cache cache1 = cache(1, "lucene"); TestSegmentReadLocker readLocker = new TestSegmentReadLocker(cache0, cache0, cache0, indexName); Directory dirA = DirectoryBuilder.newDirectoryInstance(cache0, cache0, cache0, indexName).overrideSegmentReadLocker(readLocker).create(); Directory dirB = DirectoryBuilder.newDirectoryInstance(cache1, cache1, cache1, indexName).overrideSegmentReadLocker(readLocker).create(); writeTextToIndex(dirA, 0, "hi from node A"); assertTextIsFoundInIds(dirA, "hi", 0); assertTextIsFoundInIds(dirB, "hi", 0); writeTextToIndex(dirB, 1, "hello node A, how are you?"); assertTextIsFoundInIds(dirA, "hello", 1); assertTextIsFoundInIds(dirB, "hello", 1); assertTextIsFoundInIds(dirA, "node", 1, 0); // node is keyword in both documents id=0 and id=1 assertTextIsFoundInIds(dirB, "node", 1, 0); removeByTerm(dirA, "from"); assertTextIsFoundInIds(dirB, "node", 1); dirA.close(); dirB.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache0, "indexName"); DirectoryIntegrityCheck.verifyDirectoryStructure(cache1, "indexName"); }
@Test @SuppressWarnings("unchecked") public void testIndexWritingAndFinding() throws IOException { final String indexName = "indexName"; final Cache<?,?> cache0 = cache(0, "lucene"); final Cache<?,?> cache1 = cache(1, "lucene"); TestSegmentReadLocker testSegmentReadLocker = new TestSegmentReadLocker((Cache<Object, Integer>) cache0, cache0, cache0, indexName); Directory dirA = DirectoryBuilder.newDirectoryInstance(cache0, cache0, cache0, indexName).overrideSegmentReadLocker(testSegmentReadLocker).create(); Directory dirB = DirectoryBuilder.newDirectoryInstance(cache1, cache1, cache1, indexName).overrideSegmentReadLocker(testSegmentReadLocker).create(); writeTextToIndex(dirA, 0, "hi from node A"); assertTextIsFoundInIds(dirA, "hi", 0); assertTextIsFoundInIds(dirB, "hi", 0); writeTextToIndex(dirB, 1, "hello node A, how are you?"); assertTextIsFoundInIds(dirA, "hello", 1); assertTextIsFoundInIds(dirB, "hello", 1); assertTextIsFoundInIds(dirA, "node", 1, 0); // node is keyword in both documents id=0 and id=1 assertTextIsFoundInIds(dirB, "node", 1, 0); removeByTerm(dirA, "from"); assertTextIsFoundInIds(dirB, "node", 1); dirA.close(); dirB.close(); DirectoryIntegrityCheck.verifyDirectoryStructure(cache0, "indexName"); DirectoryIntegrityCheck.verifyDirectoryStructure(cache1, "indexName"); }