/** * Loads all "entries" from the CacheLoader; considering this is actually a Lucene index, * that's going to transform segments in entries in a specific order, simplest entries first. * * @param entriesCollector loaded entries are collected in this set * @param maxEntries to limit amount of entries loaded */ protected <K, V> void loadAllEntries(final Set<MarshalledEntry<K, V>> entriesCollector, final int maxEntries, StreamingMarshaller marshaller) { int existingElements = entriesCollector.size(); int toLoadElements = maxEntries - existingElements; if (toLoadElements <= 0) { return; } HashSet<IndexScopedKey> keysCollector = new HashSet<>(); loadSomeKeys(keysCollector, Collections.EMPTY_SET, toLoadElements); for (IndexScopedKey key : keysCollector) { Object value = load(key); if (value != null) { MarshalledEntry cacheEntry = new MarshalledEntryImpl(key, value, null, marshaller); entriesCollector.add(cacheEntry); } } }
@Override public MarshalledEntry load(final Object key) { if (key instanceof IndexScopedKey) { final IndexScopedKey indexKey = (IndexScopedKey)key; DirectoryLoaderAdaptor directoryAdaptor = getDirectory(indexKey); Object value = directoryAdaptor.load(indexKey); if (value != null) { return ctx.getMarshalledEntryFactory().newMarshalledEntry(key, value, null); } else { return null; } } else { log.cacheLoaderIgnoringKey(key); return null; } }
/** * Loads all "entries" from the CacheLoader; considering this is actually a Lucene index, * that's going to transform segments in entries in a specific order, simplest entries first. * * @param entriesCollector loaded entries are collected in this set * @param maxEntries to limit amount of entries loaded * @throws PersistenceException */ protected void loadAllEntries(final HashSet<MarshalledEntry> entriesCollector, final int maxEntries, StreamingMarshaller marshaller) { int existingElements = entriesCollector.size(); int toLoadElements = maxEntries - existingElements; if (toLoadElements <= 0) { return; } HashSet<IndexScopedKey> keysCollector = new HashSet<IndexScopedKey>(); loadSomeKeys(keysCollector, Collections.EMPTY_SET, toLoadElements); for (IndexScopedKey key : keysCollector) { Object value = load(key); if (value != null) { MarshalledEntry cacheEntry = new MarshalledEntryImpl(key, value, null, marshaller); entriesCollector.add(cacheEntry); } } }
@Override public MarshalledEntry load(final Object key) { if (key instanceof IndexScopedKey) { final IndexScopedKey indexKey = (IndexScopedKey)key; DirectoryLoaderAdaptor directoryAdaptor = getDirectory(indexKey); Object value = directoryAdaptor.load(indexKey); if (value != null) { return ctx.getMarshalledEntryFactory().newMarshalledEntry(key, value, null); } else { return null; } } else { log.cacheLoaderIgnoringKey(key); return null; } }
/** * Loads all "entries" from the CacheLoader; considering this is actually a Lucene index, * that's going to transform segments in entries in a specific order, simplest entries first. * * @param entriesCollector loaded entries are collected in this set * @param maxEntries to limit amount of entries loaded */ protected void loadAllEntries(final HashSet<MarshalledEntry> entriesCollector, final int maxEntries, StreamingMarshaller marshaller) { int existingElements = entriesCollector.size(); int toLoadElements = maxEntries - existingElements; if (toLoadElements <= 0) { return; } HashSet<IndexScopedKey> keysCollector = new HashSet<>(); loadSomeKeys(keysCollector, Collections.EMPTY_SET, toLoadElements); for (IndexScopedKey key : keysCollector) { Object value = load(key); if (value != null) { MarshalledEntry cacheEntry = new MarshalledEntryImpl(key, value, null, marshaller); entriesCollector.add(cacheEntry); } } }
@Override public MarshalledEntry load(final Object key) { if (key instanceof IndexScopedKey) { final IndexScopedKey indexKey = (IndexScopedKey)key; DirectoryLoaderAdaptor directoryAdaptor = getDirectory(indexKey); Object value = directoryAdaptor.load(indexKey); if (value != null) { return ctx.getMarshalledEntryFactory().newMarshalledEntry(key, value, null); } else { return null; } } else { log.cacheLoaderIgnoringKey(key); return null; } }
public void testSmallChunkLoading() throws IOException { Directory mockDirectory = createMockDirectory(); DirectoryLoaderAdaptor adaptor = new DirectoryLoaderAdaptor(mockDirectory, INDEX_NAME, AUTO_BUFFER, -1); Object loaded = adaptor.load(new ChunkCacheKey(INDEX_NAME, FILE_NAME, 0, AUTO_BUFFER, segmentId)); AssertJUnit.assertTrue(loaded instanceof byte[]); AssertJUnit.assertEquals(AUTO_BUFFER, ((byte[])loaded).length); loaded = adaptor.load(new ChunkCacheKey(INDEX_NAME, FILE_NAME, 5, AUTO_BUFFER, segmentId)); AssertJUnit.assertTrue(loaded instanceof byte[]); AssertJUnit.assertEquals(AUTO_BUFFER, ((byte[])loaded).length); final int lastChunk = (int)(TEST_SIZE / AUTO_BUFFER); final long lastChunkSize = TEST_SIZE % AUTO_BUFFER; AssertJUnit.assertEquals(9, lastChunkSize); loaded = adaptor.load(new ChunkCacheKey(INDEX_NAME, FILE_NAME, lastChunk, AUTO_BUFFER, segmentId)); AssertJUnit.assertTrue(loaded instanceof byte[]); AssertJUnit.assertEquals(lastChunkSize, ((byte[])loaded).length); }
public void testAutoChunkingOnLargeFiles() throws IOException { Directory mockDirectory = createMockDirectory(); FileCacheKey k = new FileCacheKey(INDEX_NAME, FILE_NAME, segmentId); DirectoryLoaderAdaptor adaptor = new DirectoryLoaderAdaptor(mockDirectory, INDEX_NAME, AUTO_BUFFER, -1); Object loaded = adaptor.load(k); AssertJUnit.assertTrue(loaded instanceof FileMetadata); FileMetadata metadata = (FileMetadata)loaded; AssertJUnit.assertEquals(TEST_SIZE, metadata.getSize()); AssertJUnit.assertEquals(AUTO_BUFFER, metadata.getBufferSize()); }