@Override protected CloseableIterator<ObjectFilter.FilterResult> getIterator() { CacheStream<CacheEntry<?, ObjectFilter.FilterResult>> stream = (CacheStream<CacheEntry<?, ObjectFilter.FilterResult>>) CacheFilters.filterAndConvert(cache.cacheEntrySet().stream(), createFilter()); return Closeables.iterator(stream.map(CacheEntry::getValue)); }
private void testSimpleCacheStarting(final StateListener<String, String> listener) { final List<CacheEntry<String, String>> initialValues = new ArrayList<>(10); for (int i = 0; i < 10; i++) { String key = "key-" + i; String value = "value-" + i; initialValues.add(new ImmortalCacheEntry(key, value)); } CacheStream mockStream = mockStream(); doReturn(initialValues.iterator()).when(mockStream).iterator(); when(mockCache.withEncoding(any(Class.class), any(Class.class)).cacheEntrySet().stream()).thenReturn(mockStream); n.addListener(listener); verifyEvents(isClustered(listener), listener, initialValues); }
private void testFilterConverterUnusedDuringIteration(final StateListener<String, String> listener) { final List<CacheEntry<String, String>> initialValues = new ArrayList<CacheEntry<String, String>>(10); for (int i = 0; i < 10; i++) { String key = "key-" + i; String value = "value-" + i; initialValues.add(new ImmortalCacheEntry(key, value)); } // Note we don't actually use the filter/converter to retrieve values since it is being mocked, thus we can assert // the filter/converter are not used by us CacheStream mockStream = mockStream(); doReturn(initialValues.iterator()).when(mockStream).iterator(); when(mockCache.withEncoding(any(Class.class), any(Class.class)).cacheEntrySet().stream()).thenReturn(mockStream); CacheEventFilter filter = mock(CacheEventFilter.class, withSettings().serializable()); CacheEventConverter converter = mock(CacheEventConverter.class, withSettings().serializable()); n.addListener(listener, filter, converter); verifyEvents(isClustered(listener), listener, initialValues); verify(filter, never()).accept(any(), any(), any(Metadata.class), any(), any(Metadata.class), any(EventType.class)); verify(converter, never()).convert(any(), any(), any(Metadata.class), any(), any(Metadata.class), any(EventType.class)); }
@Test public void testFilterWithStoreAsBinary() throws InterruptedException, ExecutionException, TimeoutException { Cache<MagicKey, String> cache0 = cache(0, CACHE_NAME); Cache<MagicKey, String> cache1 = cache(1, CACHE_NAME); Cache<MagicKey, String> cache2 = cache(2, CACHE_NAME); Map<MagicKey, String> originalValues = new HashMap<>(); originalValues.put(new MagicKey(cache0), "cache0"); originalValues.put(new MagicKey(cache1), "cache1"); originalValues.put(new MagicKey(cache2), "cache2"); cache0.putAll(originalValues); // Try filter for all values Iterator<CacheEntry<MagicKey, String>> iterator = cache1.getAdvancedCache().cacheEntrySet().stream(). filter(CacheFilters.predicate(new MagicKeyStringFilter(originalValues))).iterator(); // we need this count since the map will replace same key'd value int count = 0; Map<MagicKey, String> results = new HashMap<MagicKey, String>(); while (iterator.hasNext()) { Map.Entry<MagicKey, String> entry = iterator.next(); results.put(entry.getKey(), entry.getValue()); count++; } assertEquals(count, 3); assertEquals(originalValues, results); }
@Test public void simpleTestRemoteFilter() { Map<Object, String> values = putValuesInCache(); Iterator<Map.Entry<Object, String>> iter = values.entrySet().iterator(); Map.Entry<Object, String> excludedEntry = iter.next(); // Remove it so comparison below will be correct iter.remove(); Cache<MagicKey, String> cache = cache(1, CACHE_NAME); Iterator<CacheEntry<MagicKey, String>> iterator = cache.getAdvancedCache().cacheEntrySet().stream().filter( CacheFilters.predicate(new KeyFilterAsKeyValueFilter<>(new CollectionKeyFilter<>( Collections.singleton(excludedEntry.getKey()))))).iterator(); Map<MagicKey, String> results = mapFromIterator(iterator); assertEquals(values, results); } }
@Test public void simpleTestIteratorFromOtherNode() { Map<Object, String> values = putValuesInCache(); Cache<MagicKey, String> cache = cache(1, CACHE_NAME); Iterator<CacheEntry<MagicKey, String>> iterator = cache.getAdvancedCache().cacheEntrySet().stream().iterator(); Map<MagicKey, String> results = mapFromIterator(iterator); assertEquals(values, results); }
public void testMetadataAvailable() { final List<CacheEntry<String, String>> initialValues = new ArrayList<>(10); for (int i = 0; i < 10; i++) { String key = "key-" + i; String value = "value-" + i; initialValues.add(new TransientMortalCacheEntry(key, value, i, -1, System.currentTimeMillis())); } // Note we don't actually use the filter/converter to retrieve values since it is being mocked, thus we can assert // the filter/converter are not used by us CacheStream mockStream = mockStream(); doReturn(initialValues.iterator()).when(mockStream).iterator(); when(mockCache.withEncoding(any(Class.class), any(Class.class)).cacheEntrySet().stream()).thenReturn(mockStream); CacheEventFilter filter = mock(CacheEventFilter.class, withSettings().serializable()); CacheEventConverter converter = mock(CacheEventConverter.class, withSettings().serializable()); StateListener<String, String> listener = new StateListenerClustered(); n.addListener(listener, filter, converter); verifyEvents(isClustered(listener), listener, initialValues); for (CacheEntryEvent<String, String> event : listener.events) { String key = event.getKey(); Metadata metadata = event.getMetadata(); assertNotNull(metadata); assertEquals(metadata.lifespan(), -1); assertEquals(metadata.maxIdle(), Long.parseLong(key.substring(4))); } }
@Test public void testFilterAndConverterCombined() { Map<Object, String> values = putValuesInCache(); Iterator<Map.Entry<Object, String>> iter = values.entrySet().iterator(); Map.Entry<Object, String> excludedEntry = iter.next(); // Remove it so comparison below will be correct iter.remove(); Cache<MagicKey, String> cache = cache(0, CACHE_NAME); KeyValueFilterConverter<MagicKey, String, String> filterConverter = new CompositeKeyValueFilterConverter<>( new KeyFilterAsKeyValueFilter<>(new CollectionKeyFilter<>(Collections.singleton(excludedEntry.getKey()))), new StringTruncator(2, 5)); try (CacheStream<CacheEntry<MagicKey, String>> stream = CacheFilters.filterAndConvert( cache.getAdvancedCache().cacheEntrySet().stream(), filterConverter)) { Map<MagicKey, String> results = mapFromStream(stream); assertEquals(values.size(), results.size()); for (Map.Entry<Object, String> entry : values.entrySet()) { assertEquals(entry.getValue().substring(2, 7), results.get(entry.getKey())); } } }
@Test public void simpleTestLocalFilter() { Map<Object, String> values = putValuesInCache(); Iterator<Map.Entry<Object, String>> iter = values.entrySet().iterator(); Map.Entry<Object, String> excludedEntry = iter.next(); // Remove it so comparison below will be correct iter.remove(); Cache<MagicKey, String> cache = cache(0, CACHE_NAME); KeyValueFilter<MagicKey, String> filter = new KeyFilterAsKeyValueFilter<>(new CollectionKeyFilter<>( Collections.singleton(excludedEntry.getKey()))); Iterator<CacheEntry<MagicKey, String>> iterator = cache.getAdvancedCache().cacheEntrySet().stream().filter( CacheFilters.predicate(filter)).iterator(); Map<MagicKey, String> results = mapFromIterator(iterator); assertEquals(values, results); }
@Override public void forEachTuple( ModelConsumer consumer, TupleTypeContext tupleTypeContext, EntityKeyMetadata entityKeyMetadata ) { Set<Bucket<EK>> buckets = getCacheManager().getWorkBucketsFor( entityKeyMetadata ); for ( Bucket<EK> bucket : buckets ) { Map<EK, Map<String, Object>> queryResult = new HashMap<>(); List<CacheEntry<EK, Map<String, Object>>> collect = bucket.getCache().getAdvancedCache().cacheEntrySet() .stream() .filter( getKeyProvider().getFilter( entityKeyMetadata ) ) // also collector needs to be Serializable (for non local caches) .collect( CacheCollectors.serializableCollector( () -> Collectors.toList() ) ); for ( CacheEntry<EK, Map<String, Object>> entry : collect ) { queryResult.put( entry.getKey(), entry.getValue() ); } // At runtime values of queryResult will be members of class org.infinispan.atomic.impl.AtomicKeySetImpl // this is because of the new implementation of FineGrainedAtomicMap Infinispan class (since 9.1) // query result return anyway valid keys, the values will be reloaded later by the InfinispanTupleIterator InfinispanTuplesSupplier<EK> supplier = new InfinispanTuplesSupplier( bucket.getCache(), queryResult ); consumer.consume( supplier ); } }
@Override public void forEachTuple( ModelConsumer consumer, TupleTypeContext tupleTypeContext, EntityKeyMetadata entityKeyMetadata ) { Set<Bucket<EK>> buckets = getCacheManager().getWorkBucketsFor( entityKeyMetadata ); for ( Bucket<EK> bucket : buckets ) { Map<EK, Map<String, Object>> queryResult = new HashMap<>(); List<CacheEntry<EK, Map<String, Object>>> collect = bucket.getCache().getAdvancedCache().cacheEntrySet() .stream() .filter( getKeyProvider().getFilter( entityKeyMetadata ) ) // also collector needs to be Serializable (for non local caches) .collect( CacheCollectors.serializableCollector( () -> Collectors.toList() ) ); for ( CacheEntry<EK, Map<String, Object>> entry : collect ) { queryResult.put( entry.getKey(), entry.getValue() ); } // At runtime values of queryResult will be members of class org.infinispan.atomic.impl.AtomicKeySetImpl // this is because of the new implementation of FineGrainedAtomicMap Infinispan class (since 9.1) // query result return anyway valid keys, the values will be reloaded later by the InfinispanTupleIterator InfinispanTuplesSupplier<EK> supplier = new InfinispanTuplesSupplier( bucket.getCache(), queryResult ); consumer.consume( supplier ); } }
public void testExpiredEntryNotReturned() throws InterruptedException { Cache<Object, String> cache = cache(0, CACHE_NAME); // First put some values in there Map<Object, String> valuesInserted = new LinkedHashMap<Object, String>(); for (int i = 0; i < 5; ++i) { Object key = i; String value = key + " stay in cache"; cache.put(key, value); valuesInserted.put(key, value); } int expectedTime = 2; // Now we insert a value that will expire in 2 seconds cache.put("expired", "this shouldn't be returned", expectedTime, TimeUnit.SECONDS); // We have to wait the time limit to make sure it is evicted before proceeding Thread.sleep(TimeUnit.SECONDS.toMillis(expectedTime) + 50); Map<Object, String> results; try (CacheStream<CacheEntry<Object, String>> stream = cache.getAdvancedCache().cacheEntrySet().stream()) { results = mapFromStream(stream); } assertEquals(valuesInserted, results); } }
new StringTruncator(2, 5)); try (CacheStream<CacheEntry<Object, String>> stream = CacheFilters.filterAndConvert( cache.getAdvancedCache().cacheEntrySet().stream(), filterConverter)) { Map<Object, String> results = mapFromStream(stream); assertEquals(values.size(), results.size());
@Test public void testConverterWithExistingTransaction() throws NotSupportedException, SystemException { Map<Object, String> values = putValuesInCache(); Cache<Object, String> cache = cache(0, CACHE_NAME); TransactionManager tm = tm(cache); tm.begin(); try { Object key = "converted-key"; String value = "converted-value"; values.put(key, value); cache.put(key, "converted-value"); try (CacheStream<CacheEntry<Object, String>> stream = cache.getAdvancedCache().cacheEntrySet().stream(). filter(CacheFilters.predicate(AcceptAllKeyValueFilter.getInstance())). map(CacheFilters.function(new StringTruncator(2, 5)))) { Map<Object, String> results = mapFromStream(stream); assertEquals(values.size(), results.size()); for (Map.Entry<Object, String> entry : values.entrySet()) { assertEquals(entry.getValue().substring(2, 7), results.get(entry.getKey())); } } } finally { tm.rollback(); } }
public static <K, V> MapCollectableCloseableIterable<K, V> entrySet(AdvancedCache<K, V> cache, KeyValueFilter<K, V> filter) { if (cache.getCacheConfiguration().transaction().transactionMode().isTransactional()) { // Dummy read to enlist the LocalTransaction as workaround for ISPN-5676 cache.containsKey(false); } // HHH-10023: we can't use values() CloseableIterator<CacheEntry<K, V>> iterator = Closeables.iterator( cache.cacheEntrySet().stream().filter(CacheFilters.predicate(filter))); return new MapCollectableCloseableIterableImpl<K, V>(iterator); }
@Test public void testFilterWithStoreAsBinaryPartialKeys() throws InterruptedException, ExecutionException, TimeoutException { Cache<MagicKey, String> cache0 = cache(0, CACHE_NAME); Cache<MagicKey, String> cache1 = cache(1, CACHE_NAME); Cache<MagicKey, String> cache2 = cache(2, CACHE_NAME); MagicKey findKey = new MagicKey(cache1); Map<MagicKey, String> originalValues = new HashMap<>(); originalValues.put(new MagicKey(cache0), "cache0"); originalValues.put(findKey, "cache1"); originalValues.put(new MagicKey(cache2), "cache2"); cache0.putAll(originalValues); // Try filter for all values Iterator<CacheEntry<MagicKey, String>> iterator = cache1.getAdvancedCache().cacheEntrySet().stream(). filter(CacheFilters.predicate(new MagicKeyStringFilter(Collections.singletonMap(findKey, "cache1")))).iterator(); CacheEntry<MagicKey, String> entry = iterator.next(); AssertJUnit.assertEquals(findKey, entry.getKey()); AssertJUnit.assertEquals("cache1", entry.getValue()); assertFalse(iterator.hasNext()); }
public static <K, V, T> MapCollectableCloseableIterable<K, T> entrySet(AdvancedCache<K, V> cache, KeyValueFilter<K, V> filter, Converter<K, V, T> converter) { if (cache.getCacheConfiguration().transaction().transactionMode().isTransactional()) { // Dummy read to enlist the LocalTransaction as workaround for ISPN-5676 cache.containsKey(false); } // HHH-10023: we can't use values() CloseableIterator<CacheEntry<K, T>> it = Closeables.iterator(cache.cacheEntrySet().stream() .filter(CacheFilters.predicate(filter)) .map(CacheFilters.function(converter))); return new MapCollectableCloseableIterableImpl<K, T>(it); }
public void testFilterWithExistingTransaction() throws Exception { Map<Object, String> values = putValueInEachCache(3); Cache<Object, String> cache = cache(0, CACHE_NAME); TransactionManager tm = tm(cache); tm.begin(); try { Object key = "filtered-key"; cache.put(key, "filtered-value"); Iterator<CacheEntry<Object, String>> iterator = cache.getAdvancedCache().cacheEntrySet().stream(). filter(CacheFilters.predicate(new KeyFilterAsKeyValueFilter<>(new CollectionKeyFilter<>( Collections.singleton(key))))).iterator(); Map<Object, String> results = mapFromIterator(iterator); assertEquals(values, results); } finally { tm.rollback(); } }
public void testStreamWithMissedKeyInTransaction() throws Exception { AdvancedCache<Object, String> cache = advancedCache(0, CACHE_NAME); TransactionManager tm = tm(cache); tm.begin(); try { Object localMissingKey = new MagicKey("key1", cache); Object remoteMissingKey = new MagicKey("key2", cache(1, CACHE_NAME)); assertFalse(cache.containsKey(localMissingKey)); assertFalse(cache.containsKey(remoteMissingKey)); Iterator<CacheEntry<Object, String>> iterator = cache.getAdvancedCache().cacheEntrySet().stream().iterator(); Map<Object, String> results = mapFromIterator(iterator); assertEquals(Collections.emptyMap(), results); // size() also uses streams internally assertEquals(0, cache.size()); } finally { tm.rollback(); } } }
@Test public void testFilter() { final boolean isClustered = cache(0).getCacheConfiguration().clustering().cacheMode().isClustered(); for (int i = 0; i < 10; ++i) { Person value = new Person(); value.setName("John"); value.setAge(i + 30); Cache<Object, Person> cache = cache(i % numNodes); Object key = isClustered ? new MagicKey(cache) : i; cache.put(key, value); } IckleFilterAndConverter filterAndConverter = new IckleFilterAndConverter<Object, Person>("from org.infinispan.query.test.Person where blurb is null and age <= 31", null, ReflectionMatcher.class); Stream<CacheEntry<Object, Object>> stream = cache(0).getAdvancedCache().cacheEntrySet().stream(); CloseableIterator<Map.Entry<Object, ObjectFilter.FilterResult>> iterator = Closeables.iterator(CacheFilters.filterAndConvert(stream, filterAndConverter).iterator()); Map<Object, ObjectFilter.FilterResult> results = mapFromIterator(iterator); assertEquals(2, results.size()); for (ObjectFilter.FilterResult p : results.values()) { assertNull(((Person) p.getInstance()).getBlurb()); assertTrue(((Person) p.getInstance()).getAge() <= 31); } }