private void nextWaiter(String lockName) { List<LockWaiter> waiters = waitersMap.compute(lockName, (s, list) -> { return list == null || list.size() == 1 ? null : new ArrayList<>(list.subList(1, list.size())); }); if (waiters != null) { waiters.get(0).acquireLock(); } } }
public void testComputeEviction() { // b/80241237 Cache<String, String> c = CacheBuilder.newBuilder().maximumSize(1).build(); assertThat(c.asMap().compute("hash-1", (k, v) -> "a")).isEqualTo("a"); assertThat(c.asMap().compute("hash-1", (k, v) -> "b")).isEqualTo("b"); assertThat(c.asMap().compute("hash-1", (k, v) -> "c")).isEqualTo("c"); assertThat(c.size()).isEqualTo(1); assertThat(c.asMap().computeIfAbsent("hash-2", k -> "")).isEqualTo(""); }
public void testComputeEviction() { Cache<String, String> c = CaffeinatedGuava.build( Caffeine.newBuilder().executor(MoreExecutors.directExecutor()).maximumSize(1)); assertThat(c.asMap().compute("hash-1", (k, v) -> "a")).isEqualTo("a"); assertThat(c.asMap().compute("hash-1", (k, v) -> "b")).isEqualTo("b"); assertThat(c.asMap().compute("hash-1", (k, v) -> "c")).isEqualTo("c"); assertThat(c.size()).isEqualTo(1); assertThat(c.asMap().computeIfAbsent("hash-2", k -> "")).isEqualTo(""); }
@Override public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { return map.compute(key, typeChecked(remappingFunction)); }
@Override public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { return delegate().compute(key, remappingFunction); } @Override public V merge(K key, V value,
public void acquire(Context context, String name, long timeout, Handler<AsyncResult<Lock>> handler) { LockWaiter lockWaiter = new LockWaiter(context, name, timeout, handler); List<LockWaiter> waiters = waitersMap.compute(name, (s, list) -> { List<LockWaiter> result; if (list != null) { result = new ArrayList<>(list.size() + 1); result.addAll(list); } else { result = new ArrayList<>(1); } result.add(lockWaiter); return result; }); if (waiters.size() == 1) { waiters.get(0).acquireLock(); } }
@Override public void run() { nodes.compute(node.getNodeName(), (ignoredNodeName, ignoredNode) -> oldNode); jenkins.updateComputerList(); jenkins.trimLabels(); } });
public void testComputeExceptionally() { try { doParallelCacheOp(count, n -> { cache.asMap().compute(key, (k, v) -> { throw new RuntimeException(); }); }); fail("Should not get here"); } catch (RuntimeException ex) { } } }
public void testUpdates() { cache.put(key, "1"); // simultaneous update for same key, some null, some non-null doParallelCacheOp( count, n -> { cache.asMap().compute(key, (k, v) -> n % 2 == 0 ? v + delimiter + n : null); }); assertTrue(1 >= cache.size()); }
public void testCompute() { cache.put(key, "1"); // simultaneous deletion doParallelCacheOp( count, n -> { cache.asMap().compute(key, (k, v) -> null); }); assertEquals(0, cache.size()); }
public void testComputeExceptionally() { try { doParallelCacheOp(count, n -> { cache.asMap().compute(key, (k, v) -> { throw new RuntimeException(); }); }); fail("Should not get here"); } catch (RuntimeException ex) {} } }
/** * compute removes when the given key is present and function returns null */ public void testCompute4() { ConcurrentMap map = map5(); map.compute(one, (x, y) -> null); assertFalse(map.containsKey(one)); }
/** * compute does not replace if the function returns null */ public void testCompute() { ConcurrentMap map = map5(); map.compute(six, (x, y) -> null); assertFalse(map.containsKey(six)); }
protected Mono<Void> updateSnapshot(InstanceEvent event) { return Mono.<Void>fromRunnable(() -> snapshots.compute(event.getInstance(), (key, old) -> { Instance instance = old != null ? old : Instance.create(key); return instance.apply(event); })).onErrorResume(ex -> { log.warn( "Error while updating the snapshot with event {}. Recomputing instance snapshot from event history.", event, ex ); return recomputeSnapshot(event.getInstance()); }); }
public void testUpdates() { cache.put(key, "1"); // simultaneous update for same key, some null, some non-null doParallelCacheOp(count, n -> { cache.asMap().compute(key, (k, v) -> n % 2 == 0 ? v + delimiter + n : null); }); assertTrue(1 >= cache.size()); }
/** * compute replaces when the given key is present */ public void testCompute3() { ConcurrentMap map = map5(); assertEquals("Z", map.compute(one, (x, y) -> "Z")); }
/** * compute adds when the given key is not present */ public void testCompute2() { ConcurrentMap map = map5(); assertEquals("Z", map.compute(six, (x, y) -> "Z")); }
public void testCompute() { cache.put(key, "1"); // simultaneous deletion doParallelCacheOp(count, n -> { cache.asMap().compute(key, (k, v) -> null); }); assertEquals(0, cache.size()); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, expiryTime = Expire.ONE_MINUTE, mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE }, expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS }, expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE}, expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}) public void compute_weighted(Cache<Integer, List<Integer>> cache, CacheContext context) { cache.put(1, ImmutableList.of(1)); context.ticker().advance(1, TimeUnit.MINUTES); cache.asMap().compute(1, (k, v) -> ImmutableList.of(1, 2, 3)); assertThat(cache.policy().eviction().get().weightedSize().getAsLong(), is(3L)); }
private <T> boolean removeLocalRegistration(HandlerHolder<T> holder) { String address = holder.getHandler().address(); boolean last = handlerMap.compute(address, (key, val) -> { if (val == null) { return null; } ConcurrentCyclicSequence<HandlerHolder> next = val.remove(holder); return next.size() == 0 ? null : next; }) == null; if (holder.setRemoved()) { holder.getContext().removeCloseHook(new HandlerEntry<>(address, holder.getHandler())); } return last; }