private void incrementUgiReference(UserGroupInformation ugi) { ugiReferenceCounter.merge(ugi, 1, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer oldvalue, Integer value) { return ++oldvalue; } }); }
public V merge(final K key, final V value, final BiFunction<? super V, ? super V, ? extends V> remappingFunction) { return backingMap.merge(key, value, remappingFunction); }
/** * @param qryType Query type. * @param qry Query description. * @param startTime Query start size. * @param duration Execution duration. * @param failed {@code True} if query execution failed. */ public void collectMetrics(GridCacheQueryType qryType, String qry, long startTime, long duration, boolean failed) { metrics.update(duration, failed); if (detailMetricsSz > 0) { // Do not collect metrics for EXPLAIN queries. if (qryType == SQL_FIELDS && !F.isEmpty(qry)) { int off = 0; int len = qry.length(); while (off < len && Character.isWhitespace(qry.charAt(off))) off++; if (qry.regionMatches(true, off, "EXPLAIN", 0, 7)) return; } GridCacheQueryDetailMetricsAdapter m = new GridCacheQueryDetailMetricsAdapter(qryType, qry, cctx.name(), startTime, duration, failed); GridCacheQueryDetailMetricsKey key = m.key(); detailMetrics.merge(key, m, QRY_DETAIL_METRICS_MERGE_FX); } }
/** * @param failed {@code True} if query execution failed. */ void collectMetrics(GridRunningQueryInfo runningQryInfo, boolean failed) { if (histSz <= 0) return; String qry = runningQryInfo.query(); String schema = runningQryInfo.schemaName(); boolean loc = runningQryInfo.local(); long startTime = runningQryInfo.startTime(); long duration = System.currentTimeMillis() - startTime; QueryHistoryMetrics m = new QueryHistoryMetrics(qry, schema, loc, startTime, duration, failed); QueryHistoryMetrics mergedMetrics = qryMetrics.merge(m.key(), m, QueryHistoryMetrics::aggregateWithNew); if (touch(mergedMetrics) && qryMetrics.size() > histSz) shrink(); }
res.counts.merge(centroidIdx, 1, (IgniteBiFunction<Integer, Integer, Integer>)(i1, i2) -> i1 + i2);
}); res.counts.merge(centroidIdx, 1, (IgniteBiFunction<Integer, Integer, Integer>)(i1, i2) -> i1 + i2);
public V merge(final K key, final V value, final BiFunction<? super V, ? super V, ? extends V> remappingFunction) { return backingMap.merge(key, value, remappingFunction); }
public V merge(final K key, final V value, final BiFunction<? super V, ? super V, ? extends V> remappingFunction) { return backingMap.merge(key, value, remappingFunction); }
/** * @return true if it should be throttled */ boolean updateAndCheckThrottle(InetAddress addr) { long oldVal = unsolicitedThrottle.merge(addr, throttleIncrement, (k, v) -> Math.min(v + throttleIncrement, throttleSaturation)) - throttleIncrement; return oldVal > throttleThreshold; }
ConcurrentHashMap<Integer,Integer> counts=new ConcurrentHashMap<>(); Stream.of(1, 2, 2, 3, 4, 5, 5) .filter(i -> counts.merge(i, 1, Integer::sum)==2) .forEach(System.out::println);
private void removeValueFromIndex(Bytes indexKey, Bytes key) { data.merge(indexKey, Collections.singletonList(key), (actual, newList) -> { if (actual.size() == 1) { return null; } else { actual.removeAll(newList); return actual; } }); }
private void addValueToIndex(Bytes indexKey, Bytes key) { data.merge(indexKey, Collections.singletonList(key), (actual, newList) -> { List<Bytes> result = new ArrayList<>(actual.size() + 1); result.addAll(actual); result.addAll(newList); return result; }); }
@Override public Collection<PostCheckpointAction> writeTables(String tableSpace, LogSequenceNumber sequenceNumber, List<Table> tables, List<Index> indexlist) throws DataStorageManagerException { tablesByTablespace.merge(tableSpace, tables, new BiFunction<List<Table>, List<Table>, List<Table>>() { @Override public List<Table> apply(List<Table> before, List<Table> after) { indexesByTablespace.merge(tableSpace, indexlist, new BiFunction<List<Index>, List<Index>, List<Index>>() { @Override public List<Index> apply(List<Index> before, List<Index> after) {
map.merge(23, c, (oldHashMap, newHashMap) -> { oldHashMap.merge (1, c.get(1), (oldV, newV) -> { if (oldV < newV) return newV; else return oldV; map.merge (23, r, (oldHashMap, newHashMap) -> { oldHashMap.merge(1, newHashMap.get(1), (oldV, newV) -> {if (oldV < newV) return newV; else return oldV;}); return oldHashMap;
private void compareAndSetToken(String newToken, ConcurrentHashMap<String, ISessionToken> oldTokens) { if (StringUtils.isNotEmpty(newToken)) { String[] newTokenParts = newToken.split(":"); if (newTokenParts.length == 2) { String range = newTokenParts[0]; ISessionToken newLSN = SessionTokenHelper.parse(newTokenParts[1]); oldTokens.merge(range, newLSN, (oldSessionToken, newSessionToken) -> { try { if (oldSessionToken == null) { return newSessionToken; } return oldSessionToken.merge(newSessionToken); } catch (DocumentClientException e) { throw new IllegalStateException(e); } }); } else { assert false : "service returned an invalid session token"; } } }
/** * Forces refresh of the cached item if it is not being refreshed at the moment. * @param key * @param singleValueInitFunc */ public void refresh( TKey key, Func0<Single<TValue>> singleValueInitFunc) { logger.debug("refreshing cache[{}]", key); AsyncLazy<TValue> initialLazyValue = values.get(key); if (initialLazyValue != null && (initialLazyValue.isSucceeded() || initialLazyValue.isFaulted())) { AsyncLazy<TValue> newLazyValue = new AsyncLazy<>(singleValueInitFunc); // Update the new task in the cache, values.merge(key, newLazyValue, (lazyValue1, lazyValu2) -> lazyValue1 == initialLazyValue ? lazyValu2 : lazyValue1); } } }
/** * @param qryType Query type. * @param qry Query description. * @param startTime Query start size. * @param duration Execution duration. * @param failed {@code True} if query execution failed. */ public void collectMetrics(GridCacheQueryType qryType, String qry, long startTime, long duration, boolean failed) { metrics.update(duration, failed); if (detailMetricsSz > 0) { // Do not collect metrics for EXPLAIN queries. if (qryType == SQL_FIELDS && !F.isEmpty(qry)) { int off = 0; int len = qry.length(); while (off < len && Character.isWhitespace(qry.charAt(off))) off++; if (qry.regionMatches(true, off, "EXPLAIN", 0, 7)) return; } GridCacheQueryDetailMetricsAdapter m = new GridCacheQueryDetailMetricsAdapter(qryType, qry, cctx.name(), startTime, duration, failed); GridCacheQueryDetailMetricsKey key = m.key(); detailMetrics.merge(key, m, QRY_DETAIL_METRICS_MERGE_FX); } }
@Override public boolean matches(T target) { return LatencyUtils.withTimeSpent(() -> underlyingMatcher.matches(target), (timeSpentInNanoseconds) -> { val key = type + "-" + transformer; accumulatedTimeByType.merge(key, new TypeMatcherMetrics(timeSpentInNanoseconds, 1), (acc, current) -> acc.merge(current.getTime())); }); }
@SuppressWarnings("rawtypes") @Test public void mergeTest(){ ConcurrentHashMap<String, Crdt> a = new ConcurrentHashMap<>(); GrowOnlySet<String> gset = new GrowOnlySet<>(Arrays.asList("a", "b")); Assert.assertEquals(gset, a.merge("a", gset, new CrdtBiFunctionMerge())); GrowOnlySet<String> over = new GrowOnlySet<>(Arrays.asList("b", "d")); Assert.assertEquals(new GrowOnlySet<>(Arrays.asList("a", "b", "d")), a.merge("a", over, CrdtBiFunctionMerge::applyStatic)); } }
newEntry.failures = 1; map.merge(addr, newEntry, (k, oldEntry) -> { CacheEntry updatedEntry = new CacheEntry(); updatedEntry.created = oldEntry.created;