final Object newKeyReference( K key, ReferenceType keyType, ReferenceQueue<Object> refQueue) { if ( keyType == ReferenceType.WEAK ) { return new WeakKeyReference<K>( key, hash, refQueue ); } if ( keyType == ReferenceType.SOFT ) { return new SoftKeyReference<K>( key, hash, refQueue ); } return key; }
private void removeFromSegment(Set<HashEntry<K, V>> evicted) { for ( HashEntry<K, V> e : evicted ) { ( (LIRSHashEntry<K, V>) e ).evict(); segment.evictionListener.onEntryChosenForEviction( e.value ); segment.remove( e.key, e.hash, null ); } }
/** * {@inheritDoc} * * @throws NullPointerException if the specified key is null */ @Override public boolean remove(Object key, Object value) { if ( key == null || value == null ) { return false; } int hash = hashOf( key ); return segmentFor( hash ).remove( key, hash, value, false ) != null; }
@Override public String[] getQueries() { return ArrayHelper.toStringArray( queryStatsMap.keySet() ); }
public ConditionFragment setCondition(String[] lhs, String rhs) { this.lhs = lhs; this.rhs = ArrayHelper.fillArray(rhs, lhs.length); return this; }
/** * calculate the array hash (only the first level) */ public static int hash(char[] array) { int seed = SEED; for ( char anArray : array ) { seed = hash( seed, anArray ); } return seed; }
final Object newValueReference( V value, ReferenceType valueType, ReferenceQueue<Object> refQueue) { if ( valueType == ReferenceType.WEAK ) { return new WeakValueReference<V>( value, keyRef, hash, refQueue ); } if ( valueType == ReferenceType.SOFT ) { return new SoftValueReference<V>( value, keyRef, hash, refQueue ); } return value; }
/** * Create a properly sized {@link ConcurrentHashMap} based on the given expected number of elements. * * @param expectedNumberOfElements The expected number of elements for the created map * @param <K> The map key type * @param <V> The map value type * * @return The created map. */ public static <K, V> ConcurrentHashMap<K, V> concurrentMap(int expectedNumberOfElements) { return concurrentMap( expectedNumberOfElements, LOAD_FACTOR ); }
/** * Moves this entry to the top of the stack. */ private void moveToStackTop() { tempRemoveFromStack(); addToStackBefore( owner.header.nextInStack ); }
/** * Moves this entry from the stack to the queue, marking it cold * (as hot entries must remain in the stack). This should only be called * on resident entries, as non-resident entries should not be made resident. * The bottom entry on the queue is always hot due to stack pruning. */ private void migrateToQueue() { removeFromStack(); cold(); }
/** * Temporarily removes this entry from the queue, fixing up neighbor links. * This entry's links remain unchanged. This should only be called if this * node's links will be subsequently changed. */ private void tempRemoveFromQueue() { if ( inQueue() ) { previousInQueue.nextInQueue = nextInQueue; nextInQueue.previousInQueue = previousInQueue; } }
/** * Returns an enumeration of the values in this table. * * @return an enumeration of the values in this table * * @see #values() */ public Enumeration<V> elements() { return new ValueIterator(); }
/** * Make a (shallow) copy of query spaces to be synchronized * * @param synchronizedQuerySpaces The query spaces * * @return The copy */ public static Set<String> copy(Set<String> synchronizedQuerySpaces) { return CollectionHelper.makeCopy( synchronizedQuerySpaces ); }
/** * Marks this entry as cold. */ private void cold() { if ( state == Recency.LIR_RESIDENT ) { owner.hotSize--; } state = Recency.HIR_RESIDENT; moveToQueueEnd(); }
/** * Removes this entry from the queue. */ private void removeFromQueue() { tempRemoveFromQueue(); previousInQueue = null; nextInQueue = null; }
@Override public Set<HashEntry<K, V>> onEntryMiss(HashEntry<K, V> en) { LIRSHashEntry<K, V> e = (LIRSHashEntry<K, V>) en; Set<HashEntry<K, V>> evicted = e.miss(); removeFromSegment( evicted ); return evicted; }
/** * calculate the array hash (only the first level) */ public static int hash(byte[] bytes) { int seed = SEED; for ( byte aByte : bytes ) { seed = hash( seed, aByte ); } return seed; }