@Override public synchronized void putAll(Map<? extends K, ? extends V> m) { WeakHashMap<K, V> tmp = new WeakHashMap<K, V>(map); tmp.putAll(m); map = Collections.unmodifiableMap(tmp); }
/** * @param m the map * @see java.util.Hashtable#putAll(java.util.Map) */ @Override public void putAll(Map<? extends K, ? extends V> m) { this.map.putAll(m); }
@Override public synchronized void putAll(Map<? extends K, ? extends V> m) { WeakHashMap<K, V> tmp = new WeakHashMap<K, V>(map); tmp.putAll(m); map = Collections.unmodifiableMap(tmp); }
@Override public synchronized void putAll(Map<? extends K, ? extends V> m) { WeakHashMap<K, V> tmp = new WeakHashMap<K, V>(map); tmp.putAll(m); map = Collections.unmodifiableMap(tmp); }
/** * Constructs a new <tt>WeakHashMap</tt> with the same mappings as the * specified map. The <tt>WeakHashMap</tt> is created with the default * load factor (0.75) and an initial capacity sufficient to hold the * mappings in the specified map. * * @param m the map whose mappings are to be placed in this map * @throws NullPointerException if the specified map is null * @since 1.3 */ public WeakHashMap(Map<? extends K, ? extends V> m) { this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1, 16), DEFAULT_LOAD_FACTOR); putAll(m); }
/** * Constructs a new <tt>WeakHashMap</tt> with the same mappings as the * specified map. The <tt>WeakHashMap</tt> is created with the default * load factor (0.75) and an initial capacity sufficient to hold the * mappings in the specified map. * * @param m the map whose mappings are to be placed in this map * @throws NullPointerException if the specified map is null * @since 1.3 */ public WeakHashMap(Map<? extends K, ? extends V> m) { this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1, 16), DEFAULT_LOAD_FACTOR); putAll(m); }
public static long calculateHash(Method method) { Map methodHashes = (Map)hashMap.get(method.getDeclaringClass()); if (methodHashes == null) { methodHashes = getInterfaceHashes(method.getDeclaringClass()); // Copy and add WeakHashMap newHashMap = new WeakHashMap(); newHashMap.putAll(hashMap); newHashMap.put(method.getDeclaringClass(), methodHashes); hashMap = newHashMap; } return ((Long)methodHashes.get(method.toString())).longValue(); }
/** * @return a copy of this set */ @Override public Object clone() { try { @SuppressWarnings( "unchecked" ) WeakHashSet<E> newSet = (WeakHashSet<E>) super.clone(); newSet.map = new WeakHashMap<E, Object>(); newSet.map.putAll( map ); return newSet; } catch( CloneNotSupportedException e ) { throw new InternalError(); } }
public static long calculateHash(Method method) { Map methodHashes = (Map)hashMap.get(method.getDeclaringClass()); if (methodHashes == null) { methodHashes = getInterfaceHashes(method.getDeclaringClass()); // Copy and add WeakHashMap newHashMap = new WeakHashMap(); newHashMap.putAll(hashMap); newHashMap.put(method.getDeclaringClass(), methodHashes); hashMap = newHashMap; } return ((Long)methodHashes.get(method.toString())).longValue(); }
public static long calculateHash(Method method) { Map<String, Long> methodHashes = methodHashesByName.get(method.getDeclaringClass()); if (methodHashes == null) { methodHashes = getInterfaceHashes(method.getDeclaringClass()); // Copy and add WeakHashMap<Class<?>, Map<String, Long>> newHashMap = new WeakHashMap<Class<?>, Map<String, Long>>(); newHashMap.putAll(methodHashesByName); newHashMap.put(method.getDeclaringClass(), methodHashes); methodHashesByName = newHashMap; } return methodHashes.get(method.toString()).longValue(); }
public BaseLogFileWriter( BaseLogFileWriter src ){ lock = new ReentrantLock(); if( src!=null ){ try{ src.lock.lock(); this.charset = src.charset; this.currentFile = src.currentFile; this.currentWriter = src.currentWriter; this.lfPolicy = src.lfPolicy!=null ? src.lfPolicy.clone() : this.lfPolicy; this.maxCurrentFileSize = src.maxCurrentFileSize; if( src.writeQueue!=null ){ synchronized( src.writeQueue ){ synchronized( this.writeQueue ){ this.writeQueue.addAll(src.writeQueue); if( src.writeQueueSize!=null )this.writeQueueSize.putAll(src.writeQueueSize); } } } }finally{ src.lock.unlock(); } } }
@Override public WeakHashSet<T> clone() { //WeakHashSet<T> newSet = (WeakHashSet<T>) super.clone(); //newSet.map = (WeakHashMap<T, Object>) map.clone(); WeakHashSet<T> newSet = new WeakHashSet<T>(map.size()); newSet.map.putAll(this.map); return newSet; }
@Override public WeakHashSet<T> clone() { //WeakHashSet<T> newSet = (WeakHashSet<T>) super.clone(); //newSet.map = (WeakHashMap<T, Object>) map.clone(); WeakHashSet<T> newSet = new WeakHashSet<T>(map.size()); newSet.map.putAll(this.map); return newSet; }
public void invalidate() { if (mNativeMap.isEmpty()) return; final WeakHashMap<android.view.MenuItem, MenuItem> menuMapCopy = new WeakHashMap<android.view.MenuItem, MenuItem>(mNativeMap.size()); for (int i = 0; i < mNativeMenu.size(); i++) { final android.view.MenuItem item = mNativeMenu.getItem(i); menuMapCopy.put(item, mNativeMap.get(item)); } mNativeMap.clear(); mNativeMap.putAll(menuMapCopy); }
/** * When running in the microcontainer with aspects installed as beans, a ClassProxyContainer will be created per bean * to check if this bean needs interceptors, each container creates a sunscribed domain for matching. This subscribed * domain is added to a queue, which is checked when we need to iterate over the subscribed domains. */ private boolean copySubDomainsFromQueue(boolean increment) { boolean copied = false; initSubscribedSubDomainsMap(); synchronized (subscribedSubDomains) { if (!increment && subscribedDomainQueueRef > 0) subscribedDomainQueueRef--; if (subscribedDomainQueueRef == 0 && subscribedSubDomainsQueue.size() > 0){ subscribedSubDomains.putAll(subscribedSubDomainsQueue); subscribedSubDomainsQueue.clear(); copied = true; } if (increment) subscribedDomainQueueRef++; } return copied; }
beanReaders.addAll(sample.beanReaders); beanReadersOrder = Arrays.copyOf(beanReadersOrder, beanReadersOrder.length); failedPropertyEditor.putAll(sample.failedPropertyEditor);