public void clearUsedMessagesMap(int initialCapacity) { used_messages = new LightHashMap(initialCapacity); if (null_values != null) { null_values = new ArrayList(0); } } }
@Override public Object getUserData( Object key ) { synchronized( this ){ if ( user_data == null ){ return( null ); } return( user_data.get( key )); } } }
@Override public void setUserData( Object key, Object data ) { synchronized( this ){ if ( user_data == null ){ user_data = new LightHashMap<>(); } user_data.put( key, data ); } }
@Override public T remove(Object key) { if(size == 0) return null; if(key == null) key = NULLKEY; final int idx = findIndex(key); if (keysEqual(data[idx], key)) return(T)removeForIndex(idx); return null; }
@Override public boolean containsKey(Object key) { if(size == 0) return false; if(key == null) key = NULLKEY; return keysEqual(key, data[nonModifyingFindIndex(key)]); }
@Override public T put(final Object key, final Object value) { checkCapacity(1); return (T)add(key, value, false); }
protected Map exportTrackerCache() Map res = new LightHashMap(1); LightHashMap entry = new LightHashMap(); entry.put( "ip", peer.getAddress().getBytes()); entry.put( "src", peer.getSource().getBytes()); entry.put( "port", new Long(peer.getPort())); entry.put( "udpport", new Long( udp_port)); entry.put( "httpport", new Long( http_port)); entry.put( "prot", new Long(peer.getProtocol())); entry.put( "azver", new Long( az_ver )); entry.compactify(0.9f);
@Override public void setTaggableTransientProperty(String key, Object value) { synchronized( TTP_KEY ){ LightHashMap<String,Object> map = (LightHashMap<String,Object>)getUserData( TTP_KEY ); if ( map == null ){ if ( value == null ){ return; } map = new LightHashMap<>(); map.put( key, value ); setUserData( TTP_KEY, map ); }else{ if ( value == null ){ map.remove( key ); if ( map.size() == 0 ){ setUserData( TTP_KEY, null ); } }else{ map.put( key, value ); } } } }
final Map m2 = new LightHashMap(); System.out.println("fill:"); time = System.currentTimeMillis(); System.out.println("compactify light map"); time = System.currentTimeMillis(); ((LightHashMap) m2).compactify(0.90f); System.out.println(System.currentTimeMillis() - time); System.out.println("transfer to hashmap"); System.out.println("transfer to lighthashmap"); time = System.currentTimeMillis(); new LightHashMap(m1); System.out.println(System.currentTimeMillis() - time); time = System.currentTimeMillis(); new LightHashMap(m2); System.out.println(System.currentTimeMillis() - time); System.out.println("remove entry by entry");
test(); System.out.println("-------------------------------------"); System.gc(); test(); final Map m2 = new LightHashMap();
private Object add(Object key, final Object value, final boolean bulkAdd) { if(key == null) key = NULLKEY; final int idx = bulkAdd ? nonModifyingFindIndex(key) : findIndex(key); final Object oldValue = data[idx+1]; if (data[idx] == null || data[idx] == THOMBSTONE) { data[idx] = key; size++; } data[idx+1] = value; return oldValue; }
final int thombStoneThreshold = Math.min((data.length>>1)-size, 100); while (data[newIndex] != null && !keysEqual(data[newIndex], keyToFind)) if(thombStoneCount * 2 > thombStoneThreshold) compactify(0.f); thombStoneIndex = -1; probe = 0; if (thombStoneIndex != -1 && !keysEqual(data[newIndex], keyToFind)) return thombStoneIndex; return newIndex;
private void checkCapacity(final int n) { final int currentCapacity = data.length>>1; if ((size + n) < currentCapacity * loadFactor) return; int newCapacity = currentCapacity; do newCapacity <<= 1; while (newCapacity * loadFactor < (size + n)); adjustCapacity(newCapacity); }
@Override public T get(Object key) { if(key == null) key = NULLKEY; return (T)data[nonModifyingFindIndex(key)+1]; }
private void adjustCapacity(final int newSize) { final Object[] oldData = data; data = new Object[newSize*2]; size = 0; for (int i = 0; i < oldData.length; i+=2) { if (oldData[i] == null || oldData[i] == THOMBSTONE) continue; add(oldData[i], oldData[i+1], true); } }
private int nonModifyingFindIndex(final Object keyToFind) { final int hash = keyToFind.hashCode() << 1; /* hash ^= (hash >>> 20) ^ (hash >>> 12); * hash ^= (hash >>> 7) ^ (hash >>> 4); */ int probe = 1; int newIndex = hash & (data.length - 1); int thombStoneIndex = -1; // search until we find a free entry or an entry matching the key to insert while (data[newIndex] != null && !keysEqual(data[newIndex], keyToFind) && probe < (data.length>>1)) { if(data[newIndex] == THOMBSTONE && thombStoneIndex == -1) thombStoneIndex = newIndex; newIndex = (hash + probe + probe * probe) & (data.length - 1); probe++; } if (thombStoneIndex != -1 && !keysEqual(data[newIndex], keyToFind)) return thombStoneIndex; return newIndex; }
LightHashMap tempMap = new LightHashMap(); if ( tempMap.put( key, value) != null ){ tempMap.compactify(-0.9f);
@Override public void putAll(final Map m) { checkCapacity(m.size()); for (final Iterator it = m.entrySet().iterator(); it.hasNext();) { final Map.Entry entry = (Map.Entry) it.next(); add(entry.getKey(), entry.getValue(),true); } // compactify in case we overestimated the new size due to redundant entries //compactify(0.f); }
/** * will shrink the internal storage size to the least possible amount, * should be used after removing many entries for example * * @param compactingLoadFactor * load factor for the compacting operation. Use 0f to compact * with the load factor specified during instantiation. Use * negative values of the desired load factors to compact only * when it would reduce the storage size. */ public void compactify(float compactingLoadFactor) { int newCapacity = 1; float adjustedLoadFactor = Math.abs(compactingLoadFactor); if (adjustedLoadFactor <= 0.f || adjustedLoadFactor >= 1.f) adjustedLoadFactor = loadFactor; while (newCapacity * adjustedLoadFactor < (size+1)) newCapacity <<= 1; if(newCapacity < data.length/2 || compactingLoadFactor >= 0.f ) adjustCapacity(newCapacity); }
protected void setAdditionalProperty( String name, Object value ) { if ( additional_properties_maybe_null == null ){ additional_properties_maybe_null = new LightHashMap(); } additional_properties_maybe_null.put( name, value ); }