private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
/** * Restore P2 at node and upward the spine. Note that P2 can vanish * at some nodes sideways as a result of this. We'll fix that later * in compressFully when needed. */ private void compressUpward(long node) { double threshold = Math.floor(size / compressionFactor); long atNode = get(node); while (!isRoot(node)) { if (atNode > threshold) { break; } long atSibling = get(sibling(node)); if (atNode + atSibling > threshold) { break; } long atParent = get(parent(node)); if (atNode + atSibling + atParent > threshold) { break; } node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); if (atSibling > 0) { node2count.remove(sibling(node)); } node = parent(node); atNode = atParent + atNode + atSibling; } }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for(final LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; final int m = counts.size(); final long[] keysArray = counts.keySet().toLongArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, (a, b) -> Long.compare(counts.get(b), counts.get(a))); for(int i = 0; i < keysArray.length; i++) mean += i * counts.get(keysArray[i]); rankMean = (double)mean / n; final long c = counts.get(keysArray[pos++]); post -= c; counts.clear(); counts.trim(); escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
AllocationMetricCollector(Method method, ThreadMXBean threadMXBean) { this.getThreadAllocatedBytes = method; this.threadMXBean = threadMXBean; previousResults = new Long2LongOpenHashMap(); previousResults.defaultReturnValue(NO_DATA); }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
/** Creates a new subset lexical strategy. * @param subset the subset of terms. */ public FrequencyLexicalStrategy( final LongSet subset ) { final long[] t = subset.toLongArray(); Arrays.sort( t ); localNumber = new Long2LongOpenHashMap(); localNumber.defaultReturnValue( -1 ); for( int i = 0; i < t.length; i++ ) localNumber.put( t[ i ], i ); }
private static Long2LongOpenHashMap mapRemoteVertices( IPartitionBuilder partitionBuilder, LongArrayList remoteVertices) throws IOException { // map remote vertices Long2LongOpenHashMap remoteVerticesMappings = new Long2LongOpenHashMap(remoteVertices.size(), 1f); for (ISerializablePartition partition : partitionBuilder.getPartitions()) { for (long remoteVertexId : remoteVertices) { ISubgraph subgraph = partition.getSubgraphForVertex(remoteVertexId); if (subgraph != null && !subgraph.getVertex(remoteVertexId).isRemote()) { assert (!remoteVerticesMappings.containsKey(remoteVertexId)); remoteVerticesMappings.put(remoteVertexId, subgraph.getId()); } } } return remoteVerticesMappings; }
public static QDigest deserialize(byte[] b) { ByteArrayInputStream bis = new ByteArrayInputStream(b); DataInputStream s = new DataInputStream(bis); try { long size = s.readLong(); double compressionFactor = s.readDouble(); long capacity = s.readLong(); int count = s.readInt(); QDigest d = new QDigest(compressionFactor); d.size = size; d.capacity = capacity; for (int i = 0; i < count; ++i) { long k = s.readLong(); long n = s.readLong(); d.node2count.put(k, n); } return d; } catch (IOException e) { throw new RuntimeException(e); } }
frequencies = new Long2LongOpenHashMap(); for(final long v : values) frequencies.addTo(v, 1);
private long get(long node) { return node2count.get(node); }
@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
@Override public LongArrayList recommendInternal(ClickData clickData) { //filter out items that have not received at last one click in the last time frame //first, retrieve the recommendation results of the underlying algorithm LongArrayList rec = mainStrategy.recommendInternal(clickData); //create lists of filtered items and retained items LongArrayList filteredRec = new LongArrayList(); LongArrayList filteredRecNotMatch = new LongArrayList(); //iterate over the recommendation list of the underlying strategy for (int j = 0; j < rec.size(); j++) { long i = rec.getLong(j); //filter items whose last-clicked timestamp is too old if ((itemClickTime.containsKey(i)) && ((clickData.click.timestamp.getTime()-itemClickTime.get(i))<filterTime)) { filteredRec.add(i); } else if (fallback) { //if we have a fallback, add the filtered item to the fallback list filteredRecNotMatch.add(i); } } //merge the filtered list with the fallback list (empty in case fallback==false) filteredRec.addAll(filteredRecNotMatch); //return the filtered list return filteredRec; }
@Override public void putAll(Map<? extends Long, ? extends Long> m) { if (f <= .5) ensureCapacity(m.size()); // The resulting map will be sized for m.size() elements else tryCapacity(size() + m.size()); // The resulting map will be tentatively sized for size() + m.size() // elements super.putAll(m); }
@Override public boolean remove(long k) { final int oldSize = size; Long2LongOpenHashMap.this.remove(k); return size != oldSize; } @Override
/** Adds a triple to this store. * * @param triple the triple to be added. * @param value the associated value. */ private void add(final long[] triple, final long value) throws IOException { final int chunk = (int)(triple[0] >>> DISK_CHUNKS_SHIFT); count[chunk]++; checkedForDuplicates = false; if (DEBUG) System.err.println("Adding " + Arrays.toString(triple)); writeLong(triple[0], byteBuffer[chunk], writableByteChannel[chunk]); writeLong(triple[1], byteBuffer[chunk], writableByteChannel[chunk]); writeLong(triple[2], byteBuffer[chunk], writableByteChannel[chunk]); if (hashMask == 0) writeLong(value, byteBuffer[chunk], writableByteChannel[chunk]); if (filteredSize != -1 && (filter == null || filter.evaluate(triple))) filteredSize++; if (value2FrequencyMap != null) value2FrequencyMap.addTo(value, 1); size++; }
final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for(final LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; final int m = counts.size(); final long[] keysArray = counts.keySet().toArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, (a, b) -> Long.signum(counts.get(b) - counts.get(a))); for(int i = 0; i < keysArray.length; i++) mean += i * counts.get(keysArray[i]); rankMean = (double)mean / n; final long c = counts.get(keysArray[pos++]); post -= c; counts.clear(); counts.trim(); escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
Long2LongMap newResults = new Long2LongOpenHashMap(); newResults.defaultReturnValue(NO_DATA); for (int i = 0; i < allThreadIds.length; i++) {