public static QDigest deserialize(byte[] b) { ByteArrayInputStream bis = new ByteArrayInputStream(b); DataInputStream s = new DataInputStream(bis); try { long size = s.readLong(); double compressionFactor = s.readDouble(); long capacity = s.readLong(); int count = s.readInt(); QDigest d = new QDigest(compressionFactor); d.size = size; d.capacity = capacity; for (int i = 0; i < count; ++i) { long k = s.readLong(); long n = s.readLong(); d.node2count.put(k, n); } return d; } catch (IOException e) { throw new RuntimeException(e); } }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
/** * Creates a new hash map using the elements of two parallel arrays. * * @param k * the array of keys of the new hash map. * @param v * the array of corresponding values in the new hash map. * @param f * the load factor. * @throws IllegalArgumentException * if {@code k} and {@code v} have different lengths. */ public Long2LongOpenHashMap(final long[] k, final long[] v, final float f) { this(k.length, f); if (k.length != v.length) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")"); for (int i = 0; i < k.length; i++) this.put(k[i], v[i]); } /**
@Override protected void trainInternal(List<Item> items, List<ClickData> clickData) { //for each click update the last-clicked time of the item that has been clicked for (ClickData c : clickData) { itemClickTime.put(c.click.item.id, c.click.timestamp.getTime()); } //let the underlying algorithm train mainStrategy.train(items, clickData); }
@Override protected void trainInternal(List<Item> items, List<ClickData> clickData) { //for each click, extract the item's publication time and add it to the map for (ClickData c : clickData) { timestampMap.put(c.click.item.id, c.click.item.createdAt.getTime()); } //let the underlying algorithm train mainStrategy.train(items, clickData); }
public static QDigest deserialize(byte[] b) { ByteArrayInputStream bis = new ByteArrayInputStream(b); DataInputStream s = new DataInputStream(bis); try { long size = s.readLong(); double compressionFactor = s.readDouble(); long capacity = s.readLong(); int count = s.readInt(); QDigest d = new QDigest(compressionFactor); d.size = size; d.capacity = capacity; for (int i = 0; i < count; ++i) { long k = s.readLong(); long n = s.readLong(); d.node2count.put(k, n); } return d; } catch (IOException e) { throw new RuntimeException(e); } }
public static QDigest deserialize(byte[] b) { ByteArrayInputStream bis = new ByteArrayInputStream(b); DataInputStream s = new DataInputStream(bis); try { long size = s.readLong(); double compressionFactor = s.readDouble(); long capacity = s.readLong(); int count = s.readInt(); QDigest d = new QDigest(compressionFactor); d.size = size; d.capacity = capacity; for (int i = 0; i < count; ++i) { long k = s.readLong(); long n = s.readLong(); d.node2count.put(k, n); } return d; } catch (IOException e) { throw new RuntimeException(e); } }
/** Creates a new subset lexical strategy. * @param subset the subset of terms. */ public FrequencyLexicalStrategy( final LongSet subset ) { final long[] t = subset.toLongArray(); Arrays.sort( t ); localNumber = new Long2LongOpenHashMap(); localNumber.defaultReturnValue( -1 ); for( int i = 0; i < t.length; i++ ) localNumber.put( t[ i ], i ); }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException("Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private static Long2LongOpenHashMap mapRemoteVertices( IPartitionBuilder partitionBuilder, LongArrayList remoteVertices) throws IOException { // map remote vertices Long2LongOpenHashMap remoteVerticesMappings = new Long2LongOpenHashMap(remoteVertices.size(), 1f); for (ISerializablePartition partition : partitionBuilder.getPartitions()) { for (long remoteVertexId : remoteVertices) { ISubgraph subgraph = partition.getSubgraphForVertex(remoteVertexId); if (subgraph != null && !subgraph.getVertex(remoteVertexId).isRemote()) { assert (!remoteVerticesMappings.containsKey(remoteVertexId)); remoteVerticesMappings.put(remoteVertexId, subgraph.getId()); } } } return remoteVerticesMappings; }
this.singles.instance_materials.put(m_id, m); this.singles.instance_shaders.put(s_id, shader); this.singles.instance_to_material.put(i_id, m_id);
this.batches.instance_materials.put(m_id, m); this.batches.instance_shaders.put(s_id, shader); this.batches.instance_to_material.put(i_id, m_id);
for(final LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
for(final LongIterator i = values.iterator(); i.hasNext();) { v = i.nextLong(); counts.put(v, counts.get(v) + 1); size = Fast.length(v); if (size > w) w = size; final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
final long value = i.nextLong(); width = Math.max(width, Fast.mostSignificantBit(value) + 1); counts.put(value, counts.get(value) + 1); l++;