private long get(long node) { return node2count.get(node); }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
private long get(long node) { return node2count.get(node); }
private long get(long node) { return node2count.get(node); }
public long localNumber( final long globalNumber ) { long n = localNumber.get( globalNumber ); return n == -1 ? 0 : n; }
public int localIndex( final long globalNumber ) { return localNumber.get( globalNumber ) == -1 ? 1 : 0; }
@Override public long getLong(final long index) { final long value = map.get(values.getLong(index)); return value == -1 ? escape : value; }
@Override public long getLong(final long index) { final long value = map.get(values.getLong(index)); return value == -1 ? escape : value; }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
@Override public LongArrayList recommendInternal(ClickData clickData) { //filter out items that have not received at last one click in the last time frame //first, retrieve the recommendation results of the underlying algorithm LongArrayList rec = mainStrategy.recommendInternal(clickData); //create lists of filtered items and retained items LongArrayList filteredRec = new LongArrayList(); LongArrayList filteredRecNotMatch = new LongArrayList(); //iterate over the recommendation list of the underlying strategy for (int j = 0; j < rec.size(); j++) { long i = rec.getLong(j); //filter items whose last-clicked timestamp is too old if ((itemClickTime.containsKey(i)) && ((clickData.click.timestamp.getTime()-itemClickTime.get(i))<filterTime)) { filteredRec.add(i); } else if (fallback) { //if we have a fallback, add the filtered item to the fallback list filteredRecNotMatch.add(i); } } //merge the filtered list with the fallback list (empty in case fallback==false) filteredRec.addAll(filteredRecNotMatch); //return the filtered list return filteredRec; }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException("Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }