public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
public List<long[]> toAscRanges() { List<long[]> ranges = new ArrayList<long[]>(); for (long key : node2count.keySet()) { ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)}); } Collections.sort(ranges, RANGES_COMPARATOR); return ranges; }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException( "Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
public static QDigest unionOf(QDigest a, QDigest b) { if (a.compressionFactor != b.compressionFactor) { throw new IllegalArgumentException("Compression factors must be the same: " + "left is " + a.compressionFactor + ", " + "right is " + b.compressionFactor); } if (a.capacity > b.capacity) { return unionOf(b, a); } QDigest res = new QDigest(a.compressionFactor); res.capacity = a.capacity; res.size = a.size + b.size; for (long k : a.node2count.keySet()) { res.node2count.put(k, a.node2count.get(k)); } if (b.capacity > res.capacity) { res.rebuildToCapacity(b.capacity); } for (long k : b.node2count.keySet()) { res.node2count.put(k, b.get(k) + res.get(k)); } res.compressFully(); return res; }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
final long[] keysArray = counts.keySet().toLongArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, (a, b) -> Long.compare(counts.get(b), counts.get(a)));
final long[] keysArray = counts.keySet().toArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, (a, b) -> Long.signum(counts.get(b) - counts.get(a)));
final long[] keys = counts.keySet().toLongArray(); Arrays.sort(keys);