@Override public void seal() { sealed = true; sortedLongList = new long[rawLongSet.size()]; rawLongSet.toArray(sortedLongList); Arrays.sort(sortedLongList); if (sortedLongList.length == 0) { min = null; max = null; return; } // Update min/max based on raw docs. min = sortedLongList[0]; max = sortedLongList[sortedLongList.length - 1]; // Merge the raw and aggregated docs, so stats for dictionary creation are collected correctly. int numAggregated = aggregatedLongSet.size(); if (numAggregated > 0) { rawLongSet.addAll(aggregatedLongSet); sortedLongList = new long[rawLongSet.size()]; rawLongSet.toArray(sortedLongList); Arrays.sort(sortedLongList); } } }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
public long[] toArray(long[] a) { return _set.toArray(a); }
public Object[] toArray() { return _set.toArray(); }
public Object[] toArray() { return _set.toArray(); }
public Object[] toArray() { return _set.toArray(); }
public <T> T[] toArray(T[] a) { return _set.toArray(a); }
public long[] toArray(long[] a) { return _set.toArray(a); }
public <T> T[] toArray(T[] a) { return _set.toArray(a); }
public long[] toArray(long[] a) { return _set.toArray(a); }
public <T> T[] toArray(T[] a) { return _set.toArray(a); }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
final long[] keysArray = counts.keySet().toArray(new long[m]); LongArrays.quickSort(keysArray, 0, keysArray.length, (a, b) -> Long.signum(counts.get(b) - counts.get(a)));
if (size == 0 || size == 1) return new Coder(new long[0], new int[0], new long[0], Long2IntMaps.EMPTY_MAP); final long[] symbol = new long[size]; frequencies.keySet().toArray(symbol);