@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
/** * Restore P2 at node and upward the spine. Note that P2 can vanish * at some nodes sideways as a result of this. We'll fix that later * in compressFully when needed. */ private void compressUpward(long node) { double threshold = Math.floor(size / compressionFactor); long atNode = get(node); while (!isRoot(node)) { if (atNode > threshold) { break; } long atSibling = get(sibling(node)); if (atNode + atSibling > threshold) { break; } long atParent = get(parent(node)); if (atNode + atSibling + atParent > threshold) { break; } node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); if (atSibling > 0) { node2count.remove(sibling(node)); } node = parent(node); atNode = atParent + atNode + atSibling; } }
/** * Restore P2 at seedNode and guarantee that no new violations of P2 appeared. */ private void compressDownward(long seedNode) { double threshold = Math.floor(size / compressionFactor); // P2 check same as above but shorter and slower (and invoked rarely) LongArrayFIFOQueue q = new LongArrayFIFOQueue(); q.enqueue(seedNode); while (!q.isEmpty()) { long node = q.dequeueLong(); long atNode = get(node); long atSibling = get(sibling(node)); if (atNode == 0 && atSibling == 0) { continue; } long atParent = get(parent(node)); if (atParent + atNode + atSibling > threshold) { continue; } node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); node2count.remove(sibling(node)); // Now P2 could have vanished at the node's and sibling's subtrees since they decreased. if (!isLeaf(node)) { q.enqueue(leftChild(node)); q.enqueue(leftChild(sibling(node))); } } }
@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
@Override public void offer(Long value) { if (value < 0 || value > Long.MAX_VALUE/2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE/2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
/** Adds a triple to this store. * * @param triple the triple to be added. * @param value the associated value. */ private void add(final long[] triple, final long value) throws IOException { final int chunk = (int)(triple[0] >>> DISK_CHUNKS_SHIFT); count[chunk]++; checkedForDuplicates = false; if (DEBUG) System.err.println("Adding " + Arrays.toString(triple)); writeLong(triple[0], byteBuffer[chunk], writableByteChannel[chunk]); writeLong(triple[1], byteBuffer[chunk], writableByteChannel[chunk]); writeLong(triple[2], byteBuffer[chunk], writableByteChannel[chunk]); if (hashMask == 0) writeLong(value, byteBuffer[chunk], writableByteChannel[chunk]); if (filteredSize != -1 && (filter == null || filter.evaluate(triple))) filteredSize++; if (value2FrequencyMap != null) value2FrequencyMap.addTo(value, 1); size++; }
if (indirect) { frequencies = new Long2LongOpenHashMap(); for(final long v : values) frequencies.addTo(v, 1);
if (indirect) { frequencies = new Long2LongOpenHashMap(); for(final long v : values) frequencies.addTo(v, 1);
/** * Restore P2 at node and upward the spine. Note that P2 can vanish * at some nodes sideways as a result of this. We'll fix that later * in compressFully when needed. */ private void compressUpward(long node) { double threshold = Math.floor(size / compressionFactor); long atNode = get(node); while (!isRoot(node)) { if (atNode > threshold) { break; } long atSibling = get(sibling(node)); if (atNode + atSibling > threshold) { break; } long atParent = get(parent(node)); if (atNode + atSibling + atParent > threshold) { break; } node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); if (atSibling > 0) { node2count.remove(sibling(node)); } node = parent(node); atNode = atParent + atNode + atSibling; } }
node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); if (atSibling > 0) {
/** * Restore P2 at seedNode and guarantee that no new violations of P2 appeared. */ private void compressDownward(long seedNode) { double threshold = Math.floor(size / compressionFactor); // P2 check same as above but shorter and slower (and invoked rarely) LongArrayFIFOQueue q = new LongArrayFIFOQueue(); q.enqueue(seedNode); while (!q.isEmpty()) { long node = q.dequeueLong(); long atNode = get(node); long atSibling = get(sibling(node)); if (atNode == 0 && atSibling == 0) { continue; } long atParent = get(parent(node)); if (atParent + atNode + atSibling > threshold) { continue; } node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); node2count.remove(sibling(node)); // Now P2 could have vanished at the node's and sibling's subtrees since they decreased. if (!isLeaf(node)) { q.enqueue(leftChild(node)); q.enqueue(leftChild(sibling(node))); } } }
node2count.addTo(parent(node), atNode + atSibling); node2count.remove(node); node2count.remove(sibling(node));