public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
public static byte[] serialize(QDigest d) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream s = new DataOutputStream(bos); try { s.writeLong(d.size); s.writeDouble(d.compressionFactor); s.writeLong(d.capacity); s.writeInt(d.node2count.size()); for (long k : d.node2count.keySet()) { s.writeLong(k); s.writeLong(d.node2count.get(k)); } return bos.toByteArray(); } catch (IOException e) { // Should never happen throw new RuntimeException(e); } }
@Override public void putAll(Map<? extends Long, ? extends Long> m) { if (f <= .5) ensureCapacity(m.size()); // The resulting map will be sized for m.size() elements else tryCapacity(size() + m.size()); // The resulting map will be tentatively sized for size() + m.size() // elements super.putAll(m); }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
private void compressFully() { // Restore property 2 at each node. Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]); for (long node : allNodes) { // The root node is not compressible: it has no parent and no sibling if (!isRoot(node)) { compressDownward(node); } } }
@Override public void offer(long value) { if (value < 0 || value > Long.MAX_VALUE / 2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE / 2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
@Override public void offer(Long value) { if (value < 0 || value > Long.MAX_VALUE/2) { throw new IllegalArgumentException("Can only accept values in the range 0.." + Long.MAX_VALUE/2 + ", got " + value); } // Rebuild if the value is too large for the current tree height if (value >= capacity) { rebuildToCapacity(Long.highestOneBit(value) << 1); } long leaf = value2leaf(value); node2count.addTo(leaf, 1); size++; // Always compress at the inserted node, and recompress fully // if the tree becomes too large. // This is one sensible strategy which both is fast and keeps // the tree reasonably small (within the theoretical bound of 3k nodes) compressUpward(leaf); if (node2count.size() > 3 * compressionFactor) { compressFully(); } }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
final int m = counts.size();
final int m = counts.size();