public @Override int compare(Node left, Node right) { return getNodeOrdering().compare(computeId(left), computeId(right)); }
@VisibleForTesting Node getNode(NodeId nodeId) { SortedMap<NodeId, Node> nodes = getNodes(Collections.singleton(nodeId)); return nodes.get(nodeId); }
public @Override int bucket(Node node, int depth) { NodeId nodeId = computeId(node); TreeId bucketId = computeBucketId(nodeId, depth); int leafBucket = bucketId.leafBucket(); return leafBucket; }
@Test public void buildSimpleDAGFromScratch() { strategy = canonical.build(); for (int i = 0; i < strategy.normalizedSizeLimit(0); i++) { Node node = featureNode("f", i); strategy.put(node); } DAG root = strategy.buildRoot(); assertTrue(buckets(root).isEmpty()); assertFalse(children(root).isEmpty()); assertEquals(strategy.normalizedSizeLimit(0), children(root).size()); assertEquals(0, strategy.depth()); }
mergeRoot(dag); final int normalizedSizeLimit = normalizedSizeLimit(dagDepth); final @Nullable TreeId bucketId = computeBucketId(nodeId, dagDepth + 1); if (bucketId != null) { final DAG bucketDAG = getOrCreateDAG(bucketId); dag.addBucket(bucketId); deltaSize = put(bucketDAG, nodeId, remove); changed = bucketDAG.getState() == STATE.CHANGED; if (bucketDAG.getTotalChildCount() == 0) { ListMultimap<TreeId, NodeId> promotions = ArrayListMultimap.create(); dag.forEachChild((childId) -> { TreeId bucketId = computeBucketId(childId, dagDepth + 1); checkNotNull(bucketId); promotions.put(bucketId, childId); DAG bucketDAG = getOrCreateDAG(bucketId); dag.addBucket(bucketId); for (NodeId childId : childIds) { put(bucketDAG, childId, remove); changed = true; dag.setTotalChildCount(dag.getTotalChildCount() + deltaSize); shrinkIfUnderflow(dag);
strategy.put(n); Lists.transform(flatten(strategy.buildRoot()), nid -> strategy.getNode(nid))); assertEquals(nodes.size(), initial.size()); NodeId nodeId = strategy.computeId(ref); Node currNode = strategy.getNode(nodeId); strategy.put(ref); Node newNode = strategy.getNode(nodeId); assertFalse(currNode.equals(newNode)); Lists.transform(flatten(strategy.buildRoot()), nid -> strategy.getNode(nid))); assertEquals(nodes.size(), result.size());
strategy = canonical.build(); final int numNodes = 2 * strategy.normalizedSizeLimit(0); strategy.put(n); Set<Node> edittedResult = new HashSet<>(); DAG root = strategy.buildRoot(); originalResult.addAll(toNode(flatten(root))); strategy.put(n); root = strategy.buildRoot(); edittedResult.addAll(toNode(flatten(root)));
@Test public void bucketDAGShrinksOnRemoveBellowThreshold() { final List<Node> nodes = featureNodes(0, 513, false); final List<Node> removeNodes = nodes.subList(100, 500); final RevTree original; { LegacyTreeBuilder legacyBuilder = new LegacyTreeBuilder(store); for (Node n : nodes) { legacyBuilder.put(n); } original = legacyBuilder.build(); } // original = manuallyCreateBucketsTree(); store.put(original); strategy = canonical.original(original).build(); Stopwatch sw = Stopwatch.createStarted(); for (Node node : removeNodes) { strategy.remove(node); } System.err.printf("Removed %,d nodes in %s\n", removeNodes.size(), sw.stop()); DAG root = strategy.buildRoot(); assertFalse(children(root).isEmpty()); assertTrue(buckets(root).isEmpty()); // assertEquals(1, strategy.depth()); List<NodeId> flattenedNodes = flatten(root); assertEquals(nodes.size() - removeNodes.size(), flattenedNodes.size()); assertTrue(buckets(root).isEmpty()); assertFalse(children(root).isEmpty()); assertEquals(nodes.size() - removeNodes.size(), children(root).size()); }
/** * To be called by {@link #shrinkIfUnderflow(DAG, NodeId, int)} */ private Set<NodeId> getChildrenRecursiveAndClearBuckets(final DAG dag) { Set<NodeId> children = new HashSet<>(); dag.forEachChild((id) -> children.add(id)); if (!children.isEmpty()) { return children; } final List<TreeId> bucketIds = dag.bucketList(); for (TreeId bucketId : bucketIds) { DAG bucket = getOrCreateDAG(bucketId); if (bucket.getState() == STATE.INITIALIZED) { mergeRoot(bucket); } Set<NodeId> bucketChildren = getChildrenRecursiveAndClearBuckets(bucket); int pre = children.size(); children.addAll(bucketChildren); int post = children.size(); Preconditions.checkState(pre + bucketChildren.size() == post); bucket.reset(RevTree.EMPTY_TREE_ID); } return children; }
final RevTree original = getOriginalTree(root.originalTreeId()); final Map<NodeId, DAGNode> origNodes = lazyNodes(original); if (!origNodes.isEmpty()) { preloadBuckets(original); original.forEachBucket(bucket -> { TreeId dagBucketId = root.getId().newChild(bucket.getIndex()); ObjectId bucketId = bucket.getObjectId(); getOrCreateDAG(dagBucketId, bucketId); root.addBucket(dagBucketId); });
final RevTree original = getOriginalTree(root.originalTreeId()); final Map<NodeId, DAGNode> origNodes = lazyNodes(original); if (!origNodes.isEmpty()) { preload(buckets.values()); for (Entry<Integer, Bucket> e : buckets.entrySet()) { Integer bucketIndex = e.getKey(); ObjectId bucketId = e.getValue().getObjectId(); DAG dag = getOrCreateDAG(dagBucketId, bucketId); root.addBucket(dagBucketId);
int depth(DAG root) { if (0 == root.numBuckets()) { return 0; } final AtomicInteger maxDepth = new AtomicInteger();// cause an int can't be used from inside // the lambda root.forEachBucket((bucketId) -> { DAG bucket = getOrCreateDAG(bucketId); int bucketDepth = depth(bucket); maxDepth.set(Math.max(maxDepth.get(), bucketDepth)); }); return 1 + maxDepth.get(); }
/** * Replaces {@code oldNode} by {@code newNode} * <p> * This default implemetation just calls {@link #remove(Node) remove(oldNode)} and then * {@link #put(Node) put(newNode)}. Subclasses are encouraged to override with optimized * versions whenever possible. * * @return {@code 0} if the operation resulted in no change, {@code 1} if the node was * inserted/updated, {@code -1} if the node was deleted */ public int update(Node oldNode, Node newNode) { Preconditions.checkArgument(oldNode.getName().equals(newNode.getName())); if (remove(oldNode)) { return put(newNode); } return 0; }
TreeId computeBucketId(final NodeId nodeId, final int childDepth) { byte[] treeId = new byte[childDepth]; int unpromotableDepthIndex = -1; for (int depthIndex = 0; depthIndex < childDepth; depthIndex++) { int bucketIndex = bucket(nodeId, depthIndex); if (bucketIndex == -1) { unpromotableDepthIndex = depthIndex; break; } treeId[depthIndex] = (byte) bucketIndex; } if (unpromotableDepthIndex > -1) { final int extraBucketIndex = unpromotableBucketIndex(unpromotableDepthIndex); treeId[unpromotableDepthIndex] = (byte) extraBucketIndex; unpromotableDepthIndex++; final int missingDepthCount = childDepth - unpromotableDepthIndex; for (int i = 0; i < missingDepthCount; i++, unpromotableDepthIndex++) { int bucketIndex = canonicalBucket(nodeId, i); treeId[unpromotableDepthIndex] = (byte) bucketIndex; } } return new TreeId(treeId); }
/** * @param * @return {@code 0} if the operation resulted in no change, {@code 1} if the node was * inserted/updated, {@code -1} if the node was deleted */ public int put(final Node node) { @Nullable final NodeId nodeId = computeId(node); if (null == nodeId) { return 0; } // nodeId can be null if it's not to be added to the tree at all (e.g. a non spatial // feature in a spatial index) boolean remove = node.getObjectId().isNull(); int delta; writeLock.lock(); try { delta = put(root, nodeId, remove); dagCache.prune(); } finally { writeLock.unlock(); } if (!remove) { storageProvider.saveNode(nodeId, node); } return delta; }
@Test public void nodeReplacedOnEditsWithBaseRevTree() { final RevTree origTree = manuallyCreateBucketsTree(); store.put(origTree); final Set<Node> original = new HashSet<>(); final Set<Node> edited = new HashSet<>(); { Iterator<NodeRef> it = new DepthTreeIterator("", ObjectId.NULL, origTree, store, Strategy.RECURSIVE_FEATURES_ONLY); while (it.hasNext()) { original.add(it.next().getNode()); } for (Node n : original) { ObjectId oid = RevObjectTestSupport.hashString(n.toString()); Node edit = Node.create(n.getName(), oid, ObjectId.NULL, TYPE.FEATURE, n.bounds().orNull()); edited.add(edit); } assertFalse(original.equals(edited)); } strategy = canonical.original(origTree).build(); for (Node n : edited) { strategy.put(n); } Set<Node> edittedResult = new HashSet<>(); DAG root = strategy.buildRoot(); edittedResult.addAll(toNode(flatten(root))); assertEquals(edited.size(), edittedResult.size()); assertEquals(edited, edittedResult); }
@Override public int put(final Node node) { Preconditions.checkArgument(TYPE.FEATURE == node.getType(), "Can't add non feature nodes to quad-tree: %s", node); return super.put(node); }
/** * @see #getOrCreateDAG(TreeId, ObjectId) */ DAG getOrCreateDAG(TreeId treeId) { return getOrCreateDAG(treeId, RevTree.EMPTY_TREE_ID); }
private void shrinkIfUnderflow(final DAG dag) { if (dag.numBuckets() == 0) { return; } final long childCount = dag.getTotalChildCount(); // TODO: in the case of quadtrees would need to check if it's an unpromotables bucket and // use canonical's normalized size limit instead? final int depth = dag.getId().depthLength(); final int normalizedSizeLimit = normalizedSizeLimit(depth); if (childCount > normalizedSizeLimit) { return; } Set<NodeId> childrenRecursive = getChildrenRecursiveAndClearBuckets(dag); checkState(childrenRecursive.size() == childCount, "expected %s, got %s, at: %s", childCount, childrenRecursive.size(), dag); dag.clearBuckets(); childrenRecursive.forEach((id) -> dag.addChild(id)); }