private WeightedChoice<N> getWeightedChoiceForDistance( N source, Graph<N> graph, Distance<N> distance) { Map<N, Double> nodeWeights = new HashMap<>(); Set<N> successors = graph.successors(source); for (N node : graph.nodes()) { // don't include the source or its successors if (!node.equals(source) && !successors.contains(node)) { nodeWeights.put( node, Math.pow(distance.getDistance(source, node).doubleValue(), -clusteringExponent)); } } Preconditions.checkState( nodeWeights.size() >= connectionCount, "number of possible targets (%s) must be greater than connection count (%s)", nodeWeights.size(), connectionCount); WeightedChoice<N> weightedChoice = new WeightedChoice<>(nodeWeights, random); return weightedChoice; } }
enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights);
private List<N> generateAdjacentNodes(int edgesToAdd) { Preconditions.checkArgument(edgesToAdd >= 1); WeightedChoice<N> nodeChooser = buildNodeProbabilities(); List<N> adjacentNodes = new ArrayList<N>(edgesToAdd); while (adjacentNodes.size() < edgesToAdd) { N attach_point = nodeChooser.nextItem(); // if parallel edges are not allowed, skip this node if already present if (!graph.allowsParallelEdges() && adjacentNodes.contains(attach_point)) { continue; } adjacentNodes.add(attach_point); } return adjacentNodes; }
weighted_choice = new WeightedChoice<V>(vertex_weights, random); V target = weighted_choice.nextItem(); graph.addEdge(edge_factory.get(), source, target);
weighted_choice = new WeightedChoice<V>(vertex_weights, random); V target = weighted_choice.nextItem(); graph.addEdge(edge_factory.create(), source, target);
weighted_choice = new WeightedChoice<V>(vertex_weights, random); V target = weighted_choice.nextItem(); graph.addEdge(edge_factory.create(), source, target);
for (Map.Entry<T, ? extends Number> entry : item_weights.entrySet()) { double value = entry.getValue().doubleValue() / sum; enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights);
public void addSmallWorldConnections( MutableNetwork<N, E> graph, Distance<N> distance, Supplier<E> edgeFactory) { // verify that it's actually possible to give each node 'connectionCount' new incident edges // without creating parallel edges or self-loops (both are disallowed) Preconditions.checkArgument(graph.nodes().size() - 5 >= connectionCount); // TODO: For toroidal graphs, we can make this more clever by pre-creating the WeightedChoice object // and using the output as an offset to the current node location. for (N node : graph.nodes()) { // TODO: come up with a better random selection mechanism. // in this case we want selection without replacement, which is not what WeightedChoice does; // otherwise we can keep selecting the same target over and over again, which is inefficient. WeightedChoice<N> weightedChoice = getWeightedChoiceForDistance(node, graph.asGraph(), distance); Set<N> targets = new HashSet<>(); while (targets.size() < connectionCount) { // the item returned is guaranteed by getWeightedChoiceForDistance() to not be equal to node // or any of its successors; we may try to add the same node to targets more than once // (see the note above re: selection w/o replacement) but the Set semantics disallows duplicates targets.add(weightedChoice.nextItem()); } for (N target : targets) { graph.addEdge(node, target, edgeFactory.get()); } } }
private WeightedChoice<N> buildNodeProbabilities() { Map<N, Double> item_weights = new HashMap<N, Double>(); for (N v : graph.nodes()) { double degree; double denominator; // Attachment probability is dependent on whether the graph is // directed or undirected. if (graph.isDirected()) { degree = graph.inDegree(v); denominator = graph.edges().size() + graph.nodes().size(); } else { degree = graph.degree(v); denominator = (2 * graph.edges().size()) + graph.nodes().size(); } double prob = (degree + 1) / denominator; item_weights.put(v, prob); } WeightedChoice<N> nodeProbabilities = new WeightedChoice<N>(item_weights, mRandom); return nodeProbabilities; }
for (Map.Entry<T, ? extends Number> entry : item_weights.entrySet()) { double value = entry.getValue().doubleValue() / sum; enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); - (bucket_weight - light_weight); if (new_weight > threshold) { enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights);
private void createRandomEdge(Collection<V> preexistingNodes, V newVertex, Set<Pair<V>> added_pairs, WeightedChoice<V> weightedProbabilities) { V attach_point; boolean created_edge = false; Pair<V> endpoints; do { attach_point = weightedProbabilities.nextItem(); endpoints = new Pair<V>(newVertex, attach_point); /* * If parallel edges are not allowed, skip attach_point if * <newVertex, attach_point> already exists; note that because of * the way the new node's edges are added, we only need to check the * list of candidate edges for duplicates. */ if (!(mGraph instanceof MultiGraph)) { if (added_pairs.contains(endpoints)) continue; if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED && added_pairs.contains(new Pair<V>(attach_point, newVertex))) continue; } created_edge = true; } while (!created_edge); added_pairs.add(endpoints); if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { added_pairs.add(new Pair<V>(attach_point, newVertex)); } }
item_weights.put(v, prob); WeightedChoice<V> nodeProbabilities = new WeightedChoice<V>(item_weights, mRandom);
enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights);