/** * Performs Difference on the vertex and edge sets of the input graphs * removes common vertices and edges. If a source/target vertex is removed, * its corresponding edge will also be removed * * @param graph the graph to perform difference with * @return a new graph where the common vertices and edges have been removed */ public Graph<K, VV, EV> difference(Graph<K, VV, EV> graph) { DataSet<Vertex<K, VV>> removeVerticesData = graph.getVertices(); return this.removeVertices(removeVerticesData); }
/** * Reverses the direction of this Edge. * @return a new Edge, where the source is the original Edge's target * and the target is the original Edge's source. */ public Edge<K, V> reverse() { return new Edge<>(this.f1, this.f0, this.f2); }
public Tuple2<K, Either<NullValue, Message>> map(Vertex<K, VV> vertex) { outTuple.f0 = vertex.getId(); return outTuple; } }
@Override public void sendMessages(Vertex<K, Double> vertex) { if (getSuperstepNumber() == 1) { // initialize vertex ranks vertex.setValue(1.0 / this.getNumberOfVertices()); } for (Edge<K, Double> edge : getEdges()) { sendMessageTo(edge.getTarget(), vertex.getValue() * edge.getValue()); } } }
@Override public ChecksumHashCode<K, VV, EV> run(Graph<K, VV, EV> input) throws Exception { super.run(input); vertexChecksum = new org.apache.flink.graph.asm.dataset.ChecksumHashCode<>(); vertexChecksum.run(input.getVertices()); edgeChecksum = new org.apache.flink.graph.asm.dataset.ChecksumHashCode<>(); edgeChecksum.run(input.getEdges()); return this; }
public Projection( Vertex<KC, VVC> connectingVertex, VV sourceVertexValue, VV targetVertexValue, EV sourceEdgeValue, EV targetEdgeValue) { this.f0 = connectingVertex.getId(); this.f1 = connectingVertex.getValue(); this.f2 = sourceVertexValue; this.f3 = targetVertexValue; this.f4 = sourceEdgeValue; this.f5 = targetEdgeValue; }
private static Graph<Long, Double, Double> getGraph(ExecutionEnvironment env) { if (fileOutput) { return Graph.fromCsvReader(verticesInputPath, edgesInputPath, env).lineDelimiterEdges("\n") .types(Long.class, Double.class, Double.class); } else { return Graph.fromDataSet(IncrementalSSSPData.getDefaultVertexDataSet(env), IncrementalSSSPData.getDefaultEdgeDataSet(env), env); } }
private static Graph<Long, Double, Double> getSSSPGraph(ExecutionEnvironment env) { if (fileOutput) { return Graph.fromCsvReader(verticesInputPath, edgesInSSSPInputPath, env).lineDelimiterEdges("\n") .types(Long.class, Double.class, Double.class); } else { return Graph.fromDataSet(IncrementalSSSPData.getDefaultVertexDataSet(env), IncrementalSSSPData.getDefaultEdgesInSSSP(env), env); } }
@Override public void join(Tuple4<K, K, VV, EV> tripletWithSrcValSet, Vertex<K, VV> vertex, Collector<Triplet<K, VV, EV>> collector) throws Exception { collector.collect(new Triplet<>(tripletWithSrcValSet.f0, tripletWithSrcValSet.f1, tripletWithSrcValSet.f2, vertex.getValue(), tripletWithSrcValSet.f3)); } }
@Override public Long map(Vertex<K, Tuple2<Long, Double>> vertex) throws Exception { return vertex.getValue().f0; } }
@Override public GraphAnalytic<K, VV, EV, T> run(Graph<K, VV, EV> input) throws Exception { env = input.getContext(); return this; }
/** * @param algorithm the algorithm to run on the Graph * @param <T> the return type * @return the result of the graph algorithm * @throws Exception */ public <T> T run(GraphAlgorithm<K, VV, EV, T> algorithm) throws Exception { return algorithm.run(this); }
@Override public Tuple2<K, EV> reduce(Tuple2<K, EV> first, Tuple2<K, EV> second) throws Exception { first.f1 = function.reduceEdges(first.f1, second.f1); return first; } }
/** * Creates a graph from a DataSet of vertices and a DataSet of edges. * * @param vertices a DataSet of vertices. * @param edges a DataSet of edges. * @param context the flink execution environment. * @return the newly created graph. */ public static <K, VV, EV> Graph<K, VV, EV> fromDataSet(DataSet<Vertex<K, VV>> vertices, DataSet<Edge<K, EV>> edges, ExecutionEnvironment context) { return new Graph<>(vertices, edges, context); }
public Edge<K, EV> map(Edge<K, EV> edge) { output.setFields(edge.f1, edge.f0, edge.f2); return output; } }
@Override public Tuple2<K, VV> reduce(Tuple2<K, VV> first, Tuple2<K, VV> second) throws Exception { first.f1 = function.reduceNeighbors(first.f1, second.f1); return first; } }
@Override public boolean filter(Edge<Long, Double> edge) throws Exception { return edge.equals(edgeToBeRemoved); } }).count() > 0;
@Override public Edge<Long, Double> map(Tuple2<Long, Long> tuple2) throws Exception { return new Edge<>(tuple2.f0, tuple2.f1, 0.0); } });