@Override public GraphCollection execute(GraphCollection collection) { return collection.getConfig().getGraphCollectionFactory().fromDataSets( collection.getGraphHeads().distinct(new Id<>()), collection.getVertices(), collection.getEdges()); }
/** * {@inheritDoc} * <p> * Graph heads will be disposed at the moment. The following issue attempts to provide * alternatives to keep graph heads: https://github.com/dbs-leipzig/gradoop/issues/974 */ @Override public LogicalGraph getLogicalGraph() { return getGraphCollection().reduce(new ReduceCombination()); }
@Override public GraphCollection execute(GraphCollection collection) { config = collection.getConfig(); DataSet<GraphTransaction> input = collection .getGraphTransactions(); DataSet<GraphTransaction> output = execute(input); return config.getGraphCollectionFactory() .fromTransactions(output); }
/** * Creates a new logical graph by union the vertex and edge sets of all graph * contained in the given collection. * * @param collection input collection * @return combined graph */ @Override public LogicalGraph execute(GraphCollection collection) { return collection.getConfig().getLogicalGraphFactory().fromDataSets( collection.getVertices(), collection.getEdges()); }
/** * Creates the meta data for the given graph collection. * * @param graphs graph collection * @return meta data information */ default DataSet<Tuple3<String, String, String>> tuplesFromCollection(GraphCollection graphs) { return tuplesFromElements(graphs.getVertices()) .union(tuplesFromElements(graphs.getEdges())) .union(tuplesFromElements(graphs.getGraphHeads())); }
List<GradoopId> expectedEdgeIds = Lists.newArrayList(); inputCollection.getGraphHeads().map(new Id<>()).output( new LocalCollectionOutputFormat<>(expectedGraphHeadIds)); inputCollection.getVertices().map(new Id<>()).output( new LocalCollectionOutputFormat<>(expectedVertexIds)); inputCollection.getEdges().map(new Id<>()).output( new LocalCollectionOutputFormat<>(expectedEdgeIds)); .apply(new ApplyTransformation( TransformationTest::transformGraphHead, TransformationTest::transformVertex, List<GradoopId> resultEdgeIds = Lists.newArrayList(); outputCollection.getGraphHeads().map(new Id<>()).output( new LocalCollectionOutputFormat<>(resultGraphHeadIds)); outputCollection.getVertices().map(new Id<>()).output( new LocalCollectionOutputFormat<>(resultVertexIds)); outputCollection.getEdges().map(new Id<>()).output( new LocalCollectionOutputFormat<>(resultEdgeIds));
@Override protected DataSet<Vertex> computeNewVertices( DataSet<GraphHead> newSubgraphs) { DataSet<GradoopId> ids = secondCollection.getGraphHeads() .map(new Id<GraphHead>()); return firstCollection.getVertices() .filter(new InAnyGraphBroadcast<Vertex>()) .withBroadcastSet(ids, GraphsContainmentFilterBroadcast.GRAPH_IDS); } }
/** * Test SumProperty, SumVertexProperty and SumEdgeProperty with a graph collection * and unsupported property types. */ @Test public void testCollectionUnsupportedPropertyValueType() { FlinkAsciiGraphLoader loader = getLoaderFromString( "g[({p : 0})-[{p : 0.0}]->({p : true})-[{p : \"\"}]->({})]"); GraphCollection collection = loader.getGraphCollectionByVariables("g"); try { collection .apply(new ApplyAggregation(new SumVertexProperty(PROPERTY, VERTEX_AGGREGATE_PROPERTY))) .apply(new ApplyAggregation(new SumEdgeProperty(PROPERTY, EDGE_AGGREGATE_PROPERTY))) .apply(new ApplyAggregation(new SumProperty(PROPERTY, ELEMENT_AGGREGATE_PROPERTY))) .getGraphHeads().print(); } catch (Exception e) { assertTrue( e instanceof JobExecutionException && e.getCause() instanceof UnsupportedTypeException); } }
/** * {@inheritDoc} */ @Override public DataSet<Boolean> isEmpty() { return getGraphHeads() .map(new True<>()) .distinct() .union(getConfig().getExecutionEnvironment().fromElements(false)) .reduce(new Or()) .map(new Not()); }
/** * {@inheritDoc} */ @Override protected DataSet<GraphHead> computeNewGraphHeads() { return firstCollection.getGraphHeads() .union(secondCollection.getGraphHeads()) .distinct(new Id<GraphHead>()); }
@Test public void testInBound() throws Exception { FlinkAsciiGraphLoader loader = getSocialNetworkLoader(); int limit = 2; GraphCollection inputCollection = loader.getGraphCollectionByVariables("g0", "g1", "g2", "g3"); GraphCollection outputCollection = inputCollection.limit(limit); assertEquals(limit, outputCollection.getGraphHeads().count()); }
@Test public void testKeepOnlyRelevantVertices() throws Exception { FlinkAsciiGraphLoader loader = getLoaderFromString("source:G {source : \"graph\"}[" + " (a:Patent {author : \"asdf\", year: 2000, title: \"P1\"})-[:cite {difference : 0}]->(b:Patent {author : \"asdf\", year: 2000, title: \"P2\"})" + " (a)-[:cite {difference : 0}]->(c:Patent {author : \"asdf\", year: 2000, title: \"P3\"})" + " (b)-[:cite {difference : 0}]->(c)\n" + " (a)-[:cite {difference : 5}]->(d:Patent {author : \"zxcv\", year: 1995, title: \"Earlier...\"})" + " (b)-[:cite {difference : 5}]->(d)" + " (e:Patent {author : \"kdkdkd\", year: 1997, title: \"Once upon a time\"})-[e_d:cite {difference : 2}]->(d)" + "]"); GraphCollection sourceGraph = loader.getGraphCollectionByVariables("source"); // Caution: We can't use result.equalsByGraphElementIds because it internally uses a cross join // with equality of elements, which means, it ignores elements that are not within the other dataset // This means, the test would succeed even though we have too many vertices as a result of the // subgraph operator. org.junit.Assert.assertEquals(3, sourceGraph .apply(new ApplySubgraph(null, edge -> edge.getPropertyValue("difference").getInt() == 0)) .getVertices() .collect().size()); } }
@Test public void testTransformation() throws Exception { FlinkAsciiGraphLoader loader = getSocialNetworkLoader(); GraphCollection originalCollection = loader.getGraphCollection(); DataSet<GraphTransaction> transactions = originalCollection.getGraphTransactions(); GraphCollection restoredCollection = getConfig().getGraphCollectionFactory() .fromTransactions(transactions); collectAndAssertTrue( originalCollection.equalsByGraphIds(restoredCollection)); collectAndAssertTrue( originalCollection.equalsByGraphElementIds(restoredCollection)); collectAndAssertTrue( originalCollection.equalsByGraphData(restoredCollection)); }
/** * {@inheritDoc} */ @Override protected DataSet<Vertex> computeNewVertices( DataSet<GraphHead> newGraphHeads) { return firstCollection.getVertices() .union(secondCollection.getVertices()) .distinct(new Id<Vertex>()); }
@Override public GraphCollection executeForTxLayout(GraphCollection collection) { DataSet<GraphTransaction> graphTransactions = collection.getGraphTransactions(); GradoopFlinkConfig config = collection.getConfig(); DataSet<GraphTransaction> transformedGraphTransactions = graphTransactions .map(new TransformGraphTransaction( collection.getFactory().getGraphHeadFactory(), graphHeadTransFunc, collection.getFactory().getVertexFactory(), vertexTransFunc, collection.getFactory().getEdgeFactory(), edgeTransFunc )); return config.getGraphCollectionFactory().fromTransactions(transformedGraphTransactions); } }
/** * Test using multiple element aggregation functions on a graph collection * * @throws Exception if the execution or IO fails. */ @Test public void testCollectionWithMultipleElementAggregationFunctions() throws Exception { GraphCollection collection = getSocialNetworkLoader() .getGraphCollectionByVariables("g0", "g1", "g2", "g3"); SumProperty sumProperty = new SumProperty("since"); MaxProperty maxProperty = new MaxProperty("since"); GraphCollection expected = collection.apply(new ApplyAggregation(sumProperty)) .apply(new ApplyAggregation(maxProperty)); GraphCollection output = collection.apply(new ApplyAggregation(sumProperty, maxProperty)); collectAndAssertTrue(expected.equalsByGraphData(output)); }
/** * {@inheritDoc} */ @Override public GraphCollection apply(ApplicableUnaryGraphToGraphOperator op) { return callForCollection(op); }
"]"); GraphCollection sourceGraph = loader.getGraphCollectionByVariables("source"); LogicalGraph searchGraph = sourceGraph.reduce(new ReduceCombination()); GraphCollection patternGraph = sourceGraph .apply(new ApplySubgraph(null, edge -> edge.getPropertyValue("difference").getInt() == 0)) .apply(new ApplyTransformation((gh, plain) -> { gh.setLabel("Combined"); return gh;
/** * Executes a rollUp on edges using a single grouping key and checks if the result * is correct. * * @throws Exception If the execution fails. */ @Test public void testEdgeRollUpWithSingleGroupingKey() throws Exception { FlinkAsciiGraphLoader loader = getSocialNetworkLoader(); LogicalGraph input = loader.getGraphCollectionByVariables("g0", "g1", "g2", "g3") .reduce(new ReduceCombination()); //expected loader.initDatabaseFromString("g0 {edgeRollUpGroupingKeys:\":label\"}[" + "(v0)" + "(v0)-[e_0:knows{count:10L}]->(v0)" + "(v0)-[e_1:hasModerator{count:1L}]->(v0)" + "(v0)-[e_2:hasMember{count:2L}]->(v0)" + "]"); GraphCollection expected = loader.getGraphCollection(); List<String> vertexGK = Collections.emptyList(); List<AggregateFunction> vertexAGG = Collections.emptyList(); List<String> edgeGK = Collections.singletonList(Grouping.LABEL_SYMBOL); List<AggregateFunction> edgeAGG = Collections.singletonList(new Count("count")); GraphCollection output = input.groupEdgesByRollUp(vertexGK, vertexAGG, edgeGK, edgeAGG); collectAndAssertTrue(output.equalsByGraphData(expected)); }