@Override public Set<Statement> edgeSet() { Set<Statement> result = listStatements(null, confinementProperty, null).toSet(); return result; }
@Override public Set<Statement> outgoingEdgesOf(RDFNode vertex) { Set<Statement> result = listStatements(vertex, confinementProperty, null).toSet(); return result; }
@Override public Set<Statement> incomingEdgesOf(RDFNode vertex) { Set<Statement> result = listStatements(null, confinementProperty, vertex).toSet(); return result; }
@Override public Set<Statement> getAllEdges(RDFNode sourceVertex, RDFNode targetVertex) { Set<Statement> result = listStatements(sourceVertex, confinementProperty, targetVertex).toSet(); return result; }
@Override public Set<Statement> removeAllEdges(RDFNode sourceVertex, RDFNode targetVertex) { Set<Statement> result = listStatements(sourceVertex, confinementProperty, targetVertex).toSet(); result.forEach(model::remove); return result; }
@Override public Collection<Class<?>> getApplicableTypes(Resource subject) { Set<Class<?>> result = subject .listProperties(typeProperty).toSet().stream() .map(stmt -> stmt.getObject().asNode()) .map(o -> nodeToClass.get(o)) .filter(o -> o != null) .collect(Collectors.toSet()); return result; }
@Override public Collection<Class<?>> getApplicableTypes(Resource subject) { Set<Class<?>> result = subject .listProperties(typeProperty).toSet().stream() .map(stmt -> stmt.getObject().asNode()) .map(o -> nodeToClass.get(o)) .filter(o -> o != null) .collect(Collectors.toSet()); return result; }
@Override public void addedStatements(StmtIterator statements) { if (log.isDebugEnabled()) { Set<Statement> set = statements.toSet(); log.debug(hash + " addedStatements: " + set.size()); m.add(new StmtIteratorImpl(set.iterator())); } else { m.add(new StmtIteratorImpl(statements)); } }
@Override public void removedStatements(StmtIterator statements) { if (log.isDebugEnabled()) { Set<Statement> set = statements.toSet(); log.debug(hash + " removedStatements: " + set.size()); m.remove(new StmtIteratorImpl(set.iterator())); } else { m.remove(new StmtIteratorImpl(statements)); } }
public static void enrichTestCasesWithLabels(Model model) { Set<Statement> stmts = model.listStatements(null, RDF.type, SparqlQcVocab.ContainmentTest).toSet(); for (Statement stmt : stmts) { Resource s = stmt.getSubject(); String str = s.getURI(); int x = str.lastIndexOf('/') + 1; String id = str.substring(x).replace('#', '_'); model.add(s, RDFS.label, id); } }
public static void enrichTestCasesWithLabels(Model model) { Set<Statement> stmts = model.listStatements(null, RDF.type, SparqlQcVocab.ContainmentTest).toSet(); for (Statement stmt : stmts) { Resource s = stmt.getSubject(); String str = s.getURI(); int x = str.lastIndexOf('/') + 1; String id = str.substring(x).replace('#', '_'); model.add(s, RDFS.label, id); } }
public static void main(String[] args) throws Exception{ org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG); SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); // endpoint = SparqlEndpoint.getEndpointDBpedia(); ConciseBoundedDescriptionGenerator cbdGen = new SymmetricConciseBoundedDescriptionGeneratorImpl(endpoint); Resource res = ResourceFactory.createResource("http://dbpedia.org/resource/Santa_Clara,_California"); Model cbd = cbdGen.getConciseBoundedDescription(res.getURI(), 2); System.out.println("#triples =\t" + cbd.size()); System.out.println("#triples_out =\t" + cbd.listStatements(res, null, (RDFNode) null).toSet().size()); cbd.listStatements(res, null, (RDFNode) null).toList().forEach(System.out::println); System.out.println("#triples_in =\t" + cbd.listStatements(null, null, res).toSet().size()); cbd.listStatements(null, null, res).toList().forEach(System.out::println); }
/** * Convert OWL axioms from OWL API into JENA API statements. * @param axioms the OWL API axioms * @return the JENA statements */ public static Set<Statement> asStatements(Set<OWLAxiom> axioms) { try { OWLOntology ontology = man.createOntology(axioms, IRI.create("http://dllearner.org/converter" + ONT_COUNTER++)); Model model = getModel(ontology); return model.listStatements().toSet(); } catch (OWLOntologyCreationException e) { throw new RuntimeException("Conversion of axioms failed.", e); } }
private Set<Statement> collectStatements(final Set<Action> actions) { final Set<Statement> stmts = new HashSet<>(); final ExtendedIterator<RDFList> iter = WrappedIterator.create( new RDFListIterator(holder.getBaseItem())).filterKeep( new RDFListSecFilter<>(this, actions)); try { while (iter.hasNext()) { stmts.addAll(iter.next().listProperties().toSet()); } return stmts; } finally { iter.close(); } }
private void assertModelContents(GraphModelStructure gms, String... expected) { Set<Statement> stmts = mmmm.openModel(gms.uri).listStatements().toSet(); assertStatements(stmts, expected); }
@Test public void test_API1() { try(QueryExecution qExec = makeQExec("SELECT * {?s ?p ?o}")) { ResultSet rs = qExec.execSelect() ; assertTrue("No results", rs.hasNext()) ; QuerySolution qs = rs.nextSolution() ; Resource qr = qs.getResource("s") ; //assertSame("Not the same model as queried", qr.getModel(), m) ; Set<Statement> s1 = qr.getModel().listStatements().toSet() ; Set<Statement> s2 = m.listStatements().toSet() ; assertEquals(s1,s2) ; } }
@Test public void testRemoveProperties() { final Set<Action> perms = SecurityEvaluator.Util.asSet(new Action[] { Action.Update, Action.Delete }); final int count = baseModel.listStatements(getBaseRDFNode().asResource(), SecuredRDFNodeTest.p, (RDFNode) null) .toSet().size(); try { getSecuredResource().removeProperties(); // only throw on delete if count > 0 if (!securityEvaluator.evaluate(Action.Update) || ((count > 0) && !securityEvaluator.evaluate(Action.Delete))) { Assert.fail("Should have thrown AccessDeniedException"); } } catch (final AccessDeniedException e) { if (securityEvaluator.evaluate(perms)) { Assert.fail(String.format("Should not have thrown AccessDeniedException: %s - %s", e, e.getTriple())); } } } }
@Test public void testRemoveAll() { final Set<Action> perms = SecurityEvaluator.Util.asSet(new Action[] { Action.Update, Action.Delete }); final int count = baseModel.listStatements(getBaseRDFNode().asResource(), SecuredRDFNodeTest.p, (RDFNode) null) .toSet().size(); try { getSecuredResource().removeAll(SecuredRDFNodeTest.p); // only throw on delete if count > 0 if (!securityEvaluator.evaluate(Action.Update) || ((count > 0) && !securityEvaluator.evaluate(Action.Delete))) { Assert.fail("Should have thrown AccessDeniedException"); } } catch (final AccessDeniedException e) { if (securityEvaluator.evaluate(perms)) { Assert.fail(String.format("Should not have thrown AccessDeniedException: %s - %s", e, e.getTriple())); } } }
@Test public void testJena() { final Resource A = ResourceFactory.createResource("A"); final Resource B = ResourceFactory.createResource("B"); final Resource C = ResourceFactory.createResource("C"); final Resource i = ResourceFactory.createResource("i"); final Model expected = ModelFactory.createDefaultModel(); expected.add(A, org.apache.jena.vocabulary.OWL.disjointWith, B); expected.add(i, RDF.type, A); expected.add(i, RDF.type, B); final OntModel model = ModelFactory.createOntologyModel(openllet.jena.PelletReasonerFactory.THE_SPEC); model.add(expected); model.add(i, RDF.type, C); model.prepare(); final Model actual = ((PelletInfGraph) model.getGraph()).explainInconsistency(); assertEquals(expected.listStatements().toSet(), actual.listStatements().toSet()); }