public static Resource getPropertyResource(Resource uri){ String whereClause = "?prop " + " " + SPARQLHelper.toSPARQL(RDFS.range) + SPARQLHelper.toSPARQL(uri) + " . "; Model m = InternalModelConf.getFlatModel(); String query = SPARQLHelper.SELECT_STATEMENT.replace("[variables]", "?prop").replace("[whereClauses]", whereClause); Resource r = null; Query qry = QueryFactory.create(query); QueryExecution qe = QueryExecutionFactory.create(qry, m); ResultSet rs = qe.execSelect(); while (rs.hasNext()){ r = rs.next().get("prop").asResource(); } return r; }
public static void main(String[] args) { FileManager.get().addLocatorClassLoader(ExampleARQ_01.class.getClassLoader()); Model model = FileManager.get().loadModel("data/data.ttl"); System.out.println("Input data:"); model.write(System.out, "TURTLE"); File path = new File("src/main/resources/data/queries"); File[] files = path.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("construct-") && name.endsWith(".sparql"); } }); Arrays.sort(files); for (File file : files) { System.out.println("Executing " + file.getName() + " ..."); Query query = QueryFactory.read(file.getAbsolutePath()); QueryExecution qexec = QueryExecutionFactory.create(query, model); try { Model result = qexec.execConstruct(); model.add(result); } finally { qexec.close(); } } System.out.println("Output data:"); model.write(System.out, "TURTLE"); }
public QueryRunner trySelect(String queryString) { Stopwatch sw = Stopwatch.createStarted(); Query query = new Query(); query.setPrefixMapping(prefixMapping); QueryFactory.parse(query, queryString, "http://example.org/", syntax); QueryExecution qe = sparqlService.createQueryExecution(query); ResultSet rs = qe.execSelect(); ResultSetFormatter.consume(rs); System.out.println("Time taken: " + sw.elapsed(TimeUnit.MILLISECONDS)); return this; } }
Query q1 = QueryFactory.create(); q1.setPrefixMapping(pmap); q1 = QueryFactory.parse(q1, sparqlQuery, null, null); q1.toString(); return q1.toString();
private int query(String str, Model model) { Query q = QueryFactory.create(str, Syntax.syntaxARQ) ; try(QueryExecution qexec = QueryExecutionFactory.create(q, model)) { ResultSet rs = qexec.execSelect() ; return ResultSetFormatter.consume(rs) ; } }
@Test public void special1() { Dataset ds = dataset() ; Model m = ds.getDefaultModel() ; load1(m) ; String qs = "CONSTRUCT {?s ?p ?o } WHERE { GRAPH <"+defaultGraph+"> {?s ?p ?o}}" ; Query q = QueryFactory.create(qs) ; QueryExecution qExec = QueryExecutionFactory.create(q, ds) ; Model m2 = qExec.execConstruct() ; assertTrue(m.isIsomorphicWith(m2)) ; }
private void test(ReadWrite mode) { ds.begin(mode) ; Query q = QueryFactory.create("SELECT * { { ?s ?p ?o } UNION { GRAPH ?g { ?s ?p ?o }}}") ; QueryExecution qExec = QueryExecutionFactory.create(q, ds) ; long count = ResultSetFormatter.consume(qExec.execSelect()) ; ds.commit() ; ds.end() ; assertEquals(2, count) ; }
@Test public void query2() { Dataset ds = dataset() ; Model m = ds.getDefaultModel() ; load1(m) ; String qs = "CONSTRUCT {?s ?p ?o } WHERE { GRAPH <http://example/graph/> {?s ?p ?o}}" ; Query q = QueryFactory.create(qs) ; QueryExecution qExec = QueryExecutionFactory.create(q, ds) ; Model m2 = qExec.execConstruct() ; assertTrue(m2.isEmpty()) ; }
@Test public void testDAWG4() { final Model model = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC); model.read("file:test/data/sparql-dawg-tests/data-r2/optional-filter/data-1.ttl", "N3"); final Query query = QueryFactory.read("file:test/data/sparql-dawg-tests/data-r2/optional-filter/expr-5.rq"); final QueryExecution qe = SparqlDLExecutionFactory.create(query, model); final ResultSet rs = qe.execSelect(); ResultSetFormatter.consume(rs); assertEquals(3, rs.getRowNumber()); }
public Dataset retrieveNeedDataset(String uri) { String queryString = "prefix won: <http://purl.org/webofneeds/model#> select distinct ?g where { " + "GRAPH ?g { ?uri a won:Need. ?a ?b ?c. } }"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setCommandText(queryString); pps.setIri("uri", uri); Query query = QueryFactory.create(pps.toString()); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, query); ResultSet results = qexec.execSelect(); Dataset ds = DatasetFactory.createGeneral(); while (results.hasNext()) { QuerySolution qs = results.next(); String graphUri = qs.getResource("g").getURI(); Model model = retrieveModel(graphUri); ds.addNamedModel(graphUri, model); } return ds; }
@Test public void testConstructRejectsBadTriples1() { String queryString = "CONSTRUCT { ?s ?p ?o } WHERE { ?o ?p ?s }"; Query q = QueryFactory.create(queryString); QueryExecution qExec = QueryExecutionFactory.create(q, m); Model resultModel = qExec.execConstruct(); assertEquals(0, resultModel.size()); }
@Test public void testARQConstructQuad_ShortForm_3() { String queryString = "CONSTRUCT WHERE { ?s ?p ?o. GRAPH ?g1 {?s1 ?p1 ?o1} }"; Query q = QueryFactory.create(queryString, Syntax.syntaxARQ); QueryExecution qExec = QueryExecutionFactory.create(q, d); Dataset result = qExec.execConstructDataset(); assertTrue(IsoMatcher.isomorphic( d.asDatasetGraph(), result.asDatasetGraph()) ); }
@Test public void sparql5() { Dataset dataset = create() ; String graphName = "http://example/" ; Triple triple = SSE.parseTriple("(<x> <y> 123)") ; Graph g2 = dataset.asDatasetGraph().getGraph(NodeFactory.createURI(graphName)) ; // Graphs only exists if they have a triple in them g2.add(triple) ; Query query = QueryFactory.create("ASK { GRAPH <"+graphName+"> {} }") ; boolean b = QueryExecutionFactory.create(query, dataset).execAsk() ; assertEquals(true, b) ; }
private Collection<String> getReferenceSet(QueryExecutionFactory model, String query) { Collection<String> references = new ArrayList<>(); Query q = QueryFactory.create(query); try (QueryExecution qe = model.createQueryExecution(q)) { qe.execSelect().forEachRemaining( row -> references.add("<" + row.get("reference").toString() + ">") ); } return references; }
private ResultSetRewindable executeSparqlSelect(Model model, String sparql, boolean lock) { Query query = QueryFactory.create(sparql); try(QueryExecution queryExec = QueryExecutionFactory.create(query, model)) { model.enterCriticalSection(lock); try { return ResultSetFactory.makeRewindable(queryExec.execSelect()); } finally { model.leaveCriticalSection(); } } }
private Model executeSparqlConstruct(Model model, String sparql, boolean lock) { Query query = QueryFactory.create(sparql); try(QueryExecution queryExec = QueryExecutionFactory.create(query, model)) { model.enterCriticalSection(lock); try { return queryExec.execConstruct() ; } finally { model.leaveCriticalSection() ; } } }
@Test public void testCombinedQueryEngine() { // This tests annotations using the SPARQL-DL combined query engine final OntModel model = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC); model.read(DATA1_RDF); final Query query = QueryFactory.read(QUERY1_RQ); try (final QueryExecution qe = SparqlDLExecutionFactory.create(query, model)) { assertTrue("qe must not be null", qe != null); final ResultSet rs = qe.execSelect(); while (rs.hasNext()) { final QuerySolution qs = rs.nextSolution(); final Resource s = qs.getResource("s"); final Literal o = qs.getLiteral("o"); assertEquals("http://example.org#i", s.getURI()); assertEquals("o2", o.getLexicalForm()); } } }
private static void test3(String pattern, String... joinForm) { Query q = QueryFactory.create("PREFIX : <http://example/> SELECT * "+pattern) ; Op op = Algebra.compile(q.getQueryPattern()) ; test(op, joinForm) ; }
/** * Execute a query, expecting the result to be one row, one column. Return * that one RDFNode */ public static RDFNode getExactlyOne(String qs, Dataset ds) { Query q = QueryFactory.create(qs) ; if ( q.getResultVars().size() != 1 ) throw new ARQException("getExactlyOne: Must have exactly one result columns") ; String varname = q.getResultVars().get(0) ; try ( QueryExecution qExec = QueryExecutionFactory.create(q, ds) ) { return getExactlyOne(qExec, varname) ; } }
public void run() { // register the Terp _parser ARQTerpParser.registerFactory(); for (final String query : queries) { // First create a Jena ontology model backed by the Pellet reasoner // (note, the Pellet reasoner is required) final OntModel m = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC); // Then read the _data from the file into the ontology model m.read(ontology); // Now read the query file into a query object // Important: specifying that the query is in Terp syntax final Query q = QueryFactory.read(query, TerpSyntax.getInstance()); // Create a SPARQL-DL query execution for the given query and // ontology model final QueryExecution qe = SparqlDLExecutionFactory.create(q, m); // We want to execute a SELECT query, do it, and return the result set final ResultSet rs = qe.execSelect(); // There are different things we can do with the result set, for // instance iterate over it and process the query solutions or, what we // do here, just print out the results ResultSetFormatter.out(rs); // And an empty line to make it pretty System.out.println(); } }