public static String getAllDatasets(){ String selectQuery = "SELECT DISTINCT ?dataset { ?x " + SPARQLHelper.toSPARQL(DAQ.computedOn) +" ?dataset . }"; QueryExecution exec = QueryExecutionFactory.create(QueryFactory.create(selectQuery), getFlatModel()); String json = "["; ResultSet set = exec.execSelect(); while(set.hasNext()){ QuerySolution sol = set.next(); json += "\""+sol.get("dataset").asResource().toString() + "\""+","; } json = json.substring(0, json.length()-1); json += "]"; return json; }
public Map<String, Double> getClassCounts(Model model) { QueryExecution execution = QueryExecutionFactory .create("select ?c ?i where { ?i <" + RDF.type + "> ?c }", model); ResultSet result = execution.execSelect(); Map<String, Double> stats = new HashMap<String, Double>(); while (result.hasNext()) { QuerySolution solution = result.next(); String className = solution.getResource("c").getURI(); if (stats.containsKey(className)) { stats.put(className, stats.get(className) + 1); } else { stats.put(className, 1d); } } return stats; }
public Map<ObjectProperty, Integer> getMostFrequentProperties(NamedClass cls1, NamedClass cls2){ Map<ObjectProperty, Integer> prop2Cnt = new HashMap<ObjectProperty, Integer>(); String query = String.format("SELECT ?p (COUNT(*) AS ?cnt) WHERE {?x1 a <%s>. ?x2 a <%s>. ?x1 ?p ?x2} GROUP BY ?p", cls1, cls2); ResultSet rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); QuerySolution qs; while(rs.hasNext()){ qs = rs.next(); ObjectProperty p = new ObjectProperty(qs.getResource("p").getURI()); int cnt = qs.getLiteral("cnt").getInt(); prop2Cnt.put(p, cnt); } return prop2Cnt; }
/** This operation faithfully walks the results but does nothing with them. * @return The count of the number of solutions. */ public static int consume(ResultSet resultSet) { int count = 0 ; for ( ; resultSet.hasNext() ; ) { // Force nodes to be materialized. QuerySolution result = resultSet.nextSolution() ; for ( Iterator<String> iter = result.varNames() ; iter.hasNext() ; ) { String vn = iter.next(); RDFNode n = result.get(vn) ; } count++ ; } return count ; }
private static List<Pair<String, String>> storesByQuery(String fn) { Model model = FileManager.get().loadModel(fn) ; List<Pair<String, String>> data = new ArrayList<Pair<String, String>>(); Query query = QueryFactory.create(queryString) ; QueryExecution qExec = QueryExecutionFactory.create(query, model) ; try { ResultSet rs = qExec.execSelect() ; for ( ; rs.hasNext() ; ) { QuerySolution qs = rs.nextSolution() ; String label = qs.getLiteral("label").getLexicalForm() ; String desc = qs.getResource("desc").getURI() ; data.add(new Pair<String, String>(label, desc)) ; } } finally { qExec.close() ; } return data ; } }
QueryExecution qexec = QueryExecutionFactory.create(query, jenaModel); ResultSet results = qexec.execSelect() ; if (!results.hasNext()) { logger.info("query does not return any answer."); return null; for ( ; results.hasNext() ; ) QuerySolution soln = results.nextSolution() ; Map<String, String> attValues = new HashMap<>(); RDFNode argNode = soln.get(arg) ; if (argNode != null) { String value = argNode.toString(); attValues.put(arg, value); return null; } finally { qexec.close() ;
private static void exec(String qs, Model model, Map<Node, List<Node>> multimap) { String preamble = StrUtils.strjoinNL("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>", "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>", "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>", "PREFIX owl: <http://www.w3.org/2002/07/owl#>", "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>") ; Query query = QueryFactory.create(preamble+"\n"+qs, Syntax.syntaxARQ) ; QueryExecution qexec = QueryExecutionFactory.create(query, model) ; ResultSet rs = qexec.execSelect() ; for ( ; rs.hasNext() ; ) { QuerySolution soln= rs.next() ; Node x = soln.get("x").asNode() ; Node y = soln.get("y").asNode() ; if ( ! multimap.containsKey(x) ) multimap.put(x, new ArrayList<Node>()) ; multimap.get(x).add(y) ; } } }
try { final QueryExecution qexec = QueryExecutionFactory.sparqlService( sparqlEndpointURI, QueryFactory.create(query), auth(sparqlEndpointURI, user, password)); qexec.setTimeout(2000, 5000); final ResultSet results = qexec.execSelect() ; while (results.hasNext()) final QuerySolution soln = results.nextSolution() ; final Resource res = soln.getResource("res"); String name = ""; if(soln.contains("name")) { name = soln.getLiteral("name").getString(); final RetrievedResource retrievedRes = new RetrievedResource(res.getURI(), name); resList.add(retrievedRes); qexec.close() ; } catch (Exception exception) { LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
private Map<URI, MatchResult> search(String textquery, Query sparqlQuery) { logger.debug("Executing SPARQL query: {}", sparqlQuery); QueryExecution qexec = QueryExecutionFactory.sparqlService(queryEndpoint.toString(), sparqlQuery); ResultSet resultSet = qexec.execSelect(); Map<URI, MatchResult> r = Maps.newLinkedHashMap(); while (resultSet.hasNext()) { QuerySolution solution = resultSet.next(); RDFNode s = solution.get("s"); if (s.isURIResource()) { try { String resource = s.asResource().getURI(); FreeTextMatchResult result = new FreeTextMatchResult(new URI(searchProperty + "?q=" + textquery), new URI(resource)); r.put(new URI(resource), result); } catch (URISyntaxException e) { e.printStackTrace(); } } } return r; }
@Override public List<String> getAllStoredGraphs() { String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }"; QueryExecution qexec; if (configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY) && configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_PASSWORD_KEY)) { HttpAuthenticator httpAuthenticator = new SimpleAuthenticator( configurationService.getProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY), configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray()); qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString, httpAuthenticator); } else { qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString); } ResultSet rs = qexec.execSelect(); List<String> graphs = Collections.synchronizedList(new ArrayList<String>()); while (rs.hasNext()) { QuerySolution solution = rs.next(); if (solution.contains("g")) { graphs.add(solution.get("g").asResource().getURI()); } } qexec.close(); return graphs; }
@Override public Set<Predicate> getPredicates() { Set<Predicate> predicates = new TreeSet<Predicate>(); dataset.begin(ReadWrite.READ); QueryExecution qExec = null; try { qExec = QueryExecutionFactory.create(SELECT_PREDICATES_QUERY, dataset); ResultSet rs = qExec.execSelect(); while (rs.hasNext()) { predicates.add(new Predicate(rs.next().get("?p").toString(), 2)); } } finally { if (qExec != null) { qExec.close(); } dataset.end(); } return predicates; }
public static Map<RDFNode,RDFNode> fetchMap(Dataset m,Query query,QuerySolution bindings) throws Exception { QueryExecution qe=QueryExecutionFactory.create(query,m); try { ResultSet results=qe.execSelect(); Map<RDFNode,RDFNode> map=Maps.newHashMap(); List<String> vars=results.getResultVars(); while(results.hasNext()) { QuerySolution row=results.nextSolution(); map.put(row.get(vars.get(0)),row.get(vars.get(1))); } return map; } finally { qe.close(); } }
Model model = ModelFactory.createDefaultModel(); Graph graph = model.getGraph(); ResultSet results = qExec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); String subject; String predicate; subject = sol.getResource("s").toString(); predicate = sol.getResource("p").toString(); object = sol.get("o"); } catch (NoSuchElementException e) { logger.error("SELECT query does not return a (?s ?p ?o) Triple"); if (object.isLiteral()) { Literal obj = object.asLiteral(); objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype()); } else { objNode = NodeFactory.createLiteral(object.toString());
try { QueryExecution qExec = QueryExecutionFactory.create( query.toString(), txnGraph.getUnderlyingDataset()); ResultSet rs = qExec.execSelect(); try { int i = 0; while (rs.hasNext()) { QuerySolution solution = rs.nextSolution(); if (i == 0) { subscriberURL = solution.get("subscriberURL").asNode().getURI(); solution.getLiteral("ir").getLexicalForm(); qExec.close();
csvFile.append(","); QueryExecution exec = QueryExecutionFactory.create(QueryFactory.create(selectQuery), d.getNamedModel(datasets_metadata.get(ds))); ResultSet set = exec.execSelect(); while(set.hasNext()){ QuerySolution qs = set.next(); double value = qs.get("value").asLiteral().getDouble(); csvFile.append(value); csvFile.append(",");
public void toInteractions(ResultSet results) throws StudyImporterException { final InteractionListener listener = new InteractionListenerImpl(nodeFactory, getGeoNamesService(), getLogger()); while (results.hasNext()) { QuerySolution next = results.next(); Iterator<String> nameIter = next.varNames(); Map<String, String> props = new TreeMap<>(); while (nameIter.hasNext()) { String key = nameIter.next(); RDFNode rdfNode = next.get(key); if (rdfNode.isURIResource()) { props.put(key, next.getResource(key).getURI()); } else { props.put(key, next.getLiteral(key).getString()); } } props.put(StudyImporterForTSV.STUDY_SOURCE_CITATION, getDataset().getCitation()); listener.newLink(props); } }
public void describe(Resource r) { // Default model. DB2Closure.closure(otherModel(r, dataset.getDefaultModel()), false, acc, resources); String query = "SELECT ?g { GRAPH ?g { <" + r.getURI() + "> ?p ?o } }"; QueryExecution qExec = RdfStoreQueryExecutionFactory.create(query, dataset); ResultSet rs = qExec.execSelect(); for (; rs.hasNext();) { QuerySolution qs = rs.next(); String gName = qs.getResource("g").getURI(); // mdb for DB2 Model model = dataset.getNamedModel(gName); Resource r2 = otherModel(r, model); DB2Closure.closure(r2, false, acc, resources); } qExec.close(); DB2Closure.closure(r, false, acc, resources); }
/** Execute, expecting the result to be one row, one column. Return that one RDFNode or throw an exception */ public static RDFNode getExactlyOne(QueryExecution qExec, String varname) { try { ResultSet rs = qExec.execSelect() ; if ( ! rs.hasNext() ) throw new ARQException("Not found: var ?"+varname) ; QuerySolution qs = rs.nextSolution() ; RDFNode r = qs.get(varname) ; if ( rs.hasNext() ) throw new ARQException("More than one: var ?"+varname) ; return r ; } finally { qExec.close() ; } }