public Map<String, Double> getClassCounts(Model model) { QueryExecution execution = QueryExecutionFactory .create("select ?c ?i where { ?i <" + RDF.type + "> ?c }", model); ResultSet result = execution.execSelect(); Map<String, Double> stats = new HashMap<String, Double>(); while (result.hasNext()) { QuerySolution solution = result.next(); String className = solution.getResource("c").getURI(); if (stats.containsKey(className)) { stats.put(className, stats.get(className) + 1); } else { stats.put(className, 1d); } } return stats; }
public static Map<RDFNode,RDFNode> fetchMap(Dataset m,Query query,QuerySolution bindings) throws Exception { QueryExecution qe=QueryExecutionFactory.create(query,m); try { ResultSet results=qe.execSelect(); Map<RDFNode,RDFNode> map=Maps.newHashMap(); List<String> vars=results.getResultVars(); while(results.hasNext()) { QuerySolution row=results.nextSolution(); map.put(row.get(vars.get(0)),row.get(vars.get(1))); } return map; } finally { qe.close(); } }
private SortedResultSet(ResultSet rs, Comparator<Binding> comparator) { model = rs.getResourceModel() ; // Put straight into a sorted structure SortedSet<Binding> sorted = new TreeSet<Binding>(comparator) ; for ( ; rs.hasNext() ; ) { Binding b = rs.nextBinding() ; sorted.add(b) ; } qIter = new QueryIterPlainWrapper(sorted.iterator()) ; resultVars = rs.getResultVars() ; //resultSet = new ResultSetStream(rs.getResultVars(), null, qIter) ; }
public ResultSetWrapper(final ResultSet jenaResultSet) { final List<QuerySolution> solutions = new ArrayList<QuerySolution>(); while (jenaResultSet.hasNext()) { solutions.add(jenaResultSet.nextSolution()); } solutionsIter = solutions.iterator(); resultVars = jenaResultSet.getResultVars(); }
public static Resource getPropertyResource(Resource uri){ String whereClause = "?prop " + " " + SPARQLHelper.toSPARQL(RDFS.range) + SPARQLHelper.toSPARQL(uri) + " . "; Model m = InternalModelConf.getFlatModel(); String query = SPARQLHelper.SELECT_STATEMENT.replace("[variables]", "?prop").replace("[whereClauses]", whereClause); Resource r = null; Query qry = QueryFactory.create(query); QueryExecution qe = QueryExecutionFactory.create(qry, m); ResultSet rs = qe.execSelect(); while (rs.hasNext()){ r = rs.next().get("prop").asResource(); } return r; }
QueryExecution qexec = QueryExecutionFactory.create(query, jenaModel); ResultSet results = qexec.execSelect() ; if (!results.hasNext()) { logger.info("query does not return any answer."); return null; for ( ; results.hasNext() ; ) QuerySolution soln = results.nextSolution() ; Map<String, String> attValues = new HashMap<>(); RDFNode argNode = soln.get(arg) ; if (argNode != null) { String value = argNode.toString(); attValues.put(arg, value); return null; } finally { qexec.close() ;
private static void exec(String qs, Model model, Map<Node, List<Node>> multimap) { String preamble = StrUtils.strjoinNL("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>", "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>", "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>", "PREFIX owl: <http://www.w3.org/2002/07/owl#>", "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>") ; Query query = QueryFactory.create(preamble+"\n"+qs, Syntax.syntaxARQ) ; QueryExecution qexec = QueryExecutionFactory.create(query, model) ; ResultSet rs = qexec.execSelect() ; for ( ; rs.hasNext() ; ) { QuerySolution soln= rs.next() ; Node x = soln.get("x").asNode() ; Node y = soln.get("y").asNode() ; if ( ! multimap.containsKey(x) ) multimap.put(x, new ArrayList<Node>()) ; multimap.get(x).add(y) ; } } }
@Override public Set<Predicate> getPredicates() { Set<Predicate> predicates = new TreeSet<Predicate>(); dataset.begin(ReadWrite.READ); QueryExecution qExec = null; try { qExec = QueryExecutionFactory.create(SELECT_PREDICATES_QUERY, dataset); ResultSet rs = qExec.execSelect(); while (rs.hasNext()) { predicates.add(new Predicate(rs.next().get("?p").toString(), 2)); } } finally { if (qExec != null) { qExec.close(); } dataset.end(); } return predicates; }
/** Execute, expecting the result to be one row, one column. * Return that one RDFNode or null * Throw excpetion if more than one. */ public static RDFNode getOne(QueryExecution qExec, String varname) { try { ResultSet rs = qExec.execSelect() ; if ( ! rs.hasNext() ) return null ; QuerySolution qs = rs.nextSolution() ; RDFNode r = qs.get(varname) ; if ( rs.hasNext() ) throw new ARQException("More than one: var ?"+varname) ; return r ; } finally { qExec.close() ; } }
public void debug(){ String entityVar = "s"; String fieldVar = "p"; String valueVar = "o"; StringBuilder qb = new StringBuilder(); qb.append(String.format("SELECT ?%s ?%s ?%s \n", entityVar,fieldVar,valueVar)); //for the select qb.append("{ \n"); qb.append(String.format(" ?%s ?%s ?%s . \n", entityVar,fieldVar,valueVar)); //for the where qb.append("} \n"); log.debug("EntityDataIterator Query: \n"+qb.toString()); Query q = QueryFactory.create(qb.toString(), Syntax.syntaxARQ); ResultSet rs = QueryExecutionFactory.create(q, indexingDataset.toDataset()).execSelect(); Var s = Var.alloc(entityVar); Var p = Var.alloc(fieldVar); Var o = Var.alloc(valueVar); while (rs.hasNext()){ Binding b = rs.nextBinding(); log.debug("{} {} {}",new Object[]{b.get(s),b.get(p),b.get(o)}); } }
@Override public List<String> getAllStoredGraphs() { String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }"; QueryExecution qexec; if (configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY) && configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_PASSWORD_KEY)) { HttpAuthenticator httpAuthenticator = new SimpleAuthenticator( configurationService.getProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY), configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray()); qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString, httpAuthenticator); } else { qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString); } ResultSet rs = qexec.execSelect(); List<String> graphs = Collections.synchronizedList(new ArrayList<String>()); while (rs.hasNext()) { QuerySolution solution = rs.next(); if (solution.contains("g")) { graphs.add(solution.get("g").asResource().getURI()); } } qexec.close(); return graphs; }
private Map<URI, MatchResult> search(String textquery, Query sparqlQuery) { logger.debug("Executing SPARQL query: {}", sparqlQuery); QueryExecution qexec = QueryExecutionFactory.sparqlService(queryEndpoint.toString(), sparqlQuery); ResultSet resultSet = qexec.execSelect(); Map<URI, MatchResult> r = Maps.newLinkedHashMap(); while (resultSet.hasNext()) { QuerySolution solution = resultSet.next(); RDFNode s = solution.get("s"); if (s.isURIResource()) { try { String resource = s.asResource().getURI(); FreeTextMatchResult result = new FreeTextMatchResult(new URI(searchProperty + "?q=" + textquery), new URI(resource)); r.put(new URI(resource), result); } catch (URISyntaxException e) { e.printStackTrace(); } } } return r; }
private static List<Pair<String, String>> storesByQuery(String fn) { Model model = FileManager.get().loadModel(fn) ; List<Pair<String, String>> data = new ArrayList<Pair<String, String>>(); Query query = QueryFactory.create(queryString) ; QueryExecution qExec = QueryExecutionFactory.create(query, model) ; try { ResultSet rs = qExec.execSelect() ; for ( ; rs.hasNext() ; ) { QuerySolution qs = rs.nextSolution() ; String label = qs.getLiteral("label").getLexicalForm() ; String desc = qs.getResource("desc").getURI() ; data.add(new Pair<String, String>(label, desc)) ; } } finally { qExec.close() ; } return data ; } }
public void apply() { proc.start(rs) ; for ( ; rs.hasNext() ; ) { QuerySolution qs = rs.next() ; proc.start(qs) ; for ( String varName : rs.getResultVars() ) { RDFNode node = qs.get(varName) ; // node may be null proc.binding(varName, node) ; } proc.finish(qs) ; } proc.finish(rs) ; }
private DataEntry getDataEntryFromRS(ResultSet rs) { DataEntry dataEntry = new DataEntry(); QuerySolution soln = rs.nextSolution(); String colName, value; boolean useColumnNumbers = this.isUsingColumnNumbers(); /* for each column get the colName and colValue and add to the data entry */ for (int i = 0; i < rs.getResultVars().size(); i++) { colName = rs.getResultVars().get(i); RDFNode node = soln.get(colName) ; if (node.isLiteral()) { value = convertRSToString(soln, colName); } else { value = soln.getResource(colName).getURI(); } dataEntry.addValue(useColumnNumbers ? Integer.toString(i + 1) : colName, new ParamValue(value)); } return dataEntry; }
/** This operation faithfully walks the results but does nothing with them. * @return The count of the number of solutions. */ public static int consume(ResultSet resultSet) { int count = 0 ; for ( ; resultSet.hasNext() ; ) { // Force nodes to be materialized. QuerySolution result = resultSet.nextSolution() ; for ( Iterator<String> iter = result.varNames() ; iter.hasNext() ; ) { String vn = iter.next(); RDFNode n = result.get(vn) ; } count++ ; } return count ; }
QueryExecutionFactory.create( "SELECT (COUNT(*) as ?count) { GRAPH ?g { ?s ?p ?o } } ", txnGraph.getUnderlyingDataset()); ResultSet rs = qExec.execSelect(); try { result = (Integer) rs.nextBinding() .get(Var.alloc("count")) .getLiteralValue(); } finally { qExec.close();
/** * Extracts a List filled with the binding of selectElement variable for each * query solution as RDFNodes (Resources or Literals). * Exhausts the result set. Create a rewindable one to use multiple times. * * @see com.hp.hpl.jena.query.ResultSetFactory */ public static List<RDFNode> resultSetToList(ResultSet rs, String selectElement) { // feature suggested by James Howison List<RDFNode> items = new ArrayList<RDFNode>() ; while (rs.hasNext()) { QuerySolution qs = rs.nextSolution() ; RDFNode n = qs.get(selectElement) ; items.add(n) ; } return items ; }