Refine search
public static Map<RDFNode,RDFNode> fetchMap(Dataset m,Query query,QuerySolution bindings) throws Exception { QueryExecution qe=QueryExecutionFactory.create(query,m); try { ResultSet results=qe.execSelect(); Map<RDFNode,RDFNode> map=Maps.newHashMap(); List<String> vars=results.getResultVars(); while(results.hasNext()) { QuerySolution row=results.nextSolution(); map.put(row.get(vars.get(0)),row.get(vars.get(1))); } return map; } finally { qe.close(); } }
/** Create a QueryExecution that will access a SPARQL service over HTTP * @param service URL of the remote service * @param query Query string to execute * @return QueryExecution */ static public QueryExecution sparqlService(String service, String query) { return sparqlService(service, query, (HttpAuthenticator)null); }
/** Create a QueryExecution given some initial values of variables. * * @param queryStr QueryString * @param syntax Query language syntax * @param initialBinding Any initial binding of variables * @return QueryExecution */ static public QueryExecution create(String queryStr, Syntax syntax, QuerySolution initialBinding) { checkArg(queryStr) ; return create(makeQuery(queryStr, syntax), initialBinding) ; }
/** Create a QueryExecution to execute over the Dataset. * * @param queryStr Query string * @param dataset Target of the query * @return QueryExecution */ static public QueryExecution create(String queryStr, Dataset dataset) { checkArg(queryStr) ; //checkArg(dataset) ; // Allow null return make(makeQuery(queryStr), dataset) ; }
public Map<String, Double> getClassCounts(Model model) { QueryExecution execution = QueryExecutionFactory .create("select ?c ?i where { ?i <" + RDF.type + "> ?c }", model); ResultSet result = execution.execSelect(); Map<String, Double> stats = new HashMap<String, Double>(); while (result.hasNext()) { QuerySolution solution = result.next(); String className = solution.getResource("c").getURI(); if (stats.containsKey(className)) { stats.put(className, stats.get(className) + 1); } else { stats.put(className, 1d); } } return stats; }
private static List<Pair<String, String>> storesByQuery(String fn) { Model model = FileManager.get().loadModel(fn) ; List<Pair<String, String>> data = new ArrayList<Pair<String, String>>(); Query query = QueryFactory.create(queryString) ; QueryExecution qExec = QueryExecutionFactory.create(query, model) ; try { ResultSet rs = qExec.execSelect() ; for ( ; rs.hasNext() ; ) { QuerySolution qs = rs.nextSolution() ; String label = qs.getLiteral("label").getLexicalForm() ; String desc = qs.getResource("desc").getURI() ; data.add(new Pair<String, String>(label, desc)) ; } } finally { qExec.close() ; } return data ; } }
@Override public List<String> getAllStoredGraphs() { String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }"; QueryExecution qexec; if (configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY) && configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_PASSWORD_KEY)) { HttpAuthenticator httpAuthenticator = new SimpleAuthenticator( configurationService.getProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY), configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray()); qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString, httpAuthenticator); } else { qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(), queryString); } ResultSet rs = qexec.execSelect(); List<String> graphs = Collections.synchronizedList(new ArrayList<String>()); while (rs.hasNext()) { QuerySolution solution = rs.next(); if (solution.contains("g")) { graphs.add(solution.get("g").asResource().getURI()); } } qexec.close(); return graphs; }
private Map<URI, MatchResult> search(String textquery, Query sparqlQuery) { logger.debug("Executing SPARQL query: {}", sparqlQuery); QueryExecution qexec = QueryExecutionFactory.sparqlService(queryEndpoint.toString(), sparqlQuery); ResultSet resultSet = qexec.execSelect(); Map<URI, MatchResult> r = Maps.newLinkedHashMap(); while (resultSet.hasNext()) { QuerySolution solution = resultSet.next(); RDFNode s = solution.get("s"); if (s.isURIResource()) { try { String resource = s.asResource().getURI(); FreeTextMatchResult result = new FreeTextMatchResult(new URI(searchProperty + "?q=" + textquery), new URI(resource)); r.put(new URI(resource), result); } catch (URISyntaxException e) { e.printStackTrace(); } } } return r; }
public ResultSet runSPARQLQuery(String queryString) { Query query = QueryFactory.create(queryString); QueryExecution qe = QueryExecutionFactory.create(query, om); return qe.execSelect(); }
try { final QueryExecution qexec = QueryExecutionFactory.sparqlService( sparqlEndpointURI, QueryFactory.create(query), auth(sparqlEndpointURI, user, password)); qexec.setTimeout(2000, 5000); final ResultSet results = qexec.execSelect() ; while (results.hasNext()) final QuerySolution soln = results.nextSolution() ; numRes = soln.getLiteral("count").getInt(); qexec.close() ; } catch (Exception exception) { LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
@Test public void special4() { Dataset ds = create() ; load1(ds.getDefaultModel()) ; load2(ds.getNamedModel("http://example/graph1")) ; load3(ds.getNamedModel("http://example/graph2")) ; Model m = ModelFactory.createDefaultModel() ; load2(m) ; load3(m) ; TDB.sync(ds) ; String qs = "PREFIX : <"+baseNS+"> SELECT (COUNT(?x) as ?c) WHERE { ?x (:p1|:p2) 'x1' }" ; Query q = QueryFactory.create(qs, Syntax.syntaxARQ) ; QueryExecution qExec = QueryExecutionFactory.create(q, ds) ; qExec.getContext().set(TDB.symUnionDefaultGraph, true) ; long c1 = qExec.execSelect().next().getLiteral("c").getLong() ; qExec.close() ; qExec = QueryExecutionFactory.create(q, m) ; long c2 = qExec.execSelect().next().getLiteral("c").getLong() ; assertEquals(c1, c2) ; qExec.close() ; }
final List<T> parseCollection() { List<T> result=null; QuerySolutionMap parameters = new QuerySolutionMap(); parameters.add(this.targetVariable,this.resource); QueryExecution queryExecution=QueryExecutionFactory.create(this.query,this.model); queryExecution.setInitialBinding(parameters); try { ResultSet results = queryExecution.execSelect(); result=processResults(results); } finally { queryExecution.close(); } return result; }
public static void appendConstruct(Model outModel, String queryString, Model inModel, QuerySolution bindings) { Query query=QueryFactory.create(queryString); QueryExecution qe=QueryExecutionFactory.create(query,inModel); try { if(null!=bindings) { qe.setInitialBinding(bindings); } qe.execConstruct(outModel); } finally{ qe.close(); } }
@Override public void run() { try{ System.out.println("Offset: "+ currentOffset); logger.info("next offset {}, size {}", currentOffset, endpointSize); String query = "SELECT DISTINCT * { ?s ?p ?o . } ORDER BY ASC(?s) LIMIT 10000 OFFSET " + currentOffset; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint, query); qe.addParam("timeout","20000"); ResultSet rs = qe.execSelect(); while(rs.hasNext()){ sparqlIterator.add(rs.next()); } } catch (Exception e){ logger.error("Error parsing SPARQL Endpoint {}. Error message {}", sparqlEndPoint, e.getMessage()); throw e; } } };
private void testBuildFetchLabelQuery(List<String> properties, Model expected) { Model m = modelFromTurtle(":it :p :x, :y, :z"); View.State s = new View.State(null, m, null, null, null); String queryString = View.buildFetchLabelsQuery(s, properties); Query q = QueryFactory.create(queryString); QueryExecution qx = QueryExecutionFactory.create(q, dataModel); Model resultModel = qx.execConstruct(); ModelTestBase.assertIsoModels(expected, resultModel); }
public boolean buildModelFromSparql(Model modelToQuery, String queryString) { boolean success = false; Query query = QueryFactory.create(queryString); //Model model = loadModelFromTurtle(loader, configPath); // Not sure if we want this to build its own model or not... //if (model != null) { QueryExecution qexec = QueryExecutionFactory.create(query, modelToQuery); Model resultModel = qexec.execDescribe(); qexec.close(); buildModelFromJena(resultModel, true); success = true; //} return success; }
private MatchResult queryForMatchResult(URI origin, URI destination, String queryStr) { MatchType type = LogicConceptMatchType.Fail; // Query the engine Query query = QueryFactory.create(queryStr); QueryExecution qe = QueryExecutionFactory.sparqlService(this.sparqlEndpoint.toASCIIString(), query); MonitoredQueryExecution qexec = new MonitoredQueryExecution(qe); try { Stopwatch stopwatch = new Stopwatch().start(); ResultSet qResults = qexec.execSelect(); stopwatch.stop(); log.debug("Time taken for querying the registry: {}", stopwatch); if (qResults.hasNext()) { QuerySolution soln = qResults.nextSolution(); type = getMatchType(soln); } log.debug("Concept {} was matched to {} with type {}", origin, destination, type); } finally { qexec.close(); } return new AtomicMatchResult(origin, destination, type, this); }
@Override public Integer call() throws Exception{ String query = "SELECT DISTINCT (count(?s) AS ?count) { { ?s ?p ?o . } UNION { GRAPH ?g { ?s ?p ?o .} } }"; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint,query); int size = qe.execSelect().next().get("count").asLiteral().getInt(); return size; } });