Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); QueryEngineHTTP qeh = new QueryEngineHTTP(this.endPoint, query); ResultSet resultSet = qeh.execSelect();
QueryEngineHTTP qe = new QueryEngineHTTP("http://drugbank.bio2rdf.org/sparql", pss.toString()); System.out.println(ResultSetFormatter.asText(qe.execSelect())); qe.close();
QueryEngineHTTP objectToExec=QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql",YOUR_QUERY); objectToExec.addParam("timeout","5000"); //5 sec resultset=objectToExec.execSelect();
QueryEngineHTTP qe = new QueryEngineHTTP("http://dbpedia.org/sparq","select ..."); ResultSet rs = qe.execSelect();
void std_query() { String query = "select distinct ?super where {?super (^skos:broader){0,3} category:Nationalist_parties, category:New_Delhi}"; QueryEngineHTTP qe = new QueryEngineHTTP("http://dbpedia.org/sparql", query); try { com.hp.hpl.jena.query.ResultSet results = qe.execSelect(); while(results.hasNext()) { QuerySolution s=results.nextSolution(); String strg=s.getResource("?super").toString(); System.out.println("########### Standard Sparql Result #########"); System.out.println(strg); } //ResultSetFormatter.out(System.out, results, query); } finally { qe.close(); } }
@Override public void sparqlSelectQuery(String queryStr, ResultSetConsumer consumer) throws RDFServiceException { QueryEngineHTTP qeh = new QueryEngineHTTP( readEndpointURI, queryStr); try { consumer.processResultSet(qeh.execSelect()); } finally { qeh.close(); } }
queryExecution.addNamedGraph(ngu); org.apache.jena.query.ResultSet tmp = queryExecution.execSelect(); ResultSetRewindable rs2 = ResultSetFactory.makeRewindable(tmp); String json = SparqlQuery.convertResultSetToJSON(rs2);
@Override public SPARQLExecutionResult call() { Map<String, Set<String>> resultSet = new HashMap<>(); markers.forEach(marker -> resultSet.put(marker, new HashSet<>())); Model unionModel = ModelFactory.createDefaultModel(); SPARQLServiceConverter converter = new SPARQLServiceConverter(schema); String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType); logger.info(sparqlQuery); CredentialsProvider credsProvider = new BasicCredentialsProvider(); Credentials credentials = new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword()); credsProvider.setCredentials(AuthScope.ANY, credentials); HttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .build(); HttpOp.setDefaultHttpClient(httpclient); Query jenaQuery = QueryFactory.create(sparqlQuery); QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery); qEngine.setClient(httpclient); ResultSet results = qEngine.execSelect(); results.forEachRemaining(solution -> { markers.stream().filter(solution::contains).forEach(marker -> resultSet.get(marker).add(solution.get(marker).asResource().getURI())); unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema)); }); SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel); logger.info(sparqlExecutionResult); return sparqlExecutionResult; }
@Override public SPARQLExecutionResult call() { Map<String, Set<String>> resultSet = new HashMap<>(); markers.forEach(marker -> resultSet.put(marker, new HashSet<>())); Model unionModel = ModelFactory.createDefaultModel(); SPARQLServiceConverter converter = new SPARQLServiceConverter(schema); String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType); logger.info(sparqlQuery); CredentialsProvider credsProvider = new BasicCredentialsProvider(); Credentials credentials = new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword()); credsProvider.setCredentials(AuthScope.ANY, credentials); HttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .build(); HttpOp.setDefaultHttpClient(httpclient); Query jenaQuery = QueryFactory.create(sparqlQuery); QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery); qEngine.setClient(httpclient); ResultSet results = qEngine.execSelect(); results.forEachRemaining(solution -> { markers.stream().filter(solution::contains).forEach(marker -> resultSet.get(marker).add(solution.get(marker).asResource().getURI())); unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema)); }); SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel); logger.info(sparqlExecutionResult); return sparqlExecutionResult; }
public static void main(String[] args) { String query = "SELECT ?dt (count(distinct ?o) AS ?cnt)\n" + " WHERE\n" + " { ?s <http://dbpedia.org/ontology/birthDate> ?o }\n" + " GROUP BY (datatype(?o) AS ?dt)"; QueryEngineHTTP qe = new QueryEngineHTTP("http://dbpedia.org/sparql", query); qe.setDefaultGraphURIs(Collections.singletonList("http://dbpedia.org")); ResultSet rs = qe.execSelect(); System.out.println(rs.next()); }
@Override public Integer call() throws Exception{ String query = "SELECT DISTINCT (count(?s) AS ?count) { { ?s ?p ?o . } UNION { GRAPH ?g { ?s ?p ?o .} } }"; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint,query); int size = qe.execSelect().next().get("count").asLiteral().getInt(); return size; } });
public void run(){ try{ boolean start = true; do{ if (nextOffset >= endpointSize) start = false; logger.debug("[SPARQL Endpoint Processor - {}] Endpoint: {} => Next offset {}, Size {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint, nextOffset, endpointSize); String query = "SELECT * WHERE { { SELECT DISTINCT * { { ?s ?p ?o . } UNION { GRAPH ?g { ?s ?p ?o .} } } ORDER BY ASC(?s) } } LIMIT 10000 OFFSET " + nextOffset; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint, query); qe.addParam("timeout","10000"); ResultSet rs = qe.execSelect(); while(rs.hasNext()){ sparqlIterator.add(rs.next()); } nextOffset = ((endpointSize - nextOffset) > 10000) ? nextOffset + 10000 : nextOffset + (endpointSize - nextOffset); } while(start); logger.info("[SPARQL Endpoint Processor - {}] - Done Parsing Endpoint {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint); } catch (Exception e){ logger.error("[SPARQL Endpoint Processor - {}] - Error parsing SPARQL Endpoint {}. Error message {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint, e.getMessage()); ExceptionOutput.output(e, "Exception thrown whilst fetching triples from SPARQL Endpoint: "+ datasetLocation, logger); throw e; } } };
ResultSet resultSet = qeh.execSelect(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ResultSet tmp = queryExecution.execSelect(); rs = ResultSetFactory.makeRewindable(tmp); } catch (HTTPException e) {
public static void main(String[] args) { FileManager.get().addLocatorClassLoader(ExampleARQ_01.class.getClassLoader()); String apikey = System.getenv("KASABI_API_KEY"); String queryString = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" + "PREFIX italy: <http://data.kasabi.com/dataset/italy/schema/>" + "SELECT ?region WHERE { " + " ?region rdf:type italy:Region" + "}"; Query query = QueryFactory.create(queryString); QueryEngineHTTP qexec = (QueryEngineHTTP)QueryExecutionFactory.createServiceRequest("http://api.kasabi.com/dataset/italy/apis/sparql", query); qexec.addParam("apikey", apikey); try { ResultSet results = qexec.execSelect(); while ( results.hasNext() ) { QuerySolution soln = results.nextSolution(); Resource region = soln.getResource("region"); System.out.println(region.getURI()); } } finally { qexec.close(); } }
private ResultSet executeSelectQuery(String query){ ResultSet rs; if(model == null){ QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); for (String dgu : endpoint.getDefaultGraphURIs()) { queryExecution.addDefaultGraph(dgu); } for (String ngu : endpoint.getNamedGraphURIs()) { queryExecution.addNamedGraph(ngu); } rs = queryExecution.execSelect(); } else { rs = QueryExecutionFactory.create(query, model).execSelect(); } return rs; }
public String sparql(String subject) { // First query takes the most specific class from a given resource. String ontology_service = endpoint; String endpointsSparql = "select ?label where {<" + subject + "> <http://www.w3.org/2000/01/rdf-schema#label> ?label FILTER (lang(?label) = 'en')} LIMIT 100"; Query sparqlQuery = QueryFactory.create(endpointsSparql, Syntax.syntaxARQ); QueryEngineHTTP qexec = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(ontology_service, sparqlQuery); qexec.setModelContentType(WebContent.contentTypeRDFXML); ResultSet results = qexec.execSelect(); String property = null; while (results.hasNext()) { QuerySolution qs = results.next(); property = qs.getLiteral("?label").getLexicalForm(); } return property; }