Refine search
private ResultSet executeSelectQuery(String query){ ResultSet rs; if(model == null){ QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); for (String dgu : endpoint.getDefaultGraphURIs()) { queryExecution.addDefaultGraph(dgu); } for (String ngu : endpoint.getNamedGraphURIs()) { queryExecution.addNamedGraph(ngu); } rs = queryExecution.execSelect(); } else { rs = QueryExecutionFactory.create(query, model).execSelect(); } return rs; }
/** Create a QueryExecution that will access a SPARQL service over HTTP * @param service URL of the remote service * @param query Query to execute * @param defaultGraphURIs List of URIs to make up the default graph * @param namedGraphURIs List of URIs to make up the named graphs * @param client HTTP client * @param httpContext HTTP Context * @return QueryExecution */ static public QueryExecution sparqlService(String service, Query query, List<String> defaultGraphURIs, List<String> namedGraphURIs, HttpClient client, HttpContext httpContext) { checkNotNull(service, "URL for service is null") ; // checkNotNull(defaultGraphURIs, "List of default graph URIs is null") ; // checkNotNull(namedGraphURIs, "List of named graph URIs is null") ; checkArg(query) ; QueryEngineHTTP qe = createServiceRequest(service, query, client) ; if ( defaultGraphURIs != null ) qe.setDefaultGraphURIs(defaultGraphURIs) ; if ( namedGraphURIs != null ) qe.setNamedGraphURIs(namedGraphURIs) ; return qe ; }
public QueryExecution executeQuery(String queryString) { logger.info("SPARQL Query is:\n" + queryString); Query query = QueryFactory.create(queryString); QueryEngineHTTP queryEngine = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(ENDPOINT, query); queryEngine.addParam("timeout", String.valueOf(Constants.API_TIMEOUT)); return queryEngine; }
@Override public void sparqlConstructQuery(String queryStr, Model model) throws RDFServiceException { QueryEngineHTTP qeh = new QueryEngineHTTP( readEndpointURI, queryStr); try { qeh.execConstruct(model); } finally { qeh.close(); } }
QueryEngineHTTP objectToExec=QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql",YOUR_QUERY); objectToExec.addParam("timeout","5000"); //5 sec resultset=objectToExec.execSelect();
QueryEngineHTTP qe = new QueryEngineHTTP("http://dbpedia.org/sparq","select ..."); ResultSet rs = qe.execSelect();
@Override protected QueryExecution createQueryExecution(Query q) throws SQLException { if (this.remoteConn.getQueryEndpoint() == null) throw new SQLException("This statement is backed by a write-only connection, read operations are not supported"); // Create basic execution QueryEngineHTTP exec = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(this.remoteConn.getQueryEndpoint(), q); // Apply HTTP settings if (this.client != null) { exec.setClient(client); } // Apply default and named graphs if appropriate if (this.remoteConn.getDefaultGraphURIs() != null) { exec.setDefaultGraphURIs(this.remoteConn.getDefaultGraphURIs()); } if (this.remoteConn.getNamedGraphURIs() != null) { exec.setNamedGraphURIs(this.remoteConn.getNamedGraphURIs()); } // Set result types if (this.remoteConn.getSelectResultsType() != null) { exec.setSelectContentType(this.remoteConn.getSelectResultsType()); } if (this.remoteConn.getModelResultsType() != null) { exec.setModelContentType(this.remoteConn.getModelResultsType()); } // Return execution return exec; }
public String sparql(String subject) { // First query takes the most specific class from a given resource. String ontology_service = endpoint; String endpointsSparql = "select ?label where {<" + subject + "> <http://www.w3.org/2000/01/rdf-schema#label> ?label FILTER (lang(?label) = 'en')} LIMIT 100"; Query sparqlQuery = QueryFactory.create(endpointsSparql, Syntax.syntaxARQ); QueryEngineHTTP qexec = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(ontology_service, sparqlQuery); qexec.setModelContentType(WebContent.contentTypeRDFXML); ResultSet results = qexec.execSelect(); String property = null; while (results.hasNext()) { QuerySolution qs = results.next(); property = qs.getLiteral("?label").getLexicalForm(); } return property; }
public void run(){ try{ boolean start = true; do{ if (nextOffset >= endpointSize) start = false; logger.debug("[SPARQL Endpoint Processor - {}] Endpoint: {} => Next offset {}, Size {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint, nextOffset, endpointSize); String query = "SELECT * WHERE { { SELECT DISTINCT * { { ?s ?p ?o . } UNION { GRAPH ?g { ?s ?p ?o .} } } ORDER BY ASC(?s) } } LIMIT 10000 OFFSET " + nextOffset; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint, query); qe.addParam("timeout","10000"); ResultSet rs = qe.execSelect(); while(rs.hasNext()){ sparqlIterator.add(rs.next()); } nextOffset = ((endpointSize - nextOffset) > 10000) ? nextOffset + 10000 : nextOffset + (endpointSize - nextOffset); } while(start); logger.info("[SPARQL Endpoint Processor - {}] - Done Parsing Endpoint {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint); } catch (Exception e){ logger.error("[SPARQL Endpoint Processor - {}] - Error parsing SPARQL Endpoint {}. Error message {}", (new io.github.luzzu.operations.lowlevel.Date()).getDate(), sparqlEndPoint, e.getMessage()); ExceptionOutput.output(e, "Exception thrown whilst fetching triples from SPARQL Endpoint: "+ datasetLocation, logger); throw e; } } };
@Override public void sparqlSelectQuery(String queryStr, ResultSetConsumer consumer) throws RDFServiceException { QueryEngineHTTP qeh = new QueryEngineHTTP( readEndpointURI, queryStr); try { consumer.processResultSet(qeh.execSelect()); } finally { qeh.close(); } }
@Override public Integer call() throws Exception{ String query = "SELECT DISTINCT (count(?s) AS ?count) { { ?s ?p ?o . } UNION { GRAPH ?g { ?s ?p ?o .} } }"; QueryEngineHTTP qe = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(sparqlEndPoint,query); int size = qe.execSelect().next().get("count").asLiteral().getInt(); return size; } });
public static void main(String[] args) { FileManager.get().addLocatorClassLoader(ExampleARQ_01.class.getClassLoader()); String apikey = System.getenv("KASABI_API_KEY"); String queryString = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" + "PREFIX italy: <http://data.kasabi.com/dataset/italy/schema/>" + "SELECT ?region WHERE { " + " ?region rdf:type italy:Region" + "}"; Query query = QueryFactory.create(queryString); QueryEngineHTTP qexec = (QueryEngineHTTP)QueryExecutionFactory.createServiceRequest("http://api.kasabi.com/dataset/italy/apis/sparql", query); qexec.addParam("apikey", apikey); try { ResultSet results = qexec.execSelect(); while ( results.hasNext() ) { QuerySolution soln = results.nextSolution(); Resource region = soln.getResource("region"); System.out.println(region.getURI()); } } finally { qexec.close(); } }
public static void main(String[] args) { String query = "SELECT ?dt (count(distinct ?o) AS ?cnt)\n" + " WHERE\n" + " { ?s <http://dbpedia.org/ontology/birthDate> ?o }\n" + " GROUP BY (datatype(?o) AS ?dt)"; QueryEngineHTTP qe = new QueryEngineHTTP("http://dbpedia.org/sparql", query); qe.setDefaultGraphURIs(Collections.singletonList("http://dbpedia.org")); ResultSet rs = qe.execSelect(); System.out.println(rs.next()); }
Map<String, Context> serviceContextMap = (Map<String, Context>) engine.context.get(Service.serviceContext); if (serviceContextMap != null && serviceContextMap.containsKey(serviceURI)) { Context serviceContext = serviceContextMap.get(serviceURI); engine.setAllowCompression(serviceContext.isTrueOrUndef(Service.queryCompression)); applyServiceTimeouts(engine, serviceContext); HttpClient client = serviceContext.get(Service.queryClient); engine.setClient(client);
@Override public SPARQLExecutionResult call() { Map<String, Set<String>> resultSet = new HashMap<>(); markers.forEach(marker -> resultSet.put(marker, new HashSet<>())); Model unionModel = ModelFactory.createDefaultModel(); SPARQLServiceConverter converter = new SPARQLServiceConverter(schema); String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType); logger.info(sparqlQuery); CredentialsProvider credsProvider = new BasicCredentialsProvider(); Credentials credentials = new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword()); credsProvider.setCredentials(AuthScope.ANY, credentials); HttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .build(); HttpOp.setDefaultHttpClient(httpclient); Query jenaQuery = QueryFactory.create(sparqlQuery); QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery); qEngine.setClient(httpclient); ResultSet results = qEngine.execSelect(); results.forEachRemaining(solution -> { markers.stream().filter(solution::contains).forEach(marker -> resultSet.get(marker).add(solution.get(marker).asResource().getURI())); unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema)); }); SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel); logger.info(sparqlExecutionResult); return sparqlExecutionResult; }
.http("http://dbpedia.org/sparql", Lists.newArrayList("http://dbpedia.org")) .config().withPostProcessor(qe -> ((QueryEngineHTTP) ((QueryExecutionHttpWrapper) qe).getDecoratee()) .setModelContentType(WebContent.contentTypeRDFXML)) .withCache(cacheFrontend) .end()
public QueryExecution postProcesss(QueryEngineHTTP qe) { qe.setDefaultGraphURIs(datasetDescription.getDefaultGraphURIs()); qe.setNamedGraphURIs(datasetDescription.getNamedGraphURIs()); QueryExecution result = new QueryExecutionHttpWrapper(qe); return result; }
/** * Performs a SPARQL ASK query against the knowledge base. The query may have * an embedded graph identifier. * * @param queryStr - the SPARQL query to be executed against the RDF store * * @return boolean - the result of the SPARQL query */ @Override public boolean sparqlAskQuery(String queryStr) throws RDFServiceException { QueryEngineHTTP qeh = new QueryEngineHTTP( readEndpointURI, queryStr); try { return qeh.execAsk(); } finally { qeh.close(); } }
private Model constructWithReplacement(SparqlEndpoint endpoint, String query) throws Exception{ QueryEngineHTTP qe = new QueryEngineHTTP(endpoint.getURL().toString(), query); qe.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); String request = qe.toString().replace("GET ", ""); URL url = new URL(request); java.net.HttpURLConnection conn = (java.net.HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); conn.addRequestProperty(HttpNames.hAccept, WebContent.contentTypeRDFXML); try(BufferedReader rdr = new BufferedReader(new InputStreamReader(conn.getInputStream()))) { Model model = ModelFactory.createDefaultModel(); String buf = null; StringBuilder doc = new StringBuilder(); while ((buf = rdr.readLine()) != null) { // Apply regex on buf if(buf.contains("&#")) { buf = buf.replace("&#", ""); } // build output doc.append(buf); } try(InputStream is = new ByteArrayInputStream(doc.toString().getBytes(StandardCharsets.UTF_8))) { model.read(is, null); } return model; } }
private QueryEngineHTTP(String serviceURI, Query query, String queryString, HttpClient client, HttpContext httpContext) { this.query = query; this.queryString = queryString; this.service = serviceURI; this.context = ARQ.getContext().copy(); // Apply service configuration if relevant applyServiceConfig(serviceURI, this); // Don't want to overwrite client config we may have picked up from // service context in the parent constructor if the specified // client is null if (client != null) setClient(client); if (httpContext != null) setHttpContext(httpContext); }