@Override public TupleQuery prepareTupleQuery(QueryLanguage ql, String query, String baseURI) throws MalformedQueryException, RepositoryException { return wrap(delegate.prepareTupleQuery(ql, query, baseURI)); }
@Override public TupleQuery prepareTupleQuery(QueryLanguage ql, String query, String baseURI) throws MalformedQueryException, RepositoryException { return wrap(delegate.prepareTupleQuery(ql, query, baseURI)); }
public void query(String query, int expected) throws Exception { prettyPrintQuery(query); prettyPrintQueryPlan(query); CountingResultHandler resultHandler = new CountingResultHandler(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); Validate.isTrue(expected == resultHandler.getCount()); }
public static void testLubmFile(final SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, RDFParseException, IOException { final String query = LubmQuery.LUBM_QUERY_14.getSparqlQuery(); log.info("Query to be Performed on LUBM Data :\n\n" + query + "\n"); log.info("Adding LUBM Data from: " + LUBM_FILE.toAbsolutePath()); addTriples(conn, LUBM_FILE.toFile(), RDFFormat.NTRIPLES); log.info("Executing LUBM Query"); final CountingResultHandler resultHandler = new CountingResultHandler(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); Validate.isTrue(resultHandler.getCount() > 0); resultHandler.resetCount(); final String deleteQuery = "DELETE WHERE { ?s ?p ?o }"; log.info("Deleting LUBM Data"); final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, deleteQuery); update.execute(); final String selectAllQuery = "SELECT * WHERE { ?s ?p ?o }"; log.info("Confirming LUBM Data Cleared"); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, selectAllQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); Validate.isTrue(resultHandler.getCount() == 0); }
+ "}";// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); tupleHandler = new CountingResultHandler(); tupleQuery.evaluate(tupleHandler); + "}";// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); tupleHandler = new CountingResultHandler(); tupleQuery.evaluate(tupleHandler);
@Override public TupleQueryResult executeSparqlQuery(final String ryaInstanceName, final String sparqlQuery) throws InstanceDoesNotExistException, RyaClientException { requireNonNull(ryaInstanceName); requireNonNull(sparqlQuery); // Ensure the Rya Instance exists. if (!instanceExists.exists(ryaInstanceName)) { throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName)); } try { // Get a Sail object that is connected to the Rya instance. final MongoDBRdfConfiguration ryaConf = connectionDetails.build(ryaInstanceName); sail = RyaSailFactory.getInstance(ryaConf); final SailRepository sailRepo = new SailRepository(sail); sailRepoConn = sailRepo.getConnection(); // Execute the query. final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery); return tupleQuery.evaluate(); } catch (SailException | RyaDAOException | InferenceEngineException | AccumuloException | AccumuloSecurityException e) { throw new RyaClientException("Could not create the Sail object used to query the RYA instance.", e); } catch (final MalformedQueryException | QueryEvaluationException | RepositoryException e) { throw new RyaClientException("Could not execute the SPARQL query.", e); } }
log.info("Running Explicit Query"); final CountingResultHandler resultHandler = new CountingResultHandler(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); log.info("Running Inference-dependent Query"); resultHandler.resetCount(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); log.info("Re-running Inference-dependent Query"); resultHandler.resetCount(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
log.info("Running Explicit Query"); CountingResultHandler resultHandler = new CountingResultHandler(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); log.info("Running Inference-dependant Query"); resultHandler.resetCount(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); resultHandler.resetCount(); resultHandler = new CountingResultHandler(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}"; resultHandler = new CountingResultHandler(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}"; resultHandler = new CountingResultHandler(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
+ " ?x <http://acme.com/actions/likes> \"Avocados\" }}"; final CountingResultHandler resultHandler = new CountingResultHandler(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { // Add data String query = "INSERT DATA\n"// + "{ \n"// + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. " + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }"; log.info("Performing Query"); final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); update.execute(); // refresh the graph for inferencing (otherwise there is a five minute wait) ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }"; final CountingResultHandler resultHandler = new CountingResultHandler(); final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); Validate.isTrue(resultHandler.getCount() == 1); resultHandler.resetCount(); }
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { // Add data String query = "INSERT DATA\n"// + "{ \n"// + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. " + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }"; log.info("Performing Query"); final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); update.execute(); // refresh the graph for inferencing (otherwise there is a five minute wait) ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }"; final CountingResultHandler resultHandler = new CountingResultHandler(); final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); Validate.isTrue(resultHandler.getCount() == 1); resultHandler.resetCount(); }
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); query = "select ?p { GRAPH <http://updated/test> {<urn:paulGreatGrandfather> <urn:greatGrandfather> ?p}}"; resultHandler = new CountingResultHandler(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount()); query = "select ?s ?p { GRAPH <http://updated/test> {?s <urn:grandfather> ?p}}"; resultHandler = new CountingResultHandler(); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.evaluate(resultHandler); log.info("Result count : " + resultHandler.getCount());
private static void testDeleteTemporalData( final SailRepositoryConnection conn) throws Exception { // Delete all stored dates final String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n" + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"// + "DELETE {\n" // + " ?event time:inXSDDateTime ?time . \n" + "}\n" + "WHERE { \n" + " ?event time:inXSDDateTime ?time . \n"// + "}";// final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlDelete); deleteUpdate.execute(); // Find all stored dates. final String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"// + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"// + "SELECT ?event ?time \n" // + "WHERE { \n" + " ?event time:inXSDDateTime ?time . \n"// + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after // 3 // seconds + "}";// final CountingResultHandler tupleHandler = new CountingResultHandler(); final TupleQuery tupleQuery = conn.prepareTupleQuery( QueryLanguage.SPARQL, queryString); tupleQuery.evaluate(tupleHandler); log.info("Result count : " + tupleHandler.getCount()); Validate.isTrue(tupleHandler.getCount() == 0); }
@Override public TupleQueryResult executeSparqlQuery(final String ryaInstanceName, final String sparqlQuery) throws InstanceDoesNotExistException, RyaClientException { requireNonNull(ryaInstanceName); requireNonNull(sparqlQuery); // Ensure the Rya Instance exists. if(!instanceExists.exists(ryaInstanceName)) { throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName)); } try { // Get a Sail object that is connected to the Rya instance. final AccumuloRdfConfiguration ryaConf = getAccumuloConnectionDetails().buildAccumuloRdfConfiguration(ryaInstanceName); sail = RyaSailFactory.getInstance(ryaConf); sailRepo = new SailRepository(sail); sailRepoConn = sailRepo.getConnection(); // Execute the query. final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery); return tupleQuery.evaluate(); } catch (final SailException | AccumuloException | AccumuloSecurityException | RyaDAOException | InferenceEngineException e) { throw new RyaClientException("A problem connecting to the Rya instance named '" + ryaInstanceName + "' has caused the query to fail.", e); } catch (final MalformedQueryException e) { throw new RyaClientException("There was a problem parsing the supplied query.", e); } catch (final QueryEvaluationException e) { throw new RyaClientException("There was a problem evaluating the supplied query.", e); } catch (final RepositoryException e) { throw new RyaClientException("There was a problem executing the query against the Rya instance named " + ryaInstanceName + ".", e); } }
+ " ?person a <http://example.org/ontology/Person> . "// + "}";// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); tupleHandler = new CountingResultHandler(); tupleQuery.evaluate(tupleHandler);