@Override public void handle(String baseIRI, HttpResponse response) throws IOException { String ct = contentType(response); ResultsFormat fmt = contentTypeToResultsFormat(ct); InputStream in = response.getEntity().getContent(); rs = ResultSetFactory.load(in, fmt); // Force reading rs = ResultSetFactory.copyResults(rs); }
/** * Loads result set from an RDF dataset using a SPARQL query. * Only <code>SELECT</code> queries can be used with this method. * * @param dataset the RDF dataset to be queried * @param query query object * @return result set * @see <a href="http://www.w3.org/TR/2013/REC-sparql11-query-20130321/#select">SELECT</a> */ public ResultSetRewindable loadResultSet(Dataset dataset, Query query) { if (log.isDebugEnabled()) log.debug("Local Dataset Query: {}", query); if (dataset == null) throw new IllegalArgumentException("Dataset must be not null"); if (query == null) throw new IllegalArgumentException("Query must be not null"); try (QueryExecution qex = QueryExecutionFactory.create(query, dataset)) { if (query.isSelectType()) return ResultSetFactory.copyResults(qex.execSelect()); throw new QueryExecException("Query to load ResultSet must be SELECT"); } catch (QueryExecException ex) { if (log.isDebugEnabled()) log.debug("Local query execution exception: {}", ex); throw ex; } }
protected ResultSet executeSelectQuery(String queryString, long timeout, TimeUnit timeoutUnits){ if (logger.isTraceEnabled()) { logger.trace("Sending query \n {}", queryString); } try(QueryExecution qe = qef.createQueryExecution(queryString)) { qe.setTimeout(timeout, timeoutUnits); ResultSet rs = qe.execSelect(); return ResultSetFactory.copyResults(rs); } catch (QueryExceptionHTTP e) { throw new QueryExceptionHTTP("Error sending query \"" + queryString + "\" to endpoint " + qef.getId(), e); } catch (Exception e) { throw new RuntimeException("Error sending query \"" + queryString + "\" to endpoint " + qef.getId(), e); } }
public static ResultSet query(String string, Model m, String varName, RDFNode value) { Query query = QueryFactory.create(FusekiConst.PREFIXES + string) ; QuerySolutionMap initValues = null ; if ( varName != null && value != null ) initValues = querySolution(varName, value) ; try ( QueryExecution qExec = QueryExecutionFactory.create(query, m, initValues) ) { return ResultSetFactory.copyResults(qExec.execSelect()) ; } }
public static ResultSet query(String string, Dataset ds, String varName, RDFNode value) { Query query = QueryFactory.create(FusekiConst.PREFIXES + string) ; QuerySolutionMap initValues = null ; if ( varName != null && value != null ) initValues = querySolution(varName, value) ; try ( QueryExecution qExec = QueryExecutionFactory.create(query, ds, initValues) ) { return ResultSetFactory.copyResults(qExec.execSelect()) ; } }
/** * Given a dataset to query, a rule name, and the query string, execute the query over the * dataset. * * @param dataset Dataset to query * @param ruleName name of rule to verify * @param query the SPARQL query string * @return true if there are results, false otherwise * @throws IOException on query parse error */ public static boolean execVerify(Dataset dataset, String ruleName, String query) throws IOException { ResultSetRewindable results = ResultSetFactory.copyResults(execQuery(dataset, query)); System.out.println("Rule " + ruleName + ": " + results.size() + " violation(s)"); if (results.size() == 0) { System.out.println("PASS Rule " + ruleName + ": 0 violation(s)"); return false; } else { ResultSetMgr.write(System.err, results, Lang.CSV); System.out.println("FAIL Rule " + ruleName + ": " + results.size() + " violation(s)"); return true; } }
public static ResultSet query(String string, Dataset ds, String varName, RDFNode value) { Query query = QueryFactory.create(FusekiConst.PREFIXES + string) ; QuerySolutionMap initValues = null ; if ( varName != null && value != null ) initValues = querySolution(varName, value) ; try ( QueryExecution qExec = QueryExecutionFactory.create(query, ds, initValues) ) { return ResultSetFactory.copyResults(qExec.execSelect()) ; } }
public static ResultSet query(String string, Model m, String varName, RDFNode value) { Query query = QueryFactory.create(FusekiConst.PREFIXES + string) ; QuerySolutionMap initValues = null ; if ( varName != null && value != null ) initValues = querySolution(varName, value) ; try ( QueryExecution qExec = QueryExecutionFactory.create(query, m, initValues) ) { return ResultSetFactory.copyResults(qExec.execSelect()) ; } }
outputFormat = Lang.CSV; ResultSetRewindable results = ResultSetFactory.copyResults(execQuery(dataset, query)); if (results.size() == 0) { System.out.println("PASS Rule " + ruleName + ": 0 violation(s)");
@Test public void resultSet_10() { try (InputStream in = IO.openFile(DIR+"/results-1.srj")) { ResultSet rs = ResultSetFactory.fromJSON(in) ; test(ResultSetFactory.copyResults(rs)) ; } catch (IOException ex) { IO.exception(ex) ; } } }
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rs = rsrw;
private static void preserve_bnodes(Lang sparqlresultlang, Context cxt, boolean same) { ResultSetRewindable rs1 = ResultSetFactory.makeRewindable(BuilderResultSet.build(SSE.parseItem(StrUtils.strjoinNL(rs1$)))) ; ByteArrayOutputStream x = new ByteArrayOutputStream(); ResultsWriter.create().context(cxt).lang(sparqlresultlang).write(x, rs1); ByteArrayInputStream y = new ByteArrayInputStream(x.toByteArray()); ResultSetRewindable rs2 = ResultSetFactory.copyResults( ResultsReader.create().context(cxt).lang(sparqlresultlang).read(y) ); rs1.reset(); rs2.reset(); if ( same ) assertTrue(ResultSetCompare.equalsExact(rs1, rs2)); else assertFalse(ResultSetCompare.equalsExact(rs1, rs2)); }
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rsrw.reset();
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rsrw.reset();
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rsrw.reset();
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rs = rsrw;
ResultSetRewindable rsrw = ResultSetFactory.copyResults(rs); int size = rsrw.size(); rs = rsrw;