/** Already have the dataset, so process the query and return the results. */ private static ResultSet getQueryResults(String queryStr, RDFService rdfService) { try { return ResultSetFactory.fromJSON( rdfService.sparqlSelectQuery(queryStr, RDFService.ResultFormat.JSON)); } catch (RDFServiceException e) { throw new RuntimeException(e); } }
/** * Converts from JSON to internal Jena format. * * @param json * A JSON representation if a SPARQL query result. * @return A Jena ResultSet. */ public static ResultSetRewindable convertJSONtoResultSet(String json) { ByteArrayInputStream bais = new ByteArrayInputStream(json .getBytes(Charset.forName("UTF-8"))); // System.out.println("JSON " + json); return ResultSetFactory.makeRewindable(ResultSetFactory.fromJSON(bais)); }
public static ResultSet sparqlSelectQuery(String query, RDFService rdfService) { ResultSet resultSet = null; try { InputStream resultStream = rdfService.sparqlSelectQuery(query, RDFService.ResultFormat.JSON); resultSet = ResultSetFactory.fromJSON(resultStream); return resultSet; } catch (RDFServiceException e) { log.error("error executing sparql select query: " + e.getMessage()); } return resultSet; } }
try { InputStream in = rdfService.sparqlSelectQuery(queryStr, RDFService.ResultFormat.JSON); results = ResultSetFactory.fromJSON(in); } catch (RDFServiceException e) { throw new RuntimeException(e);
private void serialize(OutputStream outputStream, String query) throws RDFServiceException { InputStream resultStream = sparqlSelectQuery(query, RDFService.ResultFormat.JSON); ResultSet resultSet = ResultSetFactory.fromJSON(resultStream); if (resultSet.getResultVars().contains("g")) { Iterator<Quad> quads = new ResultSetQuadsIterator(resultSet); RDFDataMgr.writeQuads(outputStream, quads); } else { Iterator<Triple> triples = new ResultSetTriplesIterator(resultSet); RDFDataMgr.writeTriples(outputStream, triples); } }
@Test public void resultset_03() { ResultSet rs = make($rs) ; ByteArrayOutputStream out = new ByteArrayOutputStream() ; ResultSetFormatter.outputAsJSON(out, rs) ; ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()) ; ResultSet rs2 = ResultSetFactory.fromJSON(in) ; areIsomorphic(rs, rs2); }
@Override public void executeAndFormat(OutputStream out) throws RDFServiceException, IOException { InputStream rawResult = getRawResultStream(); if (mediaType.isNativeFormat()) { IOUtils.copy(rawResult, out); } else if (mediaType == TSV) { // ARQ doesn't support TSV, so we will do the translation. pipeWithReplacement(rawResult, out); } else { ResultSet rs = ResultSetFactory.fromJSON(rawResult); ResultsFormat format = ResultsFormat.lookup(mediaType .getJenaResponseFormat()); ResultSetFormatter.output(out, rs, format); } }
ResultSet result = factory.fromJSON(in); List<String> var = result.getResultVars(); List<String> list = new ArrayList<>();
private void parseJSON(String input) { byte[] b = StrUtils.asUTF8bytes(input); ByteArrayInputStream in = new ByteArrayInputStream(b); ResultSet rs = ResultSetFactory.fromJSON(in); while (rs.hasNext()) { rs.nextBinding(); } }
@Override public ResultSetRewindable readFrom(Class<ResultSetRewindable> type, Type type1, Annotation[] antns, javax.ws.rs.core.MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream in) throws IOException { if (log.isTraceEnabled()) log.trace("Reading ResultSet with HTTP headers: {} MediaType: {}", httpHeaders, mediaType); // result set needs to be rewindable because results might be processed multiple times, e.g. to calculate hash and write response // TO-DO: construct Jena's ResultFormat and then pass to ResultSet.load(in, format) if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_XML_TYPE)) return ResultSetFactory.makeRewindable(ResultSetFactory.fromXML(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_JSON_TYPE)) return ResultSetFactory.makeRewindable(ResultSetFactory.fromJSON(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_CSV_TYPE)) return ResultSetFactory.makeRewindable(CSVInput.fromCSV(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_CSV_TYPE)) return ResultSetFactory.makeRewindable(TSVInput.fromTSV(in)); throw new IllegalStateException("ResultSet MediaType should be readable but no Jena reader matched"); }
ResultFormat resultFormat) throws RDFServiceException { log.debug("sparqlSelectQuery: " + query.replaceAll("\\s+", " ")); ResultSet resultSet = ResultSetFactory.fromJSON( s.sparqlSelectQuery(query, RDFService.ResultFormat.JSON)); List<QuerySolution> solnList = getSolutionList(resultSet);
@Test public void test_RS_5() { ResultSetRewindable rs1 = new ResultSetMem() ; ByteArrayOutputStream arr = new ByteArrayOutputStream() ; ResultSetFormatter.outputAsJSON(arr, rs1) ; rs1.reset() ; ByteArrayInputStream ins = new ByteArrayInputStream(arr.toByteArray()) ; ResultSet rs2 = ResultSetFactory.fromJSON(ins) ; assertTrue(ResultSetCompare.equalsByTerm(rs1, rs2)) ; }
@Test public void test_RS_6() { ResultSetRewindable rs1 = make2Rewindable("x", org.apache.jena.graph.NodeFactory.createURI("tag:local")) ; ByteArrayOutputStream arr = new ByteArrayOutputStream() ; ResultSetFormatter.outputAsJSON(arr, rs1) ; rs1.reset() ; ByteArrayInputStream ins = new ByteArrayInputStream(arr.toByteArray()) ; ResultSet rs2 = ResultSetFactory.fromJSON(ins) ; // Test using the DAWG examples assertTrue(ResultSetCompare.equalsByTerm(rs1, rs2)) ; }
@Test public void resultSet_10() { try (InputStream in = IO.openFile(DIR+"/results-1.srj")) { ResultSet rs = ResultSetFactory.fromJSON(in) ; test(ResultSetFactory.copyResults(rs)) ; } catch (IOException ex) { IO.exception(ex) ; } } }