public void close() { execution.close(); } }
@Override public void close() { if (this.qExec != null) { this.qExec.close(); } this.dataset.end(); }
@Override public void close() { if (this.qExec != null) { this.qExec.close(); } this.dataset.end(); }
@Override public void close() { history.add( "CLOSE" ); qe.close(); }
private boolean execAsk(String queryStr) { Query askQuery = QueryFactory.create(queryStr); QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery); try { return qe.execAsk(); } finally { qe.close(); } }
public static Map<RDFNode,RDFNode> fetchMap(Dataset m,Query query,QuerySolution bindings) throws Exception { QueryExecution qe=QueryExecutionFactory.create(query,m); try { ResultSet results=qe.execSelect(); Map<RDFNode,RDFNode> map=Maps.newHashMap(); List<String> vars=results.getResultVars(); while(results.hasNext()) { QuerySolution row=results.nextSolution(); map.put(row.get(vars.get(0)),row.get(vars.get(1))); } return map; } finally { qe.close(); } }
/** * Executes a SPARQL CONSTRUCT. * * @param query the SPARQL CONSTRUCT Query. * @return the {@link Model} that includes the CONSTRUCT result. * @throws UnableToExecuteQueryException in case of failure before, during or after the query execution. */ public Model construct(final String constructQuery) throws UnableToExecuteQueryException { QueryExecution execution = null; try { return (execution = execution(constructQuery)).execConstruct(); } catch (final Exception exception) { throw new UnableToExecuteQueryException(exception); } finally { execution.close(); } }
/** * Executes a SPARQL ASK. * * @param query the ASK SELECT Query. * @return the {@link ResultSet} that includes matching bindings. * @throws UnableToExecuteQueryException in case of failure before, during or after the query execution. */ public boolean ask(final String askQuery) throws UnableToExecuteQueryException { QueryExecution execution = null; try { return (execution = execution(askQuery)).execAsk(); } catch (final Exception exception) { throw new UnableToExecuteQueryException(exception); } finally { execution.close(); } }
/** * Executes a SPARQL DESCRIBDE. * * @param query the SPARQL DESCRIBE Query. * @return the {@link Model} that includes the DESCRIBE result. * @throws UnableToExecuteQueryException in case of failure before, during or after the query execution. */ public Model describe(final String describeQuery) throws UnableToExecuteQueryException { QueryExecution execution = null; try { return (execution = execution(describeQuery)).execDescribe(); } catch (final Exception exception) { throw new UnableToExecuteQueryException(exception); } finally { execution.close(); } }
public boolean buildModelFromSparql(Model modelToQuery, String queryString) { boolean success = false; Query query = QueryFactory.create(queryString); //Model model = loadModelFromTurtle(loader, configPath); // Not sure if we want this to build its own model or not... //if (model != null) { QueryExecution qexec = QueryExecutionFactory.create(query, modelToQuery); Model resultModel = qexec.execDescribe(); qexec.close(); buildModelFromJena(resultModel, true); success = true; //} return success; }
public static void appendConstruct(Model outModel, String queryString, Model inModel, QuerySolution bindings) { Query query=QueryFactory.create(queryString); QueryExecution qe=QueryExecutionFactory.create(query,inModel); try { if(null!=bindings) { qe.setInitialBinding(bindings); } qe.execConstruct(outModel); } finally{ qe.close(); } }
/** <code>query</code> must be a DESCRIBE query. Answer the model which is the description. */ public Model executeDescribe( Query query ) { Lock l = getLock(); l.enterCriticalSection( Lock.READ ); QueryExecution qe = execute( query ); try { return qe.execDescribe(); } finally { try { qe.close(); } finally { l.leaveCriticalSection(); } } }
/** <code>query</code> must be a CONSTRUCT query. Answer the model which is constructed. */ public Model executeConstruct( Query query ) { Lock l = getLock(); l.enterCriticalSection( Lock.READ ); QueryExecution qe = execute( query ); try { return qe.execConstruct(); } finally { try { qe.close(); } finally { l.leaveCriticalSection(); } } }
final List<T> parseCollection() { List<T> result=null; QuerySolutionMap parameters = new QuerySolutionMap(); parameters.add(this.targetVariable,this.resource); QueryExecution queryExecution=QueryExecutionFactory.create(this.query,this.model); queryExecution.setInitialBinding(parameters); try { ResultSet results = queryExecution.execSelect(); result=processResults(results); } finally { queryExecution.close(); } return result; }
/** Execute, expecting the result to be one row, one column. * Return that one RDFNode or null * Throw excpetion if more than one. */ public static RDFNode getOne(QueryExecution qExec, String varname) { try { ResultSet rs = qExec.execSelect() ; if ( ! rs.hasNext() ) return null ; QuerySolution qs = rs.nextSolution() ; RDFNode r = qs.get(varname) ; if ( rs.hasNext() ) throw new ARQException("More than one: var ?"+varname) ; return r ; } finally { qExec.close() ; } }
/** Execute, expecting the result to be one row, one column. Return that one RDFNode or throw an exception */ public static RDFNode getExactlyOne(QueryExecution qExec, String varname) { try { ResultSet rs = qExec.execSelect() ; if ( ! rs.hasNext() ) throw new ARQException("Not found: var ?"+varname) ; QuerySolution qs = rs.nextSolution() ; RDFNode r = qs.get(varname) ; if ( rs.hasNext() ) throw new ARQException("More than one: var ?"+varname) ; return r ; } finally { qExec.close() ; } }
/** Execute, expecting the result to be one row, one column. Return that one RDFNode or throw an exception */ public static RDFNode getExactlyOne(QueryExecution qExec, String varname) { try { ResultSet rs = qExec.execSelect() ; if ( ! rs.hasNext() ) throw new ARQException("Not found: var ?"+varname) ; QuerySolution qs = rs.nextSolution() ; RDFNode r = qs.get(varname) ; if ( rs.hasNext() ) throw new ARQException("More than one: var ?"+varname) ; return r ; } finally { qExec.close() ; } }
public static void executeQuery(Query query, QueryExecution queryExecution, ResultsFormat outputFormat) { if ( query.isSelectType() ) doSelectQuery(query, queryExecution, outputFormat) ; if ( query.isDescribeType() ) doDescribeQuery(query, queryExecution, outputFormat) ; if ( query.isConstructType() ) doConstructQuery(query, queryExecution, outputFormat) ; if ( query.isAskType() ) doAskQuery(query, queryExecution, outputFormat) ; queryExecution.close() ; }
public static void executeQuery(Query query, QueryExecution queryExecution, ResultsFormat outputFormat) { if ( query.isSelectType() ) doSelectQuery(query, queryExecution, outputFormat) ; if ( query.isDescribeType() ) doDescribeQuery(query, queryExecution, outputFormat) ; if ( query.isConstructType() ) doConstructQuery(query, queryExecution, outputFormat) ; if ( query.isAskType() ) doAskQuery(query, queryExecution, outputFormat) ; queryExecution.close() ; }
@Test public void mrswSPARQL1() { Dataset ds = create(); Query query = QueryFactory.create("SELECT * { ?s ?p ?o}") ; QueryExecution qExec = QueryExecutionFactory.create(query, ds) ; ResultSet rs = qExec.execSelect() ; while(rs.hasNext()) rs.next(); qExec.close() ; DatasetGraph dsg = ds.asDatasetGraph() ; Quad quad = SSE.parseQuad("(<g> <y> <p> 99)") ; dsg.add(quad) ; Iterator<Quad> iter = dsg.find() ; iter.hasNext() ; iter.next() ; }