protected final void raiseException(RiotParseException ex) { throw new QueryException("Error passing SPARQL JSON results", ex); }
/** Location of the source for the data. If the model is not set, * then the QueryEngine will attempt to load the data from these URIs * as named graphs in the dataset. */ public void addNamedGraphURI(String uri) { if ( namedGraphURIs == null ) namedGraphURIs = new ArrayList<>() ; if ( namedGraphURIs.contains(uri) ) throw new QueryException("URI already in named graph set: "+uri) ; else namedGraphURIs.add(uri) ; }
private boolean parseNextBinding() { String line; try { line = this.reader.readLine(); //Once EOF has been reached we'll see null for this call so we can return false because there are no further bindings if (line == null) return false; this.lineNum++; } catch (IOException e) { throw new QueryException("Error parsing CSV results - " + e.getMessage()); } if ( line.isEmpty() ) { // Empty input line - no bindings. // Only valid when we expect zero/one values as otherwise we should get a sequence of tab characters // which means a non-empty string which we handle normally if (expectedItems > 1) throw new QueryException(String.format("Error Parsing CSV results at Line %d - The result row had 0/1 values when %d were expected", this.lineNum, expectedItems)); binding = BindingFactory.create() ; if ( expectedItems == 1 ) binding.add(vars.get(0), NodeConst.emptyString) ; return true ; } binding = parseLine(vars, line) ; return true ; }
public void addDescribeNode(Node node) { if ( node.isVariable() ) { addResultVar(node) ; return ; } if ( node.isURI() || node.isBlank() ) { if ( !resultNodes.contains(node) ) resultNodes.add(node); return ; } if ( node.isLiteral() ) throw new QueryException("Result node is a literal: "+FmtUtils.stringForNode(node)) ; throw new QueryException("Result node not recognized: "+node) ; }
@Override public Iterator<Triple> execConstructTriples() { try (GraphQueryResult result = getConstructResult()) { List<Triple> tripleArrayList = new ArrayList<>(); while (result.hasNext()) { tripleArrayList.add(AGNodeFactory.asTriple(result.next())); } //Getting Iterator from List return tripleArrayList.iterator(); } catch (QueryEvaluationException e) { throw new QueryException(e); } }
@Override public Iterator<Quad> execConstructQuads() { try (GraphQueryResult result = getConstructResult()) { List<Quad> quadArrayList = new ArrayList<>(); while (result.hasNext()) { quadArrayList.add(AGNodeFactory.asQuad(result.next())); } //Getting Iterator from List return quadArrayList.iterator(); } catch (QueryEvaluationException e) { throw new QueryException(e); } }
/** Add a collection of projection variables to a SELECT query */ public void addProjectVars(Collection<?> vars) { for ( Object obj : vars ) { if ( obj instanceof String ) { this.addResultVar( (String) obj ); continue; } if ( obj instanceof Var ) { this.addResultVar( (Var) obj ); continue; } throw new QueryException( "Not a variable or variable name: " + obj ); } resultVarsSet = true ; }
public void addOrderBy(Node var, int direction) { if ( ! var.isVariable() ) throw new QueryException("Not a variable: "+var) ; SortCondition sc = new SortCondition(var, direction) ; addOrderBy(sc) ; }
public void addResultVar(Node v) { if ( !v.isVariable() ) throw new QueryException("Not a variable: "+v) ; _addResultVar(v.getName()) ; }
public SparqlSelectFunction<T> addOrderBy(Node var, int direction) { if ( ! var.isVariable() ) throw new QueryException("Not a variable: "+var) ; SortCondition sc = new SortCondition(var, direction) ; addOrderBy(sc) ; return this; }
public void addResultVar(Node v, Expr expr) { Var var = null ; if ( v == null ) var = allocInternVar() ; else { if ( !v.isVariable() ) throw new QueryException("Not a variable: "+v) ; var = Var.alloc(v) ; } _addVarExpr(projectVars, var, expr) ; }
public SparqlDLExecution(final Query query, final Dataset source, final boolean handleVariableSPO) { _query = query; _source = source; _handleVariableSPO = handleVariableSPO; final Graph graph = source.getDefaultModel().getGraph(); if (!(graph instanceof PelletInfGraph)) throw new QueryException("PelletQueryExecution can only be used with Pellet-backed models"); if (OpenlletOptions.FULL_SIZE_ESTIMATE) ((PelletInfGraph) graph).getKB().getSizeEstimate().computeAll(); }
public SparqlDLExecution(final Query query, final Dataset source, final boolean handleVariableSPO) { _query = query; _source = source; _handleVariableSPO = handleVariableSPO; final Graph graph = source.getDefaultModel().getGraph(); if (!(graph instanceof PelletInfGraph)) throw new QueryException("PelletQueryExecution can only be used with Pellet-backed models"); if (OpenlletOptions.FULL_SIZE_ESTIMATE) ((PelletInfGraph) graph).getKB().getSizeEstimate().computeAll(); }
public SparqlDLExecution(final Query query, final Dataset source, final boolean handleVariableSPO) { _query = query; _source = source; _handleVariableSPO = handleVariableSPO; final Graph graph = source.getDefaultModel().getGraph(); if (!(graph instanceof PelletInfGraph)) throw new QueryException("PelletQueryExecution can only be used with Pellet-backed models"); if (OpenlletOptions.FULL_SIZE_ESTIMATE) ((PelletInfGraph) graph).getKB().getSizeEstimate().computeAll(); }
private Pair<InputStream, Lang> execConstructWorker(String contentType) { checkNotClosed() ; HttpQuery httpQuery = makeHttpQuery(); httpQuery.setAccept(chooseAcceptHeader(acceptHeader, contentType)); InputStream in = httpQuery.exec(); // Don't assume the endpoint actually gives back the content type we // asked for String actualContentType = httpQuery.getContentType(); httpResponseContentType = actualContentType; // If the server fails to return a Content-Type then we will assume // the server returned the type we asked for if (actualContentType == null || actualContentType.equals("")) { actualContentType = WebContent.defaultDatasetAcceptHeader; } Lang lang = RDFLanguages.contentTypeToLang(actualContentType); if ( ! RDFLanguages.isQuads(lang) && ! RDFLanguages.isTriples(lang) ) throw new QueryException("Endpoint returned Content Type: " + actualContentType + " which is not a valid RDF syntax"); return Pair.create(in, lang) ; }
/** Convert a {@link Node} (graph SPI) to an RDFNode (model API), anchored to the model if possible. * * @param node * @param model (may be null) * @return RDFNode */ public static RDFNode convertGraphNodeToRDFNode(Node node, Model model) { if ( node.isVariable() ) throw new QueryException("Variable: "+node) ; // Best way. if ( model != null ) return model.asRDFNode(node) ; if ( node.isLiteral() ) return new LiteralImpl(node, null) ; if ( node.isURI() || node.isBlank() ) return new ResourceImpl(node, null) ; throw new ARQInternalErrorException("Unknown node type for node: "+node) ; }
@Override public Model execConstruct(Model m) { GraphQueryResult result = getConstructResult(); if (m == null) { m = ModelFactory.createDefaultModel(); } try { m.setNsPrefixes(result.getNamespaces()); while (result.hasNext()) { m.add(model.asStatement(AGNodeFactory.asTriple(result.next()))); } } catch (QueryEvaluationException e) { throw new QueryException(e); } return m; }
public long countConstruct() { if (query.getLanguage() != QueryLanguage.SPARQL) { throw new UnsupportedOperationException(query.getLanguage().getName() + " language does not support CONSTRUCT queries."); } AGGraphQuery gq = model.getGraph().getConnection().prepareGraphQuery(query.getLanguage(), query.getQueryString()); gq.setIncludeInferred(model.getGraph() instanceof AGInfGraph); gq.setEntailmentRegime(model.getGraph().getEntailmentRegime()); gq.setCheckVariables(query.isCheckVariables()); gq.setLimit(query.getLimit()); gq.setOffset(query.getOffset()); gq.setDataset(model.getGraph().getDataset()); if (binding != null) { Iterator<String> vars = binding.varNames(); while (vars.hasNext()) { String var = vars.next(); gq.setBinding(var, model.getGraph().vf.asValue(binding.get(var).asNode())); } } long count; try { count = gq.count(); } catch (QueryEvaluationException e) { throw new QueryException(e); } return count; }
public static void executeQuery(Prologue prologue, QueryExecution queryExecution, ResultsFormat outputFormat) { Query query = queryExecution.getQuery() ; if ( prologue == null ) prologue = query.getPrologue() ; if ( prologue == null ) prologue = dftPrologue ; if ( query.isSelectType() ) doSelectQuery(prologue, queryExecution, outputFormat) ; else if ( query.isDescribeType() ) doDescribeQuery(prologue, queryExecution, outputFormat) ; else if ( query.isConstructQuad() ) // Before isConstructType. doConstructQuadsQuery(prologue, queryExecution, outputFormat) ; else if ( query.isConstructType() ) doConstructQuery(prologue, queryExecution, outputFormat) ; else if ( query.isAskType() ) doAskQuery(prologue, queryExecution, outputFormat) ; else if ( query.isJsonType() ) doJsonQuery(prologue, queryExecution, outputFormat) ; else throw new QueryException("Unrecognized query form"); }
public long countSelect() { AGTupleQuery tq = model.getGraph().getConnection().prepareTupleQuery(query.getLanguage(), query.getQueryString()); tq.setIncludeInferred(model.getGraph() instanceof AGInfGraph); tq.setEntailmentRegime(model.getGraph().getEntailmentRegime()); tq.setCheckVariables(query.isCheckVariables()); tq.setLimit(query.getLimit()); tq.setOffset(query.getOffset()); tq.setDataset(model.getGraph().getDataset()); if (binding != null) { Iterator<String> vars = binding.varNames(); while (vars.hasNext()) { String var = vars.next(); tq.setBinding(var, model.getGraph().vf.asValue(binding.get(var).asNode())); } } long count; try { count = tq.count(); } catch (QueryEvaluationException e) { throw new QueryException(e); } return count; }