/** * Set the offset for the results to return. * Setting the offset to zero (0) or removes the offset. * @param offset The offset to set. */ public void setOffset(int offset) { query.setOffset(offset < 1 ? Query.NOLIMIT : offset); }
/** * Set the offset for the results to return. * Setting the offset to zero (0) or removes the offset. * @param offset The offset to set. */ public void setOffset(int offset) { query.setOffset(offset < 1 ? Query.NOLIMIT : offset); }
@Override public void visitOffset(Query query) { newQuery.setOffset(query.getOffset()); }
@Override public void visitOffset(Query query) { newQuery.setOffset(query.getOffset()) ; }
public static void applyRange(Query query, Range<Long> range) { long offset = rangeToOffset(range); long limit = rangeToLimit(range); query.setOffset(offset); query.setLimit(limit); }
final public void OffsetClause() throws ParseException { Token t ; jj_consume_token(OFFSET); t = jj_consume_token(INTEGER); getQuery().setOffset(integerValue(t.image)) ; }
final public void OffsetClause() throws ParseException { Token t ; jj_consume_token(OFFSET); t = jj_consume_token(INTEGER); getQuery().setOffset(integerValue(t.image)) ; }
final public void OffsetClause() throws ParseException { Token t ; jj_consume_token(OFFSET); t = jj_consume_token(INTEGER); getQuery().setOffset(integerValue(t.image)) ; }
@Override public Model extractFragment(OWLClass cls, int maxFragmentDepth) { startTime = System.currentTimeMillis(); Model fragment = ModelFactory.createDefaultModel(); Query query = buildConstructQuery(cls, maxFragmentDepth); long pageSize = PaginationUtils.adjustPageSize(qef, 10000); query.setLimit(pageSize); int offset = 0; while(getRemainingRuntime() > 0){ query.setOffset(offset);System.out.println(query); Model model = qef.createQueryExecution(query).execConstruct(); fragment.add(model); offset += pageSize; } return fragment; }
@Override public QueryExecutionCompare createQueryExecution(Query query) { if(removeSlices) { query = (Query)query.clone(); query.setLimit(Query.NOLIMIT); query.setOffset(Query.NOLIMIT); } //boolean isOrdered = !query.getOrderBy().isEmpty(); QueryExecution qea = a.createQueryExecution(query); QueryExecution qeb = b.createQueryExecution(query); QueryExecutionCompare result = new QueryExecutionCompare(query, qea, qeb, false); //QueryExecution result = QueryExecutionWrapper.wrap(tmp); return result; }
public PaginationQueryIterator createQueryIterator(Long offset, Long limit) { long o = offset == null ? 0 : offset; long l = limit == null ? Long.MAX_VALUE : limit; long queryOffset = proto.getOffset() == Query.NOLIMIT ? 0 : proto.getOffset(); long itemOffset = queryOffset + o; long queryLimit = proto.getLimit() == Query.NOLIMIT ? Long.MAX_VALUE : proto.getLimit() - o; long itemLimit = Math.min(queryLimit, l); itemLimit = itemLimit == Long.MAX_VALUE ? Query.NOLIMIT : itemLimit; Query clone = proto.cloneQuery(); clone.setOffset(itemOffset); clone.setLimit(itemLimit); PaginationQueryIterator result = new PaginationQueryIterator(clone, pageSize); return result; } }
/** * Sets up a new query execution. * * @param source The input CSV file * @param options Configuration options for the CSV file * @param query The input query */ public TarqlQueryExecution(InputStreamSource source, CSVOptions options, TarqlQuery query) { if (options == null) { options = new CSVOptions(); } if (options.hasColumnNamesInFirstRow() == null) { // Presence or absence of header row was not specified on command line or FROM clause. // So we fall back to the convention where OFFSET 1 in the query // indicates that a header is present. To make that work, we // set the OFFSET to 0 and tell the parser to gobble up the first // row for column names. options = new CSVOptions(options); Query firstQuery = query.getQueries().get(0); if (firstQuery.getOffset() == 1) { options.setColumnNamesInFirstRow(true); firstQuery.setOffset(0); } } table = new CSVTable(source, options); tq = query; }
@Override public void visit(OpSlice opSlice) { if ( opSlice.getStart() != Query.NOLIMIT ) query.setOffset(opSlice.getStart()) ; if ( opSlice.getLength() != Query.NOLIMIT ) query.setLimit(opSlice.getLength()) ; opSlice.getSubOp().visit(this) ; }
/** * Copy all the modifications from the Solution Modifier argument * @param solutionModifier The solution modifier to copy from. */ public void addAll(SolutionModifierHandler solutionModifier) { List<SortCondition> lst = solutionModifier.query.getOrderBy(); if (lst != null) { for (SortCondition sc : lst) { query.addOrderBy(sc); } } query.getGroupBy().addAll(solutionModifier.query.getGroupBy()); query.getHavingExprs().addAll(solutionModifier.query.getHavingExprs()); query.setLimit(solutionModifier.query.getLimit()); query.setOffset(solutionModifier.query.getOffset()); }
/** * Copy all the modifications from the Solution Modifier argument * @param solutionModifier The solution modifier to copy from. */ public void addAll(SolutionModifierHandler solutionModifier) { List<SortCondition> lst = solutionModifier.query.getOrderBy(); if (lst != null) { for (SortCondition sc : lst) { query.addOrderBy(sc); } } query.getGroupBy().addAll(solutionModifier.query.getGroupBy()); query.getHavingExprs().addAll(solutionModifier.query.getHavingExprs()); query.setLimit(solutionModifier.query.getLimit()); query.setOffset(solutionModifier.query.getOffset()); }
@Override public QueryExecution createQueryExecution(Query query) { Query q = query.cloneQuery(); long offset = q.getOffset() == Query.NOLIMIT ? 0 : q.getOffset(); long limit = q.getLimit(); long o = (offset / pageExpandSize) * pageExpandSize; long l; if(limit != Query.NOLIMIT) { long target = offset + limit; long t = ((target / pageExpandSize) + 1) * pageExpandSize; l = t - o; } else { l = Query.NOLIMIT; } long start = o - offset; // Align offset and target to pageExpandSize boundaries q.setOffset(o); q.setLimit(l); QueryExecution qe = qef.createQueryExecution(q); //QueryExecutionRange result = new QueryExecutionRange(qe, start, l); QueryExecution result = null; return result; }
query.setOffset(i++ * pageSize); QueryExecution qe = ksQef.createQueryExecution(query); Model tmp = qe.execConstruct();
public static Query createQueryList(UnaryRelation concept, Long limit, Long offset) { Query result = new Query(); result.setQuerySelectType(); result.setDistinct(true); result.setLimit(limit == null ? Query.NOLIMIT : limit); result.setOffset(offset == null ? Query.NOLIMIT : offset); result.getProject().add(concept.getVar()); Element e = concept.getElement(); if(e instanceof ElementSubQuery) { e = ElementUtils.createElementGroup(e); } result.setQueryPattern(e); // String str = result.toString(); // System.out.println(str); return result; }
@Override public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException { Query query = QueryFactory.create("CONSTRUCT WHERE { ?s ?p ?o }", Syntax.syntaxSPARQL_11); QuerySolutionMap map = new QuerySolutionMap(); if ( subject != null ) { map.add("s", subject); } if ( predicate != null ) { map.add("p", predicate); } if ( object != null ) { map.add("o", object); } query.setOffset(offset); query.setLimit(limit); Model triples = ModelFactory.createDefaultModel(); DatasetWrapper dw = getDatasetWrapper(); try { Dataset d = dw.getDataset(); try (QueryExecution qexec = QueryExecutionFactory.create(query, d, map)) { qexec.execConstruct(triples); } return triples; } finally { dw.close(); } }