public SymmetricConciseBoundedDescriptionGeneratorImpl(Model model) { this(new QueryExecutionFactoryModel(model)); }
public ConciseBoundedDescriptionGeneratorImpl(Model model) { this(new QueryExecutionFactoryModel(model)); }
public SPARQLReasoner(Model model) { this(new QueryExecutionFactoryModel(model)); }
public NBR(Model model){ this.model = model; noSequences = new ArrayList<>(); qef = new QueryExecutionFactoryModel(model); }
/** * Use from instead * * @param model * @return */ @Deprecated public static FluentQueryExecutionFactory<?> model(Model model) { return new FluentQueryExecutionFactory<Object>(new QueryExecutionFactoryModel(model)); }
public static FluentQueryExecutionFactory<?> from(Model model) { return new FluentQueryExecutionFactory<Object>(new QueryExecutionFactoryModel(model)); }
@Override protected QueryExecutionFactory buildQueryExecutionFactory() { QueryExecutionFactory qef = new QueryExecutionFactoryModel(model); // we are working on an in-memory model, but still should enable caching by default qef = CacheUtilsH2.createQueryExecutionFactory(qef, cacheDir, true, cacheTTL); return qef; }
/** * Writes a model into a destination. This function delegates to {@code write(QueryExecutionFactory qef)} * * @param model the model * @throws RdfWriterException the triple writer exception */ default void write(Model model) throws RdfWriterException { write(new QueryExecutionFactoryModel(model)); }
/** * Writes a model into a destination. This function delegates to {@code write(QueryExecutionFactory qef)} * * @param model the model * @throws RdfWriterException the triple writer exception */ default void write(Model model) throws RdfWriterException { write(new QueryExecutionFactoryModel(model)); }
@Override public Collection<TestCase> generate(SchemaSource source) { try (QueryExecutionFactoryModel qef = new QueryExecutionFactoryModel(source.getModel())) { Set<TestCase> tests = testGenerators.stream() .parallel() .flatMap(tg -> generate(qef, source, tg).stream()) .collect(Collectors.toSet()); log.info("{} generated {} tests using {} TAGs", source.getUri(), tests.size(), testGenerators.size()); return tests; } }
@Override public Collection<TestCase> generate(SchemaSource source) { Model m = source.getModel(); try (QueryExecutionFactoryModel qef = new QueryExecutionFactoryModel(m)) { Set<TestCase> tests = testGenerators.stream() .parallel() .flatMap(tg -> generate(qef, source, tg).stream()) .collect(Collectors.toSet()); log.info("{} generated {} tests using {} TAGs", source.getUri(), tests.size(), testGenerators.size()); return tests; } }
public BlanknodeResolvingCBDGenerator(Model model) { String query = "prefix : <http://dl-learner.org/ontology/> " + "construct { ?s ?p ?o ; ?type ?s .} " + "where { ?s ?p ?o . bind( if(isIRI(?s),:sameIri,:sameBlank) as ?type )}"; qef = new QueryExecutionFactoryModel(model); QueryExecution qe = qef.createQueryExecution(query); Model extendedModel = qe.execConstruct(); qe.close(); qef = new QueryExecutionFactoryModel(extendedModel); }
protected ResultSet executeSelectQuery(String query, Model model) { logger.trace("Sending query on local model\n{} ...", query); QueryExecutionFactory qef = new QueryExecutionFactoryModel(model); QueryExecution qexec = qef.createQueryExecution(query); return qexec.execSelect(); }
@Override public void apply(Model input) { QueryExecutionFactory qef = new QueryExecutionFactoryModel(input); List<Resource> resources = ServiceUtils.fetchListResources(qef, concept); Map<Resource, Model> extra = lookupService.fetchMap(resources); for(Entry<Resource, Model> entry : extra.entrySet()) { Model m = entry.getValue(); input.add(m); } } }
public AbstractSchemaGenerator(Model model) { // enable reasoning on model this.model = ModelFactory.createOntologyModel(reasoningProfile, model); this.qef = new QueryExecutionFactoryModel(this.model); this.reasoner = new SPARQLReasoner(qef); }
public Set<PrefixDeclaration> getPrefixDeclarations(Resource resource) { ImmutableSet.Builder<PrefixDeclaration> prefixes = ImmutableSet.builder(); try ( QueryExecutionFactory qef = new QueryExecutionFactoryModel(resource.getModel()); QueryExecution qe = qef.createQueryExecution(sparqlQuery)) { qe.execSelect().forEachRemaining(solution -> { PrefixDeclaration pr = PrefixDeclarationReader.create().read(solution.getResource("declare")); prefixes.add(pr); }); } catch (Exception e) { e.printStackTrace(); } return prefixes.build(); }
@Override public SparqlService createInstance() throws Exception { Graph graph = new GraphResource(this.fileNameOrUrl); QueryExecutionFactory qef = new QueryExecutionFactoryModel(graph); // Test whether the resource works QueryExecution qe = qef.createQueryExecution("Ask { ?s ?p ?o }"); boolean ask = qe.execAsk(); SparqlService result = new SparqlServiceImpl(qef, null); return result; } }
@Override public void init() throws ComponentInitException { if(ks.isRemote()){ ksQef = ks.getQueryExecutionFactory(); } else { ksQef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); } if(ksReasoner == null){ ksReasoner = new SPARQLReasoner(ksQef); } // ksReasoner.supportsSPARQL1_1(); reasoner = ksReasoner; initialized = true; }
private QueryExecutionFactoryModel generateExecutionFactory(){ Model model = ModelFactory.createDefaultModel(); // Set the defined prefixes PrefixNSService.setNSPrefixesInModel(model); try { getPatternsReader(baseDirectories).read(model); autoGeneratorReaders.read(model); } catch (RdfReaderException e) { throw new IllegalArgumentException(e.getMessage(), e); } return new QueryExecutionFactoryModel(model); // Update pattern service }
private QueryExecutionFactoryModel generateExecutionFactory(){ Model model = ModelFactory.createDefaultModel(); // Set the defined prefixes PrefixNSService.setNSPrefixesInModel(model); try { getPatternsReader(baseDirectories).read(model); autoGeneratorReaders.read(model); } catch (RdfReaderException e) { throw new IllegalArgumentException(e.getMessage(), e); } return new QueryExecutionFactoryModel(model); // Update pattern service }