public Enrichment(SparqlEndpoint se, OWLEntity resource, double threshold, int nrOfAxiomsToLearn, boolean useInference, boolean verbose, int chunksize, int maxExecutionTimeInSeconds, boolean omitExistingAxioms) { this(new SparqlEndpointKS(se), resource, threshold, nrOfAxiomsToLearn, useInference, verbose, chunksize, maxExecutionTimeInSeconds, omitExistingAxioms); }
public SPARQLReasoner(SparqlEndpointKS ks) { super(ks); this.qef = ks.getQueryExecutionFactory(); }
public SPARQLLearningProblemsGenerator(SparqlEndpoint endpoint, File benchmarkDirectory, int threadCount) throws ComponentInitException { this.benchmarkDirectory = benchmarkDirectory; this.threadCount = threadCount; // setup the endpoint ks = new SparqlEndpointKS(endpoint); ks.setCacheDir(new File(benchmarkDirectory, "cache").getPath() + ";mv_store=false"); ks.setPageSize(50000); ks.setUseCache(true); ks.setQueryDelay(100); ks.init(); // the directory where instance data is stored dataDir = new File(benchmarkDirectory, "data/dbpedia/"); dataDir.mkdirs(); schema = ModelFactory.createDefaultModel(); // initialize the reasoner reasoner = new SPARQLReasoner(ks); reasoner.init(); }
@Override public void init() throws ComponentInitException { if(!initialized){ if(isRemote()) { if(endpoint == null) { endpoint = new SparqlEndpoint(url, defaultGraphURIs, namedGraphURIs); } supportsSPARQL_1_1 = new SPARQLTasks(endpoint).supportsSPARQL_1_1(); } if(qef == null) { qef = buildQueryExecutionFactory(); } initialized = true; } initialized = true; logger.info("SPARQL KB setup:\n" + toString()); }
ks.init(); } catch (ComponentInitException e1) { e1.printStackTrace(); if(ks.isRemote()){ try { cacheDir = "cache" + File.separator + URLEncoder.encode(ks.getEndpoint().getURL().toString(), "UTF-8"); } catch (UnsupportedEncodingException e) {
@Override public void init() throws ComponentInitException { if(ks.isRemote()){ ksQef = ks.getQueryExecutionFactory(); } else { ksQef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); } if(ksReasoner == null){ ksReasoner = new SPARQLReasoner(ksQef); } // ksReasoner.supportsSPARQL1_1(); reasoner = ksReasoner; initialized = true; }
public static void main(String[] args) throws Exception { SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( new URL("http://dbpedia.org/sparql"), "http://dbpedia.org")); ks.init(); OWLProperty p1 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/birthPlace")); OWLProperty p2 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/genre")); OWLIndividual ind1 = new OWLNamedIndividualImpl(IRI.create("http://dbpedia.org/resource/Kid_Canaveral")); InformativenessMeasures informativenessMeasures = new InformativenessMeasures(ks.getQueryExecutionFactory()); double itf1 = informativenessMeasures.getInverseTripleFrequency(p1); System.out.println("itf(" + p1 + ") = " + itf1); double itf2 = informativenessMeasures.getInverseTripleFrequency(p2); System.out.println("itf(" + p2 + ") = " + itf2); double pf1_out = informativenessMeasures.getPredicateFrequency(ind1, p1, true); double pf1_in = informativenessMeasures.getPredicateFrequency(ind1, p1, false); System.out.println("pf_out(" + ind1 + "," + p1 + ") = " + pf1_out); System.out.println("pf_in(" + ind1 + "," + p1 + ") = " + pf1_in); double pf2_out = informativenessMeasures.getPredicateFrequency(ind1, p2, true); double pf2_in = informativenessMeasures.getPredicateFrequency(ind1, p2, false); System.out.println("pf_out(" + ind1 + "," + p2 + ") = " + pf2_out); System.out.println("pf_in(" + ind1 + "," + p2 + ") = " + pf2_in); }
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://biomedical.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); new PredicateExistenceFilterBiomedical().analyze(ks); }
SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.setQueryDelay(0); ks.setUseCache(false); ks.setRetryCount(0); ks.init(); TreeBasedConciseBoundedDescriptionGenerator cbdGen = new TreeBasedConciseBoundedDescriptionGenerator(ks.getQueryExecutionFactory()); Model cbd = cbdGen.getConciseBoundedDescription("http://dbpedia.org/resource/Dan_Gauthier", cbdTree); System.out.println(cbd.size());
qef = CacheUtilsH2.createQueryExecutionFactory(qef, cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI-cache;mv_store=false", false, TimeUnit.DAYS.toMillis(7) ); try { ks = new SparqlEndpointKS(endpoint); ks.setCacheDir(cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI-cache;mv_store=false"); ks.setQueryExecutionFactory(qef); ks.init(); } catch (ComponentInitException e) { e.printStackTrace();
ks = new SparqlEndpointKS(se, cacheDir); ks.init(); } catch (URISyntaxException e2) { e2.printStackTrace(); if(ks.isRemote()){ SparqlQuery sq = new SparqlQuery(query, ks.getEndpoint()); try { ResultSet q = sq.send(); resource = new SPARQLTasks(ks.getEndpoint()).guessResourceType(resourceURI.toString(), true); if(resource == null) { throw new IllegalArgumentException("Could not determine the type (class, object property or data property) of input resource " + options.valueOf("resource")
public BiomedicalLearningProblemsGenerator(File benchmarkDirectory, int threadCount) throws Exception { this.benchmarkDirectory = benchmarkDirectory; this.threadCount = threadCount; Model model = RDFDataMgr.loadModel("file:/home/user/work/experiments/qtl/data/biomedical/drugbank_dump.nt", Lang.NTRIPLES); schema = ModelFactory.createDefaultModel(); schema.read(new FileInputStream(new File("/home/user/work/experiments/qtl/data/biomedical/", "drugbank.schema.owl")), null, "RDF/XML"); schema.write(System.out, "TURTLE"); model.add(schema); ks = new LocalModelBasedSparqlEndpointKS(model); ks.setUseCache(true); ks.setQueryDelay(100); ks.init(); reasoner = new SPARQLReasoner(ks); reasoner.init(); cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); dataDir = new File(benchmarkDirectory, "data/biomedical/"); dataDir.mkdirs(); }
ks = new SparqlEndpointKS(endpoint); ks.setCacheDir(cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI-cache;mv_store=false"); ks.init(); } catch (ComponentInitException e) { e.printStackTrace();
SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.setUseCache(false); ks.setRetryCount(0); ks.init(); OWLOntologyManager man = OWLManager.createOWLOntologyManager();
final SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); try { ks.init(); } catch (ComponentInitException e) { e.printStackTrace(); ks.setSupportsSPARQL_1_1(supportsSPARQL_1_1); final SPARQLReasoner reasoner = new SPARQLReasoner(new SparqlEndpointKS(endpoint)); if (useInference && !reasoner.isPrepared()) { System.out.print("Precomputing subsumption hierarchy ... ");
@Before public void setUp() throws Exception { String kb = "" + "@prefix : <http://dl-learner.org/test/> ." + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ." + "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ." + ":x1 :r :y1 ." + ":x2 rdf:type :A ." + ":r rdfs:domain :A ." + ":x3 :s :y2 ." + ":x4 rdf:type :B ." + ":s rdfs:domain :C ." + ":B rdfs:subClassOf :C ." + ":x5 :t :y3 ." + ":y3 rdf:type :B ." + ":x6 :t :y4 ." + ":t rdfs:range :C ." + ":B rdfs:subClassOf :C ."; Model model = ModelFactory.createDefaultModel(); RDFDataMgr.read(model, new StringReader(kb), null, Lang.TURTLE); SparqlEndpointKS ks = new LocalModelBasedSparqlEndpointKS(model); ks.init(); AbstractReasonerComponent reasoner = new SPARQLReasoner(ks); reasoner.setPrecomputeClassHierarchy(true); reasoner.init(); cbdGenerator = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); treeFactory = new QueryTreeFactoryBase(); lggGen = new LGGGeneratorRDFS(reasoner); }
OWLObjectProperty op = df.getOWLObjectProperty(IRI.create("http://dbpedia.org/ontology/birthPlace")); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.setCache(CacheUtilsH2.createCacheFrontend("cache", true, TimeUnit.DAYS.toMillis(1)));
protected Set<OWLClass> getAllClasses() { if(ks.isRemote()){ return new SPARQLTasks(ks.getEndpoint()).getAllClasses(); } else { return ((LocalModelBasedSparqlEndpointKS) ks).getModel().listClasses() .filterDrop(new OWLFilter()) .filterDrop(new RDFSFilter()) .filterDrop(new RDFFilter()) .toList().stream() .filter(cls -> !cls.isAnon()) .map(cls -> df.getOWLClass(IRI.create(cls.getURI()))) .collect(Collectors.toCollection(TreeSet::new)); } }
private void keepMostGeneralClasses(Set<OWLClass> classes) { if (ks.isRemote()) { if (reasoner.isPrepared()) { ClassHierarchy h = reasoner.getClassHierarchy(); for (OWLClass nc : new HashSet<>(classes)) { classes.removeAll(h.getSubClasses(nc)); } } } else { OntModel model = ((LocalModelBasedSparqlEndpointKS) ks).getModel(); // Set<OWLClass> topClasses = new HashSet<OWLClass>(); // for(OntClass cls : model.listOWLClasses().toSet()){ // Set<OntClass> superClasses = cls.listSuperClasses().toSet(); // if(superClasses.isEmpty() || // (superClasses.size() == 1 && superClasses.contains(model.getOntClass(org.apache.jena.vocabulary.OWL.Thing.getURI())))){ // topClasses.add(df.getOWLClass(IRI.create(cls.getURI())); // } // // } // classes.retainAll(topClasses); for (OWLClass nc : new HashSet<>(classes)) {//System.out.print(nc + "::"); for (OntClass cls : model.getOntClass(nc.toStringID()).listSubClasses().toSet()) {//System.out.print(cls + "|"); classes.remove(df.getOWLClass(IRI.create(cls.getURI()))); } // System.out.println(); } } }
OWLNamedIndividual knowldegeBaseInd = f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getURL())); ax = f.getOWLClassAssertionAxiom(EnrichmentVocabulary.SPARQLEndpoint, knowldegeBaseInd); axioms.add(ax); if(!ks.getEndpoint().getDefaultGraphURIs().isEmpty()) { ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.defaultGraph, knowldegeBaseInd, f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getDefaultGraphURIs().iterator().next()))); axioms.add(ax);