ks.init(); } catch (ComponentInitException e1) { e1.printStackTrace();
public static void main(String[] args) throws Exception { SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( new URL("http://dbpedia.org/sparql"), "http://dbpedia.org")); ks.init(); OWLProperty p1 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/birthPlace")); OWLProperty p2 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/genre")); OWLIndividual ind1 = new OWLNamedIndividualImpl(IRI.create("http://dbpedia.org/resource/Kid_Canaveral")); InformativenessMeasures informativenessMeasures = new InformativenessMeasures(ks.getQueryExecutionFactory()); double itf1 = informativenessMeasures.getInverseTripleFrequency(p1); System.out.println("itf(" + p1 + ") = " + itf1); double itf2 = informativenessMeasures.getInverseTripleFrequency(p2); System.out.println("itf(" + p2 + ") = " + itf2); double pf1_out = informativenessMeasures.getPredicateFrequency(ind1, p1, true); double pf1_in = informativenessMeasures.getPredicateFrequency(ind1, p1, false); System.out.println("pf_out(" + ind1 + "," + p1 + ") = " + pf1_out); System.out.println("pf_in(" + ind1 + "," + p1 + ") = " + pf1_in); double pf2_out = informativenessMeasures.getPredicateFrequency(ind1, p2, true); double pf2_in = informativenessMeasures.getPredicateFrequency(ind1, p2, false); System.out.println("pf_out(" + ind1 + "," + p2 + ") = " + pf2_out); System.out.println("pf_in(" + ind1 + "," + p2 + ") = " + pf2_in); }
public static void main(String[] args) throws Exception { ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.init(); ObjectPropertyDomainAxiomLearner la = new ObjectPropertyDomainAxiomLearner(ks); la.setPropertyToDescribe(new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/author"))); la.setUseSampling(false); la.setBatchMode(true); la.setProgressMonitor(new ConsoleAxiomLearningProgressMonitor()); la.init(); la.start(); la.getCurrentlyBestEvaluatedAxioms().forEach(ax -> { System.out.println("---------------\n" + ax); la.getPositiveExamples(ax).stream().limit(5).forEach(System.out::println); }); }
public static void main(String[] args) throws Exception { ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.init(); ObjectPropertyRangeAxiomLearner la = new ObjectPropertyRangeAxiomLearner(ks); la.setPropertyToDescribe(new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/author"))); la.setUseSampling(false); la.setBatchMode(true); la.setProgressMonitor(new ConsoleAxiomLearningProgressMonitor()); la.init(); la.start(); la.getCurrentlyBestEvaluatedAxioms().forEach(ax -> { System.out.println("---------------\n" + ax); la.getPositiveExamples(ax).stream().limit(5).forEach(System.out::println); }); } }
public static void main(String[] args) throws Exception { ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.init(); SubObjectPropertyOfAxiomLearner la = new SubObjectPropertyOfAxiomLearner(ks); la.setEntityToDescribe(new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/author"))); la.setUseSampling(false); la.setBatchMode(true); la.setProgressMonitor(new ConsoleAxiomLearningProgressMonitor()); la.init(); la.start(); la.getCurrentlyBestEvaluatedAxioms().forEach(ax -> { System.out.println("---------------\n" + ax); la.getPositiveExamples(ax).stream().limit(5).forEach(System.out::println); }); } }
public static void main(String[] args) throws Exception{ ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.init(); DataPropertyDomainAxiomLearner l = new DataPropertyDomainAxiomLearner(ks); l.setEntityToDescribe(new OWLDataPropertyImpl(IRI.create("http://dbpedia.org/ontology/birthDate"))); l.setUseSampling(false); l.setBatchMode(true); l.setUsePrecisionOnly(false); l.setProgressMonitor(new ConsoleAxiomLearningProgressMonitor()); l.init(); l.start(); l.getCurrentlyBestEvaluatedAxioms(0.3).forEach(ax -> { System.out.println("---------------\n" + ax); l.getPositiveExamples(ax).stream().limit(5).forEach(System.out::println); }); }
public static void main(String[] args) throws Exception { ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); ks.init(); SimpleSubclassLearner la = new SimpleSubclassLearner(ks); la.setEntityToDescribe(new OWLClassImpl(IRI.create("http://dbpedia.org/ontology/Book"))); la.setUseSampling(false); la.setBatchMode(true); la.setStrictOWLMode(true); la.setReturnOnlyNewAxioms(true); la.setProgressMonitor(new ConsoleAxiomLearningProgressMonitor()); la.init(); la.start(); la.getCurrentlyBestEvaluatedAxioms(0.3).forEach(ax -> { System.out.println("---------------\n" + ax); la.getPositiveExamples(ax).stream().limit(5).forEach(System.out::println); }); } }
public static void main(String[] args) throws Exception{ ErrorHandlerFactory.setDefaultErrorHandler(ErrorHandlerFactory.errorHandlerStrictNoLogging); SparqlEndpoint endpoint = new SparqlEndpoint(new URL("http://dbpedia.org/sparql"), "http://dbpedia.org"); // endpoint = SparqlEndpoint.getEndpointDBpedia(); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); OWLEntity entity = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/author")); MultiPropertyAxiomLearner la = new MultiPropertyAxiomLearner(ks); la.setEntityToDescribe(entity); la.setUseSampling(true); la.setMaxSampleGenerationTime(10, TimeUnit.SECONDS); la.setMaxNrOfThreads(1); la.setAxiomTypes(Sets.newHashSet( AxiomType.OBJECT_PROPERTY_DOMAIN, AxiomType.OBJECT_PROPERTY_RANGE, AxiomType.SUB_OBJECT_PROPERTY, AxiomType.EQUIVALENT_OBJECT_PROPERTIES, AxiomType.FUNCTIONAL_OBJECT_PROPERTY, AxiomType.ASYMMETRIC_OBJECT_PROPERTY, AxiomType.IRREFLEXIVE_OBJECT_PROPERTY, AxiomType.TRANSITIVE_OBJECT_PROPERTY ) ); la.setMaxExecutionTime(1, TimeUnit.MINUTES); la.start(); }
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://biomedical.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); new PredicateExistenceFilterBiomedical().analyze(ks); }
public SPARQLLearningProblemsGenerator(SparqlEndpoint endpoint, File benchmarkDirectory, int threadCount) throws ComponentInitException { this.benchmarkDirectory = benchmarkDirectory; this.threadCount = threadCount; // setup the endpoint ks = new SparqlEndpointKS(endpoint); ks.setCacheDir(new File(benchmarkDirectory, "cache").getPath() + ";mv_store=false"); ks.setPageSize(50000); ks.setUseCache(true); ks.setQueryDelay(100); ks.init(); // the directory where instance data is stored dataDir = new File(benchmarkDirectory, "data/dbpedia/"); dataDir.mkdirs(); schema = ModelFactory.createDefaultModel(); // initialize the reasoner reasoner = new SPARQLReasoner(ks); reasoner.init(); }
public static void main(String[] args) throws Exception { SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org")); ks.init(); DisjointClassesLearner la = new DisjointClassesLearner(ks); la.setEntityToDescribe(new OWLClassImpl(IRI.create("http://dbpedia.org/ontology/Actor"))); la.setUseSampling(false); la.init(); la.start(); la.getCurrentlyBestAxioms(10); } }
public static void main(String[] args) throws ComponentInitException { SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init();
public BiomedicalLearningProblemsGenerator(File benchmarkDirectory, int threadCount) throws Exception { this.benchmarkDirectory = benchmarkDirectory; this.threadCount = threadCount; Model model = RDFDataMgr.loadModel("file:/home/user/work/experiments/qtl/data/biomedical/drugbank_dump.nt", Lang.NTRIPLES); schema = ModelFactory.createDefaultModel(); schema.read(new FileInputStream(new File("/home/user/work/experiments/qtl/data/biomedical/", "drugbank.schema.owl")), null, "RDF/XML"); schema.write(System.out, "TURTLE"); model.add(schema); ks = new LocalModelBasedSparqlEndpointKS(model); ks.setUseCache(true); ks.setQueryDelay(100); ks.init(); reasoner = new SPARQLReasoner(ks); reasoner.init(); cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); dataDir = new File(benchmarkDirectory, "data/biomedical/"); dataDir.mkdirs(); }
ks = new SparqlEndpointKS(endpoint); ks.setCacheDir(cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI-cache;mv_store=false"); ks.init(); } catch (ComponentInitException e) { e.printStackTrace();
public void testLearnSingleClass() throws ComponentInitException{ ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); ks.init(); reasoner = new SPARQLReasoner(ks); reasoner.init(); DisjointClassesLearner l = new DisjointClassesLearner(ks); l.setReasoner(reasoner); l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); l.setEntityToDescribe(new OWLClassImpl(IRI.create("http://dbpedia.org/ontology/Book"))); l.init(); l.start(); System.out.println(l.getCurrentlyBestAxioms(5)); }
ks = new SparqlEndpointKS(endpoint); ks.setCacheDir("./cache-qtl/qtl-qald-iswc2015-cache;mv_store=false"); ks.init();
ks.setCacheDir(cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI2017-cache;mv_store=false"); ks.setQueryExecutionFactory(qef); ks.init(); } catch (ComponentInitException e) { e.printStackTrace();
public void testLearnForMostGeneralClasses() throws ComponentInitException { ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); ks.init(); reasoner = new SPARQLReasoner(ks); reasoner.init(); DisjointClassesLearner l = new DisjointClassesLearner(ks); l.setReasoner(reasoner); l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); l.init(); for(OWLClassExpression cls : reasoner.getClassHierarchy().getMostGeneralClasses()){ l.setEntityToDescribe(cls.asOWLClass()); l.start(); System.out.println(l.getCurrentlyBestAxioms(5)); } }
@Before public void setUp() throws Exception { String kb = "" + "@prefix : <http://dl-learner.org/test/> ." + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ." + "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ." + ":x1 :r :y1 ." + ":x2 rdf:type :A ." + ":r rdfs:domain :A ." + ":x3 :s :y2 ." + ":x4 rdf:type :B ." + ":s rdfs:domain :C ." + ":B rdfs:subClassOf :C ." + ":x5 :t :y3 ." + ":y3 rdf:type :B ." + ":x6 :t :y4 ." + ":t rdfs:range :C ." + ":B rdfs:subClassOf :C ."; Model model = ModelFactory.createDefaultModel(); RDFDataMgr.read(model, new StringReader(kb), null, Lang.TURTLE); SparqlEndpointKS ks = new LocalModelBasedSparqlEndpointKS(model); ks.init(); AbstractReasonerComponent reasoner = new SPARQLReasoner(ks); reasoner.setPrecomputeClassHierarchy(true); reasoner.init(); cbdGenerator = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); treeFactory = new QueryTreeFactoryBase(); lggGen = new LGGGeneratorRDFS(reasoner); }
ks.setCacheDir(cacheDir.getAbsolutePath() + "/sparql/qtl-AAAI2017-cache;mv_store=false"); ks.setQueryExecutionFactory(qef); ks.init(); } catch (ComponentInitException e) { e.printStackTrace();