public SPARQLReasoner(SparqlEndpointKS ks) { super(ks); this.qef = ks.getQueryExecutionFactory(); }
public MultiPropertyAxiomLearner(SparqlEndpointKS ks) { this(ks.getQueryExecutionFactory()); this.ks = ks; }
public AbstractSampleGenerator(SparqlEndpointKS ks) { this(ks.getQueryExecutionFactory()); }
public QTL2Disjunctive(PosNegLP lp, SparqlEndpointKS ks) { this(lp, ks.getQueryExecutionFactory()); }
public QTL2DisjunctiveMultiThreaded(PosNegLP lp, SparqlEndpointKS ks) { this(lp, ks.getQueryExecutionFactory()); }
public TimeBasedFragmentExtractor(SparqlEndpointKS ks, int maxExecutionTimeInMilliseconds, TimeUnit timeUnit) { this.ks = ks; this.maxExecutionTimeInMilliseconds = timeUnit.toMillis(maxExecutionTimeInMilliseconds); qef = ks.getQueryExecutionFactory(); }
public IndividualBasedFragmentExtractor(SparqlEndpointKS ks, int maxNrOfIndividuals) { this.maxNrOfIndividuals = maxNrOfIndividuals; cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); }
private OWLOntology extractSampleModule(String resource){ logger.info("Extracting sample module..."); long startTime = System.currentTimeMillis(); ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); Model model = cbdGen.getConciseBoundedDescription(resource, 3); OWLOntology data = convert(model); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return data; }
private OWLOntology extractSampleModule(Set<String> resources){ logger.info("Extracting sample module..."); long startTime = System.currentTimeMillis(); ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); Model model = ModelFactory.createDefaultModel(); for(String resource : resources){ model.add(cbdGen.getConciseBoundedDescription(resource, depth)); } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return convert(model); }
@Override public Model call() throws Exception { ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); return cbdGen.getConciseBoundedDescription(ind.toStringID(), 2); } }));
@Override public void init() throws ComponentInitException { if(ks.isRemote()){ ksQef = ks.getQueryExecutionFactory(); } else { ksQef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); } if(ksReasoner == null){ ksReasoner = new SPARQLReasoner(ksQef); } // ksReasoner.supportsSPARQL1_1(); reasoner = ksReasoner; initialized = true; }
private Model getFragment(SparqlEndpointKS ks, Set<OWLIndividual> individuals){ ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); Model model = ModelFactory.createDefaultModel(); for(OWLIndividual ind : individuals){ Model cbd = cbdGen.getConciseBoundedDescription(ind.toStringID(), 2); model.add(cbd); } return model; }
public static void main(String[] args) throws Exception { SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( new URL("http://dbpedia.org/sparql"), "http://dbpedia.org")); ks.init(); OWLProperty p1 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/birthPlace")); OWLProperty p2 = new OWLObjectPropertyImpl(IRI.create("http://dbpedia.org/ontology/genre")); OWLIndividual ind1 = new OWLNamedIndividualImpl(IRI.create("http://dbpedia.org/resource/Kid_Canaveral")); InformativenessMeasures informativenessMeasures = new InformativenessMeasures(ks.getQueryExecutionFactory()); double itf1 = informativenessMeasures.getInverseTripleFrequency(p1); System.out.println("itf(" + p1 + ") = " + itf1); double itf2 = informativenessMeasures.getInverseTripleFrequency(p2); System.out.println("itf(" + p2 + ") = " + itf2); double pf1_out = informativenessMeasures.getPredicateFrequency(ind1, p1, true); double pf1_in = informativenessMeasures.getPredicateFrequency(ind1, p1, false); System.out.println("pf_out(" + ind1 + "," + p1 + ") = " + pf1_out); System.out.println("pf_in(" + ind1 + "," + p1 + ") = " + pf1_in); double pf2_out = informativenessMeasures.getPredicateFrequency(ind1, p2, true); double pf2_in = informativenessMeasures.getPredicateFrequency(ind1, p2, false); System.out.println("pf_out(" + ind1 + "," + p2 + ") = " + pf2_out); System.out.println("pf_in(" + ind1 + "," + p2 + ") = " + pf2_in); }
qef = ks.getQueryExecutionFactory(); qef = new QueryExecutionFactoryDelay(qef, 50);
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://localhost:7200/repositories/lubm-inferred-owlhorst", Lists.newArrayList()); LUBMEvaluationDataset ds = new LUBMEvaluationDataset(new File("/tmp/test"), endpoint); QueryExecutionFactory qef = ds.getKS().getQueryExecutionFactory(); Map<String, Query> queries = ds.getSparqlQueries(); System.out.println(queries.size()); queries.forEach((key, query) -> { System.out.println(query); query.setLimit(1); try (QueryExecution qe = qef.createQueryExecution(query)) { ResultSet rs = qe.execSelect(); System.out.println(rs.hasNext()); while (rs.hasNext()) { QuerySolution qs = rs.next(); System.out.println(qs); } } }); }
"http://dbpedia.org/ontology/wikiPageExternalLink"); ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); cbdGen.setIgnoredProperties(ignoredProperties); cbdGen.setAllowedPropertyNamespaces(Sets.newHashSet("http://dbpedia.org/ontology/"));
public BiomedicalLearningProblemsGenerator(File benchmarkDirectory, int threadCount) throws Exception { this.benchmarkDirectory = benchmarkDirectory; this.threadCount = threadCount; Model model = RDFDataMgr.loadModel("file:/home/user/work/experiments/qtl/data/biomedical/drugbank_dump.nt", Lang.NTRIPLES); schema = ModelFactory.createDefaultModel(); schema.read(new FileInputStream(new File("/home/user/work/experiments/qtl/data/biomedical/", "drugbank.schema.owl")), null, "RDF/XML"); schema.write(System.out, "TURTLE"); model.add(schema); ks = new LocalModelBasedSparqlEndpointKS(model); ks.setUseCache(true); ks.setQueryDelay(100); ks.init(); reasoner = new SPARQLReasoner(ks); reasoner.init(); cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); dataDir = new File(benchmarkDirectory, "data/biomedical/"); dataDir.mkdirs(); }
public QTLEvaluation(EvaluationDataset dataset, File benchmarkDirectory, boolean write2DB, boolean override, int maxQTLRuntime, boolean useEmailNotification, int nrOfThreads) { this.dataset = dataset; this.benchmarkDirectory = benchmarkDirectory; this.write2DB = write2DB; this.override = override; this.maxExecutionTimeInSeconds = maxQTLRuntime; this.useEmailNotification = useEmailNotification; this.nrOfThreads = nrOfThreads; queryTreeFactory = new QueryTreeFactoryBaseInv(); queryTreeFactory.setMaxDepth(maxTreeDepth); // add some filters to avoid resources with namespaces like http://dbpedia.org/property/ List<Predicate<Statement>> var = dataset.getQueryTreeFilters(); queryTreeFactory.addDropFilters((Predicate<Statement>[]) var.toArray(new Predicate[var.size()])); qef = dataset.getKS().getQueryExecutionFactory(); cbdGen = new SymmetricConciseBoundedDescriptionGeneratorImpl(qef); rnd.reSeed(123); kbSize = getKBSize(); timeStamp = System.currentTimeMillis(); if(write2DB) { setupDatabase(); } cacheDirectory = new File(benchmarkDirectory, "cache"); }
public QTLTuplesExperiment(EvaluationDataset dataset) throws Exception{ this.dataset = dataset; qef = dataset.ks.getQueryExecutionFactory(); qtl = new QTLTuples(dataset.ks.getQueryExecutionFactory()); qtl.setCBDGenerator(cbdGen); qtl.setTreeFactory(tf);
@Before public void setUp() throws Exception { String kb = "" + "@prefix : <http://dl-learner.org/test/> ." + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ." + "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ." + ":x1 :r :y1 ." + ":x2 rdf:type :A ." + ":r rdfs:domain :A ." + ":x3 :s :y2 ." + ":x4 rdf:type :B ." + ":s rdfs:domain :C ." + ":B rdfs:subClassOf :C ." + ":x5 :t :y3 ." + ":y3 rdf:type :B ." + ":x6 :t :y4 ." + ":t rdfs:range :C ." + ":B rdfs:subClassOf :C ."; Model model = ModelFactory.createDefaultModel(); RDFDataMgr.read(model, new StringReader(kb), null, Lang.TURTLE); SparqlEndpointKS ks = new LocalModelBasedSparqlEndpointKS(model); ks.init(); AbstractReasonerComponent reasoner = new SPARQLReasoner(ks); reasoner.setPrecomputeClassHierarchy(true); reasoner.init(); cbdGenerator = new ConciseBoundedDescriptionGeneratorImpl(ks.getQueryExecutionFactory()); treeFactory = new QueryTreeFactoryBase(); lggGen = new LGGGeneratorRDFS(reasoner); }