public static SparqlEndpoint create(String url, List<String> defaultGraphURIs) throws MalformedURLException { return create(url, defaultGraphURIs, Collections.emptyList()); }
public static SparqlEndpoint create(String url, String defaultGraphURI) throws MalformedURLException { return create(url, defaultGraphURI == null ? Collections.emptyList() : Lists.newArrayList(defaultGraphURI)); }
public static void main(String[] args) throws Exception{ org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG); SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); // endpoint = SparqlEndpoint.getEndpointDBpedia(); ConciseBoundedDescriptionGenerator cbdGen = new SymmetricConciseBoundedDescriptionGeneratorImpl(endpoint); Resource res = ResourceFactory.createResource("http://dbpedia.org/resource/Santa_Clara,_California"); Model cbd = cbdGen.getConciseBoundedDescription(res.getURI(), 2); System.out.println("#triples =\t" + cbd.size()); System.out.println("#triples_out =\t" + cbd.listStatements(res, null, (RDFNode) null).toSet().size()); cbd.listStatements(res, null, (RDFNode) null).toList().forEach(System.out::println); System.out.println("#triples_in =\t" + cbd.listStatements(null, null, res).toSet().size()); cbd.listStatements(null, null, res).toList().forEach(System.out::println); }
public static void main(String[] args) throws Exception { File benchmarkBaseDirectory = new File(args[0]); int threadCount = Integer.parseInt(args[1]); int nrOfSPARQLQueries = Integer.parseInt(args[2]); int minDepth = Integer.parseInt(args[3]); int maxDepth = Integer.parseInt(args[4]); int minNrOfExamples = Integer.parseInt(args[5]); // SparqlEndpoint endpoint = SparqlEndpoint.create("http://dbpedia.org/sparql", "http://dbpedia.org"); SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://uobm.org"); UOBMLearningProblemsGenerator generator = new UOBMLearningProblemsGenerator(endpoint, benchmarkBaseDirectory, threadCount); generator.generateBenchmark(nrOfSPARQLQueries, minDepth, maxDepth, minNrOfExamples); }
public static void main(String[] args) throws Exception { File benchmarkBaseDirectory = new File(args[0]); int threadCount = Integer.parseInt(args[1]); int nrOfSPARQLQueries = Integer.parseInt(args[2]); int minDepth = Integer.parseInt(args[3]); int maxDepth = Integer.parseInt(args[4]); int minNrOfExamples = Integer.parseInt(args[5]); // SparqlEndpoint endpoint = SparqlEndpoint.create("http://dbpedia.org/sparql", "http://dbpedia.org"); SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); DBpediaLearningProblemsGenerator generator = new DBpediaLearningProblemsGenerator(endpoint, benchmarkBaseDirectory, threadCount); generator.generateBenchmark(nrOfSPARQLQueries, minDepth, maxDepth, minNrOfExamples); }
public static void main(String[] args) throws Exception{ if(args.length == 0){ System.out.println("Usage: QALD6DBpediaEvaluationDataset <queriesTargetFile"); System.exit(0); } SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); QALD6DBpediaEvaluationDataset ds = new QALD6DBpediaEvaluationDataset(new File("/tmp/test"), endpoint); ds.saveToDisk(new File(args[0])); Map<String, Query> queries = ds.getSparqlQueries(); System.out.println("#queries:" + queries.size()); File graphsDir = new File("/home/user/work/experiments/qtl/QALD6/graphs/"); graphsDir.mkdirs(); // queries.forEach((id, query) -> QueryToGraphExporter.exportYedGraph(query, new File(graphsDir, id + ".png"))); }
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://localhost:7200/repositories/lubm-inferred", Lists.newArrayList()); LUBMEvaluationDataset ds = new LUBMEvaluationDataset(new File("/tmp/test"), endpoint); new QTLTuplesExperiment(ds).run(); } }
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://localhost:7200/repositories/lubm-inferred-owlhorst", Lists.newArrayList()); LUBMEvaluationDataset ds = new LUBMEvaluationDataset(new File("/tmp/test"), endpoint); QueryExecutionFactory qef = ds.getKS().getQueryExecutionFactory(); Map<String, Query> queries = ds.getSparqlQueries(); System.out.println(queries.size()); queries.forEach((key, query) -> { System.out.println(query); query.setLimit(1); try (QueryExecution qe = qef.createQueryExecution(query)) { ResultSet rs = qe.execSelect(); System.out.println(rs.hasNext()); while (rs.hasNext()) { QuerySolution qs = rs.next(); System.out.println(qs); } } }); }
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://biomedical.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); new PredicateExistenceFilterBiomedical().analyze(ks); }
public static void main(String[] args) throws Exception { SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org")); ks.init(); DisjointClassesLearner la = new DisjointClassesLearner(ks); la.setEntityToDescribe(new OWLClassImpl(IRI.create("http://dbpedia.org/ontology/Actor"))); la.setUseSampling(false); la.init(); la.start(); la.getCurrentlyBestAxioms(10); } }
endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); QueryExecutionFactory qef = FluentQueryExecutionFactory .http(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()).config()
endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.setQueryDelay(0);
SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org");
public static void main(String[] args) throws Exception{ SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); endpoint = SparqlEndpoint.create("http://localhost:7200/repositories/repo-dbpedia", Collections.emptyList()); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init();
public static void main(String[] args) throws Exception{ StringRenderer.setRenderer(StringRenderer.Rendering.DL_SYNTAX); SparqlEndpoint endpoint = SparqlEndpoint.create("http://dbpedia.org/sparql", "http://dbpedia.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.setUseCache(false);
URL endpointURL = options.valueOf(endpointURLSpec); String defaultGraph = options.has(defaultGraphSpec) ? options.valueOf(defaultGraphSpec) : null; SparqlEndpoint endpoint = SparqlEndpoint.create(endpointURL.toString(), defaultGraph); int maxNrOfQueries = options.valueOf(maxNrOfQueriesSpec); int maxTreeDepth = options.valueOf(maxTreeDepthSpec);
qtf.setMaxDepth(maxTreeDepth); SparqlEndpoint endpoint = SparqlEndpoint.create("http://sake.informatik.uni-leipzig.de:8890/sparql", "http://dbpedia.org"); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init();
URL endpointURL = options.valueOf(endpointURLSpec); String defaultGraph = options.has(defaultGraphSpec) ? options.valueOf(defaultGraphSpec) : null; SparqlEndpoint endpoint = SparqlEndpoint.create(endpointURL.toString(), defaultGraph); int maxNrOfQueries = options.valueOf(maxNrOfQueriesSpec); int maxTreeDepth = options.valueOf(maxTreeDepthSpec);
SparqlEndpoint endpoint = SparqlEndpoint.create(endpointURL.toString(), defaultGraphs);
SparqlEndpoint endpoint = SparqlEndpoint.create(endpointURL.toString(), defaultGraphs);