public ContextAwareConnection(RepositoryConnection connection) throws RepositoryException { this(connection.getRepository(), connection); }
@Override public Object getRepository() { return conn.getRepository(); }
public WritableConnection(Federation federation, List<RepositoryConnection> members) throws SailException { super(federation, members); int size = members.size(); int rnd = (new Random().nextInt() % size + size) % size; // use round-robin to distribute the load for (int i = rnd, n = rnd + size; i < n; i++) { try { if (members.get(i % size).getRepository().isWritable()) { addIndex = i % size; } } catch (RepositoryException e) { throw new SailException(e); } } }
private void destroyUnflushedStatements() throws RepositoryException { Repository add = added.getRepository(); Repository remove = removed.getRepository(); added.close(); removed.close(); add.shutDown(); remove.shutDown(); } }
public WritableConnection(Federation federation, List<RepositoryConnection> members) throws SailException { super(federation, members); int size = members.size(); int rnd = (new Random().nextInt() % size + size) % size; // use round-robin to distribute the load for (int i = rnd, n = rnd + size; i < n; i++) { try { if (members.get(i % size).getRepository().isWritable()) { addIndex = i % size; } } catch (RepositoryException e) { throw new SailException(e); } } }
public PropertyChangeNotifierSupport(Entity elmo) { this.bean = (SesameEntity) elmo; this.resource = bean.getSesameResource(); SesameManager manager = bean.getSesameManager(); RepositoryConnection conn = manager.getConnection(); immediate = findBroadcaster(conn) == null; Repository repository = conn.getRepository(); synchronized (managers) { if (managers.containsKey(repository)) { beans = managers.get(repository); } else { managers.put(repository, beans = new ConcurrentHashMap()); } } }
ValueFactory getValueFactory() { RepositoryConnection conn = manager.getConnection(); return conn.getRepository().getValueFactory(); }
public void setSesameStatus(RepositoryConnection repositoryConnection) throws RepositoryException { final Repository repository = repositoryConnection.getRepository(); tripleStore.clear(); switch (detailLevel) { case 2: tripleStore.put("statements", String.valueOf(repositoryConnection.size())); tripleStore.put("namespaces", String.valueOf(sizeOf(repositoryConnection.getNamespaces()))); tripleStore.put("contexts", String.valueOf(sizeOf(repositoryConnection.getContextIDs()))); case 1: tripleStore.put("writeable", String.valueOf(repository.isWritable())); break; case 0: default: //nop; } }
public void setSesameStatus(RepositoryConnection repositoryConnection) throws RepositoryException { final Repository repository = repositoryConnection.getRepository(); tripleStore.clear(); switch (detailLevel) { case 2: tripleStore.put("statements", String.valueOf(repositoryConnection.size())); tripleStore.put("namespaces", String.valueOf(sizeOf(repositoryConnection.getNamespaces()))); tripleStore.put("contexts", String.valueOf(sizeOf(repositoryConnection.getContextIDs()))); case 1: tripleStore.put("writeable", String.valueOf(repository.isWritable())); break; case 0: default: //nop; } }
/** * Closes a connection if it is open. * * This method is intended to close a connection after an exception * occured. Any occuring exceptions while closing will be added as * suppressed exceptions to the given exception. * @param conn the connection that should be closed * @param ex an exception that caused the closing */ protected void close(RepositoryConnection conn, Exception ex) { if (conn != null) { try { if (conn.isOpen()) { conn.close(); } Repository rep = conn.getRepository(); } catch (RepositoryException ex2) { ex.addSuppressed(ex2); } } }
private BNode mapBNode(BNode bNode) { BNode result = bNodesMap.get(bNode.getID()); if (result == null) { result = con.getRepository().getValueFactory().createBNode(); bNodesMap.put(bNode.getID(), result); } return result; }
@Override public void close() throws RepositoryException { Repository cacheRepo; if (_subjCacheInfr != null) { cacheRepo = _subjCacheInfr.getRepository(); _subjCacheInfr.close(); cacheRepo.shutDown(); } if (_subjCacheExpl != null) { cacheRepo = _subjCacheExpl.getRepository(); _subjCacheExpl.close(); cacheRepo.shutDown(); } super.close(); }
private URI getMemberPredicate(int index) { RepositoryConnection conn = manager.getConnection(); Repository repository; repository = conn.getRepository(); String uri = RDF.NAMESPACE + '_' + (index + 1); return repository.getValueFactory().createURI(uri); }
public ElmoQuery setParameter(String name, String label, Locale locale) { RepositoryConnection conn = manager.getConnection(); ValueFactory vf = conn.getRepository().getValueFactory(); if (label == null) { setBinding(name, null); } else if (locale == null) { setBinding(name, vf.createLiteral(label)); } else { String lang = locale.toString().toLowerCase().replace('_', '-'); setBinding(name, vf.createLiteral(label, lang)); } return this; }
final ValueFactory vf = con.getRepository().getValueFactory(); RDFParser rdfParser = new TurtleParser(); rdfParser.setValueFactory(vf);
final ValueFactory vf = con.getRepository().getValueFactory(); RDFParser rdfParser = new TurtleParser(); rdfParser.setValueFactory(vf);
private Set<URI> printContainer(Resource subj) throws RDFHandlerException { CloseableIteration<? extends Statement, RepositoryException> stIter; Set<URI> set = new HashSet<URI>(); try { ValueFactory vf = con.getRepository().getValueFactory(); int idx = 1; URI pred = vf.createURI(RDF.NAMESPACE, "_" + idx++); while (con.hasStatement(subj, pred, null, false)) { stIter = con.getStatements(subj, pred, null, false); try { if (stIter.hasNext()) { Statement st = stIter.next(); print(vf.createStatement(st.getSubject(), RDF.LI, st .getObject())); } while (stIter.hasNext()) { print(stIter.next()); } } finally { stIter.close(); } set.add(pred); pred = vf.createURI(RDF.NAMESPACE, "_" + idx++); } return set; } catch (RepositoryException e) { throw new RDFHandlerException(e); } } }
protected void testResource(String uri, String sparqlFile) throws Exception { Assume.assumeTrue(ldcache.getClient().ping(uri)); Model model = ldcache.get(valueFactory.createURI(uri)); Assert.assertTrue(model.size() > 0); RepositoryConnection connection = ModelCommons.asRepository(model).getConnection(); connection.begin(); // run a SPARQL test to see if the returned data is correct InputStream sparql = BaseLDCacheTest.class.getResourceAsStream(sparqlFile); final String query = IOUtils.toString(sparql); BooleanQuery testLabel = connection.prepareBooleanQuery(QueryLanguage.SPARQL, query); final boolean testResult = testLabel.evaluate(); if(!testResult && log.isDebugEnabled()) { log.debug("QUERY: {}", query); StringWriter out = new StringWriter(); connection.export(Rio.createWriter(RDFFormat.TURTLE, out)); log.debug("DATA: {}", out.toString()); } Assert.assertTrue("SPARQL test query failed", testResult); connection.commit(); connection.close(); connection.getRepository().shutDown(); }
final ValueFactory vf = con.getRepository().getValueFactory(); RDFParser rdfParser = new TurtleParser(); rdfParser.setValueFactory(vf);
final ValueFactory vf = con.getRepository().getValueFactory(); RDFParser rdfParser = new TurtleParser(); rdfParser.setValueFactory(vf);