RepositoryConnection conn = rep.getConnection(); try { conn.begin(); // start a transaction conn.add(f.createURI("http://example.org/", "Turin"),RDF.PREDICATE,f.createURI("http://example.org/", "Timon")); conn.add(f.createURI("http://example.org/", "Turin"),RDF.PREDICATE,f.createURI("http://example.org/", "Timon")); conn.add(f.createURI("http://example.org/", "Timon"),RDF.PREDICATE, f.createURI("http://example.org/", "eddy")); conn.add(f.createURI("http://example.org/", "Pumba"),RDF.PREDICATE, f.createURI("http://example.org/", "Timon")); conn.commit(); } finally { conn.close(); }
@Test public void testAddMalformedLiteralsDefaultConfig() throws Exception { try { testCon.add( RepositoryConnectionTest.class.getResourceAsStream(TEST_DIR_PREFIX + "malformed-literals.ttl"), "", RDFFormat.TURTLE); fail("upload of malformed literals should fail with error in default configuration"); } catch (RDFParseException e) { // ignore, as expected } }
@Test public void testDuplicateFilter() throws Exception { testCon.begin(); testCon.add(bob, name, nameBob); testCon.add(bob, name, nameBob, context1); testCon.add(bob, name, nameBob, context2); testCon.commit(); RepositoryResult<Statement> result = testCon.getStatements(bob, name, null, true); result.enableDuplicateFilter(); int count = 0; while (result.hasNext()) { result.next(); count++; } assertThat(count, is(equalTo(1))); }
@Test public void testContextHandling() throws Exception { connection.add(SUBJECT_4, PREDICATE_1, new LiteralImpl("sfourponecone"), CONTEXT_1); connection.add(SUBJECT_4, PREDICATE_2, new LiteralImpl("sfourptwocone"), CONTEXT_1); connection.add(SUBJECT_5, PREDICATE_1, new LiteralImpl("sfiveponecone"), CONTEXT_1); connection.add(SUBJECT_5, PREDICATE_1, new LiteralImpl("sfiveponectwo"), CONTEXT_2); connection.add(SUBJECT_5, PREDICATE_2, new LiteralImpl("sfiveptwoctwo"), CONTEXT_2); connection.commit(); // connection.close(); // connection = repository.getConnection(); // connection.setAutoCommit(false); // test querying assertQueryResult("sfourponecone", PREDICATE_1, SUBJECT_4); assertQueryResult("sfourptwocone", PREDICATE_2, SUBJECT_4); assertQueryResult("sfiveponecone", PREDICATE_1, SUBJECT_5); assertQueryResult("sfiveponectwo", PREDICATE_1, SUBJECT_5); assertQueryResult("sfiveptwoctwo", PREDICATE_2, SUBJECT_5); // blind test to see if this method works: assertNoQueryResult("johannesgrenzfurthner"); // remove a context connection.clear(CONTEXT_1); connection.commit(); assertNoQueryResult("sfourponecone"); assertNoQueryResult("sfourptwocone"); assertNoQueryResult("sfiveponecone"); assertQueryResult("sfiveponectwo", PREDICATE_1, SUBJECT_5); assertQueryResult("sfiveptwoctwo", PREDICATE_2, SUBJECT_5); }
@Test public void testRollback() throws Exception { testCon.begin(); testCon.add(alice, name, nameAlice); assertTrue("Uncommitted updates should be visible to own connection", testCon.hasStatement(alice, name, nameAlice, false)); testCon.rollback(); assertFalse("Repository should not contain statement after rollback", testCon.hasStatement(alice, name, nameAlice, false)); }
@Test public void testAddRemove() throws Exception { URI FOAF_PERSON = vf.createURI("http://xmlns.com/foaf/0.1/Person"); final Statement stmt = vf.createStatement(bob, name, nameBob); testCon.add(bob, RDF.TYPE, FOAF_PERSON); testCon.begin(); testCon.add(stmt); testCon.remove(stmt); testCon.commit(); testCon.exportStatements(null, null, null, false, new RDFHandlerBase() { @Override public void handleStatement(Statement st) throws RDFHandlerException { assertThat(st, is(not(equalTo(stmt)))); } }); }
@Test public void testIdenticalVariablesInStatementPattern() throws Exception { conn.add(alice, f.createURI("http://purl.org/dc/elements/1.1/publisher"), bob); StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append("SELECT ?publisher "); queryBuilder.append("{ ?publisher <http://purl.org/dc/elements/1.1/publisher> ?publisher }"); conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder.toString()).evaluate( new TupleQueryResultHandlerBase() { public void handleSolution(BindingSet bindingSet) { fail("nobody is self published"); } }); }
@Test public void testNullContextHandling() throws Exception connection.add(SUBJECT_4, PREDICATE_1, new LiteralImpl("sfourponecone")); connection.add(SUBJECT_4, PREDICATE_2, new LiteralImpl("sfourptwocone")); connection.add(SUBJECT_5, PREDICATE_1, new LiteralImpl("sfiveponecone")); connection.add(SUBJECT_5, PREDICATE_1, new LiteralImpl("sfiveponectwo"), CONTEXT_2); connection.add(SUBJECT_5, PREDICATE_2, new LiteralImpl("sfiveptwoctwo"), CONTEXT_2); connection.commit(); connection.commit(); assertNoQueryResult("sfourponecone"); assertNoQueryResult("sfourptwocone");
@Test public void testEmptyRollback() throws Exception { assertThat(testCon.isEmpty(), is(equalTo(true))); assertThat(testCon2.isEmpty(), is(equalTo(true))); testCon.begin(); testCon.add(vf.createBNode(), vf.createURI(URN_PRED), vf.createBNode()); assertThat(testCon.isEmpty(), is(equalTo(false))); assertThat(testCon2.isEmpty(), is(equalTo(true))); testCon.rollback(); assertThat(testCon.isEmpty(), is(equalTo(true))); assertThat(testCon2.isEmpty(), is(equalTo(true))); }
/** * Note: Overridden to turn off autocommit and commit after the data are * loaded. */ protected void loadDataset(String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLUpdateTest.class.getResourceAsStream(datasetFile); try { // con.setAutoCommit(false); con.add(dataset, "", RDFFormat.forFileName(datasetFile));//RDFFormat.TRIG); con.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
@Test public void testAutoCommit() throws Exception { testCon.begin(); testCon.add(alice, name, nameAlice); assertTrue("Uncommitted update should be visible to own connection", testCon.hasStatement(alice, name, nameAlice, false)); testCon.commit(); assertTrue("Repository should contain statement after commit", testCon.hasStatement(alice, name, nameAlice, false)); }
@Test public void testConcurrentReadingAndWriting() throws Exception connection.add(SUBJECT_1, PREDICATE_1, new LiteralImpl("sfourponecone"), CONTEXT_1); connection.add(SUBJECT_2, PREDICATE_1, new LiteralImpl("sfourponecone"), CONTEXT_1); connection.commit(); BindingSet bindings = result.next(); connection.add(SUBJECT_3, PREDICATE_1, new LiteralImpl("sfourponecone"), CONTEXT_1); connection.commit(); BindingSet bindings = result.next(); connection.add(SUBJECT_3, PREDICATE_1, new LiteralImpl("blubbb"), CONTEXT_1); connection.commit();
@Test public void testSizeRollback() throws Exception { assertThat(testCon.size(), is(equalTo(0L))); assertThat(testCon2.size(), is(equalTo(0L))); testCon.begin(); testCon.add(vf.createBNode(), vf.createURI(URN_PRED), vf.createBNode()); assertThat(testCon.size(), is(equalTo(1L))); assertThat(testCon2.size(), is(equalTo(0L))); testCon.add(vf.createBNode(), vf.createURI(URN_PRED), vf.createBNode()); assertThat(testCon.size(), is(equalTo(2L))); assertThat(testCon2.size(), is(equalTo(0L))); testCon.rollback(); assertThat(testCon.size(), is(equalTo(0L))); assertThat(testCon2.size(), is(equalTo(0L))); }
public static void loadDataFromResources(final Repository repo, final String resource, final String baseURL) throws OpenRDFException, IOException { final RepositoryConnection cxn = repo.getConnection(); try { cxn.begin(); try { final InputStream is = SampleBlazegraphSesameEmbedded.class.getResourceAsStream(resource); if (is == null) { throw new IOException("Could not locate resource: " + resource); } final Reader reader = new InputStreamReader(new BufferedInputStream(is)); try { cxn.add(reader, baseURL, RDFFormat.N3); } finally { reader.close(); } cxn.commit(); } catch (OpenRDFException ex) { cxn.rollback(); throw ex; } } finally { // close the repository connection cxn.close(); } }
/** * Overridden to turn off auto commit and to commit after the data set is * loaded. */ @Override protected void loadTestData(String dataFile, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset " + dataFile); InputStream dataset = ComplexSPARQLQueryTest.class.getResourceAsStream(dataFile); try { conn.setAutoCommit(false); conn.add(dataset, "", RDFFormat.forFileName(dataFile), contexts); conn.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
@Test public void testRemoveStatementCollection() throws Exception { testCon.begin(); testCon.add(alice, name, nameAlice); testCon.add(bob, name, nameBob); testCon.commit(); assertThat(testCon.hasStatement(bob, name, nameBob, false), is(equalTo(true))); assertThat(testCon.hasStatement(alice, name, nameAlice, false), is(equalTo(true))); Collection<Statement> c = Iterations.addAll(testCon.getStatements(null, null, null, false), new ArrayList<Statement>()); testCon.remove(c); assertThat(testCon.hasStatement(bob, name, nameBob, false), is(equalTo(false))); assertThat(testCon.hasStatement(alice, name, nameAlice, false), is(equalTo(false))); }