@Override public Function<Set<Quad>, Dataset> finisher() { return set -> { final Dataset dataset = rdf.createDataset(); set.forEach(dataset::add); return dataset; }; }
@Override public Function<Set<Quad>, Dataset> finisher() { return set -> { final Dataset dataset = rdf.createDataset(); set.forEach(dataset::add); return dataset; }; }
/** * Create a new dataset. * * @return a dataset */ public static TrellisDataset createDataset() { return new TrellisDataset(getInstance().createDataset()); }
@Override public CompletionStage<Void> delete(final Metadata metadata) { LOGGER.debug("Deleting: {}", metadata.getIdentifier()); return runAsync(() -> { try (final Dataset dataset = rdf.createDataset()) { final Instant time = now(); final Metadata md = Metadata.builder(metadata.getIdentifier()).interactionModel(LDP.Resource).build(); storeResource(md, dataset, time, OperationType.DELETE); } catch (final Exception ex) { throw new RuntimeTrellisException("Error deleting resoruce: " + metadata.getIdentifier(), ex); } }); }
private Optional<ProducerRecord<String, String>> buildMembershipMessage(final String topic, final IRI resource, final Resource parent, final Dataset dataset) throws Exception { try (final Dataset data = rdf.createDataset()) { if (DirectContainer.equals(parent.getInteractionModel())) { parent.getMembershipResource().ifPresent(member -> { parent.getMemberRelation().ifPresent(relation -> data.add(rdf.createQuad(PreferMembership, member, relation, resource))); parent.getMemberOfRelation().ifPresent(relation -> data.add(rdf.createQuad(PreferMembership, resource, relation, member))); }); } else if (IndirectContainer.equals(parent.getInteractionModel())) { parent.getMembershipResource().ifPresent(member -> parent.getMemberRelation().ifPresent(relation -> parent.getInsertedContentRelation().ifPresent(inserted -> dataset.stream(of(PreferUserManaged), null, inserted, null).sequential().forEachOrdered(q -> data.add(rdf.createQuad(PreferMembership, member, relation, q.getObject())))))); } final Optional<String> key = data.stream(of(PreferMembership), null, null, null).map(Quad::getSubject) .filter(x -> x instanceof IRI).map(x -> (IRI) x).map(IRI::getIRIString).findFirst(); if (key.isPresent()) { dataset.stream(of(PreferAudit), null, null, null).map(auditTypeMapper).forEachOrdered(data::add); return of(new ProducerRecord<>(topic, key.get(), serialize(data))); } return empty(); } }
@Override public Stream<IRI> purge(final IRI identifier) { final InterProcessLock lock = getLock(identifier); try { lock.acquire(Long.parseLong(System.getProperty("zk.lock.wait.ms", "100")), MILLISECONDS); } catch (final Exception ex) { LOGGER.error("Error acquiring lock: {}", ex.getMessage()); } if (!lock.isAcquiredInThisProcess()) { throw new RuntimeTrellisException("Could not acquire resource lock for " + identifier); } get(identifier, MAX).ifPresent(res -> { try (final Dataset dataset = rdf.createDataset()) { dataset.add(rdf.createQuad(PreferAudit, rdf.createBlankNode(), type, Delete)); tryWrite(identifier, dataset); } catch (final Exception ex) { LOGGER.error("Error closing dataset: {}", ex.getMessage()); } }); final Stream<IRI> stream = tryPurge(identifier); try { lock.release(); } catch (final Exception ex) { LOGGER.error("Error releasing resource lock: {}", ex.getMessage()); throw new RuntimeTrellisException("Error releasing resource lock", ex); } return stream; }
@Test public void testDefaultCreate() { final IRI root = rdf.createIRI("trellis:data/"); final Dataset dataset = rdf.createDataset(); final Metadata metadata = Metadata.builder(existing).container(root).interactionModel(LDP.RDFSource).build(); when(mockResourceService.replace(eq(metadata), eq(dataset))).thenReturn(completedFuture(null)); assertDoesNotThrow(() -> mockResourceService.create(metadata, dataset).toCompletableFuture().join()); verify(mockResourceService).replace(eq(metadata), eq(dataset)); }
final Dataset dataset1 = rdf.createDataset(); dataset1.add(Trellis.PreferAudit, identifier, PROV.wasGeneratedBy, audit1); dataset1.add(Trellis.PreferAudit, audit1, type, PROV.Activity); final Dataset dataset2 = rdf.createDataset(); dataset2.add(Trellis.PreferAudit, identifier, PROV.wasGeneratedBy, audit2); dataset2.add(Trellis.PreferAudit, audit2, type, PROV.Activity); final Dataset combined = rdf.createDataset(); dataset1.stream().forEach(combined::add); dataset2.stream().forEach(combined::add);
/** * Build a dataset. * @param resource the resource IRI * @param title a title * @param subject a subject * @return a new dataset */ default Dataset buildDataset(final IRI resource, final String title, final String subject) { final Dataset dataset = getInstance().createDataset(); dataset.add(Trellis.PreferUserManaged, resource, DC.title, getInstance().createLiteral(title)); dataset.add(Trellis.PreferUserManaged, resource, DC.subject, getInstance().createIRI(subject)); dataset.add(Trellis.PreferUserManaged, resource, type, SKOS.Concept); return dataset; } }
/** * Build a dataset. * @param resource the resource IRI * @param title a title * @param subject a subject * @return a new dataset */ default Dataset buildDataset(final IRI resource, final String title, final String subject) { final Dataset dataset = getInstance().createDataset(); dataset.add(Trellis.PreferUserManaged, resource, DC.title, getInstance().createLiteral(title)); dataset.add(Trellis.PreferUserManaged, resource, DC.subject, getInstance().createIRI(subject)); dataset.add(Trellis.PreferUserManaged, resource, type, SKOS.Concept); return dataset; } }
@Test public void testAuditCreation() { final Dataset dataset = rdf.createDataset(); final AuditService svc = new DefaultAuditService() {}; svc.creation(subject, mockSession).forEach(dataset::add); assertTrue(dataset.getGraph(Trellis.PreferAudit).filter(graph -> graph.size() == dataset.size()).isPresent(), "Graph and dataset sizes don't match for creation event!"); assertTrue(dataset.contains(null, null, type, AS.Create), "as:Create type not in create dataset!"); assertAll("Event property check", checkEventProperties(dataset)); }
@Test public void testAuditUpdate() { final Dataset dataset = rdf.createDataset(); final AuditService svc = new DefaultAuditService() {}; svc.update(subject, mockSession).forEach(dataset::add); assertTrue(dataset.getGraph(Trellis.PreferAudit).filter(graph -> graph.size() == dataset.size()).isPresent()); assertTrue(dataset.contains(null, null, type, AS.Update)); assertAll("Event property check", checkEventProperties(dataset)); }
@Test public void testAuditDeletion() { final Dataset dataset = rdf.createDataset(); final AuditService svc = new DefaultAuditService() {}; svc.deletion(subject, mockSession).forEach(dataset::add); assertTrue(dataset.getGraph(Trellis.PreferAudit).filter(graph -> graph.size() == dataset.size()).isPresent(), "Graph and dataset sizes don't match for deletion event!"); assertTrue(dataset.contains(null, null, type, AS.Delete), "as:Delete type not in delete dataset!"); assertAll("Event property check", checkEventProperties(dataset)); }
@Test public void testPersistableResource() { final Instant time = now(); final IRI identifier = createIRI("trellis:identifier"); final Quad quad = createQuad(testResourceId2, testResourceId2, testResourceId1, badId); final Dataset dataset = TrellisUtils.getInstance().createDataset(); dataset.add(quad); final Resource res = new JoiningResourceService.PersistableResource(identifier, LDP.Container, null, dataset); assertEquals(identifier, res.getIdentifier(), "Resource has wrong ID!"); assertEquals(LDP.Container, res.getInteractionModel(), "Resource has wrong LDP type!"); assertFalse(res.getModified().isBefore(time), "Resource modification date predates its creation!"); assertFalse(res.getModified().isAfter(now()), "Resource modification date is too late!"); assertTrue(res.stream().anyMatch(quad::equals), "Expected quad not present in resource stream"); assertFalse(res.getContainer().isPresent(), "Expected no parent container"); assertThrows(UnsupportedOperationException.class, res::hasAcl, "ACL retrieval should throw an exception!"); } }
@Test public void datasetImplToStringTest() { RDF rdf = createFactory(); JenaDataset jena = (JenaDataset) rdf.createDataset(); final IRI graph = rdf.createIRI("http://example.com/"); final IRI s = rdf.createIRI("http://example.com/s"); final IRI p = rdf.createIRI("http://example.com/p"); final Literal literal123 = rdf.createLiteral("123", Types.XSD_INTEGER); jena.add(graph, s, p, literal123); String out = jena.toString(); assertEquals("<http://example.com/s> <http://example.com/p> \"123\"^^<http://www" + ".w3.org/2001/XMLSchema#integer> <http://example.com/> .\n", out); assertEquals(10L, dataset.size()); } }
private Dataset createDataset2() { final RDF factory2 = createFactory(); final IRI name = factory2.createIRI("http://xmlns.com/foaf/0.1/name"); final Dataset g2 = factory2.createDataset(); final BlankNode b1 = createOwnBlankNode("b1", "bc8d3e45-a08f-421d-85b3-c25b373abf87"); g2.add(b1, b1, name, factory2.createLiteral("Charlie")); final BlankNode b2 = createOwnBlankNode("b2", "2209097a-5078-4b03-801a-6a2d2f50d739"); g2.add(b2, b2, name, factory2.createLiteral("Dave")); final IRI hasChild = factory2.createIRI("http://example.com/hasChild"); // NOTE: Opposite direction of loadDataset1 g2.add(b2, b2, hasChild, b1); return g2; }
/** * Make a new dataset with two BlankNodes - each with a different * uniqueReference */ private Dataset createDataset1() { final RDF factory1 = createFactory(); final IRI name = factory1.createIRI("http://xmlns.com/foaf/0.1/name"); final Dataset g1 = factory1.createDataset(); final BlankNode b1 = createOwnBlankNode("b1", "0240eaaa-d33e-4fc0-a4f1-169d6ced3680"); g1.add(b1, b1, name, factory1.createLiteral("Alice")); final BlankNode b2 = createOwnBlankNode("b2", "9de7db45-0ce7-4b0f-a1ce-c9680ffcfd9f"); g1.add(b2, b2, name, factory1.createLiteral("Bob")); final IRI hasChild = factory1.createIRI("http://example.com/hasChild"); g1.add(null, b1, hasChild, b2); return g1; }
@Test public void testDatasetNoConversion() { final Dataset dataset = jenaRdf.createDataset(); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, SKOS.prefLabel, literal)); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, type, SKOS.Concept)); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, DC.subject, AS.Activity)); assertEquals(3L, dataset.size(), "Confirm dataset size"); assertTrue(TriplestoreUtils.asJenaDataset(dataset).containsNamedModel(PreferUserManaged.getIRIString()), "Confirm presence of trellis:PreferUserManaged named graph"); assertEquals(TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), "Confirm datasets are equal"); }
@Test public void testDatasetConversion() { final Dataset dataset = simpleRdf.createDataset(); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, SKOS.prefLabel, literal)); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, type, SKOS.Concept)); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, DC.subject, AS.Activity)); assertEquals(3L, dataset.size(), "Confirm dataset size"); assertTrue(TriplestoreUtils.asJenaDataset(dataset).containsNamedModel(PreferUserManaged.getIRIString()), "Confirm presence of trellis:PreferUserManaged named graph"); assertNotEquals(TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), "Confirm dataset has been converted"); }
@Before public void createDatasetAndAdd() { factory = createFactory(); dataset = factory.createDataset(); assertEquals(0, dataset.size());