/** * {@inheritDoc} */ @Override public long size() { return super.object.size(); }
/** Answer the size of this result-set's model. */ public long modelSize() { return model.merged.size(); }
public long size() { return model.size(); }
public double getNumberOfTriples(Model model) { return model.size(); }
@Override public long size() throws ModelRuntimeException { assertModel(); return (int)this.jenaModel.size(); }
@Override public long size() throws ModelRuntimeException { // Start with the size of the default graph long size = this.dataset.getDefaultModel().size(); // Loop and add the sizes of all contained graphs Iterator<String> it = this.dataset.listNames(); while (it.hasNext()) { size += this.dataset.getNamedModel(it.next()).size(); } return size; }
/** * Q: Under what conditions are we allowed to do this directly through Dataset.getNamedModel() actions? A: Not sure * - the clean-est way is to generate SPARQL-UPDATE and apply. If we are allowed to modify the model directly using * Jena API, then it will be sufficient (for immediate practical purposes) to delete all triples with actionIdent as * SUBJECT. * * @param tas */ @Deprecated private void old_deleteThingAction(RepoClient rc, Ident graphID, ThingActionSpec tas) { Ident actionID = tas.getActionSpecID(); Resource actionRes = rc.getDefaultRdfNodeTranslator().makeResourceForIdent(actionID); //Repo.WithDirectory repo = rc.getRepo(); theLogger.error("FIXME: About to fetch a readonly model and then try to write to it, which will have no effect - FIXME!!!"); Model gm = rc.getNamedModelReadonly(graphID); // repo.getNamedModel(graphID); FIXME theLogger.info("Prior to removal from {}, graph size is {}", graphID, gm.size()); gm.removeAll(actionRes, null, null); theLogger.info("After remova from {}, graph size is {}", graphID, gm.size()); }
/** * Q: Under what conditions are we allowed to do this directly through Dataset.getNamedModel() actions? * A: Not sure - the clean-est way is to generate SPARQL-UPDATE and apply. * If we are allowed to modify the model directly using Jena API, then it will be sufficient (for immediate * practical purposes) to delete all triples with actionIdent as SUBJECT. * @param tas */ private void deleteThingAction(RepoClient rc, Ident graphID, ThingActionSpec tas) { Ident actionID = tas.getActionSpecID(); Resource actionRes = rc.makeResourceForIdent(actionID); Repo.WithDirectory repo = rc.getRepo(); Model gm = repo.getNamedModel(graphID); theLogger.info("Prior to removal from {}, graph size is {}", graphID, gm.size()); gm.removeAll(actionRes, null, null); theLogger.info("After remova from {}, graph size is {}", graphID, gm.size()); }
public TDBSource( Resource endpoint ) { super( endpoint ); String endpointString = endpoint.getURI(); String name = endpointString.substring( TDBManager.PREFIX.length() ); this.endpoint = endpointString; this.sourceSet = TDBManager.getDataset(); if (name != null && !name.isEmpty()) { this.source = TDBManager.getTDBModelNamed(name); log.debug( "TDB with endpoint '{}' has model with {} triples" , endpointString, this.source.size() ); if (this.source.isEmpty()) EldaException.EmptyTDB( name ); } else { source = null; log.info("using TDB whole dataset"); } }
/** Intersect this with another model. As an attempt at optimisation, we try and ensure we iterate over the smaller model first. Nowadays it's not clear that this is a good idea, since <code>size()</code> can be expensive on database and inference models. @see com.hp.hpl.jena.rdf.model.Model#intersection(com.hp.hpl.jena.rdf.model.Model) */ @Override public Model intersection( Model other ) { return this.size() < other.size() ? intersect( this, other ) : intersect( other, this ); }
@Test @Ignore public void testReadsJSON_LD() throws UnsupportedEncodingException { Model reconstituted = ModelFactory.createDefaultModel(); String source = response; reconstituted.read(new ByteArrayInputStream(source.getBytes("UTF-8")), "", "JSON-LD"); System.err.println(">> reconstituted "+ reconstituted.size() + " triples."); reconstituted.write(System.err, "TURTLE"); System.err.println(">> done."); }
public void insert(Model rdf, String graphUri, ProgressMonitor progressMonitor) throws DaoException { int total = (int) rdf.size(); String sparql = null; try { int counter = 0; for (StmtIterator i = rdf.listStatements(); i.hasNext();) { Statement stmt = i.nextStatement(); sparql = "INSERT INTO GRAPH <" + graphUri + "> { " + NTripleStamentWriter.writeStament(stmt) + " }"; if (LOG.isDebugEnabled()) { LOG.debug(sparql); } VirtuosoUpdateRequest vqe = VirtuosoUpdateFactory.create(sparql, virtGraph); vqe.exec(); if (progressMonitor != null) { progressMonitor.setProgress(++counter, total); } } } catch (Exception e) { throw new DaoException(sparql, e); } finally { virtGraph.close(); } }
/** * Creates quality metadata * * @return Dataset with quality metadata which needs to be attached to the assessed dataset. * @throws MetadataException if there is no observation data calculated. */ public Dataset createQualityMetadata() throws MetadataException{ Model defaultModel = ModelFactory.createDefaultModel(); Dataset dataset = null; if (this.metadata.size() == 0) throw new MetadataException("No Metric Observations Recorded"); defaultModel.add(qualityGraph, RDF.type, DAQ.QualityGraph); defaultModel.add(qualityGraph, CUBE.structure, DAQ.dsd); dataset = new DatasetImpl(defaultModel); dataset.addNamedModel(this.qualityGraph.getURI(), this.metadata); return dataset; }
/** * Get RDF metadata from manifest.rdf and those rdf files registered in the * manifest.xml as "application/rdf+xml" through GRDDL XSLT * http://docs.oasis-open.org/office/v1.2/os/OpenDocument-v1.2-os-part1.html#__RefHeading__1415072_253892949 */ public Model getManifestRDFMetadata() throws Exception { Model m = ModelFactory.createDefaultModel(); for (String internalPath : this.getPackage().getFilePaths()) { if (Util.isSubPathOf(internalPath, this.getDocumentPath()) && this.getPackage().getMediaTypeString(internalPath).endsWith("application/rdf+xml")) { Model m1 = ModelFactory.createDefaultModel(); String RDFBaseUri = Util.getRDFBaseUri(this.getPackage().getBaseURI(), internalPath); m1.read(new InputStreamReader(this.getPackage().getInputStream(internalPath), "utf-8"), RDFBaseUri); // remove the last SLASH at the end of the RDFBaseUri: // test_rdfmeta.odt/ --> test_rdfmeta.odt ResourceUtils.renameResource(m1.getResource(RDFBaseUri), RDFBaseUri.substring(0, RDFBaseUri.length() - 1)); if (m1.size() > 0) { m = m.union(m1); } } } return m; }
private void checkRelative(Model model) { assertEquals(1, model.size()); final Statement statement = model.listStatements().next(); assertEquals("http://example.com/value", statement.getPredicate().toString()); assertEquals("Test", statement.getString()); assertEquals("http://example.com/test", statement.getSubject().toString()); }
@Test public void rollback() { Model model = SDBFactory.connectDefaultModel(store); assertTrue("Initially empty", model.isEmpty()); model.begin(); model.add(RDF.type, RDF.type, RDF.type); assertTrue("Uncommited triple can be seen", model.contains(RDF.type, RDF.type, RDF.type)); model.abort(); assertTrue("Nothing was added, the add aborted", model.isEmpty()); model.add(RDF.type, RDF.type, RDF.type); assertEquals("Model contains 1 triple", 1l, model.size()); model.begin(); model.remove(RDF.type, RDF.type, RDF.type); model.abort(); assertEquals("Model still contains 1 triple", 1l, model.size()); }
@Test public void model_4() { Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ; Resource xDft = assem.getResource("http://example/test#graphDft") ; Resource xNamed = assem.getResource("http://example/test#graphNamed") ; Store store = create(assem) ; Model model1 = (Model)Assembler.general.open(xDft) ; Model model2 = (Model)Assembler.general.open(xNamed) ; // Check they are not connected to the same place in the store Resource s = model1.createResource() ; Property p = model1.createProperty("http://example/p") ; Literal o = model1.createLiteral("foo") ; model1.add(s,p,o) ; assertTrue(model1.contains(s, p, o)) ; assertTrue(model1.size() == 1 ) ; assertTrue(model2.size() == 0 ) ; assertFalse(model1.isIsomorphicWith(model2)) ; }
@Test public void connection_1() { SDBConnection conn1 = SDBFactory.createConnection(conn) ; Store store1 = StoreFactory.create(storeDesc, conn1) ; // Reset store1.getTableFormatter().format(); SDBConnection conn2 = SDBFactory.createConnection(conn) ; Store store2 = StoreFactory.create(storeDesc, conn2) ; Model model1 = SDBFactory.connectDefaultModel(store1) ; Model model2 = SDBFactory.connectDefaultModel(store2) ; Resource s = model1.createResource() ; Property p = model1.createProperty("http://example/p") ; // These are autocommit so two stores should be OK (but not a good design paradigm) model1.add(s, p, "model1") ; model2.add(s, p, "model2") ; assertEquals(2, model1.size()) ; assertEquals(2, model2.size()) ; assertTrue(model1.isIsomorphicWith(model2)) ; } }
@Test public void update3() { Dataset ds = TDBFactory.createDataset() ; ds.asDatasetGraph().getDefaultGraph().add(t1) ; ds.getNamedModel(graphName).getGraph().add(t1) ; Model m = ds.getDefaultModel() ; m.removeAll() ; assertEquals(0, m.size()) ; // But still in the other graph assertTrue(ds.getNamedModel(graphName).getGraph().contains(t1)) ; }
@Test public void update4() { Dataset ds = TDBFactory.createDataset() ; ds.asDatasetGraph().getDefaultGraph().add(t1) ; ds.getNamedModel(graphName).getGraph().add(t1) ; Model m = ds.getNamedModel(graphName) ; m.removeAll() ; assertEquals(0, m.size()) ; // But still in the other graph assertTrue(ds.getDefaultModel().getGraph().contains(t1)) ; }