/** * Retrieves the TDB dataset with the quality problems for a metric * @return TDB Dataset */ public Dataset getDataset() { commit(); return dataset; }
protected void clearTDBFiles() { for(String className : this.metricInstances.keySet()){ QualityMetric<?> metric = this.metricInstances.get(className); ProblemCollection<?> problemCollection = metric.getProblemCollection(); if (problemCollection != null) problemCollection.cleanup(); } }
/** * Create instance triples corresponding towards a Quality Report * * @param computedOn - The resource URI of the dataset computed on * @param problemCollection - A list of quality problem collections * */ public void addToQualityProblemReport(ProblemCollection<?> problemCollection){ this.m.add(new StatementImpl(this.reportURI, QPRO.hasProblem, problemCollection.getProblemURI())); this.m.add(problemCollection.getDataset().getNamedModel(problemCollection.getNamedGraph())); problemCollection.cleanup(); }
private static void execute(int size, Resource metric, int itr) { for (int cnt = 0; cnt < itr; cnt++) { ProblemCollection<Quad> problemCollection = new ProblemCollectionQuad(metric); long start = System.nanoTime(); for (int i = 0; i < size; i++) { Quad q = new Quad(null, new Triple(ResourceCommons.generateURI().asNode(), ResourceCommons.generateURI().asNode(), ResourceCommons.generateURI().asNode())); problemCollection.addProblem(q); } ((ProblemCollectionQuad)problemCollection).commit(); long write = System.nanoTime() - start; problemCollection.cleanup(); System.gc(); System.out.println(String.format("[%s] %s Write took %s ms, Write/tp :%s", cnt, problemCollection.getClass().getName(), TimeUnit.NANOSECONDS.toMillis(write), TimeUnit.SECONDS.toNanos(size) / write)); } }
@Override public synchronized void commit() { if (super.isHPCEnabled) { // nothing to do here } else { if (this.totalTriples > 0) { logger.info("Commiting in-mem Model Resource to Datagraph. Metric: "+super.metricURI+". Number of triples added: "+this.totalTriples+" dataset location: "+super.location); this.totalTriples = 0; dataset.begin(ReadWrite.WRITE) ; try { super.getReentrantLock().lock(); dataset.addNamedModel(getNamedGraph(), this._m); } finally { dataset.commit(); dataset.end(); super.getReentrantLock().unlock(); } this._m.removeAll(); this._m = null; System.gc(); this._m = ModelFactory.createDefaultModel(); } } } }
@Override public synchronized void commit() { if (super.isHPCEnabled) { // nothing to do here } else { if (this.totalTriples > 0) { logger.info("Commiting in-mem Model to Datagraph. Metric: "+super.metricURI+". Number of triples added: "+this.totalTriples+" dataset location: "+super.location); this.totalTriples = 0; dataset.begin(ReadWrite.WRITE) ; try { super.getReentrantLock().lock(); dataset.addNamedModel(getNamedGraph(), this._m); } finally { dataset.commit(); dataset.end(); super.getReentrantLock().unlock(); } this._m.removeAll(); this._m = null; System.gc(); this._m = ModelFactory.createDefaultModel(); } } } }
public void cleanup() { try { dataset.close(); } catch (Exception e) { logger.info("Dataset {} is already closed. Nothing to do here", TDB_DIRECTORY); } if (!isHPCEnabled) { try { getReentrantLock().lock(); TDBFactory.release(dataset); //TODO: Clean up temp files if done // File f = new File(TDB_DIRECTORY); // FileUtils.deleteDirectory(f); // } catch (IOException e) { // e.printStackTrace(); } finally { getReentrantLock().unlock(); } } dataset = null; System.gc(); }
/** * The method ensures that the correct problematic thing URI is used during the report generation * @param problematicElement * @param problematicThingURI */ public void addProblem(Model problematicElement, Resource problematicThingURI) { this.isEmpty = false; this._m.add(new StatementImpl(this.problemURI, QPRO.problematicThing, problematicThingURI)); this._m.add(problematicElement); totalTriples += (int) (problematicElement.size() + 1); if (this.totalTriples >= MAX_TRIPLES) { this.commit(); totalTriples = 0; } }
public synchronized void commit() { if (super.isHPCEnabled) { // nothing to do here } else { if (this.totalTriples > 0) { logger.info("Commiting in-mem Model Quads to Datagraph. Metric: "+super.metricURI+". Number of triples added: "+this.totalTriples+" dataset location: "+super.location); this.totalTriples = 0; dataset.begin(ReadWrite.WRITE) ; try { dataset.addNamedModel(getNamedGraph(), this._m); dataset.commit(); } finally { dataset.end(); } this._m = null; System.gc(); this._m = ModelFactory.createDefaultModel(); } } }
@Override public void addProblem(Resource problematicElement) { this.isEmpty = false; this._m.add(new StatementImpl(this.problemList, RDF.li(seqCounter), ResourceCommons.asRDFNode(problematicElement.asNode()))); seqCounter++; totalTriples++; if (this.totalTriples >= MAX_TRIPLES) { this.commit(); } }
if (!problemCollection.isEmpty()) { report.addToQualityProblemReport(problemCollection);
@Override public void addProblem(Quad problematicElement) { this.isEmpty = false; Resource bNode = ResourceCommons.generateRDFBlankNode().asResource(); Quad q = problematicElement; this._m.add(new StatementImpl(bNode, RDF.type, RDF.Statement)); this._m.add(new StatementImpl(bNode, RDF.subject, ResourceCommons.asRDFNode(q.getSubject()))); this._m.add(new StatementImpl(bNode, RDF.predicate, ResourceCommons.asRDFNode(q.getPredicate()))); this._m.add(new StatementImpl(bNode, RDF.object, ResourceCommons.asRDFNode(q.getObject()))); if (q.getGraph() != null){ _m.add(new StatementImpl(bNode, QPRO.inGraph, ResourceCommons.asRDFNode(q.getGraph()))); } _m.add(new StatementImpl(this.problemList, RDF.li(seqCounter), ResourceCommons.asRDFNode(bNode.asNode()))); seqCounter++; totalTriples+=5; if (this.totalTriples >= MAX_TRIPLES) { this.commit(); } }
/** * Create instance triples corresponding towards a Quality Report * * @param computedOn - The resource URI of the dataset computed on * @param problemCollection - A list of quality problem collections * */ public void addToQualityProblemReport(ProblemCollection<?> problemCollection){ Resource problemURI = problemCollection.getProblemURI(); this.m.add(new StatementImpl(this.reportURI, QPRO.hasProblem, problemURI)); Dataset d = problemCollection.getDataset(); try { d.begin(ReadWrite.READ); problemCollection.getReentrantLock().lock(); Model _m = d.getNamedModel(problemCollection.getNamedGraph()); RDFDataMgr.write(this.serialisationOutput, _m, RDFFormat.TURTLE_PRETTY); } finally { problemCollection.getReentrantLock().unlock(); d.end(); d.close(); } problemCollection.cleanup(); }
public Resource getProblemURI() { commit(); return this.problemURI; }