/** * Test the consistency of the bound data. This normally tests * the validity of the bound instance data against the bound * schema data. * @return a ValidityReport structure */ @Override public ValidityReport validate() { checkOpen(); return new StandardValidityReport(); }
/** * Add a new error report * @param error true if the report is an error, false if it is just a warning * @param type a string giving a reasoner-dependent classification for the report * @param description a textual description of the problem */ public void add(boolean error, String type, String description) { add(error, type, description, null); }
/** * <p> * Test the consistency of the model. This looks for overall inconsistency, * and for any unsatisfiable classes. * </p> * * @return a ValidityReport structure */ @Override public ValidityReport validate() { checkOpen(); prepare(); StandardValidityReport report = new StandardValidityReport(); kb.setDoExplanation( true ); boolean consistent = kb.isConsistent(); kb.setDoExplanation( false ); if( !consistent ) { report.add( true, "KB is inconsistent!", kb.getExplanation() ); } else { for( ATermAppl c : kb.getUnsatisfiableClasses() ) { String name = JenaUtils.makeGraphNode( c ).toString(); report.add( false, "Unsatisfiable class", name ); } } return report; }
public ValidityReport validate() { checkOpen(); StandardValidityReport report = new StandardValidityReport(); report.add(nature.equalsIgnoreCase("error"), type, description.toString(), culprit);
/** * Run a datatype range check on all literal values of all properties with a range declaration. * @param report */ protected void performDatatypeRangeValidation(StandardValidityReport report) { HashMap<Node, List<RDFDatatype>> dtRange = getDTRange(); for (Iterator<Node> props = dtRange.keySet().iterator(); props.hasNext(); ) { Node prop = props.next(); for (Iterator<Triple> i = find(null, prop, null); i.hasNext(); ) { Triple triple = i.next(); report.add(checkLiteral(prop, triple)); } } }