private void incrementalClassify() { if( log.isLoggable( Level.FINE ) ) { log.fine( "Incremental classification starting" ); } Timer timer = timers.startTimer( "incrementalClassify" ); incClassifyAllModStrategy(); timer.stop(); if( log.isLoggable( Level.FINE ) ) { log.fine( "Incremental classification done" ); } }
public Collection<PartialBinding> applyRete() { Timer t; if( PelletOptions.ALWAYS_REBUILD_RETE ) { t = timers.startTimer( "rule-rebuildRete" ); partialBindings.clear(); partialBindings.addAll(unsafeRules); interpreter.reset(); t.stop(); } t = timers.startTimer( "rule-reteRun" ); interpreter.run(); t.stop(); return interpreter.getBindings(); }
public void classify() { ensureConsistency(); if( isClassified() ) return; if( log.isLoggable( Level.FINE ) ) log.fine( "Classifying..." ); Timer timer = timers.startTimer( "classify" ); builder = getTaxonomyBuilder(); boolean isClassified = builder.classify(); timer.stop(); if( !isClassified ) return; state.add( ReasoningState.CLASSIFY ); estimate.computKBCosts(); }
public void classify() { ensureConsistency(); if( isClassified() ) return; if( log.isLoggable( Level.FINE ) ) log.fine( "Classifying..." ); Timer timer = timers.startTimer( "classify" ); builder = getTaxonomyBuilder(); boolean isClassified = builder.classify(); timer.stop(); if( !isClassified ) return; state.add( ReasoningState.CLASSIFY ); estimate.computKBCosts(); }
reset(); Timer t = timers.startTimer( "createConcepts" ); logger.fine( "Creating structures" ); createConcepts(); t = timers.startTimer( "processQueue" ); processQueue(); t.stop(); t = timers.startTimer( "buildHierarchy" );
public boolean isDirectlyBlocked(Individual blocked) { Timer t = blocked.getABox().getKB().timers.startTimer( "dBlocking" ); try { return isDirectlyBlockedInt( blocked ); } finally { t.stop(); } }
public boolean isDirectlyBlocked(Individual blocked) { Timer t = blocked.getABox().getKB().timers.startTimer( "dBlocking" ); try { return isDirectlyBlockedInt( blocked ); } finally { t.stop(); } }
/** * Extract modules from scratch * * @return */ public MultiValueMap<OWLEntity, OWLEntity> extractModules() { Timer timer = timers.startTimer( "extractModules" ); // cache the axiom signatures processAdditions(); additions.clear(); // no need to consider deletions for initial module extraction deletions.clear(); changes.clear(); nonLocalAxioms = false; modules = new MultiValueMap<OWLEntity, OWLEntity>(); extractModuleSignatures( allClasses ); timer.stop(); return modules; }
@Override public void run() { // classify ontology Timer timer = timers.startTimer( "reasonerClassify" ); reasoner.flush(); reasoner.getKB().classify(); timer.stop(); if( log.isLoggable( Level.FINE ) ) { log.fine( "Regular taxonomy:" ); new TreeTaxonomyPrinter<ATermAppl>().print( reasoner.getKB().getTaxonomy(), new PrintWriter( System.err ) ); } timer = timers.startTimer( "buildClassHierarchy" ); taxonomy = buildClassHierarchy( reasoner ); timer.stop(); if( log.isLoggable( Level.FINE ) ) { log.fine( "Copied taxonomy:" ); new TreeTaxonomyPrinter<OWLClass>().print( taxonomy, new PrintWriter( System.err ) ); } } };
public void load(Set<OWLOntology> ontologies) { Timer timer = kb.timers.startTimer( "load" ); int axiomCount = 0; Collection<OWLOntology> toBeLoaded = new LinkedHashSet<OWLOntology>(); for( OWLOntology ontology : ontologies ) axiomCount += load( ontology, false, toBeLoaded ); visitor.reset(); visitor.setAddAxiom( true ); for( OWLOntology ontology : toBeLoaded ) ontology.accept( visitor ); visitor.verify(); timer.stop(); }
public void load(Set<OWLOntology> ontologies) { Timer timer = kb.timers.startTimer( "load" ); int axiomCount = 0; Collection<OWLOntology> toBeLoaded = new LinkedHashSet<OWLOntology>(); for( OWLOntology ontology : ontologies ) axiomCount += load( ontology, false, toBeLoaded ); visitor.reset(); visitor.setAddAxiom( true ); for( OWLOntology ontology : toBeLoaded ) ontology.accept( visitor ); visitor.verify(); timer.stop(); }
Timer t = timers.startTimer( "createConcepts" ); processAxioms(); t.stop(); t = timers.startTimer( "buildHierarchy" ); buildTaxonomy( subsumers ); t.stop();
Timer t = timers.startTimer( "createConcepts" ); processAxioms(); t.stop(); t = timers.startTimer( "buildHierarchy" ); buildTaxonomy( subsumers ); t.stop();
public void realize() { if( isRealized() ) return; classify(); if( !isClassified() ) return; Timer timer = timers.startTimer( "realize" ); // This is false if the progress monitor is canceled boolean isRealized = builder.realize(); timer.stop(); if( !isRealized ) return; state.add( ReasoningState.REALIZE ); estimate.computKBCosts(); }
public void realize() { if( isRealized() ) return; classify(); if( !isClassified() ) return; Timer timer = timers.startTimer( "realize" ); // This is false if the progress monitor is canceled boolean isRealized = builder.realize(); timer.stop(); if( !isRealized ) return; state.add( ReasoningState.REALIZE ); estimate.computKBCosts(); }
public boolean isBlocked(Individual blocked) { Timer t = blocked.getABox().getKB().timers.startTimer( "blocking" ); try { return !blocked.isRoot() && (isIndirectlyBlocked( blocked ) || isDirectlyBlockedInt( blocked )); } finally { t.stop(); } }
public boolean isBlocked(Individual blocked) { Timer t = blocked.getABox().getKB().timers.startTimer( "blocking" ); try { return !blocked.isRoot() && (isIndirectlyBlocked( blocked ) || isDirectlyBlockedInt( blocked )); } finally { t.stop(); } }
public boolean isSubClassOf(ATermAppl c1, ATermAppl c2) { if( !doExplanation ) { Bool isKnownSubClass = isKnownSubClassOf( c1, c2 ); if( isKnownSubClass.isKnown() ) { return isKnownSubClass.isTrue(); } } if( log.isLoggable( Level.FINE ) ) { long count = kb.timers.getTimer( "subClassSat" ) == null ? 0 : kb.timers.getTimer( "subClassSat" ).getCount(); log.fine( count + ") Checking subclass [" + ATermUtils.toString( c1 ) + " " + ATermUtils.toString( c2 ) + "]" ); } ATermAppl notC2 = ATermUtils.negate( c2 ); ATermAppl c = ATermUtils.makeAnd( c1, notC2 ); Timer t = kb.timers.startTimer( "subClassSat" ); boolean sub = !isSatisfiable( c, false ); t.stop(); if( log.isLoggable( Level.FINE ) ) { log.fine( " Result: " + sub + " (" + t.getLast() + "ms)" ); } return sub; }
public boolean isSubClassOf(ATermAppl c1, ATermAppl c2) { if( !doExplanation ) { Bool isKnownSubClass = isKnownSubClassOf( c1, c2 ); if( isKnownSubClass.isKnown() ) { return isKnownSubClass.isTrue(); } } if( log.isLoggable( Level.FINE ) ) { long count = kb.timers.getTimer( "subClassSat" ) == null ? 0 : kb.timers.getTimer( "subClassSat" ).getCount(); log.fine( count + ") Checking subclass [" + ATermUtils.toString( c1 ) + " " + ATermUtils.toString( c2 ) + "]" ); } ATermAppl notC2 = ATermUtils.negate( c2 ); ATermAppl c = ATermUtils.makeAnd( c1, notC2 ); Timer t = kb.timers.startTimer( "subClassSat" ); boolean sub = !isSatisfiable( c, false ); t.stop(); if( log.isLoggable( Level.FINE ) ) { log.fine( " Result: " + sub + " (" + t.getLast() + "ms)" ); } return sub; }
/** * {@inheritDoc} */ public void load(Iterable<Graph> graphs) throws UnsupportedFeatureException { Timer timer = kb.timers.startTimer( "load" ); monitor.setProgressTitle( "Loading" ); monitor.taskStarted(); graph = EMPTY_GRAPH; preprocess(); for (Graph g : graphs) { graph = g; processTypes(); } for (Graph g : graphs) { graph = g; processTriples(); } processUntypedResources(); monitor.taskFinished(); timer.stop(); }