public TBoxExpImpl(final KnowledgeBase kb) { _kb = kb; _Tu = new TuBox(this); _Tg = new TgBox(this); _kb = kb; }
@Override public void prepare() { _Tg.absorb(); _Tg.internalize(); _Tu.normalize(); }
return true; _subLogger.finer("Absorb nominal"); if (!OpenlletOptions.USE_PSEUDO_NOMINALS && (OpenlletOptions.USE_NOMINAL_ABSORPTION || OpenlletOptions.USE_HASVALUE_ABSORPTION) && absorbNominal(set)) return true; _subLogger.finer("Absorb II"); if (absorbII(set)) if (absorbIII(set)) if (absorbV(set)) if (absorbVI(set)) if (OpenlletOptions.USE_ROLE_ABSORPTION && absorbRole(set)) absorbVII(set); _subLogger.finer("Finished absorbTerm"); return false;
public void absorbOneOf(final ATermAppl oneOf, final ATermAppl c, final Set<ATermAppl> explain) { absorbOneOf((ATermList) oneOf.getArgument(0), c, explain); }
private void absorbSubClass(final ATermAppl sub, final ATermAppl sup, final Set<ATermAppl> axiomExplanation) { _subLogger.fine(() -> "Absorb: subClassOf(" + ATermUtils.toString(sub) + ", " + ATermUtils.toString(sup) + ")"); final HashSet<ATermAppl> set = new HashSet<>(); set.add(ATermUtils.nnf(sub)); set.add(ATermUtils.nnf(ATermUtils.makeNot(sup))); // *********************************** // Explanation-related axiom tracking: // This is used in absorbII() where actual absorption takes place // with primitive definition _explanation = new HashSet<>(); _explanation.addAll(axiomExplanation); // *********************************** absorbTerm(set); }
public void absorb() { _subLogger.fine("Absorption started"); _subLogger.fine(() -> "Tg.size was " + _termhash.size() + " _Tu.size was " + _tbox._Tu.size()); final Collection<TermDefinition> terms = _termhash.values(); // _termhash.clear(); FIXME : why is this doesn't work ? // _termhash = new HashMap<>(); _termhash = CollectionUtils.makeIdentityMap(); for (final TermDefinition def : terms) { _kb.getTimers().checkTimer("preprocessing"); for (final ATermAppl subClassAxiom : def.getSubClassAxioms()) { final ATermAppl c1 = (ATermAppl) subClassAxiom.getArgument(0); final ATermAppl c2 = (ATermAppl) subClassAxiom.getArgument(1); absorbSubClass(c1, c2, _tbox.getAxiomExplanation(subClassAxiom)); } for (final ATermAppl eqClassAxiom : def.getEqClassAxioms()) { final ATermAppl c1 = (ATermAppl) eqClassAxiom.getArgument(0); final ATermAppl c2 = (ATermAppl) eqClassAxiom.getArgument(1); absorbSubClass(c1, c2, _tbox.getAxiomExplanation(eqClassAxiom)); absorbSubClass(c2, c1, _tbox.getAxiomExplanation(eqClassAxiom)); } } _subLogger.fine(() -> "Tg.size is " + _termhash.size() + " _Tu.size is " + _tbox._Tu.size()); _subLogger.fine("Absorption finished"); }
public void absorbOneOf(final ATermAppl oneOf, final ATermAppl c, final Set<ATermAppl> explain) { absorbOneOf((ATermList) oneOf.getArgument(0), c, explain); }
private void absorbSubClass(final ATermAppl sub, final ATermAppl sup, final Set<ATermAppl> axiomExplanation) { _subLogger.fine(() -> "Absorb: subClassOf(" + ATermUtils.toString(sub) + ", " + ATermUtils.toString(sup) + ")"); final HashSet<ATermAppl> set = new HashSet<>(); set.add(ATermUtils.nnf(sub)); set.add(ATermUtils.nnf(ATermUtils.makeNot(sup))); // *********************************** // Explanation-related axiom tracking: // This is used in absorbII() where actual absorption takes place // with primitive definition _explanation = new HashSet<>(); _explanation.addAll(axiomExplanation); // *********************************** absorbTerm(set); }
public void absorb() { _subLogger.fine("Absorption started"); _subLogger.fine(() -> "Tg.size was " + _termhash.size() + " _Tu.size was " + _tbox._Tu.size()); final Collection<TermDefinition> terms = _termhash.values(); // _termhash.clear(); FIXME : why is this doesn't work ? // _termhash = new HashMap<>(); _termhash = CollectionUtils.makeIdentityMap(); for (final TermDefinition def : terms) { _kb.getTimers().checkTimer("preprocessing"); for (final ATermAppl subClassAxiom : def.getSubClassAxioms()) { final ATermAppl c1 = (ATermAppl) subClassAxiom.getArgument(0); final ATermAppl c2 = (ATermAppl) subClassAxiom.getArgument(1); absorbSubClass(c1, c2, _tbox.getAxiomExplanation(subClassAxiom)); } for (final ATermAppl eqClassAxiom : def.getEqClassAxioms()) { final ATermAppl c1 = (ATermAppl) eqClassAxiom.getArgument(0); final ATermAppl c2 = (ATermAppl) eqClassAxiom.getArgument(1); absorbSubClass(c1, c2, _tbox.getAxiomExplanation(eqClassAxiom)); absorbSubClass(c2, c1, _tbox.getAxiomExplanation(eqClassAxiom)); } } _subLogger.fine(() -> "Tg.size is " + _termhash.size() + " _Tu.size is " + _tbox._Tu.size()); _subLogger.fine("Absorption finished"); }
return true; _subLogger.finer("Absorb nominal"); if (!OpenlletOptions.USE_PSEUDO_NOMINALS && (OpenlletOptions.USE_NOMINAL_ABSORPTION || OpenlletOptions.USE_HASVALUE_ABSORPTION) && absorbNominal(set)) return true; _subLogger.finer("Absorb II"); if (absorbII(set)) if (absorbIII(set)) if (absorbV(set)) if (absorbVI(set)) if (OpenlletOptions.USE_ROLE_ABSORPTION && absorbRole(set)) absorbVII(set); _subLogger.finer("Finished absorbTerm"); return false;
@Override public void prepare() { _Tg.absorb(); _Tg.internalize(); _Tu.normalize(); }
_Tg.absorbOneOf(c1, c2, explain); _Tg.absorbOneOf(c2, c1, explain); if (ATermUtils.isOneOf(c2)) _Tg.absorbOneOf(c2, c1, explain); _Tg.absorbOneOf(sub, sup, explain); return true;
private boolean absorbVI(final Set<ATermAppl> set) { for (final ATermAppl term : set) { final ATermAppl nnfterm = ATermUtils.nnf(term); if (nnfterm.getAFun().equals(ATermUtils.ORFUN)) { set.remove(term); for (ATermList orlist = (ATermList) nnfterm.getArgument(0); !orlist.isEmpty(); orlist = orlist.getNext()) { final Set<ATermAppl> cloned = new HashSet<>(set); cloned.add((ATermAppl) orlist.getFirst()); absorbTerm(cloned); } return true; } } return false; }
public TBoxExpImpl(final KnowledgeBase kb) { _kb = kb; _Tu = new TuBox(this); _Tg = new TgBox(this); _kb = kb; }
_Tg.absorbOneOf(c1, c2, explain); _Tg.absorbOneOf(c2, c1, explain); if (ATermUtils.isOneOf(c2)) _Tg.absorbOneOf(c2, c1, explain); _Tg.absorbOneOf(sub, sup, explain); return true;
private boolean absorbVI(final Set<ATermAppl> set) { for (final ATermAppl term : set) { final ATermAppl nnfterm = ATermUtils.nnf(term); if (nnfterm.getAFun().equals(ATermUtils.ORFUN)) { set.remove(term); for (ATermList orlist = (ATermList) nnfterm.getArgument(0); !orlist.isEmpty(); orlist = orlist.getNext()) { final Set<ATermAppl> cloned = new HashSet<>(set); cloned.add((ATermAppl) orlist.getFirst()); absorbTerm(cloned); } return true; } } return false; }
absorbOneOf(list, c, _explanation);
absorbOneOf(list, c, _explanation);