@Override public CandidateSet<ATermAppl> getObviousInstances(final ATermAppl cParam, final Collection<ATermAppl> individuals) { ATermAppl c = cParam; c = ATermUtils.normalize(c); final Set<ATermAppl> subs = _kb.isClassified() && _kb.getTaxonomy().contains(c) ? _kb.getTaxonomy().getFlattenedSubs(c, false) : Collections.<ATermAppl> emptySet(); subs.remove(ATermUtils.BOTTOM); final CandidateSet<ATermAppl> cs = new CandidateSet<>(); for (final ATermAppl x : individuals) { final Bool isType = isKnownType(x, c, subs); cs.add(x, isType); } return cs; }
@Override public CandidateSet<ATermAppl> getObviousInstances(final ATermAppl cParam, final Collection<ATermAppl> individuals) { ATermAppl c = cParam; c = ATermUtils.normalize(c); final Set<ATermAppl> subs = _kb.isClassified() && _kb.getTaxonomy().contains(c) ? _kb.getTaxonomy().getFlattenedSubs(c, false) : Collections.<ATermAppl> emptySet(); subs.remove(ATermUtils.BOTTOM); final CandidateSet<ATermAppl> cs = new CandidateSet<>(); for (final ATermAppl x : individuals) { final Bool isType = isKnownType(x, c, subs); cs.add(x, isType); } return cs; }
private Set<OWLAxiom> getCachedExplanation(final OWLClassExpression unsatClass) { final OpenlletReasoner pellet = getReasoner(); if (!pellet.getKB().isClassified()) return null; final Pair<OWLClass, OWLClass> pair = getSubClassAxiom(unsatClass); if (pair != null) { final Set<Set<ATermAppl>> exps = TaxonomyUtils.getSuperExplanations(pellet.getKB().getTaxonomy(), pellet.term(pair.first), pellet.term(pair.second)); if (exps != null) { final Set<OWLAxiom> result = convertExplanation(exps.iterator().next()); if (_logger.isLoggable(Level.FINE)) _logger.fine("Cached explanation: " + result); return result; } } return null; }
private Set<OWLAxiom> getCachedExplanation(final OWLClassExpression unsatClass) { final OpenlletReasoner pellet = getReasoner(); if (!pellet.getKB().isClassified()) return null; final Pair<OWLClass, OWLClass> pair = getSubClassAxiom(unsatClass); if (pair != null) { final Set<Set<ATermAppl>> exps = TaxonomyUtils.getSuperExplanations(pellet.getKB().getTaxonomy(), pellet.term(pair.first), pellet.term(pair.second)); if (exps != null) { final Set<OWLAxiom> result = convertExplanation(exps.iterator().next()); if (_logger.isLoggable(Level.FINE)) _logger.fine("Cached explanation: " + result); return result; } } return null; }
private Set<OWLAxiom> getCachedExplanation(final OWLClassExpression unsatClass) { final OpenlletReasoner pellet = getReasoner(); if (!pellet.getKB().isClassified()) return null; final Pair<OWLClass, OWLClass> pair = getSubClassAxiom(unsatClass); if (pair != null) { final Set<Set<ATermAppl>> exps = TaxonomyUtils.getSuperExplanations(pellet.getKB().getTaxonomy(), pellet.term(pair.first), pellet.term(pair.second)); if (exps != null) { final Set<OWLAxiom> result = convertExplanation(exps.iterator().next()); if (_logger.isLoggable(Level.FINE)) _logger.fine("Cached explanation: " + result); return result; } } return null; }
@Override public void run() { _timers.execute("reasonerClassify", x -> { // classify ontology _reasoner.flush(); _reasoner.getKB().classify(); }); if (_logger.isLoggable(Level.FINE)) { _logger.fine("Regular taxonomy:"); new TreeTaxonomyPrinter<ATermAppl>().print(_reasoner.getKB().getTaxonomy(), new PrintWriter(System.err)); } _taxonomyImpl = _timers.execute("buildClassHierarchy", () -> buildClassHierarchy(_reasoner)); if (_logger.isLoggable(Level.FINE)) { _logger.fine("Copied taxonomy:"); new TreeTaxonomyPrinter<OWLClass>().print(_taxonomyImpl, new PrintWriter(System.err)); } } };
@Override public void run() { _timers.execute("reasonerClassify", x -> { // classify ontology _reasoner.flush(); _reasoner.getKB().classify(); }); if (_logger.isLoggable(Level.FINE)) { _logger.fine("Regular taxonomy:"); new TreeTaxonomyPrinter<ATermAppl>().print(_reasoner.getKB().getTaxonomy(), new PrintWriter(System.err)); } _taxonomyImpl = _timers.execute("buildClassHierarchy", () -> buildClassHierarchy(_reasoner)); if (_logger.isLoggable(Level.FINE)) { _logger.fine("Copied taxonomy:"); new TreeTaxonomyPrinter<OWLClass>().print(_taxonomyImpl, new PrintWriter(System.err)); } } };
new ClassTreePrinter().print(moduleReasoner.getKB().getTaxonomy(), new PrintWriter(System.err));
new ClassTreePrinter().print(moduleReasoner.getKB().getTaxonomy(), new PrintWriter(System.err));
if (_kb.isClassified() && _kb.getTaxonomy().contains(c)) subs = _kb.getTaxonomy().getFlattenedSubs(c, false); subs.remove(ATermUtils.BOTTOM);
if (kb.isClassified()) final Set<ATermAppl> subs = kb.getTaxonomy().getFlattenedSubs(clazz, false); final Set<ATermAppl> eqs = kb.getAllEquivalentClasses(clazz); if (SetUtils.intersects(inferred, subs) || SetUtils.intersects(inferred, eqs))
if (kb.isClassified()) final Set<ATermAppl> subs = kb.getTaxonomy().getFlattenedSubs(clazz, false); final Set<ATermAppl> eqs = kb.getAllEquivalentClasses(clazz); if (SetUtils.intersects(inferred, subs) || SetUtils.intersects(inferred, eqs))
if (kb.isClassified()) final Set<ATermAppl> subs = kb.getTaxonomy().getFlattenedSubs(clazz, false); final Set<ATermAppl> eqs = kb.getAllEquivalentClasses(clazz); if (SetUtils.intersects(inferred, subs) || SetUtils.intersects(inferred, eqs))
if (_kb.isClassified() && _kb.getTaxonomy().contains(c)) subs = _kb.getTaxonomy().getFlattenedSubs(c, false); subs.remove(ATermUtils.BOTTOM);
taxonomy = _kb.getTaxonomy(); else taxonomy = _kb.getToldTaxonomy();
taxonomy = _kb.getTaxonomy(); else taxonomy = _kb.getToldTaxonomy();
/** * Performs classification using the non-incremental (classic) classifier */ private void runClassicClassify() { final KnowledgeBase kb = getKB(); startTask("consistency check"); final boolean isConsistent = kb.isConsistent(); finishTask("consistency check"); if (!isConsistent) throw new OpenlletCmdException("Ontology is inconsistent, run \"openllet explain\" to get the reason"); startTask("classification"); kb.classify(); finishTask("classification"); final TaxonomyPrinter<ATermAppl> printer = new ClassTreePrinter(); printer.print(kb.getTaxonomy()); }
downMonotonic(_kb.getTaxonomy(), _kb.getClasses(), lhsDM, scLHS, scRHS, binding, direct, strict); else
assertTrue(TaxonomyUtils.getTypes(_kb.getTaxonomy(), _a, false).isEmpty()); assertFalse(TaxonomyUtils.getTypes(_kb.getTaxonomy(), _a, false).isEmpty()); assertEquals(autoRealize, !TaxonomyUtils.getTypes(_kb.getTaxonomy(), _b, false).isEmpty()); assertFalse(TaxonomyUtils.getTypes(_kb.getTaxonomy(), _a, false).isEmpty());
@Override public void run() { final KnowledgeBase kb = getKB(); startTask("consistency check"); final boolean isConsistent = kb.isConsistent(); finishTask("consistency check"); if (!isConsistent) throw new OpenlletCmdException("Ontology is inconsistent, run \"openllet explain\" to get the reason"); startTask("classification"); kb.classify(); finishTask("classification"); startTask("realization"); kb.realize(); finishTask("realization"); final TaxonomyPrinter<ATermAppl> printer = new ClassTreePrinter(); printer.print(kb.getTaxonomy()); }