/** * * @return @throws SLIB_Ex_Critic * */ public Map<URI, Integer> getnbPathLeadingToAllVertex() throws SLIB_Ex_Critic { if (cache.nbPathLeadingToAllVertices == null) { cache.nbPathLeadingToAllVertices = bottomNodeAccessor.computeNbPathLeadingToAllVertices(); } return Collections.unmodifiableMap(cache.nbPathLeadingToAllVertices); }
/** * Compute the set of reachable vertices for each vertices contained in the * graph according to the specified constraint associated to the instance in * use. Inclusive process, i.e. the process do consider that vertex v is * contained in the set of reachable vertices from v. * * Optimized through a topological ordering * * @return an Map key V value the set of vertices reachable from the key * @throws SLIB_Ex_Critic */ public Map<URI, Set<URI>> getAllRVInc() throws SLIB_Ex_Critic { Map<URI, Set<URI>> allRVEx = getAllRV(); for (URI v : allRVEx.keySet()) { allRVEx.get(v).add(v); } return allRVEx; }
/** * Compute the inclusive descendants for all classes. * * @throws SLIB_Ex_Critic */ private synchronized void computeAllclassesDescendants() throws SLIB_Ex_Critic { cache.descendantsInc = bottomNodeAccessor.getAllRVInc(); }
private void computeLeaves() { classesLeaves = new HashSet(); WalkConstraint wc = bottomNodeAccessor.getWalkConstraint(); for (URI v : classes) { if (graph.getV(v, wc).isEmpty()) { classesLeaves.add(v); } } }
/** * * @return @throws SLIB_Ex_Critic */ public Map<URI, Integer> computeNbPathLeadingToAllVertices() throws SLIB_Ex_Critic { Map<URI, Integer> allVertices = new HashMap(); for (URI v : g.getV()) { allVertices.put(v, 1); } return propagateNbOccurences(allVertices); }
public SM_Engine(final G g, Set<URI> toTop, Set<URI> toBottom, InstanceAccessor iAccessor) throws SLIB_Ex_Critic { this.graph = g; WalkConstraint toTopWC = new WalkConstraintGeneric(); WalkConstraint toBottomWC = new WalkConstraintGeneric(); if (toTop != null) { toTopWC.addAcceptedTraversal(toTop, Direction.OUT); toBottomWC.addAcceptedTraversal(toTop, Direction.IN); } else { toTop = new HashSet(); // for union } if (toBottom != null) { toTopWC.addAcceptedTraversal(toBottom, Direction.IN); toBottomWC.addAcceptedTraversal(toBottom, Direction.OUT); } else { toBottom = new HashSet(); // for union } topNodeAccessor = new RVF_DAG(g, toTopWC); bottomNodeAccessor = new RVF_DAG(g, toBottomWC); // We define that the classes are the vertices which are linked to relationships // which are of the types specified to iterate over the DAG Set<URI> c = new HashSet(); for (E e : g.getE(SetUtils.union(toTop, toBottom))) { c.add(e.getSource()); c.add(e.getTarget()); } classes = c; this.instanceAccessor = iAccessor; initEngine(); }
/** * Compute for each class x the classes which are leaves which are subsumed * by x. Inclusive i.e. a leaf will contain itself in it set of reachable * leaves. The result is cached for fast access. * * * @return the subsumed leaves for each classes */ public synchronized Map<URI, Set<URI>> getReachableLeaves() { if (cache.reachableLeaves.isEmpty()) { Map<URI, Set<URI>> leaves = bottomNodeAccessor.getTerminalVertices(); /* according to the documentation of the method used above, if there are classes which are isolated (which do not establish rdfs:subClassOf in this case), the algorithm will not process them and them will not be associated to an entry in the returned map. We therefore add this classes in the result map. */ for (URI c : classes) { if (!leaves.containsKey(c)) { Set<URI> s = new HashSet(); s.add(c); leaves.put(c, s); } } cache.reachableLeaves = leaves; } return Collections.unmodifiableMap(cache.reachableLeaves); }
/** * Get the parents of a class, that is to say its direct ancestors. * * <br><b>Important</b>:<br> * * The direct parent of a class are all classes x linked to the given class * c to a an edge x RDFS.SUBLASSOF c. The result is not cached by the * engine. To ensure result coherency the underlying requires to be * transitively reduced, refer to the class documentation for more * information. * * @param v the focus vertex * @return the set of parents of the given vertex */ public Set<URI> getParents(URI v) { throwErrorIfNotClass(v); return topNodeAccessor.getNeighbors(v); }
/** * Compute the set of descendants of the nodes which are not shared between * the ancestors of the given concept. ancDiff = { anc(a) union anc(b) } * diff { anc(a) inter anc(b) } result = Union (for c in ancDiff) : desc(c) * * NOT_CACHED * * @param a * @param b * @return all subclasses of the superclass, of the given classes, which are * not shared. */ public Set<URI> getHypoOfAncDiff(URI a, URI b) { Set<URI> anc_a = getAncestorsInc(a); Set<URI> anc_b = getAncestorsInc(b); Set<URI> unionAncestors = SetUtils.union(anc_a, anc_b); Set<URI> interAncestors = SetUtils.intersection(anc_a, anc_b); Set<URI> ancsEx = unionAncestors; unionAncestors.removeAll(interAncestors); Set<URI> hypoAncsEx = new HashSet(); for (URI v : ancsEx) { Set<URI> descCurAnc = bottomNodeAccessor.getRV(v); hypoAncsEx = SetUtils.union(hypoAncsEx, descCurAnc); } return hypoAncsEx; }
/** * Give access to a view of the maximal depth of all classes. The result is * stored by the engine. * * @return a resultStack containing the maximal depths for all classes * @throws SLIB_Ex_Critic */ public Map<URI, Integer> getMaxDepths() throws SLIB_Ex_Critic { if (cache.maxDepths == null) { DepthAnalyserAG dephtAnalyser = new DepthAnalyserAG(graph, bottomNodeAccessor.getWalkConstraint()); cache.maxDepths = dephtAnalyser.getVMaxDepths(); } return Collections.unmodifiableMap(cache.maxDepths); }
/** * * @return @throws SLIB_Ex_Critic */ public Map<URI, Integer> computeNbPathLeadingToAllVertices() throws SLIB_Ex_Critic { Map<URI, Integer> allVertices = new HashMap<URI, Integer>(); for (URI v : g.getV()) { allVertices.put(v, 1); } return propagateNbOccurences(allVertices); }
/** * Give access to a view of the minimal depth of all classes. The result is * stored by the engine. * * @return a resultStack containing the maximal depths for all classes * @throws SLIB_Ex_Critic */ public Map<URI, Integer> getMinDepths() throws SLIB_Ex_Critic { if (cache.minDepths == null) { DepthAnalyserAG dephtAnalyser = new DepthAnalyserAG(graph, bottomNodeAccessor.getWalkConstraint()); cache.minDepths = dephtAnalyser.getVMinDepths(); } return Collections.unmodifiableMap(cache.minDepths); }
/** * Compute the inclusive ancestors for all classes. * * @throws SLIB_Ex_Critic */ private synchronized void computeAllclassesAncestors() throws SLIB_Ex_Critic { cache.ancestorsInc = topNodeAccessor.getAllRVInc(); }
/** * CACHED ! Be careful modification of RelTypes requires cache clearing * * @param a * @param b * @param weightingScheme * @return the shortest path between the two classes considering the given * weighting scheme. * @throws SLIB_Ex_Critic */ public double getShortestPath(URI a, URI b, GWS weightingScheme) throws SLIB_Ex_Critic { if (cache.shortestPath.get(a) == null || cache.shortestPath.get(a).get(b) == null) { if (cache.shortestPath.get(a) == null) { cache.shortestPath.put(a, new ConcurrentHashMap<URI, Double>()); } WalkConstraint wc = WalkConstraintUtils.copy(topNodeAccessor.getWalkConstraint()); wc.addWalkconstraints(bottomNodeAccessor.getWalkConstraint()); Dijkstra dijkstra = new Dijkstra(graph, wc, weightingScheme); double sp = dijkstra.shortestPath(a, b); cache.shortestPath.get(a).put(b, sp); } return cache.shortestPath.get(a).get(b); }
/** * CACHED * * @param a * @param weightingScheme * @return a map containing the weight of the shortest path linking a the * given vertex. * * @throws SLIB_Ex_Critic */ public synchronized Map<URI, Double> getAllShortestPath(URI a, GWS weightingScheme) throws SLIB_Ex_Critic { if (cache.shortestPath.get(a) == null) { WalkConstraint wc = WalkConstraintUtils.copy(topNodeAccessor.getWalkConstraint()); wc.addWalkconstraints(bottomNodeAccessor.getWalkConstraint()); Dijkstra dijkstra = new Dijkstra(graph, wc, weightingScheme); ConcurrentHashMap<URI, Double> minDists_cA = dijkstra.shortestPath(a); cache.shortestPath.put(a, minDists_cA); } return cache.shortestPath.get(a); }
/** * Get the root of the taxonomic graph contained in the graph associated to * the engine. An exception will be thrown if the taxonomic graph contains * multiple roots. The result is cached. * * @return the class corresponding to the root. * @throws SLIB_Ex_Critic */ public synchronized URI getRoot() throws SLIB_Ex_Critic { if (root == null) { Set<URI> roots = new ValidatorDAG().getDAGRoots(graph, topNodeAccessor.getWalkConstraint()); if (roots.size() != 1) { throw new SLIB_Ex_Critic("Multiple roots detected in the underlying taxonomic graph of graph " + graph.getURI()); } root = roots.iterator().next(); } return root; }
Set<URI> roots = new ValidatorDAG().getDAGRoots(graph, topNodeAccessor.getWalkConstraint()); DFS dfs = new DFS(graph, roots, WalkConstraintUtils.getInverse(topNodeAccessor.getWalkConstraint(), (false))); List<URI> topoOrdering = dfs.getTraversalOrder(); for (E e : graph.getE(c, topNodeAccessor.getWalkConstraint())) {
/** * NOT_CACHED * * @param a * @param b * @param weightingScheme * @return the URI associated to the Most Specific Ancestor. * @throws SLIB_Ex_Critic */ public URI getMSA(URI a, URI b, GWS weightingScheme) throws SLIB_Ex_Critic { Dijkstra dijkstra = new Dijkstra(graph, topNodeAccessor.getWalkConstraint(), weightingScheme); URI msa_pk = SimDagEdgeUtils.getMSA_pekar_staab(getRoot(), getAllShortestPath(a, weightingScheme), getAllShortestPath(b, weightingScheme), getAncestorsInc(a), getAncestorsInc(b), dijkstra); return msa_pk; }