public static void getFeatures(SemanticGraph graph, IndexedWord vertex, boolean isHead, Collection<String> features, GrammaticalRelation reln){ if(isHead){ List<Pair<GrammaticalRelation, IndexedWord>> pt = graph.parentPairs(vertex); for(Pair<GrammaticalRelation, IndexedWord> en: pt) { features.add("PARENTREL-" + en.first()); } } else{ //find the relation to the parent if(reln == null){ List<SemanticGraphEdge> parents = graph.getOutEdgesSorted(vertex); if(parents.size() > 0) reln = parents.get(0).getRelation(); } if(reln != null) features.add("REL-" + reln.getShortName()); } //System.out.println("For graph " + graph.toFormattedString() + " and vertex " + vertex + " the features are " + features); }
private void formatSGNodeOnelineHelper(SemanticGraph sg, IndexedWord node, StringBuilder sb, Set<IndexedWord> usedOneline) { usedOneline.add(node); boolean isntLeaf = (sg.outDegree(node) > 0); if (isntLeaf) { sb.append(LPAREN); } sb.append(formatLabel(node)); for (SemanticGraphEdge depcy : sg.getOutEdgesSorted(node)) { IndexedWord dep = depcy.getDependent(); sb.append(SPACE); if (showRelns) { sb.append(depcy.getRelation()); sb.append(COLON); } if (!usedOneline.contains(dep) && !used.contains(dep)) { // avoid infinite loop formatSGNodeOnelineHelper(sg, dep, sb, usedOneline); } else { sb.append(formatLabel(dep)); } } if (isntLeaf) { sb.append(RPAREN); } }
sb.append(node.tag()); for (SemanticGraphEdge edge : getOutEdgesSorted(node)) { IndexedWord target = edge.getTarget(); sb.append(" ").append(edge.getRelation()).append(">");
spaces += indent; for (SemanticGraphEdge depcy : sg.getOutEdgesSorted(node)) { IndexedWord dep = depcy.getDependent(); out.append("\n");
/** A helper method for * {@link NaturalLogicAnnotator#getModifierSubtreeSpan(edu.stanford.nlp.semgraph.SemanticGraph, edu.stanford.nlp.ling.IndexedWord)} and * {@link NaturalLogicAnnotator#getSubtreeSpan(edu.stanford.nlp.semgraph.SemanticGraph, edu.stanford.nlp.ling.IndexedWord)}. */ private static Pair<Integer, Integer> getGeneralizedSubtreeSpan(SemanticGraph tree, IndexedWord root, Set<String> validArcs) { int min = root.index(); int max = root.index(); Queue<IndexedWord> fringe = new LinkedList<>(); for (SemanticGraphEdge edge : tree.outgoingEdgeIterable(root)) { String edgeLabel = edge.getRelation().getShortName(); if ((validArcs == null || validArcs.contains(edgeLabel)) && !"punct".equals(edgeLabel)) { fringe.add(edge.getDependent()); } } while (!fringe.isEmpty()) { IndexedWord node = fringe.poll(); min = Math.min(node.index(), min); max = Math.max(node.index(), max); // ignore punctuation fringe.addAll(tree.getOutEdgesSorted(node).stream().filter(edge -> edge.getGovernor().equals(node) && !(edge.getGovernor().equals(edge.getDependent())) && !"punct".equals(edge.getRelation().getShortName())).map(SemanticGraphEdge::getDependent).collect(Collectors.toList())); } return Pair.makePair(min, max + 1); }
for (SemanticGraphEdge edge : parse.getOutEdgesSorted(root)) { String shortName = edge.getRelation().getShortName(); String name = edge.getRelation().toString();
private static Object getNodes(SemanticGraph graph) { if(graph != null) { List<IndexedWord> vertexList = graph.vertexListSorted(); int maxIndex = vertexList.get(vertexList.size() - 1).index(); return vertexList.stream().map( (IndexedWord token) -> (Consumer<Writer>) node -> { if (token.copyCount() == 0) { node.set("id", getNodeIndex(token, maxIndex)); node.set("start", token.get(CoreAnnotations.CharacterOffsetBeginAnnotation.class)); node.set("end", token.get(CoreAnnotations.CharacterOffsetEndAnnotation.class)); } else { node.set("id", getNodeIndex(token, maxIndex)); node.set("source", token.index()); } node.set("form", token.word()); if (graph.getRoots().contains(token)) node.set("top", true); node.set("properties", (Consumer<Writer>) propertiesWriter -> { propertiesWriter.set("xpos", token.tag()); propertiesWriter.set("upos", token.get(CoreAnnotations.CoarseTagAnnotation.class)); propertiesWriter.set("lemma", token.lemma()); }); node.set("edges", graph.getOutEdgesSorted(token).stream().map( (SemanticGraphEdge dep) -> (Consumer<Writer>) edge -> { edge.set("target", getNodeIndex(dep.getDependent(), maxIndex)); edge.set("label", dep.getRelation().toString()); })); } ); } else { return null; } }
for (SemanticGraphEdge edge2 : sg.getOutEdgesSorted(w1)) { sg.removeEdge(edge2); sg.addEdge(gov2, edge2.getDependent(), edge2.getRelation(), edge2.getWeight(), edge2.isExtra());
for (SemanticGraphEdge edge2 : sg.getOutEdgesSorted(w2)) { sg.removeEdge(edge2); sg.addEdge(gov2, edge2.getDependent(), edge2.getRelation(), edge2.getWeight(), edge2.isExtra());
public static Set<IndexedWord> getChildren(SemanticGraph semanticGraph, IndexedWord node) { Set<IndexedWord> ret = new HashSet<>(); List<SemanticGraphEdge> outEdgesSorted = semanticGraph.getOutEdgesSorted(node); for (SemanticGraphEdge semanticGraphEdge : outEdgesSorted) { ret.add(semanticGraphEdge.getDependent()); } return ret; }
public static void getChildrenRecursive(SemanticGraph semanticGraph, IndexedWord node, LinkedHashSet<IndexedWord> list, int iter) throws Exception { if (iter > MAX_ITER) { throw new Exception("Too many iterations"); } List<SemanticGraphEdge> outEdgesSorted = semanticGraph.getOutEdgesSorted(node); for (SemanticGraphEdge semanticGraphEdge : outEdgesSorted) { IndexedWord child = semanticGraphEdge.getDependent(); list.add(child); getChildrenRecursive(semanticGraph, child, list, iter + 1); } }
public static void getFeatures(SemanticGraph graph, IndexedWord vertex, boolean isHead, Collection<String> features, GrammaticalRelation reln){ if(isHead){ List<Pair<GrammaticalRelation, IndexedWord>> pt = graph.parentPairs(vertex); for(Pair<GrammaticalRelation, IndexedWord> en: pt) { features.add("PARENTREL-" + en.first()); } } else{ //find the relation to the parent if(reln == null){ List<SemanticGraphEdge> parents = graph.getOutEdgesSorted(vertex); if(parents.size() > 0) reln = parents.get(0).getRelation(); } if(reln != null) features.add("REL-" + reln.getShortName()); } //System.out.println("For graph " + graph.toFormattedString() + " and vertex " + vertex + " the features are " + features); }
static TreeSet<String> getPersons(SemanticGraph semanticGraph, IndexedWord word, CoreMap sentence) { Stack<IndexedWord> wordsToCheck = new Stack<>(); wordsToCheck.add(word); int index = word.index(); while (!wordsToCheck.isEmpty()) { IndexedWord thisWord = wordsToCheck.pop(); List<SemanticGraphEdge> outEdgesSorted = semanticGraph.getOutEdgesSorted(thisWord); for (SemanticGraphEdge semanticGraphEdge : outEdgesSorted) { IndexedWord dependent = semanticGraphEdge.getDependent(); String pos = dependent.get(CoreAnnotations.PartOfSpeechAnnotation.class); if (pos.equals("VA")) { index = Math.min(index, dependent.index()); wordsToCheck.push(dependent); } } } CoreLabel token = sentence.get(CoreAnnotations.TokensAnnotation.class).get(index - 1); String morpho = token.get(DigiMorphAnnotations.MorphoAnnotation.class); String[] parts = morpho.split("\\s+"); TreeSet<String> persons = new TreeSet<>(); for (int i = 1; i < parts.length; i++) { String[] vParts = parts[i].split("\\+"); if (!vParts[1].equals("v")) { continue; } persons.add(vParts[5] + "+" + vParts[6]); } return persons; }
protected static void addChildren(HashMultimap<Integer, Integer> children, Set<Integer> stack, IndexedWord current, SemanticGraph semanticGraph, Set<IndexedWord> used) { List<SemanticGraphEdge> edges = semanticGraph.getOutEdgesSorted(current); used.add(current); int index = current.index(); for (Integer integer : stack) { children.put(integer, index); } Set<Integer> newStack = new HashSet<>(stack); newStack.add(index); for (SemanticGraphEdge edge : edges) { IndexedWord target = edge.getTarget(); // String relation = edge.getRelation().toString(); // if (relation.equals("punct")) { // continue; // } if (!used.contains(target)) { addChildren(children, newStack, target, semanticGraph, used); } } }
private void formatSGNodeOnelineHelper(SemanticGraph sg, IndexedWord node, StringBuilder sb, Set<IndexedWord> usedOneline) { usedOneline.add(node); boolean isntLeaf = (sg.outDegree(node) > 0); if (isntLeaf) { sb.append(LPAREN); } sb.append(formatLabel(node)); for (SemanticGraphEdge depcy : sg.getOutEdgesSorted(node)) { IndexedWord dep = depcy.getDependent(); sb.append(SPACE); if (showRelns) { sb.append(depcy.getRelation()); sb.append(COLON); } if (!usedOneline.contains(dep) && !used.contains(dep)) { // avoid infinite loop formatSGNodeOnelineHelper(sg, dep, sb, usedOneline); } else { sb.append(formatLabel(dep)); } } if (isntLeaf) { sb.append(RPAREN); } }
private void formatSGNodeOnelineHelper(SemanticGraph sg, IndexedWord node, StringBuilder sb, Set<IndexedWord> usedOneline) { usedOneline.add(node); boolean isntLeaf = (sg.outDegree(node) > 0); if (isntLeaf) { sb.append(LPAREN); } sb.append(formatLabel(node)); for (SemanticGraphEdge depcy : sg.getOutEdgesSorted(node)) { IndexedWord dep = depcy.getDependent(); sb.append(SPACE); if (showRelns) { sb.append(depcy.getRelation()); sb.append(COLON); } if (!usedOneline.contains(dep) && !used.contains(dep)) { // avoid infinite loop formatSGNodeOnelineHelper(sg, dep, sb, usedOneline); } else { sb.append(formatLabel(dep)); } } if (isntLeaf) { sb.append(RPAREN); } }
private void formatSGNodeOnelineHelper(SemanticGraph sg, IndexedWord node, StringBuilder sb, Set<IndexedWord> usedOneline) { usedOneline.add(node); boolean isntLeaf = (sg.outDegree(node) > 0); if (isntLeaf) { sb.append(LPAREN); } sb.append(formatLabel(node)); for (SemanticGraphEdge depcy : sg.getOutEdgesSorted(node)) { IndexedWord dep = depcy.getDependent(); sb.append(SPACE); if (showRelns) { sb.append(depcy.getRelation()); sb.append(COLON); } if (!usedOneline.contains(dep) && !used.contains(dep)) { // avoid infinite loop formatSGNodeOnelineHelper(sg, dep, sb, usedOneline); } else { sb.append(formatLabel(dep)); } } if (isntLeaf) { sb.append(RPAREN); } }
sb.append(node.tag()); for (SemanticGraphEdge edge : getOutEdgesSorted(node)) { IndexedWord target = edge.getTarget(); sb.append(" ").append(edge.getRelation()).append(":");
sb.append(node.tag()); for (SemanticGraphEdge edge : getOutEdgesSorted(node)) { IndexedWord target = edge.getTarget(); sb.append(" ").append(edge.getRelation()).append(">");
/** A helper method for * {@link NaturalLogicAnnotator#getModifierSubtreeSpan(edu.stanford.nlp.semgraph.SemanticGraph, edu.stanford.nlp.ling.IndexedWord)} and * {@link NaturalLogicAnnotator#getSubtreeSpan(edu.stanford.nlp.semgraph.SemanticGraph, edu.stanford.nlp.ling.IndexedWord)}. */ private static Pair<Integer, Integer> getGeneralizedSubtreeSpan(SemanticGraph tree, IndexedWord root, Set<String> validArcs) { int min = root.index(); int max = root.index(); Queue<IndexedWord> fringe = new LinkedList<>(); for (SemanticGraphEdge edge : tree.outgoingEdgeIterable(root)) { String edgeLabel = edge.getRelation().getShortName(); if ((validArcs == null || validArcs.contains(edgeLabel)) && !"punct".equals(edgeLabel)) { fringe.add(edge.getDependent()); } } while (!fringe.isEmpty()) { IndexedWord node = fringe.poll(); min = Math.min(node.index(), min); max = Math.max(node.index(), max); // ignore punctuation fringe.addAll(tree.getOutEdgesSorted(node).stream().filter(edge -> edge.getGovernor().equals(node) && !(edge.getGovernor().equals(edge.getDependent())) && !"punct".equals(edge.getRelation().getShortName())).map(SemanticGraphEdge::getDependent).collect(Collectors.toList())); } return Pair.makePair(min, max + 1); }