private void descendantsHelper(IndexedWord curr, Set<IndexedWord> descendantSet) { if (descendantSet.contains(curr)) { return; } descendantSet.add(curr); for (IndexedWord child : getChildren(curr)) { descendantsHelper(child, descendantSet); } }
public List<IndexedWord> getChildList(IndexedWord vertex) { if (!containsVertex(vertex)) { throw new IllegalArgumentException(); } List<IndexedWord> result = new ArrayList<>(getChildren(vertex)); Collections.sort(result); return result; }
private boolean isDagHelper(IndexedWord current, Set<IndexedWord> unused, Set<IndexedWord> trail) { if (trail.contains(current)) { return true; } else if (!unused.contains(current)) { return false; } unused.remove(current); trail.add(current); for (IndexedWord child : getChildren(current)) { boolean result = isDagHelper(child, unused, trail); if (result) { return true; } } trail.remove(current); return false; }
/** * Returns all nodes reachable from {@code root}. * * @param root the root node of the subgraph * @return all nodes in subgraph */ public Set<IndexedWord> getSubgraphVertices(IndexedWord root) { Set<IndexedWord> result = wordMapFactory.newSet(); result.add(root); List<IndexedWord> queue = Generics.newLinkedList(); queue.add(root); while (! queue.isEmpty()) { IndexedWord current = queue.remove(0); for (IndexedWord child : this.getChildren(current)) { if ( ! result.contains(child)) { result.add(child); queue.add(child); } } } return result; }
/** * Method for getting the siblings of a particular node. Siblings are the * other children of your parent, where parent is determined as the parent * returned by getParent * * @return collection of sibling nodes (does not include vertex) * the collection is empty if your parent is null */ public Collection<IndexedWord> getSiblings(IndexedWord vertex) { IndexedWord parent = this.getParent(vertex); if (parent != null) { Set<IndexedWord> result = wordMapFactory.newSet(); result.addAll(this.getChildren(parent)); result.remove(vertex);//remove this vertex - you're not your own sibling return result; } else { return Collections.emptySet(); } }
private void descendantsWithReln(SemanticGraph g, IndexedWord w, String relation, List<IndexedWord> seenNodes, List<IndexedWord> descendantSet) { if (seenNodes.contains(w)) return; seenNodes.add(w); if (descendantSet.contains(w)) return; if (ignoreCommonTags && ignoreTags.contains(w.tag().trim())) return; for (IndexedWord child : g.getChildren(w)) { for (SemanticGraphEdge edge : g.getAllEdges(w, child)) { if (edge.getRelation().toString().equals(relation)) { descendantSet.add(child); } } descendantsWithReln(g, child, relation, seenNodes, descendantSet); } }
/** * Performs a cull for the descendants of the given node in the * graph, subject to the tabu nodes to avoid, relations to avoid * crawling over, and child nodes to avoid traversing to based upon * a predicate test. */ private static void tabuDescendantsHelper(SemanticGraph sg, IndexedWord curr, Set<IndexedWord> descendantSet, Collection<IndexedWord> tabu, Collection<GrammaticalRelation> relnsToAvoid, Predicate<IndexedWord> tabuTest) { if (tabu.contains(curr)) return; if (descendantSet.contains(curr)) { return; } descendantSet.add(curr); for (IndexedWord child : sg.getChildren(curr)) { for (SemanticGraphEdge edge : sg.getAllEdges(curr, child)) { if (relnsToAvoid != null && relnsToAvoid.contains(edge.getRelation())) continue; if (tabuTest != null && tabuTest.test(edge.getDependent())) continue; tabuDescendantsHelper(sg, child, descendantSet, tabu, relnsToAvoid, tabuTest); } } }
/** * Performs a cull for the descendants of the given node in the * graph, subject to the tabu nodes to avoid, relations to avoid * crawling over, and child nodes to avoid traversing to based upon * a predicate test. */ private static void tabuDescendantsHelper(SemanticGraph sg, IndexedWord curr, Set<IndexedWord> descendantSet, Collection<IndexedWord> tabu, Collection<GrammaticalRelation> relnsToAvoid, BiPredicate<IndexedWord,SemanticGraph> tabuTest) { if (tabu.contains(curr)) return; if (descendantSet.contains(curr)) { return; } descendantSet.add(curr); for (IndexedWord child : sg.getChildren(curr)) { for (SemanticGraphEdge edge : sg.getAllEdges(curr, child)) { if (relnsToAvoid != null && relnsToAvoid.contains(edge.getRelation())) continue; if (tabuTest != null && tabuTest.test(edge.getDependent(), sg)) continue; tabuDescendantsHelper(sg, child, descendantSet, tabu, relnsToAvoid, tabuTest); } } }
@Override public void advance() { if (node.equals(IndexedWord.NO_WORD)) { next = null; return; } if (iterator == null) { Set<IndexedWord> parents = sg.getParents(node); Set<IndexedWord> neighbors = Generics.newIdentityHashSet(); for (IndexedWord parent : parents) { neighbors.addAll(sg.getChildren(parent)); } iterator = neighbors.iterator(); } while (iterator.hasNext()) { IndexedWord word = iterator.next(); if ( ! satisfiesOrder(node, word)) { continue; } this.next = word; return; } this.next = null; } };
for (IndexedWord child : getChildren(vertex)) { String lemma = child.get(CoreAnnotations.LemmaAnnotation.class); if (lemma == null || lemma.isEmpty()) {
public int getNegation(Dictionaries dict) { if(headIndexedWord == null) return 0; // direct negation in a child Collection<IndexedWord> children = enhancedDependency.getChildren(headIndexedWord); for(IndexedWord child : children) { if(dict.negations.contains(child.lemma())) return 1; } // or has a sibling for(IndexedWord sibling : getHeadSiblings()) { if(dict.negations.contains(sibling.lemma()) && !enhancedDependency.hasParentWithReln(headIndexedWord, UniversalEnglishGrammaticalRelations.NOMINAL_SUBJECT)) return 1; } // check the parent List<Pair<GrammaticalRelation,IndexedWord>> parentPairs = enhancedDependency.parentPairs(headIndexedWord); if (!parentPairs.isEmpty()) { Pair<GrammaticalRelation,IndexedWord> parentPair = parentPairs.get(0); GrammaticalRelation gr = parentPair.first; // check negative prepositions if(dict.neg_relations.contains(gr.toString())) return 1; } return 0; }
public int getNegation(Dictionaries dict) { if(headIndexedWord == null) return 0; // direct negation in a child Collection<IndexedWord> children = dependency.getChildren(headIndexedWord); for(IndexedWord child : children) { if(dict.negations.contains(child.lemma())) return 1; } // or has a sibling for(IndexedWord sibling : dependency.getSiblings(headIndexedWord)) { if(dict.negations.contains(sibling.lemma()) && !dependency.hasParentWithReln(headIndexedWord, UniversalEnglishGrammaticalRelations.NOMINAL_SUBJECT)) return 1; } // check the parent List<Pair<GrammaticalRelation,IndexedWord>> parentPairs = dependency.parentPairs(headIndexedWord); if (!parentPairs.isEmpty()) { Pair<GrammaticalRelation,IndexedWord> parentPair = parentPairs.get(0); GrammaticalRelation gr = parentPair.first; // check negative prepositions if(dict.neg_relations.contains(gr.toString())) return 1; } return 0; }
for (IndexedWord child : g.getChildren(curr)) { boolean dontuse = false; if (doNotAddThese!=null &&doNotAddThese.contains(child))
public int getModal(Dictionaries dict) { if(headIndexedWord == null) return 0; // direct modal in a child Collection<IndexedWord> children = enhancedDependency.getChildren(headIndexedWord); for(IndexedWord child : children) { if(dict.modals.contains(child.lemma())) return 1; } // check the parent IndexedWord parent = getHeadParent(); if (parent != null) { if(dict.modals.contains(parent.lemma())) return 1; // check the children of the parent (that is needed for modal auxiliaries) IndexedWord child = enhancedDependency.getChildWithReln(parent,UniversalEnglishGrammaticalRelations.AUX_MODIFIER); if(!enhancedDependency.hasParentWithReln(headIndexedWord, UniversalEnglishGrammaticalRelations.NOMINAL_SUBJECT) && child != null && dict.modals.contains(child.lemma())) return 1; } // look at the path to root List<IndexedWord> path = getHeadPathToRoot(); if(path == null) return 0; for(IndexedWord word : path) { if(dict.modals.contains(word.lemma())) return 1; } return 0; }
public int getModal(Dictionaries dict) { if(headIndexedWord == null) return 0; // direct modal in a child Collection<IndexedWord> children = dependency.getChildren(headIndexedWord); for(IndexedWord child : children) { if(dict.modals.contains(child.lemma())) return 1; } // check the parent IndexedWord parent = dependency.getParent(headIndexedWord); if (parent != null) { if(dict.modals.contains(parent.lemma())) return 1; // check the children of the parent (that is needed for modal auxiliaries) IndexedWord child = dependency.getChildWithReln(parent, UniversalEnglishGrammaticalRelations.AUX_MODIFIER); if(!dependency.hasParentWithReln(headIndexedWord, UniversalEnglishGrammaticalRelations.NOMINAL_SUBJECT) && child != null && dict.modals.contains(child.lemma())) return 1; } // look at the path to root List<IndexedWord> path = dependency.getPathToRoot(headIndexedWord); if(path == null) return 0; for(IndexedWord word : path) { if(dict.modals.contains(word.lemma())) return 1; } return 0; }
private boolean checkIfSatisfiedMaxDepth(SemanticGraph g, IndexedWord parent, IndexedWord child, IntPair depths) { if (depths.get(0) == Integer.MAX_VALUE) return true; if (parent.equals(child)) return true; boolean foundInMaxDepth = false; for (IndexedWord c : g.getChildren(parent)) { if (c.equals(child)) { return true; } } depths.set(1, depths.get(1) + 1); if (depths.get(1) >= depths.get(0)) return false; for (IndexedWord c : g.getChildren(parent)) { foundInMaxDepth = checkIfSatisfiedMaxDepth(g, c, child, depths); if (foundInMaxDepth == true) return foundInMaxDepth; } return false; }
for (IndexedWord child : sg.getChildren(rootNode)) { Set<IndexedWord> reachableSet = sg.getSubgraphVertices(child); if (reachableSet.contains(rootNode)) {
Set<IndexedWord> children = new HashSet<>(sg.getChildren(oldHead)); for (IndexedWord child : children) { SemanticGraphEdge oldEdge = sg.getEdge(oldHead, child); Set<IndexedWord> children = new HashSet<>(sg.getChildren(oldHead));
for (IndexedWord iw : vbs) { Set<IndexedWord> children = graph.getChildren(iw); List<IndexedWord> deps = Generics.newArrayList(); IndexedWord nsubj = null;
private void descendantsHelper(IndexedWord curr, Set<IndexedWord> descendantSet) { if (descendantSet.contains(curr)) { return; } descendantSet.add(curr); for (IndexedWord child : getChildren(curr)) { descendantsHelper(child, descendantSet); } }