public String toCompactString(boolean showTags) { StringBuilder sb = new StringBuilder(); Set<IndexedWord> used = wordMapFactory.newSet(); Collection<IndexedWord> roots = getRoots(); if (roots.isEmpty()) { if (size() == 0) { return "[EMPTY_SEMANTIC_GRAPH]"; } else { return "[UNROOTED_SEMANTIC_GRAPH]"; } // return toString("readable"); } for (IndexedWord root : roots) { toCompactStringHelper(root, sb, used, showTags); } return sb.toString(); }
synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg)); if (topoSort == null || topoSort.size() != sg.size()) { // size check to mitigate a stale cache topoSort = sg.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg), topoSort); synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg_aligned)); if (topoSort == null || topoSort.size() != sg_aligned.size()) { // size check to mitigate a stale cache topoSort = sg_aligned.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg_aligned), topoSort);
if (indexes == null) throw new IllegalArgumentException("Null index array"); if (indexes.length != hypGraph.size()) throw new IllegalArgumentException("Index array length " + indexes.length + " does not match hypGraph size " + hypGraph.size()); Map<IndexedWord, IndexedWord> map = Generics.newHashMap();
return visitedNodes.size() == sg.size();
Tree t = treeIt.next(); if (t == null || t.yield().size() != sg.size()) {
public String toCompactString(boolean showTags) { StringBuilder sb = new StringBuilder(); Set<IndexedWord> used = Generics.newHashSet(); Collection<IndexedWord> roots = getRoots(); if (roots.isEmpty()) { if (size() == 0) { return "[EMPTY_SEMANTIC_GRAPH]"; } else { return "[UNROOTED_SEMANTIC_GRAPH]"; } // return toString("readable"); } for (IndexedWord root : roots) { toCompactStringHelper(root, sb, used, showTags); } return sb.toString(); }
public String toCompactString(boolean showTags) { StringBuilder sb = new StringBuilder(); Set<IndexedWord> used = wordMapFactory.newSet(); Collection<IndexedWord> roots = getRoots(); if (roots.isEmpty()) { if (size() == 0) { return "[EMPTY_SEMANTIC_GRAPH]"; } else { return "[UNROOTED_SEMANTIC_GRAPH]"; } // return toString("readable"); } for (IndexedWord root : roots) { toCompactStringHelper(root, sb, used, showTags); } return sb.toString(); }
synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg)); if (topoSort == null || topoSort.size() != sg.size()) { // size check to mitigate a stale cache topoSort = sg.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg), topoSort); synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg_aligned)); if (topoSort == null || topoSort.size() != sg_aligned.size()) { // size check to mitigate a stale cache topoSort = sg_aligned.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg_aligned), topoSort);
synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg)); if (topoSort == null || topoSort.size() != sg.size()) { // size check to mitigate a stale cache topoSort = sg.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg), topoSort); synchronized (topologicalSortCache) { List<IndexedWord> topoSort = topologicalSortCache.get(System.identityHashCode(sg_aligned)); if (topoSort == null || topoSort.size() != sg_aligned.size()) { // size check to mitigate a stale cache topoSort = sg_aligned.topologicalSort(); topologicalSortCache.put(System.identityHashCode(sg_aligned), topoSort);
public String toCompactString(boolean showTags) { StringBuilder sb = new StringBuilder(); Set<IndexedWord> used = wordMapFactory.newSet(); Collection<IndexedWord> roots = getRoots(); if (roots.isEmpty()) { if (size() == 0) { return "[EMPTY_SEMANTIC_GRAPH]"; } else { return "[UNROOTED_SEMANTIC_GRAPH]"; } // return toString("readable"); } for (IndexedWord root : roots) { toCompactStringHelper(root, sb, used, showTags); } return sb.toString(); }
if (indexes == null) throw new IllegalArgumentException("Null index array"); if (indexes.length != hypGraph.size()) throw new IllegalArgumentException("Index array length " + indexes.length + " does not match hypGraph size " + hypGraph.size()); Map<IndexedWord, IndexedWord> map = Generics.newHashMap();
if (indexes == null) throw new IllegalArgumentException("Null index array"); if (indexes.length != hypGraph.size()) throw new IllegalArgumentException("Index array length " + indexes.length + " does not match hypGraph size " + hypGraph.size()); Map<IndexedWord, IndexedWord> map = Generics.newHashMap();
return visitedNodes.size() == sg.size();
return visitedNodes.size() == sg.size();
System.out.println("Sentence: "+sentence.toString()); System.out.println("DEPENDENCIES: "+dependencies1.toList()); System.out.println("DEPENDENCIES SIZE: "+dependencies1.size()); Set<SemanticGraphEdge> edge_set1 = dependencies1.getEdgeSet(); int j=0;
Tree t = treeIt.next(); if (t == null || t.yield().size() != sg.size()) {