Refine search
private static String toSieveOrderConstraintString(Pair<Integer,Integer> orderedSieveIndices, String[] sieveNames) { String first = (orderedSieveIndices.first() < 0)? "*":sieveNames[orderedSieveIndices.first()]; String second = (orderedSieveIndices.second() < 0)? "*":sieveNames[orderedSieveIndices.second()]; return first + " < " + second; }
/** Construct a ColumnDataClassifier. * * @param flagsClassifierPair A Pair of a Flags object array specifies all aspects of featurization * and other behavior and a Classifier that will be used. */ public ColumnDataClassifier(Pair<Flags[],Classifier<String,String>> flagsClassifierPair) { flags = flagsClassifierPair.first(); globalFlags = flags[0]; classifier = flagsClassifierPair.second(); }
/** * Prints the collocations found in this <code>Tree</code> as strings. * Each is followed by its boundary constituent indices in the original tree. * <br>Example: <code> throw_up (2,3) </code> * <br> <code> came_up_with (7,9) </code> */ public void printCollocationStrings(PrintWriter pw){ //ArrayList<String> strs = new ArrayList<String>(); for (Collocation c: collocationCollector) { String cs = c.collocationString; pw.println(cs+" ("+(c.span.first()+1)+","+(c.span.second()+1)+")"); } }
public String expandStringRegex(String regex) { // Replace all variables in regex String expanded = regex; for (Map.Entry<String, Pair<Pattern, String>> stringPairEntry : stringRegexVariables.entrySet()) { Pair<Pattern,String> p = stringPairEntry.getValue(); expanded = p.first().matcher(expanded).replaceAll(p.second()); } return expanded; }
public CaseExpression(List<Pair<Expression,Expression>> conds, Expression elseExpr) { if (conds.size() == 0) { throw new IllegalArgumentException("No conditions!"); } else { expr = elseExpr; for (int i = conds.size()-1; i>=0; i--) { Pair<Expression,Expression> p = conds.get(i); expr = new IfExpression(p.first(), p.second(), expr); } } } }
public double getEditDistanceScoresOtherClass(String label, String g) { double editDist; String editDistPh; // if (editDistanceFromOtherSemanticClasses.containsKey(g)) { // editDist = editDistanceFromOtherSemanticClasses.get(g); // editDistPh = editDistanceFromOtherSemanticClassesMatches.get(g); // } else { Pair<String, Double> editMatch = getEditDistanceFromOtherClasses(label, g, 4); editDist = editMatch.second(); editDistPh = editMatch.first(); // } assert (!editDistPh.isEmpty()); return (editDist == editDistMax ? 1.0 : (editDist / (double) Math.max(g.length(), editDistPh.length()))); }
private static Map<String, String> preprocess(BufferedReader reader) throws IOException { Map<String, String> macros = Generics.newHashMap(); for(String line; (line = reader.readLine()) != null; ) { line = line.trim(); if(line.startsWith("macro ")){ Pair<String, String> macro = extractMacro(line); macros.put(macro.first(), macro.second()); } } return macros; }
@Override public String toString(){ if(relations.size() > 1) throw new UnsupportedOperationException(); Pair<Token, GrammaticalRelation> rel = relations.get(0); //String pattern = "({" + wordType + ":/" + parent + "/}=parent >>" + rel + "=reln {}=node)"; String p = "(" + rel.first().toString() + "=parent >"+rel.second().toString() + "=reln {}=node)"; return p; }
private boolean satisfyHelper(IndexedWord node, IndexedWord l2, SemanticGraph sg, Set<IndexedWord> usedNodes) { List<Pair<GrammaticalRelation, IndexedWord>> govs = getNeighborPairs(sg, node); for (Pair<GrammaticalRelation, IndexedWord> gov : govs) { if (this.type.test(gov.first().toString()) && gov.second().equals(l2)) return true; } usedNodes.add(node); for (Pair<GrammaticalRelation, IndexedWord> gov : govs) { if (!usedNodes.contains(gov.second()) && satisfyHelper(gov.second(), l2, sg, usedNodes)) return true; } return false; }
protected static void reportWeights(LinearClassifier<String, String> classifier, String classLabel) { if (classLabel != null) logger.fine("CLASSIFIER WEIGHTS FOR LABEL " + classLabel); Map<String, Counter<String>> labelsToFeatureWeights = classifier.weightsAsMapOfCounters(); List<String> labels = new ArrayList<>(labelsToFeatureWeights.keySet()); Collections.sort(labels); for (String label: labels) { Counter<String> featWeights = labelsToFeatureWeights.get(label); List<Pair<String, Double>> sorted = Counters.toSortedListWithCounts(featWeights); StringBuilder bos = new StringBuilder(); bos.append("WEIGHTS FOR LABEL ").append(label).append(':'); for (Pair<String, Double> feat: sorted) { bos.append(' ').append(feat.first()).append(':').append(feat.second()+"\n"); } logger.fine(bos.toString()); } }
private void printSieveScore(Document document, DeterministicCorefSieve sieve) { logger.fine("==========================================="); logger.fine("pass"+currentSieve+": "+ sieve.flagsToString()); scoreMUC.get(currentSieve).printF1(logger); scoreBcubed.get(currentSieve).printF1(logger); scorePairwise.get(currentSieve).printF1(logger); logger.fine("# of Clusters: "+document.corefClusters.size() + ",\t# of additional links: "+additionalLinksCount +",\t# of additional correct links: "+additionalCorrectLinksCount +",\tprecision of new links: "+1.0*additionalCorrectLinksCount/additionalLinksCount); logger.fine("# of total additional links: "+linksCountInPass.get(currentSieve).second() +",\t# of total additional correct links: "+linksCountInPass.get(currentSieve).first() +",\taccumulated precision of this pass: "+1.0*linksCountInPass.get(currentSieve).first()/linksCountInPass.get(currentSieve).second()); logger.fine("--------------------------------------"); } /** Print coref link info */