public String getGloss() throws WordNetException { return this.realSynset.getGloss(); }
public String getGloss() { return this.synset.getGloss().toString(); }
@Override public String getGloss(Entity entity) throws LexicalSemanticResourceException { StringBuilder sb = new StringBuilder(); Set<Synset> synsets = WordNetUtils.entityToSynsets(dict, entity, isCaseSensitive); for (Synset synset : synsets) { sb.append(synset.getGloss()); sb.append(" "); } return sb.toString().trim(); }
public String toString() { StringBuilder words = new StringBuilder(); for (int i = 0; i < this.words.size(); ++i) { if (i > 0) { words.append(", "); } words.append(this.words.get(i).getLemma()); } if (getGloss() != null) { words.append(" -- (").append(getGloss()).append(")"); } return ResourceBundleSet.insertParams("[Synset: [Offset: {0}] {1} Words: {2}]", new Object[]{getOffset(), getPOS(), words.toString()}); }
public String toString() { StringBuilder words = new StringBuilder(); for (int i = 0; i < this.words.size(); ++i) { if (i > 0) { words.append(", "); } words.append(this.words.get(i).getLemma()); } if (getGloss() != null) { words.append(" -- (").append(getGloss()).append(")"); } return ResourceBundleSet.insertParams("[Synset: [Offset: {0}] {1} Words: {2}]", new Object[]{getOffset(), getPOS(), words.toString()}); }
@Override public String getSenseDescription(String senseId) throws SenseInventoryException { CachedWordNetSense sense = getSense(senseId); if (senseDescriptionFormat == null) { return sense.synset.getGloss(); } String description = senseDescriptionFormat.replace("%d", sense.getDefinition()); description = description.replace("%e", sense.getExamples().toString()); description = description.replace("%w", sense.getSynonyms().toString()); return description; }
System.out.print(" -- (" + synset.getGloss() + ")");
private void fathomCausesExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setCauses(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getCauses(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset cause : childNode.getCauses()) { fathomCausesExponential(wordSense, cause, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomCoordinateTermsExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setCoordinateTerms(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getCoordinateTerms(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset coordinate : childNode.getCoordinateTerms()) { fathomCoordinateTermsExponential(wordSense, coordinate, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomPertainymsExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setPertainyms(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getPertainyms(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset pertainym : childNode.getPertainyms()) { fathomPertainymsExponential(wordSense, pertainym, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomEntailmentsExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setEntailements(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getEntailments(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset entailment : childNode.getEntailments()) { fathomEntailmentsExponential(wordSense, entailment, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomAttributesExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setAttributes(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getAttributes(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset attribute : childNode.getAttributes()) { fathomAttributesExponential(wordSense, attribute, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomAttributes(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setAttributes(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getAttributes(), relvWords)); for (Synset attribute : childNode.getAttributes()) { fathomAttributes(wordSense, attribute, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }
/** * Recursively score the hypernym tree exponentially * * @param wordSense * @param child * @param relvWords * @param depth * @param maxDepth * @param intersectionExponent * @param depthScoreExponent */ private void fathomHypernymsExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setHypernyms(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getHypernyms(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset hypernym : childNode.getHypernyms()) { fathomHypernymsExponential(wordSense, hypernym, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
/** * Recursively score the hyponym tree exponentially * * @param wordSense * @param child * @param relvWords * @param depth * @param maxDepth * @param intersectionExponent * @param depthScoreExponent */ private void fathomHyponymsExponential(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double intersectionExponent, double depthScoreExponent) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setHyponyms(); wordSense.setScore(wordSense.getScore() + Math.pow(assessFeature(childNode.getHyponyms(), relvWords), intersectionExponent) / Math.pow(depth, depthScoreExponent)); for (Synset hyponym : childNode.getHyponyms()) { fathomHyponymsExponential(wordSense, hyponym, relvGlossWords, depth - 1, maxDepth, intersectionExponent, depthScoreExponent); } }
private void fathomCoordinateTerms(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setCoordinateTerms(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getCoordinateTerms(), relvWords)); for (Synset coordinate : childNode.getCoordinateTerms()) { fathomCoordinateTerms(wordSense, coordinate, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }
private void fathomEntailments(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setEntailements(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getEntailments(), relvWords)); for (Synset entailment : childNode.getEntailments()) { fathomEntailments(wordSense, entailment, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }
private void fathomPertainyms(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setPertainyms(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getPertainyms(), relvWords)); for (Synset pertainym : childNode.getPertainyms()) { fathomPertainyms(wordSense, pertainym, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }
private void fathomCauses(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setCauses(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getCauses(), relvWords)); for (Synset cause : childNode.getCauses()) { fathomEntailments(wordSense, cause, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }
/** * Recursively score the meronym tree linearly * * @param wordSense * @param child * @param relvWords * @param depth * @param maxDepth * @param depthScoreWeight */ private void fathomMeronyms(WordSense wordSense, Synset child, ArrayList<WordPOS> relvWords, int depth, int maxDepth, double depthScoreWeight) { if (depth == 0) return; String[] tokenizedGloss = WSDHelper.getTokenizer() .tokenize(child.getGloss().toString()); ArrayList<WordPOS> relvGlossWords = WSDHelper .getAllRelevantWords(tokenizedGloss); SynNode childNode = new SynNode(child, relvGlossWords); childNode.setMeronyms(); wordSense.setScore( wordSense.getScore() + Math.pow(depthScoreWeight, maxDepth - depth + 1) * assessFeature(childNode.getMeronyms(), relvWords)); for (Synset meronym : childNode.getMeronyms()) { fathomMeronyms(wordSense, meronym, relvGlossWords, depth - 1, maxDepth, depthScoreWeight); } }