@Override protected final TokenStream normalize(String fieldName, TokenStream in) { return wrapTokenStreamForNormalization(fieldName, getWrappedAnalyzer(fieldName).normalize(fieldName, in)); }
try (TokenStream ts = normalize(fieldName, new StringTokenStream(attributeFactory, filteredText, text.length()))) { final TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
@Override public Query newFuzzyQuery(String text, int fuzziness) { List<Query> disjuncts = new ArrayList<>(); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); final MappedFieldType ft = context.fieldMapper(fieldName); if (ft == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, settings.fuzzyMaxExpansions, settings.fuzzyTranspositions); disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); } } if (disjuncts.size() == 1) { return disjuncts.get(0); } return new DisjunctionMaxQuery(disjuncts, 1.0f); }
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive, QueryShardContext context) { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1); BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2); Query rangeQuery = currentFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, null, timeZone, null, context); return rangeQuery; } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }
@Override public Query newPrefixQuery(String text) { List<Query> disjuncts = new ArrayList<>(); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); final MappedFieldType ft = context.fieldMapper(fieldName); if (ft == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { if (settings.analyzeWildcard()) { Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text, getAnalyzer(ft)); if (analyzedQuery != null) { disjuncts.add(wrapWithBoost(analyzedQuery, entry.getValue())); } } else { BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.prefixQuery(term.utf8ToString(), null, context); disjuncts.add(wrapWithBoost(query, entry.getValue())); } } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); } } if (disjuncts.size() == 1) { return disjuncts.get(0); } return new DisjunctionMaxQuery(disjuncts, 1.0f); }
currentPos = new ArrayList<>(); final BytesRef term = analyzer.normalize(field, termAtt.toString()); currentPos.add(term); hasMoreTokens = source.incrementToken();
private Query getFuzzyQuerySingle(String field, String termStr, float minSimilarity) throws ParseException { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity), getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }
@Override protected final TokenStream normalize(String fieldName, TokenStream in) { return wrapTokenStreamForNormalization(fieldName, getWrappedAnalyzer(fieldName).normalize(fieldName, in)); }
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive, QueryShardContext context) { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1); BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2); Query rangeQuery = currentFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, null, timeZone, null, context); return rangeQuery; } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive, QueryShardContext context) { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1); BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2); Query rangeQuery = currentFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, null, timeZone, null, context); return rangeQuery; } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }
@Override public Query newFuzzyQuery(String text, int fuzziness) { List<Query> disjuncts = new ArrayList<>(); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); final MappedFieldType ft = context.fieldMapper(fieldName); if (ft == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, settings.fuzzyMaxExpansions, settings.fuzzyTranspositions); disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); } } if (disjuncts.size() == 1) { return disjuncts.get(0); } return new DisjunctionMaxQuery(disjuncts, 1.0f); }
@Override public Query newFuzzyQuery(String text, int fuzziness) { List<Query> disjuncts = new ArrayList<>(); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); final MappedFieldType ft = context.fieldMapper(fieldName); if (ft == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, settings.fuzzyMaxExpansions, settings.fuzzyTranspositions); disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); } } if (disjuncts.size() == 1) { return disjuncts.get(0); } return new DisjunctionMaxQuery(disjuncts, 1.0f); }
/** * Dispatches to Lucene's SimpleQueryParser's newFuzzyQuery, optionally * lowercasing the term first */ @Override public Query newFuzzyQuery(String text, int fuzziness) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.setDisableCoord(true); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { final BytesRef term = getAnalyzer().normalize(fieldName, text); Query query = new FuzzyQuery(new Term(fieldName, term), fuzziness); bq.add(wrapWithBoost(query, entry.getValue()), BooleanClause.Occur.SHOULD); } catch (RuntimeException e) { rethrowUnlessLenient(e); } } return super.simplify(bq.build()); }
@Override public Query newPrefixQuery(String text) { List<Query> disjuncts = new ArrayList<>(); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); final MappedFieldType ft = context.fieldMapper(fieldName); if (ft == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { if (settings.analyzeWildcard()) { Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text, getAnalyzer(ft)); if (analyzedQuery != null) { disjuncts.add(wrapWithBoost(analyzedQuery, entry.getValue())); } } else { BytesRef term = getAnalyzer(ft).normalize(fieldName, text); Query query = ft.prefixQuery(term.utf8ToString(), null, context); disjuncts.add(wrapWithBoost(query, entry.getValue())); } } catch (RuntimeException e) { disjuncts.add(rethrowUnlessLenient(e)); } } if (disjuncts.size() == 1) { return disjuncts.get(0); } return new DisjunctionMaxQuery(disjuncts, 1.0f); }
/** * Dispatches to Lucene's SimpleQueryParser's newFuzzyQuery, optionally * lowercasing the term first */ @Override public Query newFuzzyQuery(String text, int fuzziness) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.setDisableCoord(true); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { final BytesRef term = getAnalyzer().normalize(fieldName, text); Query query = new FuzzyQuery(new Term(fieldName, term), fuzziness); bq.add(wrapWithBoost(query, entry.getValue()), BooleanClause.Occur.SHOULD); } catch (RuntimeException e) { rethrowUnlessLenient(e); } } return super.simplify(bq.build()); }
/** * Dispatches to Lucene's SimpleQueryParser's newPrefixQuery, optionally * lowercasing the term first or trying to analyze terms */ @Override public Query newPrefixQuery(String text) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.setDisableCoord(true); for (Map.Entry<String,Float> entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { if (settings.analyzeWildcard()) { Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text); if (analyzedQuery != null) { bq.add(wrapWithBoost(analyzedQuery, entry.getValue()), BooleanClause.Occur.SHOULD); } } else { Term term = new Term(fieldName, getAnalyzer().normalize(fieldName, text)); Query query = new PrefixQuery(term); bq.add(wrapWithBoost(query, entry.getValue()), BooleanClause.Occur.SHOULD); } } catch (RuntimeException e) { return rethrowUnlessLenient(e); } } return super.simplify(bq.build()); }
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException { currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { try { BytesRef term = termStr == null ? null : getAnalyzer().normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.build(minSimilarity), getFuzzyPrefixLength(), settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions); } catch (RuntimeException e) { if (settings.lenient()) { return null; } throw e; } } return super.getFuzzyQuery(field, termStr, Float.parseFloat(minSimilarity)); }
private Query getFuzzyQuerySingle(String field, String termStr, float minSimilarity) throws ParseException { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity), getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive, QueryShardContext context) { currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { try { BytesRef part1Binary = part1 == null ? null : getAnalyzer().normalize(field, part1); BytesRef part2Binary = part2 == null ? null : getAnalyzer().normalize(field, part2); Query rangeQuery; if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) { LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType; rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, settings.timeZone(), null, context); } else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType; rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, settings.timeZone(), null, context); } else { rangeQuery = currentFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, context); } return rangeQuery; } catch (RuntimeException e) { if (settings.lenient()) { return null; } throw e; } } return newRangeQuery(field, part1, part2, startInclusive, endInclusive); }
private Query getFuzzyQuerySingle(String field, String termStr, float minSimilarity) throws ParseException { currentFieldType = context.fieldMapper(field); if (currentFieldType == null) { return newUnmappedFieldQuery(field); } try { Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity), getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } }