@Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), cache, breakerService, minFrequency, maxFrequency, minSegmentSize); } }
TermsEnum filteredIterator = filter(terms, iterator, reader); final boolean filtered = iterator != filteredIterator; iterator = filteredIterator;
@Override public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); return new SortField(getFieldName(), source, reverse); }
/** * @return the estimate for loading the entire term set into field data, or 0 if unavailable */ public long estimateStringFieldData() { try { LeafReader reader = context.reader(); Terms terms = reader.terms(getFieldNames().indexName()); Fields fields = reader.fields(); final Terms fieldTerms = fields.terms(getFieldNames().indexName()); if (fieldTerms instanceof FieldReader) { final Stats stats = ((FieldReader) fieldTerms).getStats(); long totalTermBytes = stats.totalTermBytes; if (logger.isTraceEnabled()) { logger.trace("totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}", totalTermBytes, terms.size(), terms.getSumDocFreq()); } long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq()); return totalBytes; } } catch (Exception e) { logger.warn("Unable to estimate memory overhead", e); } return 0; }
@Override public void setNextReader(AtomicReaderContext context) throws IOException { keyValues = keyIndexFieldData.load(context).getLongValues(); histoProc.valueValues = distinctIndexFieldData.load(context).getBytesValues(); }
AtomicOrdinalsFieldData data = null; PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldNames().fullName()); Terms terms = reader.terms(getFieldNames().indexName()); if (terms == null) { data = AbstractAtomicOrdinalsFieldData.empty();
/** * @return the estimate for loading the entire term set into field data, or 0 if unavailable */ public long estimateStringFieldData() { try { LeafReader reader = context.reader(); Terms terms = reader.terms(getFieldName()); final Terms fieldTerms = reader.terms(getFieldName()); if (fieldTerms instanceof FieldReader) { final Stats stats = ((FieldReader) fieldTerms).getStats(); long totalTermBytes = stats.totalTermBytes; if (logger.isTraceEnabled()) { logger.trace("totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}", totalTermBytes, terms.size(), terms.getSumDocFreq()); } long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq()); return totalBytes; } } catch (Exception e) { logger.warn("Unable to estimate memory overhead", e); } return 0; }
@Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), cache, breakerService, minFrequency, maxFrequency, minSegmentSize); } }
TermsEnum filteredIterator = filter(terms, iterator, reader); final boolean filtered = iterator != filteredIterator; iterator = filteredIterator;
new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldName()); Terms terms = reader.terms(getFieldName()); if (terms == null) { data = AbstractAtomicOrdinalsFieldData.empty();
@Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), cache, breakerService, minFrequency, maxFrequency, minSegmentSize); } }
TermsEnum filteredIterator = filter(terms, iterator, reader); final boolean filtered = iterator != filteredIterator; iterator = filteredIterator;
@Override public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); return new SortField(getFieldName(), source, reverse); }
@Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), cache, breakerService, minFrequency, maxFrequency, minSegmentSize); } }
TermsEnum filteredIterator = filter(terms, iterator, reader); final boolean filtered = iterator != filteredIterator; iterator = filteredIterator;
@Override public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); return new SortField(getFieldName(), source, reverse); }
@Override public IndexOrdinalsFieldData build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PagedBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService); } }
logger.trace("Filter exists, can't circuit break normally, using RamAccountingTermsEnum"); return new RamAccountingTermsEnum(filter(terms, reader), breaker, this, this.fieldName); } else { estimatedBytes = this.estimateStringFieldData(); return new RamAccountingTermsEnum(filter(terms, reader), breaker, this, this.fieldName); return filter(terms, reader);
@Override public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); return new SortField(getFieldName(), source, reverse); }
/** * @return the estimate for loading the entire term set into field data, or 0 if unavailable */ public long estimateStringFieldData() { try { LeafReader reader = context.reader(); Terms terms = reader.terms(getFieldName()); final Terms fieldTerms = reader.terms(getFieldName()); if (fieldTerms instanceof FieldReader) { final Stats stats = ((FieldReader) fieldTerms).getStats(); long totalTermBytes = stats.totalTermBytes; if (logger.isTraceEnabled()) { logger.trace("totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}", totalTermBytes, terms.size(), terms.getSumDocFreq()); } long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq()); return totalBytes; } } catch (Exception e) { logger.warn("Unable to estimate memory overhead", e); } return 0; }