Refine search
/** * Ensure there are at least <code>maxBucketOrd</code> buckets available. */ public final void grow(long maxBucketOrd) { docCounts = bigArrays.grow(docCounts, maxBucketOrd); }
public AbstractTDigestPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, SearchContext context, Aggregator parent, double[] keys, double compression, boolean keyed, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.keyed = keyed; this.formatter = formatter; this.states = context.bigArrays().newObjectArray(1); this.keys = keys; this.compression = compression; }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.format = formatter; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); compensations = bigArrays.newDoubleArray(1, true); } }
/** Grow an array to a size that is larger than <code>minSize</code>, * preserving content, and potentially reusing part of the provided array. */ public LongArray grow(LongArray array, long minSize) { if (minSize <= array.size()) { return array; } final long newSize = overSize(minSize, PageCacheRecycler.LONG_PAGE_SIZE, Long.BYTES); return resize(array, newSize); }
public WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSources = valuesSources; this.format = format; if (valuesSources != null) { final BigArrays bigArrays = context.bigArrays(); weights = bigArrays.newDoubleArray(1, true); sums = bigArrays.newDoubleArray(1, true); sumCompensations = bigArrays.newDoubleArray(1, true); weightCompensations = bigArrays.newDoubleArray(1, true); } }
public BucketsAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); bigArrays = context.bigArrays(); docCounts = bigArrays.newIntArray(1, true); if (context.aggregations() != null) { multiBucketConsumer = context.aggregations().multiBucketConsumer(); } else { multiBucketConsumer = (count) -> {}; } }
GeoCentroidAggregator(String name, SearchContext context, Aggregator parent, ValuesSource.GeoPoint valuesSource, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); centroids = bigArrays.newLongArray(1, true); counts = bigArrays.newLongArray(1, true); } }
LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, BucketOrder order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, SearchContext context, Aggregator parent, boolean forceDenseMode, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, order, format, bucketCountThresholds, null, context, parent, forceDenseMode, collectionMode, showTermDocCountError, pipelineAggregators, metaData); assert factories == null || factories.countAggregators() == 0; this.segmentDocCounts = context.bigArrays().newIntArray(1, true); }
public LongObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); keys = bigArrays.newLongArray(capacity(), false); values = bigArrays.newObjectArray(capacity()); }
MultiBucketAggregatorWrapper(BigArrays bigArrays, SearchContext context, Aggregator parent, AggregatorFactory<?> factory, Aggregator first) { this.bigArrays = bigArrays; this.parent = parent; this.factory = factory; this.first = first; context.addReleasable(this, Lifetime.PHASE); aggregators = bigArrays.newObjectArray(1); aggregators.set(0, first); collectors = bigArrays.newObjectArray(1); }
ParentOrdAndScoreCollector(SearchContext searchContext, IndexParentChildFieldData globalIfd, String parentType) { this.bigArrays = searchContext.bigArrays(); this.parentIdxs = new LongHash(512, bigArrays); this.scores = bigArrays.newFloatArray(512, false); this.globalIfd = globalIfd; this.parentType = parentType; }
/** Resize the array to the exact provided size. */ public FloatArray resize(FloatArray array, long size) { if (array instanceof BigFloatArray) { return resizeInPlace((BigFloatArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final FloatArray newArray = newFloatArray(size, arr.clearOnResize); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } arr.close(); return newArray; } }
/** Resize the array to the exact provided size. */ public <T> ObjectArray<T> resize(ObjectArray<T> array, long size) { if (array instanceof BigObjectArray) { return resizeInPlace((BigObjectArray<T>) array, size); } else { final ObjectArray<T> newArray = newObjectArray(size); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } array.close(); return newArray; } }
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq, @Nullable Query filter, BigArrays bigArrays) throws IOException { super(reader, field, needTotalTermFreq ? PostingsEnum.FREQS : PostingsEnum.NONE, filter); this.bigArrays = bigArrays; this.needDocFreqs = needDocFreq; this.needTotalTermFreqs = needTotalTermFreq; if (needDocFreq) { termDocFreqs = bigArrays.newIntArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termDocFreqs = null; } if (needTotalTermFreq) { termsTotalFreqs = bigArrays.newLongArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termsTotalFreqs = null; } cachedTermOrds = new BytesRefHash(INITIAL_NUM_TERM_FREQS_CACHED, bigArrays); }
if (bucket >= tops.size()) { long from = tops.size(); tops = bigArrays.grow(tops, bucket + 1); tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY); bottoms = bigArrays.resize(bottoms, tops.size()); bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY); posLefts = bigArrays.resize(posLefts, tops.size()); posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY); posRights = bigArrays.resize(posRights, tops.size()); posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY); negLefts = bigArrays.resize(negLefts, tops.size()); negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY); negRights = bigArrays.resize(negRights, tops.size()); negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
/** * Allocate a new {@link LongArray}. * @param size the initial length of the array */ public LongArray newLongArray(long size) { return newLongArray(size, true); }
/** * Sole constructor. * * @param shardSize * The number of top-scoring docs to collect for each bucket */ BestDocsDeferringCollector(int shardSize, BigArrays bigArrays) { this.shardSize = shardSize; this.bigArrays = bigArrays; perBucketSamples = bigArrays.newObjectArray(1); }
/** * Allocate a new {@link ByteArray} initialized with zeros. * @param size the initial length of the array */ public ByteArray newByteArray(long size) { return newByteArray(size, true); }
/** Resize the array to the exact provided size. */ public ByteArray resize(ByteArray array, long size) { if (array instanceof BigByteArray) { return resizeInPlace((BigByteArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final ByteArray newArray = newByteArray(size, arr.clearOnResize); final byte[] rawArray = ((ByteArrayWrapper) array).array; newArray.set(0, rawArray, 0, (int) Math.min(rawArray.length, newArray.size())); arr.close(); return newArray; } }