/** * Allocate a new {@link LongArray}. * @param size the initial length of the array */ public LongArray newLongArray(long size) { return newLongArray(size, true); }
public LongHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); keys = bigArrays.newLongArray(capacity, false); }
public BitArray(int initialSize, BigArrays bigArrays) { this.bigArrays = bigArrays; this.bits = bigArrays.newLongArray(initialSize, true); }
GlobalOrdinalValuesSource(BigArrays bigArrays, MappedFieldType type, CheckedFunction<LeafReaderContext, SortedSetDocValues, IOException> docValuesFunc, DocValueFormat format, boolean missingBucket, Object missing, int size, int reverseMul) { super(bigArrays, format, type, missingBucket, missing, size, reverseMul); this.docValuesFunc = docValuesFunc; this.values = bigArrays.newLongArray(Math.min(size, 100), false); }
GeoCentroidAggregator(String name, SearchContext context, Aggregator parent, ValuesSource.GeoPoint valuesSource, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); centroids = bigArrays.newLongArray(1, true); counts = bigArrays.newLongArray(1, true); } }
public ValueCountAggregator(String name, ValuesSource valuesSource, SearchContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, aggregationContext, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { counts = context.bigArrays().newLongArray(1, true); } }
LongValuesSource(BigArrays bigArrays, MappedFieldType fieldType, CheckedFunction<LeafReaderContext, SortedNumericDocValues, IOException> docValuesFunc, LongUnaryOperator rounding, DocValueFormat format, boolean missingBucket, Object missing, int size, int reverseMul) { super(bigArrays, format, fieldType, missingBucket, missing, size, reverseMul); this.bigArrays = bigArrays; this.docValuesFunc = docValuesFunc; this.rounding = rounding; this.bits = missingBucket ? new BitArray(Math.min(size, 100), bigArrays) : null; this.values = bigArrays.newLongArray(Math.min(size, 100), false); }
AbstractHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); ids = bigArrays.newLongArray(capacity(), true); }
public LongObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); keys = bigArrays.newLongArray(capacity(), false); values = bigArrays.newObjectArray(capacity()); }
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.format = formatter; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); compensations = bigArrays.newDoubleArray(1, true); } }
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq, @Nullable Query filter, BigArrays bigArrays) throws IOException { super(reader, field, needTotalTermFreq ? PostingsEnum.FREQS : PostingsEnum.NONE, filter); this.bigArrays = bigArrays; this.needDocFreqs = needDocFreq; this.needTotalTermFreqs = needTotalTermFreq; if (needDocFreq) { termDocFreqs = bigArrays.newIntArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termDocFreqs = null; } if (needTotalTermFreq) { termsTotalFreqs = bigArrays.newLongArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termsTotalFreqs = null; } cachedTermOrds = new BytesRefHash(INITIAL_NUM_TERM_FREQS_CACHED, bigArrays); }
/** Resize the array to the exact provided size. */ public LongArray resize(LongArray array, long size) { if (array instanceof BigLongArray) { return resizeInPlace((BigLongArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final LongArray newArray = newLongArray(size, arr.clearOnResize); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } array.close(); return newArray; } }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
/** * Allocate a new {@link LongArray}. * @param size the initial length of the array */ public LongArray newLongArray(long size) { return newLongArray(size, true); }
public StatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat format, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); compensations = bigArrays.newDoubleArray(1, true); mins = bigArrays.newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); maxes = bigArrays.newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); } this.format = format; }
GlobalOrdinalValuesSource(BigArrays bigArrays, MappedFieldType type, CheckedFunction<LeafReaderContext, SortedSetDocValues, IOException> docValuesFunc, DocValueFormat format, boolean missingBucket, Object missing, int size, int reverseMul) { super(bigArrays, format, type, missingBucket, missing, size, reverseMul); this.docValuesFunc = docValuesFunc; this.values = bigArrays.newLongArray(Math.min(size, 100), false); }
public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, double sigma, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.format = formatter; this.sigma = sigma; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); compensations = bigArrays.newDoubleArray(1, true); mins = bigArrays.newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); maxes = bigArrays.newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); sumOfSqrs = bigArrays.newDoubleArray(1, true); compensationOfSqrs = bigArrays.newDoubleArray(1, true); } }
public ValueCountAggregator(String name, ValuesSource valuesSource, SearchContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, aggregationContext, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { counts = context.bigArrays().newLongArray(1, true); } }
@Override public void postCollect() throws IOException { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { allVisitedOrds.or(bits); } } final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { final BytesRef value = values.lookupOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); } for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { counts.collect(bucket, hashes.get(ord)); } } } } }
public LongObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); keys = bigArrays.newLongArray(capacity(), false); values = bigArrays.newObjectArray(capacity()); }