/** * Allocate a new {@link IntArray}. * @param size the initial length of the array */ public IntArray newIntArray(long size) { return newIntArray(size, true); }
Hashset(long initialBucketCount) { capacity = m / 4; // because ints take 4 bytes threshold = (int) (capacity * MAX_LOAD_FACTOR); mask = capacity - 1; sizes = bigArrays.newIntArray(initialBucketCount); readSpare = new BytesRef(); writeSpare = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); }
LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, BucketOrder order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, SearchContext context, Aggregator parent, boolean forceDenseMode, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, order, format, bucketCountThresholds, null, context, parent, forceDenseMode, collectionMode, showTermDocCountError, pipelineAggregators, metaData); assert factories == null || factories.countAggregators() == 0; this.segmentDocCounts = context.bigArrays().newIntArray(1, true); }
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq, @Nullable Query filter, BigArrays bigArrays) throws IOException { super(reader, field, needTotalTermFreq ? PostingsEnum.FREQS : PostingsEnum.NONE, filter); this.bigArrays = bigArrays; this.needDocFreqs = needDocFreq; this.needTotalTermFreqs = needTotalTermFreq; if (needDocFreq) { termDocFreqs = bigArrays.newIntArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termDocFreqs = null; } if (needTotalTermFreq) { termsTotalFreqs = bigArrays.newLongArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } else { termsTotalFreqs = null; } cachedTermOrds = new BytesRefHash(INITIAL_NUM_TERM_FREQS_CACHED, bigArrays); }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
/** Resize the array to the exact provided size. */ public IntArray resize(IntArray array, long size) { if (array instanceof BigIntArray) { return resizeInPlace((BigIntArray) array, size); } else { AbstractArray arr = (AbstractArray) array; final IntArray newArray = newIntArray(size, arr.clearOnResize); for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) { newArray.set(i, array.get(i)); } array.close(); return newArray; } }
/** * Constructs a composite queue with the specified size and sources. * * @param sources The list of {@link CompositeValuesSourceConfig} to build the composite buckets. * @param size The number of composite buckets to keep. * @param afterKey composite key */ CompositeValuesCollectorQueue(BigArrays bigArrays, SingleDimensionValuesSource<?>[] sources, int size, CompositeKey afterKey) { this.bigArrays = bigArrays; this.maxSize = size; this.arrays = sources; this.keys = new TreeMap<>(this::compare); if (afterKey != null) { assert afterKey.size() == sources.length; afterKeyIsSet = true; for (int i = 0; i < afterKey.size(); i++) { sources[i].setAfter(afterKey.get(i)); } } this.docCounts = bigArrays.newIntArray(1, false); }
public BucketsAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); bigArrays = context.bigArrays(); docCounts = bigArrays.newIntArray(1, true); if (context.aggregations() != null) { multiBucketConsumer = context.aggregations().multiBucketConsumer(); } else { multiBucketConsumer = (count) -> {}; } }
public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { try (IntArray oldDocCounts = docCounts) { docCounts = bigArrays.newIntArray(newNumBuckets, true); docCounts.fill(0, newNumBuckets, 0); for (int i = 0; i < oldDocCounts.size(); i++) { int docCount = oldDocCounts.get(i); if (docCount != 0) { docCounts.increment(mergeMap[i], docCount); } } } }
public IntArray values(final long bucket) { final int size = size(bucket); final IntArray values = bigArrays.newIntArray(size); if (size == 0) { return values; } int i = 0; for (int j = 0; j < capacity; ++j) { final int k = get(bucket, j); if (k != 0) { values.set(i++, k); } } assert i == values.size(); return values; }
/** * Allocate a new {@link IntArray}. * @param size the initial length of the array */ public IntArray newIntArray(long size) { return newIntArray(size, true); }
/** * Allocate a new {@link IntArray}. * @param size the initial length of the array */ public IntArray newIntArray(long size) { return newIntArray(size, true); }
/** * Allocate a new {@link IntArray}. * @param size the initial length of the array */ public IntArray newIntArray(long size) { return newIntArray(size, true); }
public BucketsAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); bigArrays = context.bigArrays(); docCounts = bigArrays.newIntArray(1, true); }
Hashset(long initialBucketCount) { capacity = m / 4; // because ints take 4 bytes threshold = (int) (capacity * MAX_LOAD_FACTOR); mask = capacity - 1; sizes = bigArrays.newIntArray(initialBucketCount); readSpare = new BytesRef(); writeSpare = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); }
public Hashset(long initialBucketCount) { capacity = m / 4; // because ints take 4 bytes threshold = (int) (capacity * MAX_LOAD_FACTOR); mask = capacity - 1; sizes = bigArrays.newIntArray(initialBucketCount); readSpare = new BytesRef(); writeSpare = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); }
@Override public IntArray newIntArray(long size, boolean clearOnResize) { final IntArrayWrapper array = new IntArrayWrapper(super.newIntArray(size, clearOnResize), clearOnResize); if (!clearOnResize) { array.randomizeContent(0, size); } return array; }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); startOffsets = bigArrays.newLongArray(capacity + 1, false); startOffsets.set(0, 0); bytes = bigArrays.newByteArray(capacity * 3, false); hashes = bigArrays.newIntArray(capacity, false); spare = new BytesRef(); }
public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { try (IntArray oldDocCounts = docCounts) { docCounts = bigArrays.newIntArray(newNumBuckets, true); docCounts.fill(0, newNumBuckets, 0); for (int i = 0; i < oldDocCounts.size(); i++) { int docCount = oldDocCounts.get(i); if (docCount != 0) { docCounts.increment(mergeMap[i], docCount); } } } }