@Override public void ensureCapacity(long size) { long actualSize = Math.max(numberOfGroups, size); this.numberOfGroups = actualSize; headPointers.ensureCapacity(actualSize); valueAndGroupHashes.ensureCapacity(actualSize); }
@Override public void ensureCapacity(long size) { long actualSize = Math.max(numberOfGroups, size); this.numberOfGroups = actualSize; headPointers.ensureCapacity(actualSize); valueAndGroupHashes.ensureCapacity(actualSize); }
private void resizeNodeArrays(int newBucketCount) { // every per-bucket array needs to be updated counts.ensureCapacity(newBucketCount); valuePositions.ensureCapacity(newBucketCount); nextPointers.ensureCapacity(newBucketCount); valueAndGroupHashes.ensureCapacity(newBucketCount); groupIds.ensureCapacity(newBucketCount); }
private void resizeNodeArrays(int newBucketCount) { // every per-bucket array needs to be updated counts.ensureCapacity(newBucketCount); valuePositions.ensureCapacity(newBucketCount); nextPointers.ensureCapacity(newBucketCount); valueAndGroupHashes.ensureCapacity(newBucketCount); groupIds.ensureCapacity(newBucketCount); }
@Override public void ensureCapacity(long size) { unscaledDecimals.ensureCapacity(size); overflows.ensureCapacity(size); }
@Override public void ensureCapacity(long size) { longs.ensureCapacity(size); super.ensureCapacity(size); }
@Override public void ensureCapacity(long size) { unscaledDecimals.ensureCapacity(size); overflows.ensureCapacity(size); }
@Override public void ensureCapacity(long size) { longs.ensureCapacity(size); super.ensureCapacity(size); }
public BigintGroupByHash(int hashChannel, boolean outputRawHash, int expectedSize, UpdateMemory updateMemory) { checkArgument(hashChannel >= 0, "hashChannel must be at least zero"); checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); this.hashChannel = hashChannel; this.outputRawHash = outputRawHash; hashCapacity = arraySize(expectedSize, FILL_RATIO); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; values = new LongBigArray(); values.ensureCapacity(hashCapacity); groupIds = new IntBigArray(-1); groupIds.ensureCapacity(hashCapacity); valuesByGroupId = new LongBigArray(); valuesByGroupId.ensureCapacity(hashCapacity); // This interface is used for actively reserving memory (push model) for rehash. // The caller can also query memory usage on this object (pull model) this.updateMemory = requireNonNull(updateMemory, "updateMemory is null"); }
public BigintGroupByHash(int hashChannel, boolean outputRawHash, int expectedSize, UpdateMemory updateMemory) { checkArgument(hashChannel >= 0, "hashChannel must be at least zero"); checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); this.hashChannel = hashChannel; this.outputRawHash = outputRawHash; hashCapacity = arraySize(expectedSize, FILL_RATIO); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; values = new LongBigArray(); values.ensureCapacity(hashCapacity); groupIds = new IntBigArray(-1); groupIds.ensureCapacity(hashCapacity); valuesByGroupId = new LongBigArray(); valuesByGroupId.ensureCapacity(hashCapacity); // This interface is used for actively reserving memory (push model) for rehash. // The caller can also query memory usage on this object (pull model) this.updateMemory = requireNonNull(updateMemory, "updateMemory is null"); }
@Override public void ensureCapacity(long size) { partitionCounts.ensureCapacity(size); counts.ensureCapacity(size); envelopes.ensureCapacity(size); samples.ensureCapacity(size); } }
private SingleTypedHistogram(Type type, int expectedSize, int hashCapacity, BlockBuilder values) { this.type = type; this.expectedSize = expectedSize; this.hashCapacity = hashCapacity; this.values = values; checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; hashPositions = new IntBigArray(-1); hashPositions.ensureCapacity(hashCapacity); counts = new LongBigArray(); counts.ensureCapacity(hashCapacity); }
private SingleTypedHistogram(Type type, int expectedSize, int hashCapacity, BlockBuilder values) { this.type = type; this.expectedSize = expectedSize; this.hashCapacity = hashCapacity; this.values = values; checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; hashPositions = new IntBigArray(-1); hashPositions.ensureCapacity(hashCapacity); counts = new LongBigArray(); counts.ensureCapacity(hashCapacity); }
@VisibleForTesting public ValueStore(int expectedSize, BlockBuilder values) { bucketCount = computeBucketCount(expectedSize, MAX_FILL_RATIO); mask = bucketCount - 1; maxFill = calculateMaxFill(bucketCount, MAX_FILL_RATIO); this.values = values; buckets = new IntBigArray(-1); buckets.ensureCapacity(bucketCount); valueHashes = new LongBigArray(-1); valueHashes.ensureCapacity(bucketCount); }
@VisibleForTesting public ValueStore(int expectedSize, BlockBuilder values) { bucketCount = computeBucketCount(expectedSize, MAX_FILL_RATIO); mask = bucketCount - 1; maxFill = calculateMaxFill(bucketCount, MAX_FILL_RATIO); this.values = values; buckets = new IntBigArray(-1); buckets.ensureCapacity(bucketCount); valueHashes = new LongBigArray(-1); valueHashes.ensureCapacity(bucketCount); }
private boolean processUnfinishedWork() { verify(unfinishedWork != null); if (!unfinishedWork.process()) { return false; } partitionIds = unfinishedWork.getResult(); partitionRowCount.ensureCapacity(partitionIds.getGroupCount()); unfinishedWork = null; return true; }
private boolean processUnfinishedWork() { verify(unfinishedWork != null); if (!unfinishedWork.process()) { return false; } partitionIds = unfinishedWork.getResult(); partitionRowCount.ensureCapacity(partitionIds.getGroupCount()); unfinishedWork = null; return true; }
private void rehash() { long newCapacityLong = hashCapacity * 2L; if (newCapacityLong > Integer.MAX_VALUE) { throw new PrestoException(GENERIC_INSUFFICIENT_RESOURCES, "Size of hash table cannot exceed 1 billion entries"); } int newCapacity = (int) newCapacityLong; int newMask = newCapacity - 1; IntBigArray newHashPositions = new IntBigArray(-1); newHashPositions.ensureCapacity(newCapacity); for (int i = 0; i < values.getPositionCount(); i++) { // find an empty slot for the address int hashPosition = getBucketId(TypeUtils.hashPosition(type, values, i), newMask); while (newHashPositions.get(hashPosition) != -1) { hashPosition = (hashPosition + 1) & newMask; } // record the mapping newHashPositions.set(hashPosition, i); } hashCapacity = newCapacity; mask = newMask; maxFill = calculateMaxFill(newCapacity); hashPositions = newHashPositions; this.counts.ensureCapacity(maxFill); }
@Benchmark @OperationsPerInvocation(POSITIONS) public long baselineBigArray(BaselinePagesData data) { int hashSize = arraySize(GROUP_COUNT, 0.9f); int mask = hashSize - 1; LongBigArray table = new LongBigArray(-1); table.ensureCapacity(hashSize); long groupIds = 0; for (Page page : data.getPages()) { Block block = page.getBlock(0); int positionCount = block.getPositionCount(); for (int position = 0; position < positionCount; position++) { long value = BIGINT.getLong(block, position); int tablePosition = (int) XxHash64.hash(value) & mask; while (table.get(tablePosition) != -1 && table.get(tablePosition) != value) { tablePosition++; } if (table.get(tablePosition) == -1) { table.set(tablePosition, value); groupIds++; } } } return groupIds; }
@Benchmark @OperationsPerInvocation(POSITIONS) public long baselineBigArray(BaselinePagesData data) { int hashSize = arraySize(GROUP_COUNT, 0.9f); int mask = hashSize - 1; LongBigArray table = new LongBigArray(-1); table.ensureCapacity(hashSize); long groupIds = 0; for (Page page : data.getPages()) { Block block = page.getBlock(0); int positionCount = block.getPositionCount(); for (int position = 0; position < positionCount; position++) { long value = BIGINT.getLong(block, position); int tablePosition = (int) XxHash64.hash(value) & mask; while (table.get(tablePosition) != -1 && table.get(tablePosition) != value) { tablePosition++; } if (table.get(tablePosition) == -1) { table.set(tablePosition, value); groupIds++; } } } return groupIds; }