@Override public void ensureCapacity(long size) { long actualSize = Math.max(numberOfGroups, size); this.numberOfGroups = actualSize; headPointers.ensureCapacity(actualSize); valueAndGroupHashes.ensureCapacity(actualSize); }
private int getBucketIdForNode(int nodePointer, int mask) { long valueAndGroupHash = valueAndGroupHashes.get(nodePointer); // without mask int bucketId = (int) (valueAndGroupHash & mask); return bucketId; }
@Override public void setCount(long count) { counts.set(groupId, count); }
LongBigArray newValues = new LongBigArray(); newValues.ensureCapacity(newCapacity); IntBigArray newGroupIds = new IntBigArray(-1); newGroupIds.ensureCapacity(newCapacity); continue; long value = valuesByGroupId.get(groupId); newValues.set(hashPosition, value); newGroupIds.set(hashPosition, groupId); groupIds = newGroupIds; this.valuesByGroupId.ensureCapacity(maxFill); return true;
private void addNewGroup(long groupId, Block block, int position, long count) { checkState(isEmpty(), "bucket %s not empty, points to %s", bucketId, buckets.get(bucketId)); // we've already computed the value hash for only the value only; ValueStore will save it for future use int nextValuePosition = valueStore.addAndGetPosition(type, block, position, valueHash); // set value pointer to hash map of values valuePositions.set(nodePointerToUse, nextValuePosition); // save hashes for future rehashing valueAndGroupHashes.set(nodePointerToUse, valueAndGroupHash); // set pointer to node for this bucket buckets.set(bucketId, nodePointerToUse); // save data for this node counts.set(nodePointerToUse, count); // used for doing value comparisons on hash collisions groupIds.set(nodePointerToUse, groupId); // we only ever store ints as values; we need long as an index int currentHead = (int) headPointers.get(groupId); // maintain linked list of nodes in this group (insert at head) headPointers.set(groupId, nodePointerToUse); nextPointers.set(nodePointerToUse, currentHead); } }
public BigintGroupByHash(int hashChannel, boolean outputRawHash, int expectedSize, UpdateMemory updateMemory) { checkArgument(hashChannel >= 0, "hashChannel must be at least zero"); checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); this.hashChannel = hashChannel; this.outputRawHash = outputRawHash; hashCapacity = arraySize(expectedSize, FILL_RATIO); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; values = new LongBigArray(); values.ensureCapacity(hashCapacity); groupIds = new IntBigArray(-1); groupIds.ensureCapacity(hashCapacity); valuesByGroupId = new LongBigArray(); valuesByGroupId.ensureCapacity(hashCapacity); // This interface is used for actively reserving memory (push model) for rehash. // The caller can also query memory usage on this object (pull model) this.updateMemory = requireNonNull(updateMemory, "updateMemory is null"); }
long valueHash = valueHashes.get(i); int bucketId = getBucketId(valueHash, newMask); int probeCount = 1; valueHashes.ensureCapacity(newBucketCount); bucketCount = newBucketCount; maxFill = calculateMaxFill(newBucketCount, MAX_FILL_RATIO);
this.partitionRowCount = new LongBigArray(0); if (partitionChannels.isEmpty()) { this.groupByHash = Optional.empty();
/** * Creates a new big array containing one initial segment filled with the specified default value */ public LongBigArray(long initialValue) { this.initialValue = initialValue; array = new long[INITIAL_SEGMENTS][]; allocateNewSegment(); }
void add(long count) { counts.add(nodePointer, count); }
/** * Ensures this big array is at least the specified length. If the array is smaller, segments * are added until the array is larger then the specified length. */ public void ensureCapacity(long length) { if (capacity > length) { return; } grow(length); }
LongBigArray newValues = new LongBigArray(); newValues.ensureCapacity(newCapacity); IntBigArray newGroupIds = new IntBigArray(-1); newGroupIds.ensureCapacity(newCapacity); continue; long value = valuesByGroupId.get(groupId); newValues.set(hashPosition, value); newGroupIds.set(hashPosition, groupId); groupIds = newGroupIds; this.valuesByGroupId.ensureCapacity(maxFill); return true;
private void addNewGroup(long groupId, Block block, int position, long count) { checkState(isEmpty(), "bucket %s not empty, points to %s", bucketId, buckets.get(bucketId)); // we've already computed the value hash for only the value only; ValueStore will save it for future use int nextValuePosition = valueStore.addAndGetPosition(type, block, position, valueHash); // set value pointer to hash map of values valuePositions.set(nodePointerToUse, nextValuePosition); // save hashes for future rehashing valueAndGroupHashes.set(nodePointerToUse, valueAndGroupHash); // set pointer to node for this bucket buckets.set(bucketId, nodePointerToUse); // save data for this node counts.set(nodePointerToUse, count); // used for doing value comparisons on hash collisions groupIds.set(nodePointerToUse, groupId); // we only ever store ints as values; we need long as an index int currentHead = (int) headPointers.get(groupId); // maintain linked list of nodes in this group (insert at head) headPointers.set(groupId, nodePointerToUse); nextPointers.set(nodePointerToUse, currentHead); } }
public BigintGroupByHash(int hashChannel, boolean outputRawHash, int expectedSize, UpdateMemory updateMemory) { checkArgument(hashChannel >= 0, "hashChannel must be at least zero"); checkArgument(expectedSize > 0, "expectedSize must be greater than zero"); this.hashChannel = hashChannel; this.outputRawHash = outputRawHash; hashCapacity = arraySize(expectedSize, FILL_RATIO); maxFill = calculateMaxFill(hashCapacity); mask = hashCapacity - 1; values = new LongBigArray(); values.ensureCapacity(hashCapacity); groupIds = new IntBigArray(-1); groupIds.ensureCapacity(hashCapacity); valuesByGroupId = new LongBigArray(); valuesByGroupId.ensureCapacity(hashCapacity); // This interface is used for actively reserving memory (push model) for rehash. // The caller can also query memory usage on this object (pull model) this.updateMemory = requireNonNull(updateMemory, "updateMemory is null"); }
long valueHash = valueHashes.get(i); int bucketId = getBucketId(valueHash, newMask); int probeCount = 1; valueHashes.ensureCapacity(newBucketCount); bucketCount = newBucketCount; maxFill = calculateMaxFill(newBucketCount, MAX_FILL_RATIO);
public GroupedTypedHistogram(Type type, int expectedCount) { checkArgument(expectedCount > 0, "expectedSize must be greater than zero"); this.type = type; this.bucketId = expectedCount; this.bucketCount = computeBucketCount(expectedCount, MAX_FILL_RATIO); this.mask = bucketCount - 1; this.maxFill = calculateMaxFill(bucketCount, MAX_FILL_RATIO); this.values = type.createBlockBuilder(null, computeBucketCount(expectedCount, GroupedTypedHistogram.MAX_FILL_RATIO)); // buckets and node-arrays (bucket "points" to a node, so 1:1 relationship) buckets = new IntBigArray(-1); buckets.ensureCapacity(bucketCount); counts = new LongBigArray(); valuePositions = new IntBigArray(); valueAndGroupHashes = new LongBigArray(); nextPointers = new IntBigArray(NULL); groupIds = new LongBigArray(-1); // here, one bucket is one node in the hash structure (vs a bucket may be a chain of nodes in closed-hashing with linked list hashing) // ie, this is open-address hashing resizeNodeArrays(bucketCount); // end bucket/node based arrays // per-group arrays: size will be set by external call, same as groups since the number will be the same headPointers = new LongBigArray(NULL); // index into counts/valuePositions nextNodePointer = 0; bucketNodeFactory = this.new BucketNodeFactory(); valueStore = new ValueStore(expectedCount, values); }
/** * Creates a new big array containing one initial segment filled with the specified default value */ public LongBigArray(long initialValue) { this.initialValue = initialValue; array = new long[INITIAL_SEGMENTS][]; allocateNewSegment(); }
void add(long count) { counts.add(nodePointer, count); }