public ConcurrentOpenHashSet(int expectedItems, int concurrencyLevel) { checkArgument(expectedItems > 0); checkArgument(concurrencyLevel > 0); checkArgument(expectedItems >= concurrencyLevel); int numSections = concurrencyLevel; int perSectionExpectedItems = expectedItems / numSections; int perSectionCapacity = (int) (perSectionExpectedItems / MapFillFactor); this.sections = (Section<V>[]) new Section[numSections]; for (int i = 0; i < numSections; i++) { sections[i] = new Section<>(perSectionCapacity); } }
public boolean add(V value) { checkNotNull(value); long h = hash(value); return getSection(h).add(value, (int) h); }
int removeIf(Predicate<V> filter) { long stamp = writeLock(); int removedCount = 0; try { // Go through all the buckets for this section for (int bucket = capacity - 1; bucket >= 0; bucket--) { V storedValue = values[bucket]; if (storedValue != DeletedValue && storedValue != EmptyValue) { if (filter.test(storedValue)) { // Removing item --size; ++removedCount; cleanBucket(bucket); } } } return removedCount; } finally { unlockWrite(stamp); } }
boolean add(V value, int keyHash) { int bucket = keyHash; long stamp = writeLock(); int capacity = this.capacity; if (usedBuckets > resizeThreshold) { try { rehash(); } finally { unlockWrite(stamp); unlockWrite(stamp);
boolean contains(V value, int keyHash) { int bucket = keyHash; long stamp = tryOptimisticRead(); boolean acquiredLock = false; if (!acquiredLock && validate(stamp)) { stamp = readLock(); acquiredLock = true; unlockRead(stamp);
public void forEach(Consumer<? super V> processor) { long stamp = tryOptimisticRead(); if (!validate(stamp)) { stamp = readLock(); acquiredReadLock = true; V storedValue = values[bucket]; if (!acquiredReadLock && !validate(stamp)) { stamp = readLock(); acquiredReadLock = true; unlockRead(stamp);
private boolean remove(V value, int keyHash) { int bucket = keyHash; long stamp = writeLock(); try { while (true) { int capacity = this.capacity; bucket = signSafeMod(bucket, capacity); V storedValue = values[bucket]; if (value.equals(storedValue)) { --size; cleanBucket(bucket); return true; } else if (storedValue == EmptyValue) { // Value wasn't found return false; } ++bucket; } } finally { unlockWrite(stamp); } }
int removeIf(Predicate<V> filter) { long stamp = writeLock(); int removedCount = 0; try { // Go through all the buckets for this section for (int bucket = capacity - 1; bucket >= 0; bucket--) { V storedValue = values[bucket]; if (storedValue != DeletedValue && storedValue != EmptyValue) { if (filter.test(storedValue)) { // Removing item --size; ++removedCount; cleanBucket(bucket); } } } return removedCount; } finally { unlockWrite(stamp); } }
private void rehash() { // Expand the hashmap int newCapacity = capacity * 2; V[] newValues = (V[]) new Object[newCapacity]; // Re-hash table for (int i = 0; i < values.length; i++) { V storedValue = values[i]; if (storedValue != EmptyValue && storedValue != DeletedValue) { insertValueNoLock(newValues, storedValue); } } values = newValues; capacity = newCapacity; usedBuckets = size; resizeThreshold = (int) (capacity * MapFillFactor); }
public boolean add(V value) { checkNotNull(value); long h = hash(value); return getSection(h).add(value, (int) h); }
public void clear() { for (Section<V> s : sections) { s.clear(); } }
public int removeIf(Predicate<V> filter) { checkNotNull(filter); int removedCount = 0; for (Section<V> s : sections) { removedCount += s.removeIf(filter); } return removedCount; }
public ConcurrentOpenHashSet(int expectedItems, int concurrencyLevel) { checkArgument(expectedItems > 0); checkArgument(concurrencyLevel > 0); checkArgument(expectedItems >= concurrencyLevel); int numSections = concurrencyLevel; int perSectionExpectedItems = expectedItems / numSections; int perSectionCapacity = (int) (perSectionExpectedItems / MapFillFactor); this.sections = (Section<V>[]) new Section[numSections]; for (int i = 0; i < numSections; i++) { sections[i] = new Section<>(perSectionCapacity); } }
public void forEach(Consumer<? super V> processor) { for (Section<V> s : sections) { s.forEach(processor); } }
public boolean contains(V value) { checkNotNull(value); long h = hash(value); return getSection(h).contains(value, (int) h); }
boolean contains(V value, int keyHash) { int bucket = keyHash; long stamp = tryOptimisticRead(); boolean acquiredLock = false; if (!acquiredLock && validate(stamp)) { stamp = readLock(); acquiredLock = true; unlockRead(stamp);
boolean add(V value, int keyHash) { int bucket = keyHash; long stamp = writeLock(); int capacity = this.capacity; if (usedBuckets > resizeThreshold) { try { rehash(); } finally { unlockWrite(stamp); unlockWrite(stamp);
public void forEach(Consumer<? super V> processor) { long stamp = tryOptimisticRead(); if (!validate(stamp)) { stamp = readLock(); acquiredReadLock = true; V storedValue = values[bucket]; if (!acquiredReadLock && !validate(stamp)) { stamp = readLock(); acquiredReadLock = true; unlockRead(stamp);
private boolean remove(V value, int keyHash) { int bucket = keyHash; long stamp = writeLock(); try { while (true) { int capacity = this.capacity; bucket = signSafeMod(bucket, capacity); V storedValue = values[bucket]; if (value.equals(storedValue)) { --size; cleanBucket(bucket); return true; } else if (storedValue == EmptyValue) { // Value wasn't found return false; } ++bucket; } } finally { unlockWrite(stamp); } }
private void rehash() { // Expand the hashmap int newCapacity = capacity * 2; V[] newValues = (V[]) new Object[newCapacity]; // Re-hash table for (int i = 0; i < values.length; i++) { V storedValue = values[i]; if (storedValue != EmptyValue && storedValue != DeletedValue) { insertValueNoLock(newValues, storedValue); } } values = newValues; capacity = newCapacity; usedBuckets = size; resizeThreshold = (int) (capacity * MapFillFactor); }