public ConcurrentOpenHashMap(int expectedItems, int concurrencyLevel) { checkArgument(expectedItems > 0); checkArgument(concurrencyLevel > 0); checkArgument(expectedItems >= concurrencyLevel); int numSections = concurrencyLevel; int perSectionExpectedItems = expectedItems / numSections; int perSectionCapacity = (int) (perSectionExpectedItems / MapFillFactor); this.sections = (Section<K, V>[]) new Section[numSections]; for (int i = 0; i < numSections; i++) { sections[i] = new Section<>(perSectionCapacity); } }
public V get(K key) { checkNotNull(key); long h = hash(key); return getSection(h).get(key, (int) h); }
public V put(K key, V value) { checkNotNull(key); checkNotNull(value); long h = hash(key); return getSection(h).put(key, value, (int) h, false, null); }
V put(K key, V value, int keyHash, boolean onlyIfAbsent, Function<K, V> valueProvider) { long stamp = writeLock(); int bucket = signSafeMod(keyHash, capacity); if (usedBuckets > resizeThreshold) { try { rehash(); } finally { unlockWrite(stamp); unlockWrite(stamp);
V get(K key, int keyHash) { long stamp = tryOptimisticRead(); boolean acquiredLock = false; int bucket = signSafeMod(keyHash, capacity); V storedValue = (V) table[bucket + 1]; if (!acquiredLock && validate(stamp)) { stamp = readLock(); acquiredLock = true; unlockRead(stamp);
public void forEach(BiConsumer<? super K, ? super V> processor) { long stamp = tryOptimisticRead(); if (!validate(stamp)) { stamp = readLock(); acquiredReadLock = true; table = this.table; V storedValue = (V) table[bucket + 1]; if (!acquiredReadLock && !validate(stamp)) { stamp = readLock(); acquiredReadLock = true; unlockRead(stamp);
private void rehash() { // Expand the hashmap int newCapacity = capacity * 2; Object[] newTable = new Object[2 * newCapacity]; // Re-hash table for (int i = 0; i < table.length; i += 2) { K storedKey = (K) table[i]; V storedValue = (V) table[i + 1]; if (storedKey != EmptyKey && storedKey != DeletedKey) { insertKeyValueNoLock(newTable, newCapacity, storedKey, storedValue); } } table = newTable; capacity = newCapacity; usedBuckets = size; resizeThreshold = (int) (capacity * MapFillFactor); }
public V put(K key, V value) { checkNotNull(key); checkNotNull(value); long h = hash(key); return getSection(h).put(key, value, (int) h, false, null); }
public V get(K key) { checkNotNull(key); long h = hash(key); return getSection(h).get(key, (int) h); }
public void forEach(BiConsumer<? super K, ? super V> processor) { for (Section<K, V> s : sections) { s.forEach(processor); } }
public void clear() { for (Section<K, V> s : sections) { s.clear(); } }
public V computeIfAbsent(K key, Function<K, V> provider) { checkNotNull(key); checkNotNull(provider); long h = hash(key); return getSection(h).put(key, null, (int) h, true, provider); }
public ConcurrentOpenHashMap(int expectedItems, int concurrencyLevel) { checkArgument(expectedItems > 0); checkArgument(concurrencyLevel > 0); checkArgument(expectedItems >= concurrencyLevel); int numSections = concurrencyLevel; int perSectionExpectedItems = expectedItems / numSections; int perSectionCapacity = (int) (perSectionExpectedItems / MapFillFactor); this.sections = (Section<K, V>[]) new Section[numSections]; for (int i = 0; i < numSections; i++) { sections[i] = new Section<>(perSectionCapacity); } }
public V putIfAbsent(K key, V value) { checkNotNull(key); checkNotNull(value); long h = hash(key); return getSection(h).put(key, value, (int) h, true, null); }
V put(K key, V value, int keyHash, boolean onlyIfAbsent, Function<K, V> valueProvider) { long stamp = writeLock(); int bucket = signSafeMod(keyHash, capacity); if (usedBuckets > resizeThreshold) { try { rehash(); } finally { unlockWrite(stamp); unlockWrite(stamp);
V get(K key, int keyHash) { long stamp = tryOptimisticRead(); boolean acquiredLock = false; int bucket = signSafeMod(keyHash, capacity); V storedValue = (V) table[bucket + 1]; if (!acquiredLock && validate(stamp)) { stamp = readLock(); acquiredLock = true; unlockRead(stamp);
public void forEach(BiConsumer<? super K, ? super V> processor) { long stamp = tryOptimisticRead(); if (!validate(stamp)) { stamp = readLock(); acquiredReadLock = true; table = this.table; V storedValue = (V) table[bucket + 1]; if (!acquiredReadLock && !validate(stamp)) { stamp = readLock(); acquiredReadLock = true; unlockRead(stamp);
private void rehash() { // Expand the hashmap int newCapacity = capacity * 2; Object[] newTable = new Object[2 * newCapacity]; // Re-hash table for (int i = 0; i < table.length; i += 2) { K storedKey = (K) table[i]; V storedValue = (V) table[i + 1]; if (storedKey != EmptyKey && storedKey != DeletedKey) { insertKeyValueNoLock(newTable, newCapacity, storedKey, storedValue); } } table = newTable; capacity = newCapacity; usedBuckets = size; resizeThreshold = (int) (capacity * MapFillFactor); }
public V computeIfAbsent(K key, Function<K, V> provider) { checkNotNull(key); checkNotNull(provider); long h = hash(key); return getSection(h).put(key, null, (int) h, true, provider); }
public void forEach(BiConsumer<? super K, ? super V> processor) { for (Section<K, V> s : sections) { s.forEach(processor); } }