/** * Constructor for the class. * Populates the value <-> mappings. * * @param dataBuffer Pinot data buffer * @param length Length of the dictionary */ public OnHeapIntDictionary(PinotDataBuffer dataBuffer, int length) { super(dataBuffer, length, Integer.BYTES, (byte) 0); _valToDictId = new Int2IntOpenHashMap(length); _valToDictId.defaultReturnValue(-1); _dictIdToVal = new int[length]; for (int dictId = 0; dictId < length; dictId++) { int value = getInt(dictId); _dictIdToVal[dictId] = value; _valToDictId.put(value, dictId); } }
@Override public int lookupId(String name) { return forwardMapping.get(baseIdLookup.lookupId(name)); }
/** * Returns the key+value pair with the max priority (min for minHeap mode) * <ul> * <li> key+value pair is removed from the priority queue. </li> * <li> Throws runtime exception if the priority queue is empty. </li> * <li> Runtime complexity of O(1). </li> * </ul> * * @return Key+Value pair */ public IntObjectPair<T> poll() { if (isEmpty()) { throw new RuntimeException("Empty collection, nothing to remove"); } else { IntObjectPair<T> poll = peek(); int lastIndex = _values.size() - 1; swapValues(0, lastIndex); _values.remove(lastIndex); _keyToIndexMap.remove(_indexToKeyMap.get(lastIndex)); _indexToKeyMap.remove(lastIndex); if (!_values.isEmpty()) { siftDown(0); } return poll; } }
final Int2IntOpenHashMap forwardMapping = new Int2IntOpenHashMap(); forwardMapping.defaultReturnValue(-1); for (int i = 0; i < selectorCardinality; i++) { String val = NullHandling.nullToEmptyIfNeeded(selector.lookupName(i)); if (val != null && compiledRegex.matcher(val).matches()) { forwardMapping.put(i, count++); final int[] reverseMapping = new int[forwardMapping.size()]; for (Int2IntMap.Entry e : forwardMapping.int2IntEntrySet()) { reverseMapping[e.getIntValue()] = e.getIntKey();
Int2IntOpenHashMap inputIndex = new Int2IntOpenHashMap(); Int2IntOpenHashMap inputUpdateCounter = new Int2IntOpenHashMap(); Random negativeSeed = new Random(seed); int batchSize = dataBuf.readInt(); inputUpdateCounter.addTo(target, 1); inputUpdateCounter.addTo(src, 1); ObjectIterator<Int2IntMap.Entry> it = inputIndex.int2IntEntrySet().fastIterator(); while (it.hasNext()) { Int2IntMap.Entry entry = it.next(); int node = entry.getIntKey(); int offset = entry.getIntValue() * dim; int divider = inputUpdateCounter.get(node); int col = (node % numNodeOneRow) * dim; float[] values = layers[node / numNodeOneRow];
/** * Constructor. * * @param size Expected size * @param def Default value */ public MapIntegerDBIDIntegerStore(int size, int def) { super(); map = new Int2IntOpenHashMap(size); map.defaultReturnValue(def); }
throw new AbortException("External clusterings can only be used with static DBIDs."); Int2IntOpenHashMap sizes = new Int2IntOpenHashMap(); for(IntListIterator it = assignment.iterator(); it.hasNext();) { sizes.addTo(it.nextInt(), 1); Int2ObjectOpenHashMap<ArrayModifiableDBIDs> cids = new Int2ObjectOpenHashMap<>(sizes.size()); for(ObjectIterator<Int2IntMap.Entry> it = sizes.int2IntEntrySet().fastIterator(); it.hasNext();) { Int2IntMap.Entry entry = it.next(); cids.put(entry.getIntKey(), DBIDUtil.newArray(entry.getIntValue()));
int numValues = sortedInts.length; Preconditions.checkState(numValues > 0); _intValueToIndexMap = new Int2IntOpenHashMap(numValues); for (int i = 0; i < numValues; i++) { int value = sortedInts[i]; _intValueToIndexMap.put(value, i); writer.writeInt(i, value);
/** * Constructor for the class. * * @param initialCapacity Initial capacity for the priority queue * @param minHeap Min order, ie smallest element on top. */ public BaseIndexedPriorityQueue(int initialCapacity, boolean minHeap) { _minHeap = minHeap; _keyToIndexMap = new Int2IntOpenHashMap(initialCapacity); _indexToKeyMap = new Int2IntOpenHashMap(initialCapacity); }
private Int2IntMap getIntersectionMap(int idx1) { Int2IntOpenHashMap intersectionMap = new Int2IntOpenHashMap(); intersectionMap.defaultReturnValue(0); data.getUidxPreferences(idx1) .forEach(ip -> data.getIidxPreferences(ip.v1) .forEach(up -> intersectionMap.addTo(up.v1, 1))); intersectionMap.remove(idx1); return intersectionMap; }
/** * Predict node indexes with one hot encoding, one var for each node */ public Frame predictNodeIndexOHE(Frame df, String varPrefix) { Int2IntOpenHashMap indexMap = new Int2IntOpenHashMap(); buildIndexMap(root, indexMap); List<Var> varList = new ArrayList<>(); for (int i = 0; i < indexMap.size(); i++) { varList.add(VarBinary.fill(df.rowCount(), 0).withName(varPrefix + i)); } for (int i = 0; i < df.rowCount(); i++) { varList.get(indexMap.get(predictPointNodeIndex(root, df, i))).setInt(i, 1); } return SolidFrame.byVars(varList); }
for (int rowIndex = 0; rowIndex < rowSize(); rowIndex++) { SequentialSparseVector tempRowVector = row(rowIndex); Int2IntOpenHashMap tempPositionMap = new Int2IntOpenHashMap(); for (Vector.VectorEntry vectorEntry : tempRowVector) { tempPositionMap.put(vectorEntry.index(), vectorEntry.position()); for (Vector.VectorEntry vectorEntry : tempRowVector) { columnToRowPositionMap[columnIndex][vectorEntry.position()] = rowPositionMap.get(vectorEntry.index()).get(columnIndex); for (int columnIndex = 0; columnIndex < columnSize(); columnIndex++) { SequentialSparseVector tempColumnVector = column(columnIndex); Int2IntOpenHashMap tempPositionMap = new Int2IntOpenHashMap(); for (Vector.VectorEntry vectorEntry : tempColumnVector) { tempPositionMap.put(vectorEntry.index(), vectorEntry.position()); for (Vector.VectorEntry vectorEntry : tempRowVector) { rowToColumnPositionMap[rowIndex][vectorEntry.position()] = columnPositionMap.get(vectorEntry.index()).get(rowIndex);
@Override public int test() { final Int2IntOpenHashMap m_map = new Int2IntOpenHashMap( m_keys.length / 2 + 1, m_fillFactor ); int add = 0, remove = 0; while ( add < m_keys.length ) { m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.remove( m_keys[ remove++ ] ); } return m_map.size(); } }
protected void merge(float[] inputs, Int2IntOpenHashMap inputIndex, int node, float[] update, float g, int idx) { int start = inputIndex.get(node); if (!inputIndex.containsKey(node)) { start = inputIndex.size(); inputIndex.put(node, start); } int offset = start * dim; for (int c = 0; c < dim; c++) inputs[offset + c] += g * update[c + idx]; }
@Override public void multiply(int idx, double i) { if (this.containsKey(idx)) { final int val = super.get(idx); if (val * i == defaultValue) { super.remove(idx); } else { super.put(idx, (int) (val * i)); } } else { if (defaultValue != 0.0 && i != defaultValue) { super.put(idx, (int) (defaultValue * i)); } } }
@Override public int test() { final Int2IntOpenHashMap m_map = new Int2IntOpenHashMap( m_keys.length, m_fillFactor ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ],m_keys[ i ] ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ],m_keys[ i ] ); return m_map.size(); } }
Int2IntOpenHashMap wordsPerCluster = new Int2IntOpenHashMap(); for( String word : clusters.keySet() ) { float[] vectorOf = map.getVectorOf( word ); for( int i = 0; i < map.N; i++ ) partial[ i ] += vectorOf[ i ]; centroids.put( c, partial ); wordsPerCluster.addTo( c, 1 ); int nw = wordsPerCluster.get( i ); for( int j = 0; j < map.N; j++ ) { c[ j ] /= nw;
private final void newSymbol( final int symbol ) { if ( queueSize == MAX_QUEUE_SIZE ) { // Queue filled up. First, we guarantee that there are elements with frequency one. if ( freq[ MAX_QUEUE_SIZE -1 ] != 1 ) for( int j = MAX_QUEUE_SIZE; j-- != 0; ) freq[ j ] /= freq[ MAX_QUEUE_SIZE - 1 ]; // Then, we remove half of them. int j = MAX_QUEUE_SIZE; while( j-- != 0 ) if ( freq[ j ] > 1 ) break; for( int k = j + ( MAX_QUEUE_SIZE - j ) / 2; k < MAX_QUEUE_SIZE; k++ ) { if ( ASSERTS ) assert freq[ k ] == 1; code2Pos.remove( queue[ k ] ); } queueSize = j + ( MAX_QUEUE_SIZE - j ) / 2; } // Now we know that we have space. if ( ASSERTS ) assert queueSize < MAX_QUEUE_SIZE; code2Pos.put( symbol, queueSize ); queue[ queueSize ] = symbol; freq[ queueSize ] = 1; queueSize++; }