@Override public void init(int capacity) { if (data == null) { this.capacity = capacity; data = new OpenIntDoubleHashMap(); } }
/** * Constructs a matrix with a given number of parameters. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param size the number of cells the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>size<0</tt>. */ public SparseDoubleMatrix1D(int size, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(size); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** * Constructs a matrix with a given number of rows and columns using memory as specified. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param rows the number of rows the matrix shall have. * @param columns the number of columns the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>rows<0 || columns<0 || (double)columns*rows > Integer.MAX_VALUE</tt>. */ public SparseDoubleMatrix2D(int rows, int columns, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(rows,columns); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** * Constructs a matrix with a given number of parameters. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param size the number of cells the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>size<0</tt>. */ public SparseDoubleMatrix1D(int size, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(size); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** * Constructs a matrix with a given number of rows and columns using memory as specified. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param rows the number of rows the matrix shall have. * @param columns the number of columns the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>rows<0 || columns<0 || (double)columns*rows > Integer.MAX_VALUE</tt>. */ public SparseDoubleMatrix2D(int rows, int columns, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(rows,columns); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** * Constructs a matrix with a given number of slices, rows and columns using memory as specified. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param slices the number of slices the matrix shall have. * @param rows the number of rows the matrix shall have. * @param columns the number of columns the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>(double)columns*rows > Integer.MAX_VALUE</tt>. * @throws IllegalArgumentException if <tt>slices<0 || rows<0 || columns<0</tt>. */ public SparseDoubleMatrix3D(int slices, int rows, int columns, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(slices,rows,columns); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** * Constructs a matrix with a given number of slices, rows and columns using memory as specified. * All entries are initially <tt>0</tt>. * For details related to memory usage see {@link cern.colt.map.OpenIntDoubleHashMap}. * * @param slices the number of slices the matrix shall have. * @param rows the number of rows the matrix shall have. * @param columns the number of columns the matrix shall have. * @param initialCapacity the initial capacity of the hash map. * If not known, set <tt>initialCapacity=0</tt> or small. * @param minLoadFactor the minimum load factor of the hash map. * @param maxLoadFactor the maximum load factor of the hash map. * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= maxLoadFactor)</tt>. * @throws IllegalArgumentException if <tt>(double)columns*rows > Integer.MAX_VALUE</tt>. * @throws IllegalArgumentException if <tt>slices<0 || rows<0 || columns<0</tt>. */ public SparseDoubleMatrix3D(int slices, int rows, int columns, int initialCapacity, double minLoadFactor, double maxLoadFactor) { setUp(slices,rows,columns); this.elements = new OpenIntDoubleHashMap(initialCapacity, minLoadFactor, maxLoadFactor); } /**
/** */ public static void doubleTest2() { // using a map int[] keys = {0 , 3 , 100000, 9 }; double[] values = {100.0, 1000.0, 70.0 , 71.0}; int size = keys.length; AbstractIntDoubleMap map = new OpenIntDoubleHashMap(size*2, 0.2, 0.5); for (int i=0; i<keys.length; i++) { map.put(keys[i], (int)values[i]); } System.out.println(map.containsKey(3)); System.out.println(map.get(3)); System.out.println(map.containsKey(4)); System.out.println(map.get(4)); System.out.println(map.containsValue((int)71.0)); System.out.println(map.keyOf((int)71.0)); System.out.println(map); } /**
/** */ public static void doubleTest2() { // using a map int[] keys = {0 , 3 , 100000, 9 }; double[] values = {100.0, 1000.0, 70.0 , 71.0}; int size = keys.length; AbstractIntDoubleMap map = new OpenIntDoubleHashMap(size*2, 0.2, 0.5); for (int i=0; i<keys.length; i++) { map.put(keys[i], (int)values[i]); } System.out.println(map.containsKey(3)); System.out.println(map.get(3)); System.out.println(map.containsKey(4)); System.out.println(map.get(4)); System.out.println(map.containsValue((int)71.0)); System.out.println(map.keyOf((int)71.0)); System.out.println(map); } /**
System.out.println(" [Before aggregation] : "); OpenIntDoubleHashMap categoryDistribution = new OpenIntDoubleHashMap(); int sumCount = 0; OpenIntIntHashMap[] topicWordCountArray = rootNode.getTopicWordCountArray(); OpenIntDoubleHashMap[] topicWordDistributionArray = new OpenIntDoubleHashMap[topicWordCountArray.length]; for (int i = 0; i < topicWordCountArray.length; ++i) { topicWordDistributionArray[i] = new OpenIntDoubleHashMap(); IntArrayList indexList = topicWordCountArray[i].keys(); int sumLocalCount = 0;
OpenIntDoubleHashMap double_map = new OpenIntDoubleHashMap(); a2v.put(id, double_map); } else if (attribute_type == STRING_TYPE) {