/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new IllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); }
@Benchmark public void get() { int localSize = this.size; String[] localElements = this.elements; ObjectObjectMap<String, String> localHppcMap = this.hppcMap; for (int i = 0; i < localSize; i++) { if (localHppcMap.get(localElements[i]) == null) { throw new AssertionError(i); } } } }
ObjectIntMap<String> sparseIndex = sparseFeatureIndex.get(featureName); IntObjectMap<String> reverseSparseIndex = reverseSparseFeatureIndex.get(featureName); if (sparseIndex == null || reverseSparseIndex == null) { synchronized (sparseFeatureIndex) { sparseIndex = sparseFeatureIndex.get(featureName); reverseSparseIndex = reverseSparseFeatureIndex.get(featureName); if (sparseIndex == null || reverseSparseIndex == null) { sparseIndex = new ObjectIntHashMap<>(); reverseSparseIndex = new IntObjectHashMap<>(); sparseFeatureIndex.put(featureName, sparseIndex); reverseSparseFeatureIndex.put(featureName, reverseSparseIndex);
@Setup public void setUp() { Random random = new Random(123456789012345L); this.elements = new String[this.size]; this.hppcMap = new ObjectObjectHashMap<>(this.size); for (int i = 0; i < this.size; i++) { String element = RandomStringUtils.random(RANDOM_COUNT, 0, 0, false, true, null, random); this.elements[i] = element; this.hppcMap.put(element, "dummy"); } }
/** * This writes a feature's individual value, using the human readable name if possible, to a StringBuilder */ private void debugFeatureValue(String feature, int index, ConcatVector vector, BufferedWriter bw) throws IOException { bw.write("\t"); if (sparseFeatureIndex.containsKey(feature) && sparseFeatureIndex.get(feature).values().contains(index)) { // we can map this index to an interpretable string, so we do bw.write("SPARSE VALUE \""); bw.write(reverseSparseFeatureIndex.get(feature).get(index)); bw.write("\""); } else { // we can't map this to a useful string, so we default to the number bw.write(Integer.toString(index)); } bw.write(": "); bw.write(Double.toString(vector.getValueAt(featureToIndex.getOrDefault(feature, -1), index))); bw.write("\n"); } }
@Override public int test() { final ObjectObjectMap<Integer, Integer> m_map = new ObjectObjectIdentityHashMap<>( m_keys.length / 2 + 1, m_fillFactor ); int add = 0, remove = 0; while ( add < m_keys.length ) { m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.remove( m_keys[ remove++ ] ); } return m_map.size(); } }
@Override public int test() { final ObjectObjectMap<Integer, Integer> m_map = new ObjectObjectHashMap<>( m_keys.length, m_fillFactor ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ], m_keys[ i ] ); for ( int i = 0; i < m_keys2.length; ++i ) m_map.put( m_keys2[ i ], m_keys2[ i ] ); return m_map.size(); } }
/** * This constructs a fresh vector that is sized correctly to accommodate all the known sparse values for vectors * that are possibly sparse. * * @param presize a flag for whether or not to create all the dense double arrays for our sparse features * * @return a new, internally correctly sized ConcatVector that will work correctly as weights for features from * this namespace; */ public ConcatVector newWeightsVector(boolean presize) { ConcatVector vector = new ConcatVector(featureToIndex.size()); if (presize) { for (ObjectCursor<String> s : sparseFeatureIndex.keys()) { int size = sparseFeatureIndex.get(s.value).size(); vector.setDenseComponent(ensureFeature(s.value), new double[size]); } } setAlwaysOneFeature(vector, 1); return vector; }
@Override protected void execute(final File file) { String fileExtension = Files.getFileExtension(file.getAbsolutePath()); OutputPort<File> outputPort = this.fileExtensions.getOrDefault(fileExtension, unknownFileExtensionOutputPort); outputPort.send(file); }
@Benchmark public ObjectObjectMap<String, String> hppc() { int localSize = this.size; float localLoadFactor = this.loadFactor; String[] localElements = this.elements; int defaultInitialCapacity = Containers.DEFAULT_EXPECTED_ELEMENTS; ObjectObjectMap<String, String> hppc = this.isPresized ? new ObjectObjectHashMap<>(localSize, localLoadFactor) : new ObjectObjectHashMap<>(defaultInitialCapacity, localLoadFactor); for (int i = 0; i < localSize; i++) { hppc.put(localElements[i], "dummy"); } return hppc; }
@Override public int test() { final ObjectObjectMap<Integer, Integer> m_map = new ObjectObjectHashMap<>( m_keys.length / 2 + 1, m_fillFactor ); int add = 0, remove = 0; while ( add < m_keys.length ) { m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.remove( m_keys2[ remove++ ] ); } return m_map.size(); } }
@Override public int test() { final ObjectObjectMap<Integer, Integer> m_map = new ObjectObjectIdentityHashMap<>( m_keys.length, m_fillFactor ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ], m_keys[ i ] ); for ( int i = 0; i < m_keys.length; ++i ) //same keys are use for identity test m_map.put( m_keys[ i ], m_keys[ i ] ); return m_map.size(); } }
/** * @return a Builder for proto serialization */ public ConcatVectorNamespaceProto.ConcatVectorNamespace.Builder getProtoBuilder() { ConcatVectorNamespaceProto.ConcatVectorNamespace.Builder m = ConcatVectorNamespaceProto.ConcatVectorNamespace.newBuilder(); // Add the outer layer features for (ObjectCursor<String> feature : featureToIndex.keys()) { ConcatVectorNamespaceProto.ConcatVectorNamespace.FeatureToIndexComponent.Builder component = ConcatVectorNamespaceProto.ConcatVectorNamespace.FeatureToIndexComponent.newBuilder(); component.setKey(feature.value); component.setData(featureToIndex.getOrDefault(feature.value, -1)); m.addFeatureToIndex(component); } for (ObjectCursor<String> feature : sparseFeatureIndex.keys()) { ConcatVectorNamespaceProto.ConcatVectorNamespace.SparseFeatureIndex.Builder sparseFeature = ConcatVectorNamespaceProto.ConcatVectorNamespace.SparseFeatureIndex.newBuilder(); sparseFeature.setKey(feature.value); for (ObjectCursor<String> sparseFeatureName : sparseFeatureIndex.get(feature.value).keys()) { ConcatVectorNamespaceProto.ConcatVectorNamespace.FeatureToIndexComponent.Builder component = ConcatVectorNamespaceProto.ConcatVectorNamespace.FeatureToIndexComponent.newBuilder(); component.setKey(sparseFeatureName.value); component.setData(sparseFeatureIndex.get(feature.value).getOrDefault(sparseFeatureName.value, -1)); sparseFeature.addFeatureToIndex(component); } m.addSparseFeatureIndex(sparseFeature); } return m; }
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new IllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); }
/** Get back fields that have been previously added with {@link #addWithKey(Object, IndexableField)}. */ public IndexableField getByKey(Object key) { return keyedFields == null ? null : keyedFields.get(key); }
@SuppressWarnings("PMD.AvoidReassigningParameters") public OutputPort<File> addFileExtension(String fileExtension) { if (fileExtension.startsWith(".")) { fileExtension = fileExtension.substring(1); } OutputPort<File> outputPort = this.createOutputPort(); this.fileExtensions.put(fileExtension, outputPort); return outputPort; }
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new IllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); }
@Override public int test() { int res = 0; for ( int i = 0; i < m_keys.length; ++i ) if ( m_map.get( m_keys[ i ] ) != null ) res ^= 1; return res; } }
@Override public void setup(final int[] keys, final float fillFactor, final int oneFailureOutOf ) { super.setup( keys, fillFactor, oneFailureOutOf ); m_map = new ObjectObjectHashMap<>( keys.length, fillFactor ); for (Integer key : m_keys) m_map.put(new Integer( key % oneFailureOutOf == 0 ? key + 1 : key ), key); }
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new IllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); }