public FloatColumnPreIndexStatsCollector(String column, StatsCollectorConfig statsCollectorConfig) { super(column, statsCollectorConfig); rawFloatSet = new FloatOpenHashSet(INITIAL_HASH_SET_SIZE); aggregatedFloatSet = new FloatOpenHashSet(INITIAL_HASH_SET_SIZE); }
Arrays.sort(_longValues); FloatOpenHashSet floatSet = new FloatOpenHashSet(); while (floatSet.size() < NUM_VALUES) { floatSet.add(RANDOM.nextFloat()); _floatValues = floatSet.toFloatArray(); Arrays.sort(_floatValues);
@Override public boolean addAll(Collection<? extends Float> c) { // The resulting collection will be at least c.size() big if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
/** * Creates a new hash set and fills it with the elements of a given array. * * @param a * an array whose elements will be used to fill the set. * @param offset * the first element to use. * @param length * the number of elements to use. * @param f * the load factor. */ public FloatOpenHashSet(final float[] a, final int offset, final int length, final float f) { this(length < 0 ? 0 : length, f); FloatArrays.ensureOffsetLength(a, offset, length); for (int i = 0; i < length; i++) add(a[offset + i]); } /**
private void serializeSet(final DataOutput out, final Set set) throws IOException { Class oCls = set.getClass(); if (oCls.equals(IntOpenHashSet.class)) { serialize(out, ((IntOpenHashSet) set).toIntArray()); } else if (oCls.equals(FloatOpenHashSet.class)) { serialize(out, ((FloatOpenHashSet) set).toFloatArray()); } else if (oCls.equals(DoubleOpenHashSet.class)) { serialize(out, ((DoubleOpenHashSet) set).toDoubleArray()); } else if (oCls.equals(ShortOpenHashSet.class)) { serialize(out, ((ShortOpenHashSet) set).toShortArray()); } else if (oCls.equals(ByteOpenHashSet.class)) { serialize(out, ((ByteOpenHashSet) set).toByteArray()); } else if (oCls.equals(LongOpenHashSet.class)) { serialize(out, ((LongOpenHashSet) set).toLongArray()); } else if (oCls.equals(BooleanOpenHashSet.class)) { serialize(out, ((BooleanOpenHashSet) set).toBooleanArray()); } else if (oCls.equals(CharOpenHashSet.class)) { serialize(out, ((CharOpenHashSet) set).toCharArray()); } else { serialize(out, set.size()); for (Object obj : set) { serialize(out, obj); } } }
/** * Creates a new hash set copying a given collection. * * @param c * a {@link Collection} to be copied into the new hash set. * @param f * the load factor. */ public FloatOpenHashSet(final Collection<? extends Float> c, final float f) { this(c.size(), f); addAll(c); } /**
@Override public boolean containsAny(Object set) { FloatOpenHashSet setFloat = (FloatOpenHashSet) set; for (int i = 0; i < this._length; i++) if (setFloat.contains(((TermFloatList) _mTermList).getPrimitiveValue(_buf[i]))) return true; return false; }
@Override public boolean addAll(FloatCollection c) { if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
/** * Creates a new hash set using elements provided by a type-specific iterator. * * @param i * a type-specific iterator whose elements will fill the set. * @param f * the load factor. */ public FloatOpenHashSet(final FloatIterator i, final float f) { this(DEFAULT_INITIAL_SIZE, f); while (i.hasNext()) add(i.nextFloat()); } /**
private void serializeSet(final DataOutput out, final Set set) throws IOException { Class oCls = set.getClass(); if (oCls.equals(IntOpenHashSet.class)) { serialize(out, ((IntOpenHashSet) set).toIntArray()); } else if (oCls.equals(FloatOpenHashSet.class)) { serialize(out, ((FloatOpenHashSet) set).toFloatArray()); } else if (oCls.equals(DoubleOpenHashSet.class)) { serialize(out, ((DoubleOpenHashSet) set).toDoubleArray()); } else if (oCls.equals(ShortOpenHashSet.class)) { serialize(out, ((ShortOpenHashSet) set).toShortArray()); } else if (oCls.equals(ByteOpenHashSet.class)) { serialize(out, ((ByteOpenHashSet) set).toByteArray()); } else if (oCls.equals(LongOpenHashSet.class)) { serialize(out, ((LongOpenHashSet) set).toLongArray()); } else if (oCls.equals(BooleanOpenHashSet.class)) { serialize(out, ((BooleanOpenHashSet) set).toBooleanArray()); } else if (oCls.equals(CharOpenHashSet.class)) { serialize(out, ((CharOpenHashSet) set).toCharArray()); } else { serialize(out, set.size()); for (Object obj : set) { serialize(out, obj); } } }
/** * Creates a new hash set copying a given type-specific collection. * * @param c * a type-specific collection to be copied into the new hash set. * @param f * the load factor. */ public FloatOpenHashSet(final FloatCollection c, final float f) { this(c.size(), f); addAll(c); } /**
FloatRawValueBasedNotInPredicateEvaluator(NotInPredicate notInPredicate) { String[] values = notInPredicate.getValues(); _nonMatchingValues = new FloatOpenHashSet(HashUtil.getMinHashSetSize(values.length)); for (String value : values) { _nonMatchingValues.add(Float.parseFloat(value)); } }
FloatRawValueBasedInPredicateEvaluator(InPredicate inPredicate) { String[] values = inPredicate.getValues(); _matchingValues = new FloatOpenHashSet(HashUtil.getMinHashSetSize(values.length)); for (String value : values) { _matchingValues.add(Float.parseFloat(value)); } }
@Override public float[][] transformToFloatValuesMV(@Nonnull ProjectionBlock projectionBlock) { if (getResultMetadata().getDataType() != FieldSpec.DataType.FLOAT) { return super.transformToFloatValuesMV(projectionBlock); } if (_floatValueSet == null) { _floatValueSet = new FloatOpenHashSet(); for (String inValue : _stringValueSet) { _floatValueSet.add(Float.parseFloat(inValue)); } _floatValues = new float[DocIdSetPlanNode.MAX_DOC_PER_CALL][]; } float[][] unFilteredFloatValues = _mainTransformFunction.transformToFloatValuesMV(projectionBlock); int length = projectionBlock.getNumDocs(); for (int i = 0; i < length; i++) { _floatValues[i] = filterFloats(_floatValueSet, unFilteredFloatValues[i]); } return _floatValues; }
@Override public int countUnique() { FloatSet uniqueElements = new FloatOpenHashSet(); for (int i = 0; i < size(); i++) { if (!isMissing(i)) { uniqueElements.add(getFloat(i)); } } return uniqueElements.size(); }
@Override public FloatColumn unique() { final FloatSet values = new FloatOpenHashSet(); for (int i = 0; i < size(); i++) { if (!isMissing(i)) { values.add(getFloat(i)); } } final FloatColumn column = FloatColumn.create(name() + " Unique values"); for (float value : values) { column.append(value); } return column; }
@Test public void testFloatPredicateEvaluators() { List<String> stringValues = new ArrayList<>(NUM_PREDICATE_VALUES); FloatSet valueSet = new FloatOpenHashSet();
FloatSet asSet() { return new FloatOpenHashSet(data); }
return new IntOpenHashSet(set); } else if (oCls.equals(Float.class)) { return new FloatOpenHashSet(set); } else if (oCls.equals(Double.class)) { return new DoubleOpenHashSet(set);
private Set deserializeSet(final DataInput is) throws IOException, ClassNotFoundException { Object h = deserialize(is); Class oCls = h.getClass(); if (oCls.equals(Integer.class)) { int size = (Integer) h; ObjectOpenHashSet set = new ObjectOpenHashSet(size); for (int i = 0; i < size; i++) { set.add(deserialize(is)); } return set; } else if (oCls.equals(int[].class)) { return new IntOpenHashSet((int[]) h); } else if (oCls.equals(float[].class)) { return new FloatOpenHashSet((float[]) h); } else if (oCls.equals(double[].class)) { return new DoubleOpenHashSet((double[]) h); } else if (oCls.equals(short[].class)) { return new ShortOpenHashSet((short[]) h); } else if (oCls.equals(byte[].class)) { return new ByteOpenHashSet((byte[]) h); } else if (oCls.equals(long[].class)) { return new LongOpenHashSet((long[]) h); } else if (oCls.equals(boolean[].class)) { return new BooleanOpenHashSet((boolean[]) h); } else if (oCls.equals(char[].class)) { return new CharOpenHashSet((char[]) h); } throw new EOFException(); }