public DoubleColumnPreIndexStatsCollector(String column, StatsCollectorConfig statsCollectorConfig) { super(column, statsCollectorConfig); rawDoubleSet = new DoubleOpenHashSet(INITIAL_HASH_SET_SIZE); aggregatedDoubleSet = new DoubleOpenHashSet(INITIAL_HASH_SET_SIZE); }
Arrays.sort(_floatValues); DoubleOpenHashSet doubleSet = new DoubleOpenHashSet(); while (doubleSet.size() < NUM_VALUES) { doubleSet.add(RANDOM.nextDouble()); _doubleValues = doubleSet.toDoubleArray(); Arrays.sort(_doubleValues);
private UniqueDouble(Var var, boolean sorted) { super(sorted); DoubleOpenHashSet keySet = new DoubleOpenHashSet(); for (int i = 0; i < var.rowCount(); i++) { keySet.add(var.getDouble(i)); } double[] elements = keySet.toDoubleArray(); if (sorted) { DoubleArrays.quickSort(elements, new UniqueDoubleComparator()); } Double2IntOpenHashMap uniqueKeys = new Double2IntOpenHashMap(); values = new DoubleArrayList(elements); for (int i = 0; i < elements.length; i++) { uniqueKeys.put(elements[i], i); } rowLists = new Int2ObjectOpenHashMap<>(); for (int i = 0; i < var.rowCount(); i++) { double key = var.getDouble(i); int id = uniqueKeys.get(key); if (!rowLists.containsKey(id)) { rowLists.put(id, new IntArrayList()); } rowLists.get(id).add(i); } updateIdsByRow(var.rowCount()); }
@Override public boolean addAll(Collection<? extends Double> c) { // The resulting collection will be at least c.size() big if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
/** * Creates a new hash set and fills it with the elements of a given array. * * @param a * an array whose elements will be used to fill the set. * @param offset * the first element to use. * @param length * the number of elements to use. * @param f * the load factor. */ public DoubleOpenHashSet(final double[] a, final int offset, final int length, final float f) { this(length < 0 ? 0 : length, f); DoubleArrays.ensureOffsetLength(a, offset, length); for (int i = 0; i < length; i++) add(a[offset + i]); } /**
private void serializeSet(final DataOutput out, final Set set) throws IOException { Class oCls = set.getClass(); if (oCls.equals(IntOpenHashSet.class)) { serialize(out, ((IntOpenHashSet) set).toIntArray()); } else if (oCls.equals(FloatOpenHashSet.class)) { serialize(out, ((FloatOpenHashSet) set).toFloatArray()); } else if (oCls.equals(DoubleOpenHashSet.class)) { serialize(out, ((DoubleOpenHashSet) set).toDoubleArray()); } else if (oCls.equals(ShortOpenHashSet.class)) { serialize(out, ((ShortOpenHashSet) set).toShortArray()); } else if (oCls.equals(ByteOpenHashSet.class)) { serialize(out, ((ByteOpenHashSet) set).toByteArray()); } else if (oCls.equals(LongOpenHashSet.class)) { serialize(out, ((LongOpenHashSet) set).toLongArray()); } else if (oCls.equals(BooleanOpenHashSet.class)) { serialize(out, ((BooleanOpenHashSet) set).toBooleanArray()); } else if (oCls.equals(CharOpenHashSet.class)) { serialize(out, ((CharOpenHashSet) set).toCharArray()); } else { serialize(out, set.size()); for (Object obj : set) { serialize(out, obj); } } }
/** * Creates a new hash set copying a given collection. * * @param c * a {@link Collection} to be copied into the new hash set. * @param f * the load factor. */ public DoubleOpenHashSet(final Collection<? extends Double> c, final float f) { this(c.size(), f); addAll(c); } /**
@Override public boolean addAll(DoubleCollection c) { if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
/** * Creates a new hash set using elements provided by a type-specific iterator. * * @param i * a type-specific iterator whose elements will fill the set. * @param f * the load factor. */ public DoubleOpenHashSet(final DoubleIterator i, final float f) { this(DEFAULT_INITIAL_SIZE, f); while (i.hasNext()) add(i.nextDouble()); } /**
private void serializeSet(final DataOutput out, final Set set) throws IOException { Class oCls = set.getClass(); if (oCls.equals(IntOpenHashSet.class)) { serialize(out, ((IntOpenHashSet) set).toIntArray()); } else if (oCls.equals(FloatOpenHashSet.class)) { serialize(out, ((FloatOpenHashSet) set).toFloatArray()); } else if (oCls.equals(DoubleOpenHashSet.class)) { serialize(out, ((DoubleOpenHashSet) set).toDoubleArray()); } else if (oCls.equals(ShortOpenHashSet.class)) { serialize(out, ((ShortOpenHashSet) set).toShortArray()); } else if (oCls.equals(ByteOpenHashSet.class)) { serialize(out, ((ByteOpenHashSet) set).toByteArray()); } else if (oCls.equals(LongOpenHashSet.class)) { serialize(out, ((LongOpenHashSet) set).toLongArray()); } else if (oCls.equals(BooleanOpenHashSet.class)) { serialize(out, ((BooleanOpenHashSet) set).toBooleanArray()); } else if (oCls.equals(CharOpenHashSet.class)) { serialize(out, ((CharOpenHashSet) set).toCharArray()); } else { serialize(out, set.size()); for (Object obj : set) { serialize(out, obj); } } }
/** * Creates a new hash set copying a given type-specific collection. * * @param c * a type-specific collection to be copied into the new hash set. * @param f * the load factor. */ public DoubleOpenHashSet(final DoubleCollection c, final float f) { this(c.size(), f); addAll(c); } /**
DoubleRawValueBasedNotInPredicateEvaluator(NotInPredicate notInPredicate) { String[] values = notInPredicate.getValues(); _nonMatchingValues = new DoubleOpenHashSet(HashUtil.getMinHashSetSize(values.length)); for (String value : values) { _nonMatchingValues.add(Double.parseDouble(value)); } }
DoubleRawValueBasedInPredicateEvaluator(InPredicate inPredicate) { String[] values = inPredicate.getValues(); _matchingValues = new DoubleOpenHashSet(HashUtil.getMinHashSetSize(values.length)); for (String value : values) { _matchingValues.add(Double.parseDouble(value)); } }
@Override public double[][] transformToDoubleValuesMV(@Nonnull ProjectionBlock projectionBlock) { if (getResultMetadata().getDataType() != FieldSpec.DataType.DOUBLE) { return super.transformToDoubleValuesMV(projectionBlock); } if (_doubleValueSet == null) { _doubleValueSet = new DoubleOpenHashSet(); for (String inValue : _stringValueSet) { _doubleValueSet.add(Double.parseDouble(inValue)); } _doubleValues = new double[DocIdSetPlanNode.MAX_DOC_PER_CALL][]; } double[][] unFilteredDoubleValues = _mainTransformFunction.transformToDoubleValuesMV(projectionBlock); int length = projectionBlock.getNumDocs(); for (int i = 0; i < length; i++) { _doubleValues[i] = filterDoubles(_doubleValueSet, unFilteredDoubleValues[i]); } return _doubleValues; }
@Override public int countUnique() { DoubleSet uniqueElements = new DoubleOpenHashSet(); for (int i = 0; i < size(); i++) { if (!isMissing(i)) { uniqueElements.add(getDouble(i)); } } return uniqueElements.size(); }
@Override public DoubleColumn unique() { final DoubleSet doubles = new DoubleOpenHashSet(); for (int i = 0; i < size(); i++) { if (!isMissing(i)) { doubles.add(getDouble(i)); } } final DoubleColumn column = DoubleColumn.create(name() + " Unique values"); doubles.forEach((DoubleConsumer) column::append); return column; }
@Test public void testDoublePredicateEvaluators() { List<String> stringValues = new ArrayList<>(NUM_PREDICATE_VALUES); DoubleSet valueSet = new DoubleOpenHashSet();
DoubleSet asSet() { return new DoubleOpenHashSet(data); }
return new FloatOpenHashSet(set); } else if (oCls.equals(Double.class)) { return new DoubleOpenHashSet(set); } else if (oCls.equals(Short.class)) { return new ShortOpenHashSet(set);
return new FloatOpenHashSet(set); } else if (oCls.equals(Double.class)) { return new DoubleOpenHashSet(set); } else if (oCls.equals(Short.class)) { return new ShortOpenHashSet(set);