public SequentialDistribution(Integer start, Integer end, List<Object> enumeratedValues) { // just pass in some bogus probability mass function, we won't be using it super(Collections.singletonList(new Pair<Object, Double>(null, 1.0))); this.start = start; this.end = end; this.enumeratedValues = enumeratedValues; if (enumeratedValues == null) { counter = start; } else { counter = 0; } }
/** * Convenience factory method that calls the * {@link #Pair(Object, Object) constructor}. * * @param <K> the key type * @param <V> the value type * @param k First element of the pair. * @param v Second element of the pair. * @return a new {@code Pair} containing {@code k} and {@code v}. * @since 3.3 */ public static <K, V> Pair<K, V> create(K k, V v) { return new Pair<K, V>(k, v); } }
public Builder addServlet(String endpoint, Class<? extends HttpServlet> servlet) { servlets.add(new Pair<String, Class<? extends HttpServlet>>(endpoint, servlet)); return this; } }
/** * Gets the distributions that make up the mixture model. * * @return the component distributions and associated weights. */ public List<Pair<Double, T>> getComponents() { final List<Pair<Double, T>> list = new ArrayList<Pair<Double, T>>(weight.length); for (int i = 0; i < weight.length; i++) { list.add(new Pair<Double, T>(weight[i], distribution.get(i))); } return list; } }
/** * <p>Return the probability mass function as a list of <value, probability> pairs.</p> * * <p>Note that if duplicate and / or null values were provided to the constructor * when creating this EnumeratedDistribution, the returned list will contain these * values. If duplicates values exist, what is returned will not represent * a pmf (i.e., it is up to the caller to consolidate duplicate mass points).</p> * * @return the probability mass function. */ public List<Pair<T, Double>> getPmf() { final List<Pair<T, Double>> samples = new ArrayList<Pair<T, Double>>(probabilities.length); for (int i = 0; i < probabilities.length; i++) { samples.add(new Pair<T, Double>(singletons.get(i), probabilities[i])); } return samples; }
static Set<Pair<String, Credentials>> getCredential(CredentialKeyProvider provider, Map<String, String> credentials, Collection<String> configKeys) { Set<Pair<String, Credentials>> res = new HashSet<>(); if (!configKeys.isEmpty()) { for (String configKey : configKeys) { Credentials cred = doGetCredentials(provider, credentials, configKey); if (cred != null) { res.add(new Pair(configKey, cred)); } } } else { Credentials cred = doGetCredentials(provider, credentials, StringUtils.EMPTY); if (cred != null) { res.add(new Pair(StringUtils.EMPTY, cred)); } } return res; }
@Override public void onError(Throwable e) { callbackFired.set(true); writeResponseQueue.add(new Pair<WriteResponse, Throwable>(null, e)); callback.onFailure(e); }
/** * Fetch all the Frames so we can see if they are compatible with our Model(s). */ private Pair<Map<String, Frame>, Map<String, Set<String>>> fetchFrames() { Map<String, Frame> all_frames = null; Map<String, Set<String>> all_frames_cols = null; if (this.find_compatible_frames) { // caches for this request all_frames = Frames.fetchAll(); all_frames_cols = new TreeMap<String, Set<String>>(); for (Map.Entry<String, Frame> entry : all_frames.entrySet()) { all_frames_cols.put(entry.getKey(), new TreeSet<String>(Arrays.asList(entry.getValue()._names))); } } return new Pair<Map<String, Frame>, Map<String, Set<String>>>(all_frames, all_frames_cols); }
/** * Fetch all the Models so we can see if they are compatible with our Frame(s). */ private Pair<Map<String, Model>, Map<String, Set<String>>> fetchModels() { Map<String, Model> all_models = null; Map<String, Set<String>> all_models_cols = null; if (this.find_compatible_models) { // caches for this request all_models = (new Models()).fetchAll(); all_models_cols = new TreeMap<String, Set<String>>(); for (Map.Entry<String, Model> entry : all_models.entrySet()) { all_models_cols.put(entry.getKey(), new TreeSet<String>(Arrays.asList(entry.getValue()._names))); } } return new Pair<Map<String, Model>, Map<String, Set<String>>>(all_models, all_models_cols); }
/** * Create the list of Pairs representing the distribution from singletons and probabilities. * * @param singletons values * @param probabilities probabilities * @return list of value/probability pairs */ private static List<Pair<Double, Double>> createDistribution(double[] singletons, double[] probabilities) { if (singletons.length != probabilities.length) { throw new DimensionMismatchException(probabilities.length, singletons.length); } final List<Pair<Double, Double>> samples = new ArrayList<Pair<Double, Double>>(singletons.length); for (int i = 0; i < singletons.length; i++) { samples.add(new Pair<Double, Double>(singletons[i], probabilities[i])); } return samples; }
/** * Create the list of Pairs representing the distribution from singletons and probabilities. * * @param singletons values * @param probabilities probabilities * @return list of value/probability pairs */ private static List<Pair<Integer, Double>> createDistribution(int[] singletons, double[] probabilities) { if (singletons.length != probabilities.length) { throw new DimensionMismatchException(probabilities.length, singletons.length); } final List<Pair<Integer, Double>> samples = new ArrayList<Pair<Integer, Double>>(singletons.length); for (int i = 0; i < singletons.length; i++) { samples.add(new Pair<Integer, Double>(singletons[i], probabilities[i])); } return samples; }
/** * @param weights Weights of each component. * @param means Mean vector for each component. * @param covariances Covariance matrix for each component. * @return the list of components. */ private static List<Pair<Double, MultivariateNormalDistribution>> createComponents(double[] weights, double[][] means, double[][][] covariances) { final List<Pair<Double, MultivariateNormalDistribution>> mvns = new ArrayList<Pair<Double, MultivariateNormalDistribution>>(weights.length); for (int i = 0; i < weights.length; i++) { final MultivariateNormalDistribution dist = new MultivariateNormalDistribution(means[i], covariances[i]); mvns.add(new Pair<Double, MultivariateNormalDistribution>(weights[i], dist)); } return mvns; } }
@Override public void onFailure(Exception exception) { writeResponseQueue.add(new Pair<WriteResponse, Throwable>(null, exception)); if (exceptionLogger != null) { exceptionLogger.log(exception); } if (callback != null) { callback.onFailure(exception); } } };
/** * Converts the from the actual {@code Number} type to {@code double} * * @param <T> Type of the number used to represent the points and * weights of the quadrature rules. * @param rule Points and weights. * @return points and weights as {@code double}s. */ private static <T extends Number> Pair<double[], double[]> convertToDouble(Pair<T[], T[]> rule) { final T[] pT = rule.getFirst(); final T[] wT = rule.getSecond(); final int len = pT.length; final double[] pD = new double[len]; final double[] wD = new double[len]; for (int i = 0; i < len; i++) { pD[i] = pT[i].doubleValue(); wD[i] = wT[i].doubleValue(); } return new Pair<double[], double[]>(pD, wD); } }
/** {@inheritDoc} */ public Pair<RealVector, RealMatrix> value(final RealVector point) { //TODO get array from RealVector without copying? final double[] p = point.toArray(); // Evaluate. return new Pair<RealVector, RealMatrix>(computeValue(p), computeJacobian(p)); }
/** * Performs a change of variable so that the integration can be performed * on an arbitrary interval {@code [a, b]}. * It is assumed that the natural interval is {@code [-1, 1]}. * * @param rule Original points and weights. * @param a Lower bound of the integration interval. * @param b Lower bound of the integration interval. * @return the points and weights adapted to the new interval. */ private static Pair<double[], double[]> transform(Pair<double[], double[]> rule, double a, double b) { final double[] points = rule.getFirst(); final double[] weights = rule.getSecond(); // Scaling final double scale = (b - a) / 2; final double shift = a + scale; for (int i = 0; i < points.length; i++) { points[i] = points[i] * scale + shift; weights[i] *= scale; } return new Pair<double[], double[]>(points, weights); } }
@Override public void onNext(D doc) { try { callbackFired.set(true); WriteResponse writeResponse = new GenericWriteResponse<D>(doc); writeResponseQueue.add(new Pair<WriteResponse, Throwable>(writeResponse, null)); callback.onSuccess(writeResponse); } finally { if (doc instanceof TupleDocument) { ((TupleDocument) doc).content().value1().release(); } } } });
protected Pair<BulkRequest, FutureCallbackHolder> prepareBatch(Batch<Object> batch, WriteCallback callback) { BulkRequest bulkRequest = new BulkRequest(); final StringBuilder stringBuilder = new StringBuilder(); for (Object record : batch.getRecords()) { try { byte[] serializedBytes = this.serializer.serializeToJson(record); log.debug("serialized record: {}", serializedBytes); IndexRequest indexRequest = new IndexRequest(this.indexName, this.indexType) .source(serializedBytes, 0, serializedBytes.length, XContentType.JSON); if (this.idMappingEnabled) { String id = this.typeMapper.getValue(this.idFieldName, record); indexRequest.id(id); stringBuilder.append(";").append(id); } bulkRequest.add(indexRequest); } catch (Exception e) { log.error("Encountered exception {}", e); } } FutureCallbackHolder futureCallbackHolder = new FutureCallbackHolder(callback, exception -> log.error("Batch: {} failed on ids; {} with exception {}", batch.getId(), stringBuilder.toString(), exception), this.malformedDocPolicy); return new Pair(bulkRequest, futureCallbackHolder); }
probabilities.add(new Pair<>(enumeratedValue, 0.1)); ZipfDistribution zipf = new ZipfDistribution(cardinality, schema.getZipfExponent()); for (int i = 0; i < cardinality; i++) { probabilities.add(new Pair<>((Object) (i + startInt), zipf.probability(i))); ZipfDistribution zipf = new ZipfDistribution(enumeratedValues.size(), schema.getZipfExponent()); for (int i = 0; i < cardinality; i++) { probabilities.add(new Pair<>(enumeratedValues.get(i), zipf.probability(i))); case ENUMERATED: for (int i = 0; i < enumeratedValues.size(); i++) { probabilities.add(new Pair<>(enumeratedValues.get(i), enumeratedProbabilities.get(i)));
/** * Gets a copy of the quadrature rule with the given number of integration * points. * * @param numberOfPoints Number of integration points. * @return a copy of the integration rule. * @throws NotStrictlyPositiveException if {@code numberOfPoints < 1}. * @throws DimensionMismatchException if the elements of the rule pair do not * have the same length. */ public Pair<double[], double[]> getRule(int numberOfPoints) throws NotStrictlyPositiveException, DimensionMismatchException { if (numberOfPoints <= 0) { throw new NotStrictlyPositiveException(LocalizedFormats.NUMBER_OF_POINTS, numberOfPoints); } // Try to obtain the rule from the cache. Pair<double[], double[]> cached = pointsAndWeightsDouble.get(numberOfPoints); if (cached == null) { // Rule not computed yet. // Compute the rule. final Pair<T[], T[]> rule = getRuleInternal(numberOfPoints); cached = convertToDouble(rule); // Cache it. pointsAndWeightsDouble.put(numberOfPoints, cached); } // Return a copy. return new Pair<double[], double[]>(cached.getFirst().clone(), cached.getSecond().clone()); }