public EnumeratedTreeDistribution(final List<Pair<T, Double>> pmf) { super(pmf); // build the interval tree probabilityRanges = new TreeMap<Double, Integer>(); normalizedPmf = this.getPmf(); double cumulativep = 0.0; for (int i = 0; i < normalizedPmf.size(); i++) { probabilityRanges.put(cumulativep, i); Pair<T, Double> pair = normalizedPmf.get(i); cumulativep += pair.getSecond(); } }
@SuppressWarnings("unchecked") private void addCredentialToSubject(Subject subject, Map<String, String> credentials) { try { for (Pair<String, Credentials> cred : getCredentials(credentials)) { subject.getPrivateCredentials().add(cred.getSecond()); LOG.info("Credentials added to the subject."); } } catch (Exception e) { LOG.error("Failed to initialize and get UserGroupInformation.", e); } }
private WriteResponse getWriteResponseorThrow(Pair<WriteResponse, Throwable> writeResponseThrowablePair) throws ExecutionException { if (writeResponseThrowablePair.getFirst() != null) { return writeResponseThrowablePair.getFirst(); } else if (writeResponseThrowablePair.getSecond() != null) { throw new ExecutionException(writeResponseThrowablePair.getSecond()); } else { throw new ExecutionException(new RuntimeException("Could not find non-null WriteResponse pair")); } }
private WriteResponse getWriteResponseorThrow(Pair<WriteResponse, Throwable> writeResponseThrowablePair) throws ExecutionException { try { if (writeResponseThrowablePair.getFirst() != null) { return writeResponseThrowablePair.getFirst(); } else if (writeResponseThrowablePair.getSecond() != null) { throw new ExecutionException(writeResponseThrowablePair.getSecond()); } else { throw new ExecutionException(new RuntimeException("Could not find non-null WriteResponse pair")); } } finally { done.set(true); } }
/** * Creates an integrator from the given pair of points (first element of * the pair) and weights (second element of the pair. * * @param pointsAndWeights Integration points and corresponding weights. * @throws NonMonotonicSequenceException if the {@code points} are not * sorted in increasing order. * * @see #GaussIntegrator(double[], double[]) */ public GaussIntegrator(Pair<double[], double[]> pointsAndWeights) throws NonMonotonicSequenceException { this(pointsAndWeights.getFirst(), pointsAndWeights.getSecond()); }
/** * Creates an integrator from the given pair of points (first element of * the pair) and weights (second element of the pair. * * @param pointsAndWeights Integration points and corresponding weights. * @throws NonMonotonicSequenceException if the {@code points} are not * sorted in increasing order. * * @see #SymmetricGaussIntegrator(double[], double[]) */ public SymmetricGaussIntegrator(Pair<double[], double[]> pointsAndWeights) throws NonMonotonicSequenceException { this(pointsAndWeights.getFirst(), pointsAndWeights.getSecond()); }
@Override public Future<WriteResponse> write(final Batch<Object> batch, @Nullable WriteCallback callback) { Pair<BulkRequest, FutureCallbackHolder> preparedBatch = this.prepareBatch(batch, callback); try { client.bulkAsync(preparedBatch.getFirst(), preparedBatch.getSecond().getActionListener()); return preparedBatch.getSecond().getFuture(); } catch (Exception e) { throw new RuntimeException("Caught unexpected exception while calling bulkAsync API", e); } }
/** * Stores a rule. * * @param rule Rule to be stored. * @throws DimensionMismatchException if the elements of the pair do not * have the same length. */ protected void addRule(Pair<T[], T[]> rule) throws DimensionMismatchException { if (rule.getFirst().length != rule.getSecond().length) { throw new DimensionMismatchException(rule.getFirst().length, rule.getSecond().length); } pointsAndWeights.put(rule.getFirst().length, rule); }
@Override public Future<WriteResponse> write(Batch<Object> batch, @Nullable WriteCallback callback) { Pair<BulkRequest, FutureCallbackHolder> preparedBatch = this.prepareBatch(batch, callback); client.bulk(preparedBatch.getFirst(), preparedBatch.getSecond().getActionListener()); return preparedBatch.getSecond().getFuture(); }
/** * Converts the from the actual {@code Number} type to {@code double} * * @param <T> Type of the number used to represent the points and * weights of the quadrature rules. * @param rule Points and weights. * @return points and weights as {@code double}s. */ private static <T extends Number> Pair<double[], double[]> convertToDouble(Pair<T[], T[]> rule) { final T[] pT = rule.getFirst(); final T[] wT = rule.getSecond(); final int len = pT.length; final double[] pD = new double[len]; final double[] wD = new double[len]; for (int i = 0; i < len; i++) { pD[i] = pT[i].doubleValue(); wD[i] = wT[i].doubleValue(); } return new Pair<double[], double[]>(pD, wD); } }
private void drainQueue(BlockingQueue<Pair<AbstractDocument, Future>> queue, int threshold, long sleepTime, TimeUnit sleepUnit, List<Pair<AbstractDocument, Future>> failedFutures) { while (queue.remainingCapacity() < threshold) { if (sleepTime > 0) { Pair<AbstractDocument, Future> topElement = queue.peek(); if (topElement != null) { try { topElement.getSecond().get(sleepTime, sleepUnit); } catch (Exception te) { failedFutures.add(topElement); } queue.poll(); } } } }
/** * Performs a change of variable so that the integration can be performed * on an arbitrary interval {@code [a, b]}. * It is assumed that the natural interval is {@code [-1, 1]}. * * @param rule Original points and weights. * @param a Lower bound of the integration interval. * @param b Lower bound of the integration interval. * @return the points and weights adapted to the new interval. */ private static Pair<double[], double[]> transform(Pair<double[], double[]> rule, double a, double b) { final double[] points = rule.getFirst(); final double[] weights = rule.getSecond(); // Scaling final double scale = (b - a) / 2; final double shift = a + scale; for (int i = 0; i < points.length; i++) { points[i] = points[i] * scale + shift; weights[i] *= scale; } return new Pair<double[], double[]>(points, weights); } }
/** * For one or more Frame from the KV store, sumamrize and enhance them and Response containing a map of them. */ private Response serveOneOrAll(Map<String, Frame> framesMap) { // returns empty sets if !this.find_compatible_models Pair<Map<String, Model>, Map<String, Set<String>>> models_info = fetchModels(); Map<String, Model> all_models = models_info.getFirst(); Map<String, Set<String>> all_models_cols = models_info.getSecond(); Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(null, framesMap, find_compatible_models, all_models, all_models_cols); Map resultsMap = new LinkedHashMap(); resultsMap.put("frames", frameSummaries); // If find_compatible_models then include a map of the Model summaries. Should we put this on a separate switch? if (this.find_compatible_models) { Set<String> all_referenced_models = new TreeSet<String>(); for (Map.Entry<String, FrameSummary> entry: frameSummaries.entrySet()) { FrameSummary summary = entry.getValue(); all_referenced_models.addAll(summary.compatible_models); } Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(all_referenced_models, all_models, false, null, null); resultsMap.put("models", modelSummaries); } // TODO: temporary hack to get things going String json = gson.toJson(resultsMap); JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject(); return Response.done(result); }
/** * Fetch all the Models from the KV store, sumamrize and enhance them, and return a map of them. */ private Response serveOneOrAll(Map<String, Model> modelsMap) { // returns empty sets if !this.find_compatible_frames Pair<Map<String, Frame>, Map<String, Set<String>>> frames_info = fetchFrames(); Map<String, Frame> all_frames = frames_info.getFirst(); Map<String, Set<String>> all_frames_cols = frames_info.getSecond(); Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(null, modelsMap, find_compatible_frames, all_frames, all_frames_cols); Map resultsMap = new LinkedHashMap(); resultsMap.put("models", modelSummaries); // If find_compatible_frames then include a map of the Frame summaries. Should we put this on a separate switch? if (this.find_compatible_frames) { Set<String> all_referenced_frames = new TreeSet<String>(); for (Map.Entry<String, ModelSummary> entry: modelSummaries.entrySet()) { ModelSummary summary = entry.getValue(); all_referenced_frames.addAll(summary.compatible_frames); } Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(all_referenced_frames, all_frames, false, null, null); resultsMap.put("frames", frameSummaries); } // TODO: temporary hack to get things going String json = gson.toJson(resultsMap); JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject(); return Response.done(result); }
/** * {@inheritDoc} */ @Override public void doRenew(Map<String, String> credentials, Map<String, Object> topologyConf, final String topologyOwnerPrincipal) { List<String> confKeys = getConfigKeys(topologyConf); for (Pair<String, Credentials> cred : getCredentials(credentials, confKeys)) { try { Configuration configuration = getHadoopConfiguration(topologyConf, cred.getFirst()); Collection<Token<? extends TokenIdentifier>> tokens = cred.getSecond().getAllTokens(); if (tokens != null && !tokens.isEmpty()) { for (Token<? extends TokenIdentifier> token : tokens) { //We need to re-login some other thread might have logged into hadoop using // their credentials (e.g. AutoHBase might be also part of nimbu auto creds) login(configuration); long expiration = token.renew(configuration); LOG.info("HDFS delegation token renewed, new expiration time {}", expiration); } } else { LOG.debug("No tokens found for credentials, skipping renewal."); } } catch (Exception e) { LOG.warn("could not renew the credentials, one of the possible reason is tokens are beyond " + "renewal period so attempting to get new tokens.", e); populateCredentials(credentials, topologyConf, topologyOwnerPrincipal); } } }
@Override public void doRenew(Map<String, String> credentials, Map<String, Object> topologyConf, final String topologyOwnerPrincipal) { List<String> configKeys = getConfigKeys(topologyConf); for (Pair<String, Credentials> cred : getCredentials(credentials, configKeys)) { try { Configuration configuration = getHadoopConfiguration(topologyConf, cred.getFirst()); String hiveMetaStoreURI = getMetaStoreURI(configuration); String hiveMetaStorePrincipal = getMetaStorePrincipal(configuration); Collection<Token<? extends TokenIdentifier>> tokens = cred.getSecond().getAllTokens(); login(configuration); if (tokens != null && !tokens.isEmpty()) { for (Token<? extends TokenIdentifier> token : tokens) { long expiration = renewToken(token, hiveMetaStoreURI, hiveMetaStorePrincipal); LOG.info("Hive delegation token renewed, new expiration time {}", expiration); } } else { LOG.debug("No tokens found for credentials, skipping renewal."); } } catch (Exception e) { LOG.warn("could not renew the credentials, one of the possible reason is tokens are beyond " + "renewal period so attempting to get new tokens.", e); populateCredentials(credentials, topologyConf); } } }
@Override protected RealVector solve(final RealMatrix jacobian, final RealVector residuals) { try { final Pair<RealMatrix, RealVector> normalEquation = computeNormalMatrix(jacobian, residuals); final RealMatrix normal = normalEquation.getFirst(); final RealVector jTr = normalEquation.getSecond(); return new LUDecomposition(normal, SINGULARITY_THRESHOLD) .getSolver() .solve(jTr); } catch (SingularMatrixException e) { throw new ConvergenceException(LocalizedFormats.UNABLE_TO_SOLVE_SINGULAR_PROBLEM, e); } } },
@Override protected RealVector solve(final RealMatrix jacobian, final RealVector residuals) { try { final Pair<RealMatrix, RealVector> normalEquation = computeNormalMatrix(jacobian, residuals); final RealMatrix normal = normalEquation.getFirst(); final RealVector jTr = normalEquation.getSecond(); return new CholeskyDecomposition( normal, SINGULARITY_THRESHOLD, SINGULARITY_THRESHOLD) .getSolver() .solve(jTr); } catch (NonPositiveDefiniteMatrixException e) { throw new ConvergenceException(LocalizedFormats.UNABLE_TO_SOLVE_SINGULAR_PROBLEM, e); } } },
/** {@inheritDoc} */ public Evaluation evaluate(final RealVector point) { // Copy so optimizer can change point without changing our instance. final RealVector p = paramValidator == null ? point.copy() : paramValidator.validate(point.copy()); if (lazyEvaluation) { return new LazyUnweightedEvaluation((ValueAndJacobianFunction) model, target, p); } else { // Evaluate value and jacobian in one function call. final Pair<RealVector, RealMatrix> value = model.value(p); return new UnweightedEvaluation(value.getFirst(), value.getSecond(), target, p); } }