double[] parameters = regression.estimateRegressionParameters(); coefficientFromAvailableData.put(ModelCoefficient.LEADER_BYTES_IN, parameters[leaderBytesInIndex]); if (ignoreLeaderBytesOutRate) {
map.put("regressionParameters", list(multipleLinearRegression.estimateRegressionParameters())); map.put("RSquared", multipleLinearRegression.calculateRSquared()); map.put("adjustedRSquared", multipleLinearRegression.calculateAdjustedRSquared());
regression.newSampleData(aggregateSampleCpuUtilData(), aggregateSampleBytesRateData(ignoreLeaderBytesOut)); double[] parameters = regression.estimateRegressionParameters(); int leaderBytesInIndex = 0; int leaderBytesOutIndex = 1;
ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs
/** * Predict using the built model * * @param regression * @param x * @return */ static double predict(OLSMultipleLinearRegression regression, double[] x) { if (regression == null) { throw new IllegalArgumentException("regression must not be null."); } double[] beta = regression.estimateRegressionParameters(); // intercept at beta[0] double prediction = beta[0]; for (int i = 1; i < beta.length; i++) { prediction += beta[i] * x[i - 1]; } // return prediction; }
par = reg.estimateRegressionParameters(); } catch (SingularMatrixException e) { throw new NotImplementedException("Singular matrix in input");
@Override public double getResult() { // If there are no values or only a single // value then we cannot calculate the slope. if (values.length <= 2) return Double.NaN; setSampleData(); double[] regressionParams = ols.estimateRegressionParameters(); if (regressionParams.length >= 2) { // [0] is the constant (zero'th order) // [1] is the first order , which we use as the slope. final double slope = regressionParams[1]; return slope; } return Double.NaN; } }
@Override OlsLRModel train() { double[] dataArray = LR.doubleListToArray(data); R.newSampleData(dataArray, numObs, numVars); params = R.estimateRegressionParameters(); this.state = State.ready; List<Double> paramList = new ArrayList<>(); for (int i = 0; i < numVars; i++) { paramList.add(params[i]); } return this; }
@Override public void result(JsonElement partition, JsonObject result) { // If there are no values or only a single // value then we cannot calculate tne slope. if (values.length <= 2) return; setSampleData(); double[] regressionParams = ols.estimateRegressionParameters(); if (regressionParams.length >= 2) { // [0] is the constant (zero'th order) // [1] is the first order , which we use as the slope. final double slope = regressionParams[1]; if (Java7Helper.doubleIsFinite(slope)) result.addProperty(type.name(), slope); } values = null; } }
@Override public void trainSimilarity(List<EnsembleSim> simList) { if (simList.isEmpty()) { throw new IllegalArgumentException("no examples to train on!"); } similarityInterpolator.trainSimilarity(simList); double[][] X = new double[simList.size()][numMetrics]; double[] Y = new double[simList.size()]; for (int i = 0; i<simList.size(); i++){ Y[i]=simList.get(i).knownSim.similarity; EnsembleSim es = similarityInterpolator.interpolate(simList.get(i)); for (int j=0; j<numMetrics; j++){ X[i][j]=es.getScores().get(j); } } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); simlarityCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters()); double pearson = Math.sqrt(regression.calculateRSquared()); LOG.info("coefficients are " + simlarityCoefficients.toString()); LOG.info("pearson for multiple regression is " + pearson); }
@Override public void trainSimilarity(List<EnsembleSim> simList) { if (simList.isEmpty()) { throw new IllegalArgumentException("no examples to train on!"); } similarityInterpolator.trainSimilarity(simList); double[][] X = new double[simList.size()][numMetrics]; double[] Y = new double[simList.size()]; for (int i = 0; i<simList.size(); i++){ Y[i]=simList.get(i).knownSim.similarity; EnsembleSim es = similarityInterpolator.interpolate(simList.get(i)); for (int j=0; j<numMetrics; j++){ X[i][j]=es.getScores().get(j); } } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); simlarityCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters()); double pearson = Math.sqrt(regression.calculateRSquared()); LOG.info("coefficients are " + simlarityCoefficients.toString()); LOG.info("pearson for multiple regression is " + pearson); }
@Override public void trainMostSimilar(List<EnsembleSim> simList) { if (simList.isEmpty()){ throw new IllegalStateException("no examples to train on!"); } mostSimilarInterpolator.trainMostSimilar(simList); // Remove things that have no observed metrics List<EnsembleSim> pruned = new ArrayList<EnsembleSim>(); for (EnsembleSim es : simList) { if (es != null && es.getNumMetricsWithScore() > 0) { pruned.add(es); } } double[][] X = new double[pruned.size()][numMetrics*2]; double[] Y = new double[pruned.size()]; for (int i=0; i<pruned.size(); i++){ Y[i]=pruned.get(i).knownSim.similarity; EnsembleSim es = mostSimilarInterpolator.interpolate(pruned.get(i)); for (int j=0; j<numMetrics; j++){ X[i][2*j]= es.getScores().get(j); X[i][2*j+1]= Math.log(es.getRanks().get(j)+1); } } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y,X); mostSimilarCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters()); double pearson = Math.sqrt(regression.calculateRSquared()); LOG.info("coefficients are "+mostSimilarCoefficients.toString()); LOG.info("pearson for multiple regression is "+pearson); }
final OLSMultipleLinearRegression regression2 = new OLSMultipleLinearRegression(); double[] y = { 4, 8, 13, 18 }; double[][] x2 = { { 1, 1, 1 }, { 1, 2, 4 }, { 1, 3, 9 }, { 1, 4, 16 }, }; regression2.newSampleData(y, x2); regression2.setNoIntercept(true); regression2.newSampleData(y, x2); double[] beta = regression2.estimateRegressionParameters(); for (double d : beta) { System.out.println("D: " + d); }
@Override public void setValues(double[] y, double[] x) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("The numbers of y and x values must be equal (%d != %d)", y.length, x.length)); } double[][] xData = new double[x.length][]; for (int i = 0; i < x.length; i++) { // the implementation determines how to produce a vector of predictors from a single x xData[i] = xVector(x[i]); } if (logY()) { // in some models we are predicting ln y, so we replace each y with ln y y = Arrays.copyOf(y, y.length); // user might not be finished with the array we were given for (int i = 0; i < x.length; i++) { y[i] = Math.log(y[i]); } } final OLSMultipleLinearRegression ols = new OLSMultipleLinearRegression(); ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired ols.newSampleData(y, xData); // provide the data to the model coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs last_error_rate = ols.estimateErrorVariance(); Log.d(TAG, getClass().getSimpleName() + " Forecast Error rate: errorvar:" + JoH.qs(last_error_rate, 4) + " regssionvar:" + JoH.qs(ols.estimateRegressandVariance(), 4) + " stderror:" + JoH.qs(ols.estimateRegressionStandardError(), 4)); }
@Override public void setValues(double[] y, double[] x) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("The numbers of y and x values must be equal (%d != %d)", y.length, x.length)); } double[][] xData = new double[x.length][]; for (int i = 0; i < x.length; i++) { // the implementation determines how to produce a vector of predictors from a single x xData[i] = xVector(x[i]); } if (logY()) { // in some models we are predicting ln y, so we replace each y with ln y y = Arrays.copyOf(y, y.length); // user might not be finished with the array we were given for (int i = 0; i < x.length; i++) { y[i] = Math.log(y[i]); } } final OLSMultipleLinearRegression ols = new OLSMultipleLinearRegression(); ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired ols.newSampleData(y, xData); // provide the data to the model coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs last_error_rate = ols.estimateErrorVariance(); Log.d(TAG, getClass().getSimpleName() + " Forecast Error rate: errorvar:" + JoH.qs(last_error_rate, 4) + " regssionvar:" + JoH.qs(ols.estimateRegressandVariance(), 4) + " stderror:" + JoH.qs(ols.estimateRegressionStandardError(), 4)); }
@Override public void setValues(double[] y, double[] x) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("The numbers of y and x values must be equal (%d != %d)", y.length, x.length)); } double[][] xData = new double[x.length][]; for (int i = 0; i < x.length; i++) { // the implementation determines how to produce a vector of predictors from a single x xData[i] = xVector(x[i]); } if (logY()) { // in some models we are predicting ln y, so we replace each y with ln y y = Arrays.copyOf(y, y.length); // user might not be finished with the array we were given for (int i = 0; i < x.length; i++) { y[i] = Math.log(y[i]); } } final OLSMultipleLinearRegression ols = new OLSMultipleLinearRegression(); ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired ols.newSampleData(y, xData); // provide the data to the model coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs last_error_rate = ols.estimateErrorVariance(); Log.d(TAG, getClass().getSimpleName() + " Forecast Error rate: errorvar:" + JoH.qs(last_error_rate, 4) + " regssionvar:" + JoH.qs(ols.estimateRegressandVariance(), 4) + " stderror:" + JoH.qs(ols.estimateRegressionStandardError(), 4)); }
@Override public void observationsFinished() { double Y[] = ys.toArray(); double X[][] = new double[Y.length][2]; for (int i = 0; i < Y.length; i++) { X[i][0] = Math.log(1 + ranks.get(i)); X[i][1] = logIfNecessary(scores.get(i)); } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); double [] params = regression.estimateRegressionParameters(); intercept = params[0]; rankCoeff = params[1]; scoreCoeff = params[2]; super.observationsFinished(); LOG.info("trained model on " + X.length + " observations: " + dump() + " with R-squared " + regression.calculateRSquared()); }
par = reg.estimateRegressionParameters(); } catch (SingularMatrixException e) { throw new NotImplementedException("Singular matrix in input");