regression.setNoIntercept(true); boolean ignoreLeaderBytesOut = !isLeaderBytesInAndOutRatioDiverseEnough(); regression.newSampleData(aggregateSampleCpuUtilData(), aggregateSampleBytesRateData(ignoreLeaderBytesOut)); double[] parameters = regression.estimateRegressionParameters();
regression.newSampleData(aggregateSampleCpuUtilData(), sampleBytesRateData); double[] parameters = regression.estimateRegressionParameters(); coefficientFromAvailableData.put(ModelCoefficient.LEADER_BYTES_IN, parameters[leaderBytesInIndex]);
ols.newSampleData(y, xData); // provide the data to the model
regression.newSampleData(Y, X);
reg.newSampleData(y, x); } catch (IllegalArgumentException e) { throw new EvaluationException(ErrorEval.REF_INVALID);
void setSampleData() { // Fill in the x values for (int x = 0; x < values.length/2; x++) values[(x*2)+1] = x; ols.newSampleData(values, values.length/2, 1); } @Override
void setSampleData() { // Fill in the x values for (int x = 0; x < values.length/2; x++) values[(x*2)+1] = x; ols.newSampleData(values, values.length/2, 1); }
protected MultipleLinearRegression regress(double[][] observations, double[] outcomes) { OLSMultipleLinearRegression olsMultipleLinearRegression = new OLSMultipleLinearRegression(); olsMultipleLinearRegression.newSampleData(outcomes, observations); return olsMultipleLinearRegression; }
/** * Creates a a multiple linear regression. * @param x the independent variable * @param y the dependent variable * @return */ public static OLSMultipleLinearRegression createLinearRegression(final double[][] x, final double[] y) { final OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(y, x); return regression; }
public static OLSMultipleLinearRegression createLinearRegression( final double[][] x, final double[] y) { OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(y, x); return regression; }
@Override OlsLRModel train() { double[] dataArray = LR.doubleListToArray(data); R.newSampleData(dataArray, numObs, numVars); params = R.estimateRegressionParameters(); this.state = State.ready; List<Double> paramList = new ArrayList<>(); for (int i = 0; i < numVars; i++) { paramList.add(params[i]); } return this; }
final OLSMultipleLinearRegression regression2 = new OLSMultipleLinearRegression(); double[] y = { 4, 8, 13, 18 }; double[][] x2 = { { 1, 1, 1 }, { 1, 2, 4 }, { 1, 3, 9 }, { 1, 4, 16 }, }; regression2.newSampleData(y, x2); regression2.setNoIntercept(true); regression2.newSampleData(y, x2); double[] beta = regression2.estimateRegressionParameters(); for (double d : beta) { System.out.println("D: " + d); }
OLSMultipleLinearRegression regr = new OLSMultipleLinearRegression(); regr.newSampleData(y, x); double SSR1 = regr.calculateResidualSumOfSquares(); double df1 = y.length - (x[0].length + 1); //df = n - number of coefficients, including intercept regr.newSampleData(y, xb); double SSR2 = regr.calculateResidualSumOfSquares(); double df2 = y.length - (xb[0].length + 1); double MSE = SSR2/df2; // EDIT: You need the biggest model here! double MSEdiff = Math.abs ((SSR2 - SSR1) / (df2 - df1)); double dfdiff = Math.abs(df2 - df1); double Fval = MSEdiff / MSE; FDistribution Fdist = new FDistribution(dfdiff, df2); double pval = 1 - Fdist.cumulativeProbability(Fval);
double[] y = {-0.48812477, 0.33458213, -0.52754476, -0.79863471, -0.68544309, -0.12970239, 0.02355622, -0.31890850, 0.34725819, 0.08108851}; double[][] x = {{1,0}, {0,0}, {1,0}, {2,1}, {0,1}, {0,0}, {1,0}, {0,0}, {1,0}, {0,0}}; double[][] xb = {{1,0,0}, {0,0,0}, {1,0,0}, {2,1,2}, {0,1,0}, {0,0,0}, {1,0,0}, {0,0,0}, {1,0,0}, {0,0,0}}; OLSMultipleLinearRegression regr = new OLSMultipleLinearRegression(); int degreesOfFreedomA = y.length - (x[0].length); // no + 1 int degreesOfFreedomB = y.length - (xb[0].length); // no + 1 regr.setNoIntercept(true); regr.newSampleData(y, x); double sumOfSquaresNoInterceptA = regr.calculateResidualSumOfSquares(); regr.newSampleData(y, xb); double sumOfSquaresNoInterceptB = regr.calculateResidualSumOfSquares(); double MSE = sumOfSquaresNoInterceptB / degreesOfFreedomB; System.out.printf("RSS no intercept: %f\n",sumOfSquaresNoInterceptB); int degreesOfFreedomDifference = Math.abs(degreesOfFreedomB - degreesOfFreedomA); double MSEdiff = Math.abs((sumOfSquaresNoInterceptB - sumOfSquaresNoInterceptA) / (degreesOfFreedomDifference)); double Fval = MSEdiff / MSE; FDistribution Fdist = new FDistribution(degreesOfFreedomDifference, degreesOfFreedomB); double pval = 1 - Fdist.cumulative(Fval); System.out.printf("pval without intercept: %f",pval);
@Override public void trainSimilarity(List<EnsembleSim> simList) { if (simList.isEmpty()) { throw new IllegalArgumentException("no examples to train on!"); } similarityInterpolator.trainSimilarity(simList); double[][] X = new double[simList.size()][numMetrics]; double[] Y = new double[simList.size()]; for (int i = 0; i<simList.size(); i++){ Y[i]=simList.get(i).knownSim.similarity; EnsembleSim es = similarityInterpolator.interpolate(simList.get(i)); for (int j=0; j<numMetrics; j++){ X[i][j]=es.getScores().get(j); } } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); simlarityCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters()); double pearson = Math.sqrt(regression.calculateRSquared()); LOG.info("coefficients are " + simlarityCoefficients.toString()); LOG.info("pearson for multiple regression is " + pearson); }
@Override public void trainSimilarity(List<EnsembleSim> simList) { if (simList.isEmpty()) { throw new IllegalArgumentException("no examples to train on!"); } similarityInterpolator.trainSimilarity(simList); double[][] X = new double[simList.size()][numMetrics]; double[] Y = new double[simList.size()]; for (int i = 0; i<simList.size(); i++){ Y[i]=simList.get(i).knownSim.similarity; EnsembleSim es = similarityInterpolator.interpolate(simList.get(i)); for (int j=0; j<numMetrics; j++){ X[i][j]=es.getScores().get(j); } } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); simlarityCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters()); double pearson = Math.sqrt(regression.calculateRSquared()); LOG.info("coefficients are " + simlarityCoefficients.toString()); LOG.info("pearson for multiple regression is " + pearson); }
@Override public void setValues(double[] y, double[] x) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("The numbers of y and x values must be equal (%d != %d)", y.length, x.length)); } double[][] xData = new double[x.length][]; for (int i = 0; i < x.length; i++) { // the implementation determines how to produce a vector of predictors from a single x xData[i] = xVector(x[i]); } if (logY()) { // in some models we are predicting ln y, so we replace each y with ln y y = Arrays.copyOf(y, y.length); // user might not be finished with the array we were given for (int i = 0; i < x.length; i++) { y[i] = Math.log(y[i]); } } final OLSMultipleLinearRegression ols = new OLSMultipleLinearRegression(); ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired ols.newSampleData(y, xData); // provide the data to the model coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs last_error_rate = ols.estimateErrorVariance(); Log.d(TAG, getClass().getSimpleName() + " Forecast Error rate: errorvar:" + JoH.qs(last_error_rate, 4) + " regssionvar:" + JoH.qs(ols.estimateRegressandVariance(), 4) + " stderror:" + JoH.qs(ols.estimateRegressionStandardError(), 4)); }
@Override public void setValues(double[] y, double[] x) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("The numbers of y and x values must be equal (%d != %d)", y.length, x.length)); } double[][] xData = new double[x.length][]; for (int i = 0; i < x.length; i++) { // the implementation determines how to produce a vector of predictors from a single x xData[i] = xVector(x[i]); } if (logY()) { // in some models we are predicting ln y, so we replace each y with ln y y = Arrays.copyOf(y, y.length); // user might not be finished with the array we were given for (int i = 0; i < x.length; i++) { y[i] = Math.log(y[i]); } } final OLSMultipleLinearRegression ols = new OLSMultipleLinearRegression(); ols.setNoIntercept(true); // let the implementation include a constant in xVector if desired ols.newSampleData(y, xData); // provide the data to the model coef = MatrixUtils.createColumnRealMatrix(ols.estimateRegressionParameters()); // get our coefs last_error_rate = ols.estimateErrorVariance(); Log.d(TAG, getClass().getSimpleName() + " Forecast Error rate: errorvar:" + JoH.qs(last_error_rate, 4) + " regssionvar:" + JoH.qs(ols.estimateRegressandVariance(), 4) + " stderror:" + JoH.qs(ols.estimateRegressionStandardError(), 4)); }
@Override public void observationsFinished() { double Y[] = ys.toArray(); double X[][] = new double[Y.length][2]; for (int i = 0; i < Y.length; i++) { X[i][0] = Math.log(1 + ranks.get(i)); X[i][1] = logIfNecessary(scores.get(i)); } OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); regression.newSampleData(Y, X); double [] params = regression.estimateRegressionParameters(); intercept = params[0]; rankCoeff = params[1]; scoreCoeff = params[2]; super.observationsFinished(); LOG.info("trained model on " + X.length + " observations: " + dump() + " with R-squared " + regression.calculateRSquared()); }
private double resolveOneEdgeMaxR3(double[] x, double[] y) { TetradLogger.getInstance().log("info", "\nEDGE " + x + " --- " + y); OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression(); double[][] _x = new double[1][]; _x[0] = x; double[][] _y = new double[1][]; _y[0] = y; regression.newSampleData(x, transpose(_y)); double[] rXY = regression.estimateResiduals(); regression.newSampleData(y, transpose(_x)); double[] rYX = regression.estimateResiduals(); double xPlus = new AndersonDarlingTest(rXY).getASquared(); double xMinus = new AndersonDarlingTest(x).getASquared(); double yPlus = new AndersonDarlingTest(rYX).getASquared(); double yMinus = new AndersonDarlingTest(y).getASquared(); double deltaX = xPlus - xMinus; double deltaY = yPlus - yMinus; return deltaX - deltaY; }