public static String getMostLikelyFailedConstraintName(Frequency failedConstraintNamesFrequency) { if (failedConstraintNamesFrequency == null || failedConstraintNamesFrequency.getUniqueCount() == 0) return NO_REASON; Iterator<Map.Entry<Comparable<?>, Long>> entryIterator = failedConstraintNamesFrequency.entrySetIterator(); long maxCount = 0; String mostLikely = null; while (entryIterator.hasNext()) { Map.Entry<Comparable<?>, Long> entry = entryIterator.next(); if (entry.getValue() > maxCount) { Comparable<?> key = entry.getKey(); maxCount = entry.getValue(); mostLikely = key.toString(); } } return mostLikely; }
public int getRowUniqueCount(){ return rowMargin.getUniqueCount(); }
public int getColUniqueCount(){ return colMargin.getUniqueCount(); }
public int size(){ return columns.getUniqueCount(); }
public int numberOfScoreLevels(){ return columnMargin.getUniqueCount(); }
public int getSize(){ return freqTable.getUniqueCount(); }
public int getColUniqueCountAtRow(Comparable<?> rowValue){ Frequency f = tableRows.get(rowValue); return f.getUniqueCount(); }
public double[][] getTable(){ int nrow = rowMargin.getUniqueCount(); int ncol = colMargin.getUniqueCount(); double[][] table = new double[nrow][ncol]; Iterator<Comparable<?>> rows = rowMargin.valuesIterator(); Iterator<Comparable<?>> cols = null; Comparable<?> rTemp = null; int i=0; int j=0; while(rows.hasNext()){ cols = colMargin.valuesIterator(); rTemp = rows.next(); j=0; while(cols.hasNext()){ table[i][j] = (double)getCount(rTemp, cols.next()); j++; } i++; } return table; }
/** * For r number of score levels between min and max, inclusive, this method * returns a r x 2 array with integer based scores in first column * and percentile ranks in the second column. This method is useful when * only the raw scores and corresponding percentile ranks are needed. * * @return two-way array of raw scores scores and percentile ranks. */ public double[][] evaluate(){ double[][] prank = new double[freqTable.getUniqueCount()][2]; int xstar; int index = 0; Iterator<Comparable<?>> iter = freqTable.valuesIterator(); int x = 0; while(iter.hasNext()){ x = ((Long)iter.next()).intValue(); xstar = Double.valueOf(Math.floor(x+0.5)).intValue(); prank[index][0] = xstar; prank[index][1] = percentileRank(x, xstar); index++; // System.out.println("x: " + x + " xstar: " + xstar + " Fexstar: " + Fxstar + " Fxstarm1: " + Fxstarm1 + " px: " + px + " cp: " + getCummulativeProportion(xstar)); } return prank; }
public double[] getThresholds(){ double[] alpha = new double[freqY.getUniqueCount()-1]; Iterator<Comparable<?>> iter = freqY.valuesIterator(); Comparable<?> v = null; int index = 0; while(iter.hasNext()){ v = iter.next(); if(iter.hasNext()){ alpha[index] = norm.inverseCumulativeProbability(freqY.getCumPct(v)); index++; } } return alpha; }
@Override public String toString(){ if(freqTable.getUniqueCount()==0 || freqTable.getSumFreq()==0){ return "Percentile ranks not computed.";
ItemResponseVector[] responseData = new ItemResponseVector[freq.getUniqueCount()]; ItemResponseVector irv = null; Iterator<Comparable<?>> iter = freq.valuesIterator();
ItemResponseVector[] responseData = new ItemResponseVector[freq.getUniqueCount()]; ItemResponseVector irv = null; Iterator<Comparable<?>> iter = freq.valuesIterator();
int length = freqTable.getUniqueCount(); double[] xval = new double[length+2]; double[] yval = new double[length+2];
/** * Checks whether the observations conform to a Poisson process with the * specified intensity. Uses a chi square test with the specified confidence. * The null hypothesis is that the observations are the result of a poisson * process. * @param observations * @param intensity * @param confidence * @return <code>true</code> if the observations */ static boolean isPoissonProcess(Frequency observations, double intensity, double length, double confidence) { final PoissonDistribution pd = new PoissonDistribution(length * intensity); final Iterator<?> it = observations.valuesIterator(); final long[] observed = new long[observations.getUniqueCount()]; final double[] expected = new double[observations.getUniqueCount()]; int index = 0; while (it.hasNext()) { final Long l = (Long) it.next(); observed[index] = observations.getCount(l); expected[index] = pd.probability(l.intValue()) * observations.getSumFreq(); if (expected[index] == 0) { return false; } index++; } final double chi = TestUtils.chiSquareTest(expected, observed); return !(chi < confidence); }
public void summarize(double[] x, int[] y){ if(x.length!=y.length) throw new IllegalArgumentException("X and Y are of different lengths."); N = (double) x.length; Mean meanX = new Mean(); StandardDeviation sdX = new StandardDeviation(); PearsonCorrelation rxy = new PearsonCorrelation(); Frequency table = new Frequency(); for(int i=0;i<N;i++){ meanX.increment(x[i]); sdX.increment(x[i]); rxy.increment(x[i], (double)y[i]); table.addValue(y[i]); } //compute thresholds int nrow = table.getUniqueCount(); double[] freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for(int i=0;i<(nrow-1);i++){ freqDataY[i] = table.getCumFreq(i+1); thresholds[i] = norm.inverseCumulativeProbability(freqDataY[i]/ntotal); } thresholds[nrow-1] = 10;//set last threshold to a large number less than infinity }
public void summarize()throws DimensionMismatchException{ if(dataX.length!=dataY.length) throw new DimensionMismatchException(dataX.length, dataY.length); Frequency table = new Frequency(); meanX = new Mean(); sdX = new StandardDeviation(); rxy = new PearsonCorrelation(); for(int i=0;i<nrow;i++){ meanX.increment(dataX[i]); sdX.increment(dataX[i]); rxy.increment(dataX[i], (double)dataY[i]); table.addValue(dataY[i]); } //compute thresholds nrow = table.getUniqueCount(); freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for(int i=0;i<(nrow-1);i++){ freqDataY[i] = table.getCumFreq(i+1); alpha[i] = normal.inverseCumulativeProbability(freqDataY[i]/ntotal); } alpha[nrow-1] = 10;//set last threshold to a large number less than infinity }
assertEquals("0/0", modLayerAndMask); assertEquals(items.size(), freq.getUniqueCount()); testTotalMetricsLogRecord( getMetricsTotalLogRecords(stepId).get(0),
assertEquals(EXPECTED_COUNT, freq.getUniqueCount(), EXPECTED_COUNT / 10);