@Override public void setOptions(String[] options) throws Exception { String tmpStr; tmpStr = Utils.getOption('S', options); if (tmpStr.length() > 0) m_S = Integer.parseInt(tmpStr); else m_S = 0; super.setOptions(options); }
@Override public void setOptions(String[] options) throws Exception { m_encodeMissingAsZero = Utils.getFlag('M', options); Utils.checkForRemainingOptions(options); }
/** * returns the current DOM document as string array. * * @return the document as string array * @throws Exception if anything goes wrong initializing the parsing */ public String[] toArray() throws Exception { return Utils.splitOptions(toCommandLine()); }
/** * Prints this antecedent * * @return a textual description of this antecedent */ @Override public String toString() { String symbol = Utils.eq(value, 0.0) ? " <= " : " > "; return (att.name() + symbol + Utils.doubleToString(splitPoint, 6)); }
@Override public void setOptions(String[] options) throws Exception { m_Is = (Utils.getOptionPos("Is",options) >= 0) ? Integer.parseInt(Utils.getOption("Is", options)) : m_Is; m_Iy = (Utils.getOptionPos("Iy",options) >= 0) ? Integer.parseInt(Utils.getOption("Iy", options)) : m_Iy; m_Payoff = (Utils.getOptionPos('P',options) >= 0) ? Integer.parseInt(Utils.getOption('P', options)) : m_Payoff; super.setOptions(options); }
/** * Gets the current settings of KDtree. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { Vector<String> result = new Vector<String>(); Collections.addAll(result, super.getOptions()); result.add("-C"); result.add( (m_TreeConstructor.getClass().getName() + " " + Utils.joinOptions(m_TreeConstructor.getOptions())).trim()); return result.toArray(new String[result.size()]); }
knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors)); } catch (Exception ex) { Logger.getLogger(MLkNN.class.getName()).log(Level.SEVERE, null, ex); double value = Double.parseDouble(train.attribute(labelIndices[i]).value( (int) knn.instance(k).value(labelIndices[i]))); if (Utils.eq(value, 1.0)) { aces++; predictions[i] = false; } else { Random rnd = new Random(); predictions[i] = (rnd.nextInt(2) == 1) ? true : false;
Instances trainData = new Instances(instances); trainData.deleteWithMissingClass(); Instances trainDataCopy = new Instances(trainData); // Just in case base classifier is sensitive to order of data. m_BestPerformance = -99; m_NumAttributes = trainData.numAttributes(); Random random = new Random(m_Seed); trainData.randomize(random); m_TrainFoldSize = trainData.trainCV(m_NumFolds, 0).numInstances(); if (m_CVParams.size() == 0) { m_Classifier.buildClassifier(trainDataCopy); m_BestClassifierOptions = m_InitOptions; return; if (trainData.classAttribute().isNominal()) { trainData.stratify(m_NumFolds); for (int i = 0; i < m_CVParams.size(); i++) { Utils.getOption(((CVParameter)m_CVParams.elementAt(i)).m_ParamChar, m_ClassifierOptions); m_Classifier.buildClassifier(trainDataCopy);
Instances data = new Instances(instances); data.deleteWithMissingClass(); if (data.numInstances() < m_Folds) { throw new Exception("Not enough data for REP."); m_ClassAttribute = data.classAttribute(); if (m_ClassAttribute.isNominal()) { m_NumClasses = m_ClassAttribute.numValues(); } else { m_NumClasses = 1; m_Cnsqt = new double[m_NumClasses]; m_Targets = new ArrayList<double[][]>(); m_Random = new Random(m_Seed); if (m_ClassAttribute.isNominal()) { Utils.normalize(m_Cnsqt); if (Utils.gr(Utils.sum(m_DefDstr), 0)) { Utils.normalize(m_DefDstr);
testCapabilities(D); r = new Random(m_S); int L = D.classIndex(); int N = D.numInstances(); int d = D.numAttributes()-L; double w = payoff(h,new Instances(D)); if (getDebug()) System.out.print("h_{t="+0+"} & "+Arrays.toString(s)); //+"; w = "+w); if (getDebug()) System.out.print("& "+Utils.doubleToString(payoff(h,new Instances(D),1),8,2)); if (getDebug()) System.out.print("& "+Utils.doubleToString(payoff(h,new Instances(D),2),8,2)); if (getDebug()) System.out.println("& "+Utils.doubleToString(payoff(h,new Instances(D),5),8,2)); h = h_; if (getDebug()) System.out.print("h_{t="+t+"} & "+Arrays.toString(s)); //+"; w = "+w); if (getDebug()) System.out.print("& "+Utils.doubleToString(payoff(h_,new Instances(D),1),8,2)); if (getDebug()) System.out.print("& "+Utils.doubleToString(payoff(h_,new Instances(D),2),8,2)); if (getDebug()) System.out.println("& "+Utils.doubleToString(payoff(h_,new Instances(D),5),8,2));
atts = new double[m_DatasetFormat.numAttributes()]; x = rand.nextDouble(); if (Utils.eq(x, 0)) { y = getAmplitude(); } else { y = y + getAmplitude() * m_NoiseRandom.nextGaussian() * getNoiseRate() result = new DenseInstance(1.0, atts); result.setDataset(m_DatasetFormat);
/** Display a representation of this estimator */ @Override public String toString() { if (m_Covariance == null) { calculateCovariance(); } String result = "NN Conditional Estimator. " + m_CondValues.size() + " data points. Mean = " + Utils.doubleToString(m_ValueMean, 4, 2) + " Conditional mean = " + Utils.doubleToString(m_CondMean, 4, 2); result += " Covariance Matrix: \n" + m_Covariance; return result; }
result = new Instances(getOutputFormat()); for (i = 0; i < instances.numInstances(); i++) { inst = instances.instance(i); values = inst.toDoubleArray(); if (!m_Cols.isInRange(n) || !instances.attribute(n).isNumeric() || inst.isMissing(n)) { continue; if (instances.attribute(n).type() == Attribute.DATE) { value = inst.stringValue(n); } else { value = Utils.doubleToString(inst.value(n), MAX_DECIMALS); int index = result.attribute(n).indexOfValue(value); if (index == -1) { values[n] = Utils.missingValue();; } else { values[n] = index; newInst = new SparseInstance(inst.weight(), values); } else { newInst = new DenseInstance(inst.weight(), values);
Instance before = data.instance(pos); if ((int) before.classValue() == 0) { m_NoiseM[pos] = null; m_NoiseV[pos] = null; int index = Utils.minIndex(dists); pred[(int) m_Class[index]]++; dists[index] = Double.POSITIVE_INFINITY; int clas = Utils.maxIndex(pred); if ((int) before.classValue() != clas) { noises_relationInsts.add(datum); relationValue = noises.attribute(1).addRelation(noises_relationInsts); noises.setValue(0, before.value(0)); noises.setValue(1, relationValue); after.setValue(2, before.classValue()); if (Utils.gr(noises.relationalValue(1).sumOfWeights(), 0)) { for (int i = 0; i < m_Dimension; i++) { m_NoiseM[pos][i] = noises.relationalValue(1).meanOrMode(i); m_NoiseV[pos][i] = noises.relationalValue(1).variance(i); if (Utils.eq(m_NoiseV[pos][i], 0.0)) { m_NoiseV[pos][i] = m_ZERO;
int newAttIndex = m_outputStructure.numAttributes() - 1; if (m_voteLabels && Utils.sum(labelVotes) > 0) { int maxIndex = Utils.maxIndex(labelVotes); label = m_matchRules.get(maxIndex).getLabel(); double[] vals = new double[m_outputStructure.numAttributes()]; for (int i = 0; i < inputI.numAttributes(); i++) { if (!inputI.attribute(i).isString()) { vals[i] = inputI.value(i); } else { if (!batch) { vals[i] = 0; String v = inputI.stringValue(i); m_outputStructure.attribute(i).setStringValue(v); } else { String v = inputI.stringValue(i); vals[i] = m_outputStructure.attribute(i).addStringValue(v); if (m_hasLabels) { if (!getConsumeNonMatching()) { vals[newAttIndex] = Utils.missingValue(); } else { return null; result = new DenseInstance(1.0, vals); result.setDataset(m_outputStructure);
@Override public void updateFinished() { if (m_canopies == null || m_canopies.numInstances() == 0) { return; double[] finalCenter = new double[m_canopies.numAttributes()]; for (int j = 0; j < m_canopies.numAttributes(); j++) { if (m_canopies.attribute(j).isNumeric()) { if (numMissingForNumerics[j] == density[0]) { finalCenter[j] = Utils.missingValue(); } else { finalCenter[j] = centerSums[j][0] / (density[0] - numMissingForNumerics[j]); } else if (m_canopies.attribute(j).isNominal()) { int mode = Utils.maxIndex(centerSums[j]); if (mode == centerSums[j].length - 1) { finalCenter[j] = Utils.missingValue(); } else { finalCenter[j] = mode; Instance finalCenterInst = m_canopies.instance(i) instanceof SparseInstance ? new SparseInstance( 1.0, finalCenter) : new DenseInstance(1.0, finalCenter); m_canopies.set(i, finalCenterInst); m_canopies.instance(i).setWeight(density[0]); densities[i] = density[0];
public void testsqrt() { m_Filter = getFilter("sqrt(A)"); Instances result = useFilter(); assertEquals(m_Instances.numAttributes(), result.numAttributes()); assertEquals(m_Instances.numInstances(), result.numInstances()); // check equality boolean equal = true; for (int i = 0; i < result.numInstances(); i++) { if (m_Instances.instance(i) instanceof SparseInstance) continue; if (!Utils.eq( Math.sqrt(m_Instances.instance(i).value(m_AttIndex)), result.instance(i).value(m_AttIndex))) { equal = false; break; } } if (!equal) fail("Filter produces different result)!"); }
public void testTypical() { Instances result = useFilter(); // Number of attributes and instances shouldn't change assertEquals(m_Instances.numAttributes(), result.numAttributes()); assertEquals(m_Instances.numInstances(), result.numInstances()); // Check conversion is OK for (int j = 0; j < result.numAttributes(); j++) { if (result.attribute(j).isNumeric()) { double mean = result.meanOrMode(j); assertTrue("Mean should be 0", Utils.eq(mean, 0)); double stdDev = Math.sqrt(result.variance(j)); assertTrue("StdDev should be 1 (or 0)", Utils.eq(stdDev, 0) || Utils.eq(stdDev, 1)); } } }
/** * Computing Prior and PriorN Probabilities for each class of the training * set */ private void ComputePrior() { for (int i = 0; i < numLabels; i++) { int temp_Ci = 0; for (int j = 0; j < train.numInstances(); j++) { double value = Double.parseDouble(train.attribute(labelIndices[i]).value( (int) train.instance(j).value(labelIndices[i]))); if (Utils.eq(value, 1.0)) { temp_Ci++; } } PriorProbabilities[i] = (smooth + temp_Ci) / (smooth * 2 + train.numInstances()); PriorNProbabilities[i] = 1 - PriorProbabilities[i]; } }
@Override public double[] distributionForInstance(Instance x) throws Exception { int L = x.classIndex(); //if there is only one class (as for e.g. in some hier. mtds) predict it //if(L == 1) return new double[]{1.0}; Instance x_sl = convertInstance(x,L); // the sl instance x_sl.setDataset(m_InstancesTemplate); // where y in {comb_1,comb_2,...,comb_k} double w[] = m_Classifier.distributionForInstance(x_sl); // w[j] = p(y_j) for each j = 1,...,L int max_j = Utils.maxIndex(w); // j of max w[j] //int max_j = (int)m_Classifier.classifyInstance(x_sl); // where comb_i is selected String y_max = m_InstancesTemplate.classAttribute().value(max_j); // comb_i e.g. "0+3+0+0+1+2+0+0" double y[] = Arrays.copyOf(MLUtils.toDoubleArray(MLUtils.decodeValue(y_max)),L*2); // "0+3+0+0+1+2+0+0" -> [0.0,3.0,0.0,...,0.0] HashMap<Double,Double> votes[] = new HashMap[L]; for(int j = 0; j < L; j++) { votes[j] = new HashMap<Double,Double>(); } for(int i = 0; i < w.length; i++) { double y_i[] = MLUtils.toDoubleArray(MLUtils.decodeValue(m_InstancesTemplate.classAttribute().value(i))); for(int j = 0; j < y_i.length; j++) { votes[j].put(y_i[j] , votes[j].containsKey(y_i[j]) ? votes[j].get(y_i[j]) + w[i] : w[i]); } } // some confidence information for(int j = 0; j < L; j++) { y[j+L] = votes[j].size() > 0 ? Collections.max(votes[j].values()) : 0.0; } return y; }