/** * Create a {@link LinearThresholdUnit} and add it to the network * * @param label The label associated with the LTU */ public void setNetworkLabel(int label) { LinearThresholdUnit ltu = (LinearThresholdUnit) baseLTU.clone(); ltu.initialize(numExamples, numFeatures); network.set(label, ltu); }
/** * The default training algorithm for a linear threshold unit consists of evaluating the example * object with the {@link #score(Object)} method and {@link #threshold}, checking the result of * evaluation against the label, and, if they are different, promoting when the label is * positive or demoting when the label is negative. * * <p> * This method does not call {@link #classify(Object)}; it calls {@link #score(Object)} * directly. * * @param exampleFeatures The example's array of feature indices * @param exampleValues The example's array of feature values * @param exampleLabels The example's label(s) * @param labelValues The labels' values **/ public void learn(int[] exampleFeatures, double[] exampleValues, int[] exampleLabels, double[] labelValues) { assert exampleLabels.length == 1 : "Example must have a single label."; assert exampleLabels[0] == 0 || exampleLabels[0] == 1 : "Example has unallowed label value."; boolean label = (exampleLabels[0] == 1); double s = score(exampleFeatures, exampleValues); if (shouldPromote(label, s, threshold, positiveThickness)) promote(exampleFeatures, exampleValues, computeLearningRate(exampleFeatures, exampleValues, s, label)); if (shouldDemote(label, s, threshold, negativeThickness)) demote(exampleFeatures, exampleValues, computeLearningRate(exampleFeatures, exampleValues, s, label)); }
/** * Given the LTU learner to optimize. * @param snl the LTU learner. */ public LinearThresholdUnitOptimizer(LinearThresholdUnit ltu) { super(ltu.demandLexicon(), ltu.featurePruningThreshold); ltuLearner = ltu; }
/** * Computes the score for the specified example vector which will be thresholded to make the * binary classification. * * @param example The example object. * @return The score for the given example vector. **/ public double score(Object example) { Object[] exampleArray = getExampleArray(example, false); return score((int[]) exampleArray[0], (double[]) exampleArray[1]); }
conjunctiveLabels |= labelLexicon.lookupKey(label).isConjunctive(); LinearThresholdUnit ltu = (LinearThresholdUnit) baseLTU.clone(); ltu.initialize(numExamples, numFeatures); network.set(label, ltu); N = label + 1; ltu.learn(exampleFeatures, exampleValues, l, labelValues);
@Override protected int[] identifyUselessFeatures() { Lexicon lex = this.ltuLearner.demandLexicon(); if (lex != null) { HashSet<Feature> whitelist = compileWhitelist(lex); int fi = entry.getValue(); if (!whitelist.contains(entry.getKey())) { double wt = Math.abs(this.ltuLearner.getWeightVector().getRawWeights().get(fi));
/** * Sets the parameters from the parent's parameters object, giving defaults to all * parameters declared in this object. **/ public Parameters(Learner.Parameters p) { super(p); baseLTU = (LinearThresholdUnit) defaultBaseLTU.clone(); }
/** * Reinitializes the learner to the state it started at before any learning was performed. **/ public void forget() { super.forget(); variances = variances.emptyClone(); variancesBias = 1 / initialVariance; }
/** * The default evaluation method simply computes the score for the example and returns a * {@link DiscretePrimitiveStringFeature} set to either the second value from the label * classifier's array of allowable values if the score is greater than or equal to * {@link #threshold} or the first otherwise. * * @param exampleFeatures The example's array of feature indices * @param exampleValues The example's array of feature values * @return The computed feature (in a vector). **/ public FeatureVector classify(int[] exampleFeatures, double[] exampleValues) { return new FeatureVector(featureValue(exampleFeatures, exampleValues)); }
/** Simply calls <code>doneLearning()</code> on every LTU in the network. */ public void doneLearning() { super.doneLearning(); int N = network.size(); for (int i = 0; i < N; ++i) { LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(i); if (ltu == null) continue; ltu.doneLearning(); } }
/** * Simply calls {@link LinearThresholdUnit#doneWithRound()} on every LTU in the network. */ public void doneWithRound() { super.doneWithRound(); int N = network.size(); for (int i = 0; i < N; ++i) { LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(i); if (ltu == null) continue; ltu.doneWithRound(); } }
/** * Sets the labels list. * * @param l A new label producing classifier. **/ public void setLabeler(Classifier l) { if (!(l == null || l.allowableValues().length == 2)) { System.err.println("Error: " + name + ": An LTU must be given a single binary label classifier."); new Exception().printStackTrace(); System.exit(1); } super.setLabeler(l); allowableValues = l == null ? null : l.allowableValues(); labelLexicon.clear(); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[0], (short) 0, (short) 2), true); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[1], (short) 1, (short) 2), true); predictions = new FVector(2); createPrediction(0); createPrediction(1); }
/** Sets all the default values. */ public Parameters() { baseLTU = (LinearThresholdUnit) defaultBaseLTU.clone(); }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { SparseConfidenceWeighted clone = null; try { clone = (SparseConfidenceWeighted) super.clone(); } catch (Exception e) { System.err.println("Error cloning SparseConfidenceWeighted: " + e); System.exit(1); } if (variances != null) clone.variances = (SparseWeightVector) variances.clone(); return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { SparseNetworkLearner clone = null; try { clone = (SparseNetworkLearner) super.clone(); } catch (Exception e) { System.err.println("Error cloning SparseNetworkLearner: " + e); e.printStackTrace(); System.exit(1); } clone.baseLTU = (LinearThresholdUnit) baseLTU.clone(); int N = network.size(); clone.network = new OVector(N); for (int i = 0; i < N; ++i) { LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(i); if (ltu == null) clone.network.add(null); else clone.network.add(ltu.clone()); } return clone; }