private void incorporateOneInstance (Instance instance, double instanceWeight) { Labeling labeling = instance.getLabeling (); if (labeling == null) return; // Handle unlabeled instances by skipping them FeatureVector fv = (FeatureVector) instance.getData (); double oneNorm = fv.oneNorm(); if (oneNorm <= 0) return; // Skip instances that have no features present if (docLengthNormalization > 0) // Make the document have counts that sum to docLengthNormalization // I.e., if 20, it would be as if the document had 20 words. instanceWeight *= docLengthNormalization / oneNorm; assert (instanceWeight > 0 && !Double.isInfinite(instanceWeight)); for (int lpos = 0; lpos < labeling.numLocations(); lpos++) { int li = labeling.indexAtLocation (lpos); double labelWeight = labeling.valueAtLocation (lpos); if (labelWeight == 0) continue; //System.out.println ("NaiveBayesTrainer me.increment "+ labelWeight * instanceWeight); me[li].increment (fv, labelWeight * instanceWeight); // This relies on labelWeight summing to 1 over all labels pe.increment (li, labelWeight * instanceWeight); } }
private void incorporateOneInstance (Instance instance, double instanceWeight) { Labeling labeling = instance.getLabeling (); if (labeling == null) return; // Handle unlabeled instances by skipping them FeatureVector fv = (FeatureVector) instance.getData (); double oneNorm = fv.oneNorm(); if (oneNorm <= 0) return; // Skip instances that have no features present if (docLengthNormalization > 0) // Make the document have counts that sum to docLengthNormalization // I.e., if 20, it would be as if the document had 20 words. instanceWeight *= docLengthNormalization / oneNorm; assert (instanceWeight > 0 && !Double.isInfinite(instanceWeight)); for (int lpos = 0; lpos < labeling.numLocations(); lpos++) { int li = labeling.indexAtLocation (lpos); double labelWeight = labeling.valueAtLocation (lpos); if (labelWeight == 0) continue; //System.out.println ("NaiveBayesTrainer me.increment "+ labelWeight * instanceWeight); me[li].increment (fv, labelWeight * instanceWeight); // This relies on labelWeight summing to 1 over all labels pe.increment (li, labelWeight * instanceWeight); } }
private void incorporateOneInstance (Instance instance, double instanceWeight) { Labeling labeling = instance.getLabeling (); if (labeling == null) return; // Handle unlabeled instances by skipping them FeatureVector fv = (FeatureVector) instance.getData (); double oneNorm = fv.oneNorm(); if (oneNorm <= 0) return; // Skip instances that have no features present if (docLengthNormalization > 0) // Make the document have counts that sum to docLengthNormalization // I.e., if 20, it would be as if the document had 20 words. instanceWeight *= docLengthNormalization / oneNorm; assert (instanceWeight > 0 && !Double.isInfinite(instanceWeight)); for (int lpos = 0; lpos < labeling.numLocations(); lpos++) { int li = labeling.indexAtLocation (lpos); double labelWeight = labeling.valueAtLocation (lpos); if (labelWeight == 0) continue; //System.out.println ("NaiveBayesTrainer me.increment "+ labelWeight * instanceWeight); me[li].increment (fv, labelWeight * instanceWeight); // This relies on labelWeight summing to 1 over all labels pe.increment (li, labelWeight * instanceWeight); } }