/** * should avoid using this function, currently only for liblinear * @return */ public double[] getWeightArray(){ return super.getInternalArray(); } }
@Override public FeatureVector getFeatureVector() { //System.out.println("input: " + input.base_fv); return input.base_fv.copyWithShift(output*input.base_n_fea); }
/** * Decide if the feature vector contains features which are not covered by * the dense vector * * @param fv * @return */ public boolean needAllocateSpace(FeatureVector fv) { boolean ret = (fv.maxIdx() >= u.length); return ret; }
public double predictLCLRBinaryScore(IStructure is, IInstance ins){ FeatureVector fv = is.getFeatureVector(); fv.normalize(ins.size()); fv.slowAddFeature(L2LossInstanceWithAlphas.INDIRECT_GLOBAL_BIAS, 1.0); return dotProduct(fv); }
/** * return a new feature vector which is exactly the same as the original * one, except for shifting each index by "gap". * * @param gap * @return */ public FeatureVector copyWithShift(int gap) { FeatureVector res = new FeatureVector(this.idx, this.value); for (int i = 0; i < res.idx.length; i++) { res.idx[i] += gap; assert res.idx[i] >= 0; } return res; }
public LabeledMulticlassData(Map<String, Integer> m, Integer n_fea) { label_mapping = m; n_base_feature_in_train = n_fea; sp = new StructuredProblem(); }
@Override public String toString() { return "" + output + " " + input.base_fv.toString(); }
/** * Decide if the dense vector contains features which are not covered by the * dense vector * * @param fv * @return */ public boolean needAllocateSpace(DenseVector dv) { boolean ret = (dv.getVectorLength() >= u.length); return ret; }
@Override public void fillWeightVector(WeightVector w) { w.addSparseFeatureVector(fv, y * alpha); }
/** * return the dot product of a sparse feature vector and the dense vector * itself. Note that if the sparse vector contains some elements (feature * indexes) that do not exist in the dense vector, the dot product function * will ignore them (instead of throwing an exception) * * @param fv * Sparse feature vector * @return */ public double dotProduct(FeatureVector fv) { double ret = dotProduct(u, fv); return ret; }
public void sort(){ List<FeatureItem> items = convert2SortedFeatureNodeArray(this); assert items.size() == this.idx.length; this.idx = new int[items.size()]; this.value = new double[items.size()]; for(int i =0; i < items.size(); i ++){ this.idx[i] = items.get(i).index; this.value[i] = items.get(i).value; } } }
@Override public FeatureVector getFeatureVector() { //System.out.println("input: " + input.base_fv); return input.base_fv.copyWithShift(output*input.base_n_fea); }
@Override public String toString() { return "" + output + " " + input.base_fv.toString(); }
@Override public int getMaxIdx() { return fv.maxIdx(); }
public int getWeightVectorLength(){ return super.getVectorLength(); }
@Override public void fillWeightVector(WeightVector w) { // for (Pair<double[], FeatureVector> p : alphafv_map.values()) { for (Pair<double[], FeatureVector> p : al_fv_list) { double alpha = p.getFirst()[0]; FeatureVector fv = p.getSecond(); w.addSparseFeatureVector(fv, alpha); } }
@Override public int getMaxIdx() { int max_idx = -1; for (Pair<Double, FeatureVector> p : alphafv_list) { FeatureVector fv = p.getSecond(); int curidx = fv.maxIdx(); if (curidx > max_idx) max_idx = curidx; } return max_idx; }
/** * Return the dot product of a dense feature vector and the dense vector * itself * * @param df * @return */ public double dotProduct(DenseVector df) { double res = 0.0; int l = df.getVectorLength(); if (u.length < l) { l = u.length; } for (int i = 0; i < l; i++) { res += u[i] * df.u[i]; } return res; }
@Override public int getMaxIdx() { int max_idx = -1; for (Pair<double[], FeatureVector> p : al_fv_list) { FeatureVector fv = p.getSecond(); int curidx = fv.maxIdx(); if (curidx > max_idx) max_idx = curidx; } return max_idx; }