/** * return a new feature vector which is exactly the same as the original * one, except for shifting each index by "gap". * * @param gap * @return */ public FeatureVector copyWithShift(int gap) { FeatureVector res = new FeatureVector(this.idx, this.value); for (int i = 0; i < res.idx.length; i++) { res.idx[i] += gap; assert res.idx[i] >= 0; } return res; }
@Override public FeatureVector getFeatureVector() { //System.out.println("input: " + input.base_fv); return input.base_fv.copyWithShift(output*input.base_n_fea); }
@Override public String toString() { return "" + output + " " + input.base_fv.toString(); }
/** * return a new vector for (a-b) * * @param a * @param b * @return */ public static FeatureVector minus(FeatureVector a, FeatureVector b) { FeatureVector fv = null; List<FeatureItem> af = convert2SortedFeatureNodeArray(a); List<FeatureItem> bf = convert2SortedFeatureNodeArray(b); Pair<int[], double[]> p = minus(af, bf); fv = new FeatureVector(p.getFirst(), p.getSecond()); return fv; }
/** * return a new feature vector for a + b * * @param a * @param b * @return */ public static FeatureVector plus(FeatureVector a, FeatureVector b) { FeatureVector fv = null; List<FeatureItem> af = convert2SortedFeatureNodeArray(a); List<FeatureItem> bf = convert2SortedFeatureNodeArray(b); Pair<int[], double[]> p = plus(af, bf); fv = new FeatureVector(p.getFirst(), p.getSecond()); return fv; }
/** * Add a sparse vector back into the dense vector itself * <p> * * w = w + alpha * fv * * @param fv * A sparse feature vector * @param alpha * The scalar */ public synchronized void addSparseFeatureVector(FeatureVector fv, double alpha) { if (this.isExtendable() && this.needAllocateSpace(fv)) { this.allocateSpace(fv.maxIdx()); } int[] idx = fv.getIdx(); double[] value = fv.getValue(); for (int i = 0; i < idx.length; i++) { u[idx[i]] += alpha * value[i]; } }
@Override public void solveSubProblemAndUpdateW(L2SolverInfo si, WeightVector w) { double C = sC; double dot_product = w.dotProduct(fv); double xij_norm2 = fv.l2NormSqure(); double NG = 1.0 - y * dot_product - (alpha / (2.0 * C)); double PG = -NG; if (alpha == 0f) { PG = Math.min(-NG, 0); } si.PGmax_new = Math.max(si.PGmax_new, PG); si.PGmin_new = Math.min(si.PGmin_new, PG); if (Math.abs(PG) > UPDATE_CONDITION) { double step = NG / (xij_norm2 + (1.0 / (2.0 * C))); double new_alpha = Math.max(alpha + step, 0);// make sure // alpha_[i][j] // is w.addSparseFeatureVector(fv, (new_alpha - alpha) * y); alpha = new_alpha; } }
public void sort(){ List<FeatureItem> items = convert2SortedFeatureNodeArray(this); assert items.size() == this.idx.length; this.idx = new int[items.size()]; this.value = new double[items.size()]; for(int i =0; i < items.size(); i ++){ this.idx[i] = items.get(i).index; this.value[i] = items.get(i).value; } } }
FeatureVector fv = p.getSecond(); double dot_product = w.dotProduct(fv); double xij_norm2 = fv.l2NormSqure();
return new FeatureVector(idx, values);
@Override public FeatureVector getFeatureVector() { //System.out.println("input: " + input.base_fv); return input.base_fv.copyWithShift(output*input.base_n_fea); }
@Override public String toString() { return "" + output + " " + input.base_fv.toString(); }
double xij_norm2 = fv.l2NormSqure();
ri.fea_list.add(new FeatureVector(idx_list, value_list));
value_list[active_len-1] = 1; FeatureVector fv = new FeatureVector(idx_list, value_list); MultiClassInstance mi = new MultiClassInstance(n_feature, n_class, fv);
value_list[active_len-1] = 1; FeatureVector fv = new FeatureVector(idx_list, value_list); MultiClassInstance mi = new MultiClassInstance(n_feature, n_class, fv);