@Override public Matrix gradient(Matrix W) { Matrix ret = W.newInstance(); Matrix resid = MatlibMatrixUtils.dotProduct(X, W); if(this.bias!=null) { MatlibMatrixUtils.plusInplace(resid, this.bias); } MatlibMatrixUtils.minusInplace(resid, Y); for (int t = 0; t < resid.columnCount(); t++) { Vector row = this.X.row(t); row.times(resid.get(t, t)); MatlibMatrixUtils.setSubMatrixCol(ret, 0, t, row); } return ret; } @Override
/** * Subtract matrices A-B * * @param A * @param B * @return A-B */ public static Matrix minus(Matrix A, Matrix B) { final Matrix ret = copy(A); minusInplace(ret, B); return ret; }
protected Matrix updateBias(Matrix biasGrad, double biasLossWeight) { final Matrix newbias = MatlibMatrixUtils.minus( this.bias, MatlibMatrixUtils.scaleInplace( biasGrad, biasLossWeight ) ); return newbias; } protected Matrix updateW(Matrix currentW, double wLossWeighted, double weightedLambda) {
@Override public Matrix predict(Matrix x) { Matrix xt = MatlibMatrixUtils.transpose(x); final Matrix mult = MatlibMatrixUtils.dotProduct(MatlibMatrixUtils.dotProduct(MatlibMatrixUtils.transpose(u), xt),this.w); if(this.biasMode) MatlibMatrixUtils.plusInplace(mult,this.bias); Matrix ydiag = new DiagonalMatrix(mult); return ydiag; } }
Matrix Kbi = MatlibMatrixUtils.subMatrix(this.K, 0, this.K.rowCount(),Icols); MatlibMatrixUtils.minusInplace(g, Kbi.mult(beta)); // g = y_I - K_BI . itB g = MatlibMatrixUtils.minus(v,g); // g = K . itB - K_BI (y_I - o_I) MatlibMatrixUtils.dotProductTranspose(Kbi, Kbi, P); // P = Kbi . Kbi^T MatlibMatrixUtils.scaleInplace(P, C); // P = C * Kbi . Kbi^T MatlibMatrixUtils.plusInplace(P, K); // P = K + C * Kbi . Kbi^T Matrix Pinv = MatlibMatrixUtils.fromJama(MatlibMatrixUtils.toJama(P).inverse()); MatlibMatrixUtils.minusInplace(beta, Pinv.mult(g)); return Icols;
MatlibMatrixUtils.scaleInplace(this.w,weighting); MatlibMatrixUtils.scaleInplace(this.u,weighting); if(this.biasMode){ MatlibMatrixUtils.scaleInplace(this.bias,weighting); this.nodataseen = false; Matrix fakeut = new SparseSingleValueInitStrat(1).init(this.u.columnCount(),this.u.rowCount()); Dprime = MatlibMatrixUtils.dotProductTranspose(fakeut, X); // i.e. fakeut . X^T } else { Dprime = MatlibMatrixUtils.dotProductTransposeTranspose(u, X); // i.e. u^T . X^T final Matrix Vt = MatlibMatrixUtils.transposeDotProduct(neww,X); // i.e. (X^T.neww)^T X.transpose().times(neww); final double sumchangew = MatlibMatrixUtils.normF(MatlibMatrixUtils.minus(neww, this.w)); final double totalw = MatlibMatrixUtils.normF(this.w); final double sumchangeu = MatlibMatrixUtils.normF(MatlibMatrixUtils.minus(newu, this.u)); final double totalu = MatlibMatrixUtils.normF(this.u); double totalbias = 0; if(this.biasMode){ Matrix mult = MatlibMatrixUtils.dotProductTransposeTranspose(newu, X); mult = MatlibMatrixUtils.dotProduct(mult, neww); MatlibMatrixUtils.plusInplace(mult, bias); final double sumchangebias = MatlibMatrixUtils.normF(MatlibMatrixUtils.minus(newbias, bias)); totalbias = MatlibMatrixUtils.normF(this.bias); if(totalbias!=0) ratioB = (sumchangebias/totalbias) ; this.bias = newbias;
private void updateKinv(Vector d_optimal, double delta) { Matrix newKinv = null; // We're updating Kinv by calculating: [ Kinv 0; 0... 0] + (1/delta) [d -1]' . [d -1] // construct the column vector matrix [d -1]' Matrix expandDMat = DenseMatrix.dense(d_optimal.size() + 1, 1); MatlibMatrixUtils.setSubVector(expandDMat.column(0), 0, d_optimal); expandDMat.column(0).put(d_optimal.size(), -1); // construct a new, expanded Kinv matrix newKinv = new DenseMatrix(Kinv.rowCount()+1, Kinv.columnCount() + 1); MatlibMatrixUtils.setSubMatrix(newKinv, 0, 0, Kinv); // construct [d -1]' [d -1] Matrix expandDMult = newKinv.newInstance(); MatlibMatrixUtils.dotProductTranspose(expandDMat, expandDMat, expandDMult); // scale the new matrix by 1/delta MatlibMatrixUtils.scaleInplace(expandDMult, 1/delta); // add it to the new Kinv MatlibMatrixUtils.plusInplace(newKinv, expandDMult); this.Kinv = newKinv; }
public static void main(String[] args) { SparseMatrix a = SparseMatrix.sparse(4, 1118); MatlibMatrixUtils.plusInplace(a, 1); SparseMatrix xtrow = MatlibMatrixUtils.transpose(SparseMatrix.random(1118,22917,1 - 0.9998818947086253)); System.out.println("xtrow sparsity: " + MatlibMatrixUtils.sparsity(xtrow)); MeanVector mv = new MeanVector(); System.out.println("doing: a . xtrow"); for (int i = 0; i < 10; i++) { Timer t = Timer.timer(); MatlibMatrixUtils.dotProductTranspose(a, xtrow); mv.update(new double[]{t.duration()}); System.out.println("time: " + mv.vec()[0]); } } }
@Override public double eval(Matrix W) { Matrix resid = null; if(W == null){ resid = X; } else { resid = MatlibMatrixUtils.dotProduct(X,W); } Matrix vnobias = MatlibMatrixUtils.copy(X); if(this.bias!=null) { MatlibMatrixUtils.plusInplace(resid, bias); } Matrix v = MatlibMatrixUtils.copy(resid); MatlibMatrixUtils.minusInplace(resid,Y); double retval = 0; for (int t = 0; t < resid.columnCount(); t++) { double loss = resid.get(t, t); retval += loss * loss; logger.debug( String.format( "yr=%d,y=%3.2f,v=%3.2f,v(no bias)=%2.5f,error=%2.5f,serror=%2.5f", t, Y.get(t, t), v.get(t, t), vnobias.get(t,t), loss, loss*loss ) ); } return retval; } @Override
continue; final Matrix uj = new DenseMatrix(answers.get(j).secondObject()); final SparseMatrix ujuj = MatlibMatrixUtils.dotProductTranspose(uj, uj, new SparseMatrix(uj.rowCount(), uj.rowCount())); if (ujujSum == null) { MatlibMatrixUtils.plusInplace(ujujSum, ujuj); MatlibMatrixUtils.plusInplace(laplacian, MatlibMatrixUtils.scaleInplace(ujujSum, conf.lambda));
@Override public SparseMatrix laplacian(SparseMatrix adj, DiagonalMatrix degree) { SparseMatrix ret = MatlibMatrixUtils.plusInplace( DiagonalMatrix.ones(degree.rowCount()), MatlibMatrixUtils.minusInplace( degree, adj ) ); return ret; } }
@Override public double value(double[] x) throws FunctionEvaluationException { ObjectDoublePair<double[]> ind = indicator(vec,x[0]); double sumd = MatlibMatrixUtils.sum(D); double k = ind.second / sumd; double b = k / (1-k); double[][] y = new double[1][vec.length]; for (int i = 0; i < vec.length; i++) { y[0][i] = ind.first[i] + 1 - b * (1 - ind.first[i]); } SparseMatrix dmw = MatlibMatrixUtils.minusInplace(D, W); Vector yv = Vector.wrap(y[0]); double nom = new DenseMatrix(y).mult(dmw.transposeMultiply(yv)).get(0); // y' * ( (D-W) * y) double denom = new DenseMatrix(y).mult(D.transposeMultiply(yv)).get(0); return nom/denom; }
@Override public Matrix init(int rows, int cols) { Matrix ret = DenseMatrix.dense(rows, cols); MatlibMatrixUtils.plusInplace(ret, val); return ret; }
@Override public Matrix gradient(Matrix W) { SparseMatrix ret = SparseMatrix.sparse(W.rowCount(), W.columnCount()); int allRowsY = Y.rowCount()-1; int allRowsW = W.rowCount()-1; for (int i = 0; i < Y.columnCount(); i++) { this.f.setY(MatlibMatrixUtils.subMatrix(Y, 0, allRowsY, i, i)); if(bias!=null) this.f.setBias(MatlibMatrixUtils.subMatrix(bias, 0, allRowsY, i, i)); Matrix w = MatlibMatrixUtils.subMatrix(W, 0, allRowsW, i, i); Matrix submatrix = f.gradient(w); MatlibMatrixUtils.setSubMatrix(ret, 0, i, submatrix); } return ret; }
/** * Compute Y = A . B^T * * @param A * @param B * @return Y */ public static Matrix dotProductTranspose(Matrix A, Matrix B) { final Matrix ret = A.newInstance(A.rowCount(), B.rowCount()); return dotProductTranspose(A, B, ret); }
@Override public Matrix prox(Matrix W, double lambda) { final int nrows = W.rowCount(); final Matrix ret = W.newInstance(); for (int r = 0; r < nrows; r++) { final Vector row = W.row(r); final double rownorm = MatlibMatrixUtils.norm2(row); if (rownorm > lambda) { final double scal = (rownorm - lambda) / rownorm; MatlibMatrixUtils.setSubMatrixRow(ret, r, 0, row.times(scal)); } } return ret; }
public WindowedSparseMatrix(SparseMatrix sm, int nextwindow, TIntSet inactive) { TIntArrayList active = new TIntArrayList(nextwindow); indexCorrection = new HashMap<Integer, Integer>(); for (int i = 0; i < nextwindow; i++) { if(!inactive.contains(i)){ indexCorrection.put(active.size(), i); active.add(i); } } window = MatlibMatrixUtils.subMatrix(sm, active, active); }
@Override public MatlibBilinearSparseOnlineLearner clone(){ final MatlibBilinearSparseOnlineLearner ret = new MatlibBilinearSparseOnlineLearner(this.getParams()); ret.u = MatlibMatrixUtils.copy(this.u); ret.w = MatlibMatrixUtils.copy(this.w); if(this.biasMode){ ret.bias = MatlibMatrixUtils.copy(this.bias); } return ret; } /**
/** * @param m * @param row * @return m with the added column */ public static <T extends Matrix> T appendRow(T m, Vector row) { @SuppressWarnings("unchecked") final T ret = (T) m.newInstance(m.rowCount() + 1, m.columnCount()); setSubMatrixRow(ret, m.rowCount(), 0, row); return ret; }
/** * @param m * @param col * @return m with the added column */ public static <T extends Matrix> T appendColumn(T m, Vector col) { @SuppressWarnings("unchecked") final T ret = (T) m.newInstance(m.rowCount(), m.columnCount() + 1); setSubMatrixCol(ret, 0, m.columnCount(), col); return ret; }