@Override public Matrix init(int rows, int cols) { Matrix ret = DenseMatrix.dense(rows, cols); MatlibMatrixUtils.plusInplace(ret, val); return ret; }
@Override public Matrix init(int rows, int cols) { return MatlibMatrixUtils.plusInplace(SparseMatrix.sparse(rows, cols), val); }
@Override public SparseMatrix laplacian(SparseMatrix adj, DiagonalMatrix degree) { SparseMatrix ret = MatlibMatrixUtils.plusInplace( DiagonalMatrix.ones(degree.rowCount()), MatlibMatrixUtils.minusInplace( degree, adj ) ); return ret; } }
@Override public Matrix predict(Matrix x) { Matrix xt = MatlibMatrixUtils.transpose(x); final Matrix mult = MatlibMatrixUtils.dotProduct(MatlibMatrixUtils.dotProduct(MatlibMatrixUtils.transpose(u), xt),this.w); if(this.biasMode) MatlibMatrixUtils.plusInplace(mult,this.bias); Matrix ydiag = new DiagonalMatrix(mult); return ydiag; } }
MatlibMatrixUtils.plusInplace(ujujSum, ujuj); MatlibMatrixUtils.plusInplace(laplacian, MatlibMatrixUtils.scaleInplace(ujujSum, conf.lambda));
public static void main(String[] args) { SparseMatrix a = SparseMatrix.sparse(4, 1118); MatlibMatrixUtils.plusInplace(a, 1); SparseMatrix xtrow = MatlibMatrixUtils.transpose(SparseMatrix.random(1118,22917,1 - 0.9998818947086253)); System.out.println("xtrow sparsity: " + MatlibMatrixUtils.sparsity(xtrow)); MeanVector mv = new MeanVector(); System.out.println("doing: a . xtrow"); for (int i = 0; i < 10; i++) { Timer t = Timer.timer(); MatlibMatrixUtils.dotProductTranspose(a, xtrow); mv.update(new double[]{t.duration()}); System.out.println("time: " + mv.vec()[0]); } } }
public static void main(String[] args) { SparseMatrix a = SparseMatrix.sparse(4, 1118); MatlibMatrixUtils.plusInplace(a, 1); SparseMatrix xtrow = MatlibMatrixUtils.transpose(SparseMatrix.random(1118,22917,1 - 0.9998818947086253)); System.out.println("xtrow sparsity: " + MatlibMatrixUtils.sparsity(xtrow)); MeanVector mv = new MeanVector(); System.out.println("doing: a . xtrow"); for (int i = 0; i < 10; i++) { Timer t = Timer.timer(); MatlibMatrixUtils.dotProductTranspose(a, xtrow); mv.update(new double[]{t.duration()}); System.out.println("time: " + mv.vec()[0]); } } }
@Override public double eval(Matrix W) { Matrix resid = null; if(W == null){ resid = X; } else { resid = MatlibMatrixUtils.dotProduct(X,W); } Matrix vnobias = MatlibMatrixUtils.copy(X); if(this.bias!=null) { MatlibMatrixUtils.plusInplace(resid, bias); } Matrix v = MatlibMatrixUtils.copy(resid); MatlibMatrixUtils.minusInplace(resid,Y); double retval = 0; for (int t = 0; t < resid.columnCount(); t++) { double loss = resid.get(t, t); retval += loss * loss; logger.debug( String.format( "yr=%d,y=%3.2f,v=%3.2f,v(no bias)=%2.5f,error=%2.5f,serror=%2.5f", t, Y.get(t, t), v.get(t, t), vnobias.get(t,t), loss, loss*loss ) ); } return retval; } @Override
/** * Add two matrices, storing the results in the first: * <code>A = A + B</code> * * @param A * first matrix * @param B * matrix to add * @return A first matrix */ @SuppressWarnings("unchecked") public static <T extends Matrix> T plusInplace(T A, Matrix B) { if (A instanceof SparseMatrix) return (T) plusInplace((SparseMatrix) A, B); for (int i = 0; i < A.rowCount(); i++) { final Vector brow = B.row(i); for (int j = 0; j < A.columnCount(); j++) { A.row(i).add(j, brow.get(j)); } } return A; }
@Override public Matrix gradient(Matrix W) { Matrix ret = W.newInstance(); Matrix resid = MatlibMatrixUtils.dotProduct(X, W); if(this.bias!=null) { MatlibMatrixUtils.plusInplace(resid, this.bias); } MatlibMatrixUtils.minusInplace(resid, Y); for (int t = 0; t < resid.columnCount(); t++) { Vector row = this.X.row(t); row.times(resid.get(t, t)); MatlibMatrixUtils.setSubMatrixCol(ret, 0, t, row); } return ret; } @Override
private void updateKinv(Vector d_optimal, double delta) { Matrix newKinv = null; if(this.supports.size() > 1){ Vector expandD = Vector.dense(d_optimal.size() + 1); Matrix expandDMat = DenseMatrix.dense(expandD.size(), 1); MatlibMatrixUtils.setSubVector(expandDMat.column(0), 0, d_optimal); expandDMat.column(0).put(d_optimal.size(), -1); newKinv = new DenseMatrix(Kinv.rowCount()+1, Kinv.columnCount() + 1); MatlibMatrixUtils.setSubMatrix(newKinv, 0, 0, Kinv); Matrix expandDMult = newKinv.newInstance(); MatlibMatrixUtils.dotProductTranspose(expandDMat, expandDMat, expandDMult); MatlibMatrixUtils.scaleInplace(expandDMult, 1/delta); MatlibMatrixUtils.plusInplace(newKinv, expandDMult); } else { double[] only = this.supports.get(0); newKinv = DenseMatrix.dense(1, 1); newKinv.put(0, 0, 1/this.kernel.apply(IndependentPair.pair(only,only))); } this.Kinv = newKinv; }
Matrix mult = MatlibMatrixUtils.dotProductTransposeTranspose(newu, X); mult = MatlibMatrixUtils.dotProduct(mult, neww); MatlibMatrixUtils.plusInplace(mult, bias);
private void updateKinv(Vector d_optimal, double delta) { Matrix newKinv = null; // We're updating Kinv by calculating: [ Kinv 0; 0... 0] + (1/delta) [d -1]' . [d -1] // construct the column vector matrix [d -1]' Matrix expandDMat = DenseMatrix.dense(d_optimal.size() + 1, 1); MatlibMatrixUtils.setSubVector(expandDMat.column(0), 0, d_optimal); expandDMat.column(0).put(d_optimal.size(), -1); // construct a new, expanded Kinv matrix newKinv = new DenseMatrix(Kinv.rowCount()+1, Kinv.columnCount() + 1); MatlibMatrixUtils.setSubMatrix(newKinv, 0, 0, Kinv); // construct [d -1]' [d -1] Matrix expandDMult = newKinv.newInstance(); MatlibMatrixUtils.dotProductTranspose(expandDMat, expandDMat, expandDMult); // scale the new matrix by 1/delta MatlibMatrixUtils.scaleInplace(expandDMult, 1/delta); // add it to the new Kinv MatlibMatrixUtils.plusInplace(newKinv, expandDMult); this.Kinv = newKinv; }
MatlibMatrixUtils.plusInplace(P, K); // P = K + C * Kbi . Kbi^T Matrix Pinv = MatlibMatrixUtils.fromJama(MatlibMatrixUtils.toJama(P).inverse());