@Override public Matrix gradient(Matrix W) { Matrix ret = W.clone(); if(CFMatrixUtils.containsInfinity(X)){ throw new RuntimeException(); } if(CFMatrixUtils.containsInfinity(W)){ throw new RuntimeException(); } Matrix resid = CFMatrixUtils.fastdot(X,W); if(CFMatrixUtils.containsInfinity(resid)){ CFMatrixUtils.fastdot(X,W); throw new RuntimeException(); } if(this.bias!=null) { resid.plusEquals(this.bias); } CFMatrixUtils.fastminusEquals(resid, Y); if(CFMatrixUtils.containsInfinity(resid)){ throw new RuntimeException(); } for (int t = 0; t < resid.getNumColumns(); t++) { Vector xcol = this.X.getRow(t).scale(resid.getElement(t, t)).clone(); CFMatrixUtils.fastsetcol(ret,t, xcol); } return ret; } @Override
/** * @param a * @param b * @return checks for the fastest way to do this dot product */ public static Matrix fastdot(Matrix a, Matrix b) { if (a instanceof SparseRowMatrix && b instanceof SparseColumnMatrix) { return fastsparsedot((SparseRowMatrix) a, (SparseColumnMatrix) b); } return a.times(b); }
@Override public Matrix init(int rows, int cols) { return CFMatrixUtils.plusInplace( DenseMatrixFactoryMTJ.INSTANCE.createMatrix(rows, cols) , val ); }
protected Matrix updateU(Matrix xtrows, Matrix neww, double uLossWeight, double uWeightedLambda) { // Vprime is nusers x tasks final Matrix Vprime = CFMatrixUtils.fastdot(xtrows,neww); // ... so the loss function's X is (tasks x nusers) Matrix Vt = CFMatrixUtils.asSparseRow(Vprime.transpose()); if(zStandardise){ Vector rowMean = CFMatrixUtils.rowMean(Vt); CFMatrixUtils.minusEqualsCol(Vt,rowMean); } loss.setX(Vt); final Matrix gradU = loss.gradient(this.u); logger.debug("Abs u_grad: " + CFMatrixUtils.absSum(gradU)); // CFMatrixUtils.timesInplace(gradU,uLossWeight); // newu = regul.prox(newu, uWeightedLambda); Matrix newu = null; for (int i = 0; i < 1000; i++) { logger.debug("... Line searching etau = " + uLossWeight); newu = this.u.clone(); Matrix scaledGradW = gradU.scale(1./uLossWeight); newu = CFMatrixUtils.fastminus(newu,scaledGradW); newu = regul.prox(newu, uWeightedLambda/uLossWeight); if(loss.test_backtrack(this.u, gradU, newu, uLossWeight)) break; uLossWeight *= eta_gamma; } return newu; } private double lambdat(int iter, double lambda) {
public static void main(String[] args) { SparseMatrix a = SparseMatrixFactoryMTJ.INSTANCE.copyMatrix(SparseMatrixFactoryMTJ.INSTANCE.createWrapper(new FlexCompRowMatrix(4, 1118))); CFMatrixUtils.plusInplace(a, 1); SparseRowMatrix xtrow = CFMatrixUtils.randomSparseRow(1118,22917,0d,1d,1 - 0.9998818947086253, new Random(1)); SparseColumnMatrix xtcol = CFMatrixUtils.randomSparseCol(1118,22917,0d,1d,1 - 0.9998818947086253, new Random(1)); System.out.println("xtrow sparsity: " + CFMatrixUtils.sparsity(xtrow)); System.out.println("xtcol sparsity: " + CFMatrixUtils.sparsity(xtcol)); System.out.println("Equal: " + CFMatrixUtils.fastsparsedot(a,xtcol).equals(a.times(xtcol), 0)); MeanVector mv = new MeanVector(); System.out.println("doing: a . xtcol"); for (int i = 0; i < 10; i++) { Timer t = Timer.timer(); CFMatrixUtils.fastsparsedot(a,xtcol); mv.update(new double[]{t.duration()}); System.out.println("time: " + mv.vec()[0]);
private Map<String, Pair<Double>> minMaxUsers() { Map<String, Pair<Double>> ret = new HashMap<String, Pair<Double>>(); if(this.learner == null) return ret; BiMap<String, Integer> depvals = this.learner.getDependantValues(); BilinearSparseOnlineLearner bilearner = this.learner.getBilinearLearner(); for (String task : depvals.keySet()) { Integer taskCol = this.learner.getDependantValues().get(task); ret.put( task, new Pair<Double>( CFMatrixUtils.min(bilearner.getU().getColumn(taskCol)), CFMatrixUtils.max(bilearner.getU().getColumn(taskCol)) ) ); } return ret; }
File learnerOut = new File(FOLD_ROOT(foldNumber),String.format("learner_%d_dampening=%2.5f",j,dampening)); IOUtils.writeBinary(learnerOut, learner); logger.debug("W row sparcity: " + CFMatrixUtils.rowSparsity(w)); logger.debug(String.format("W range: %2.5f -> %2.5f",CFMatrixUtils.min(w), CFMatrixUtils.max(w))); logger.debug("U row sparcity: " + CFMatrixUtils.rowSparsity(u)); logger.debug(String.format("U range: %2.5f -> %2.5f",CFMatrixUtils.min(u), CFMatrixUtils.max(u))); Boolean biasMode = learner.getParams().getTyped(BilinearLearnerParameters.BIAS); if(biasMode){ logger.debug("Bias: " + CFMatrixUtils.diag(bias));
final Matrix w = learner.getW(); final Matrix u = learner.getU(); logger.debug("W row sparcity: " + CFMatrixUtils.rowSparsity(w)); logger.debug(String.format("W range: %2.5f -> %2.5f", CFMatrixUtils.min(w), CFMatrixUtils.max(w))); logger.debug("U row sparcity: " + CFMatrixUtils.rowSparsity(u)); logger.debug(String.format("U range: %2.5f -> %2.5f", CFMatrixUtils.min(u), CFMatrixUtils.max(u)));
Matrix xtrows = xt; if(xt instanceof AbstractSparseMatrix){ xtrows = CFMatrixUtils.asSparseRow(xt); final double sumchangew = CFMatrixUtils.absSum(neww.minus(this.w)); final double totalw = CFMatrixUtils.absSum(this.w); final double sumchangeu = CFMatrixUtils.absSum(newu.minus(this.u)); final double totalu = CFMatrixUtils.absSum(this.u); double ratio = ratioU + ratioW; if(this.biasMode){ final double sumchangebias = CFMatrixUtils.absSum(newbias.minus(this.bias)); totalbias = CFMatrixUtils.absSum(this.bias); if(totalbias!=0) ratioB = (sumchangebias/totalbias) ; ratio += ratioB; this.u = CFMatrixUtils.asSparseColumn(newu); this.w = CFMatrixUtils.asSparseColumn(neww); if(iter%3 == 0){ logger.debug(String.format("Iter: %d. Last Ratio: %2.3f",iter,ratio)); logger.debug("W row sparcity: " + CFMatrixUtils.rowSparsity(w)); logger.debug("U row sparcity: " + CFMatrixUtils.rowSparsity(u)); logger.debug("Total U magnitude: " + totalu); logger.debug("Total W magnitude: " + totalw); logger.debug("W row sparcity: " + CFMatrixUtils.rowSparsity(w)); logger.debug("U row sparcity: " + CFMatrixUtils.rowSparsity(u));
/** * Stack matrices vertically * * @param matricies * matrices to stack * @return matrix created from the stacking */ public static Matrix vstack(Matrix... matricies) { return vstack(MatrixFactory.getDefault(), matricies); }
/** * @param A * @param B * @return A - B, done using {@link #fastminusEquals(Matrix, Matrix)} if * possible */ public static Matrix fastminus(Matrix A, Matrix B) { return fastminusEquals(A.clone(), B); }
@Override protected Matrix updateW(Matrix currentW, double wLossWeighted, double weightedLambda) { Matrix current = currentW; int iter = 0; final Double biconvextol = this.params.getTyped(BilinearLearnerParameters.BICONVEX_TOL); final Integer maxiter = this.params.getTyped(BilinearLearnerParameters.BICONVEX_MAXITER); while (true) { final Matrix newcurrent = super.updateW(current, wLossWeighted, weightedLambda); final double sumchange = CFMatrixUtils.absSum(current.minus(newcurrent)); final double total = CFMatrixUtils.absSum(current); final double ratio = sumchange / total; current = newcurrent; if (ratio < biconvextol || iter >= maxiter) { logger.debug("W tolerance reached after iteration: " + iter); break; } iter++; } return current; }
/** * Compute the absolute sum of values in the matrix * * @param mat * the matrix * @return the absolute sum */ public static double absSum(Matrix mat) { if (mat instanceof SparseColumnMatrix) { return absSumSparse((SparseColumnMatrix) mat); } else if (mat instanceof SparseRowMatrix) { return absSumSparse((SparseRowMatrix) mat); } double tot = 0; final int nrows = mat.getNumRows(); final int ncols = mat.getNumColumns(); for (int r = 0; r < nrows; r++) { for (int c = 0; c < ncols; c++) { final double element = mat.getElement(r, c); if (Double.isNaN(element)) { throw new RuntimeException("hmm?"); } tot += Math.abs(element); } } return tot; }
@Override public void performExperiment() throws Exception { final String exproot = "/Users/ss/Dropbox/TrendMiner/deliverables/year2-18month/Austrian Data/streamingExperiments/experiment_1392738388042"; final File[] folds = new File(exproot).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.contains("fold"); } }); final int f = (folds.length - 1) / 2; final File fold = folds[f]; final File learner = fold.listFiles()[0]; final BilinearSparseOnlineLearner l = IOUtils.read(learner, BilinearSparseOnlineLearner.class); final Matrix words = l.getW(); final Matrix users = l.getU(); final String name = "fold_" + f; final MatFileWriter writ = new MatFileWriter(); final ArrayList<MLArray> col = new ArrayList<MLArray>(); col.add(new MLDouble("words_" + name, CFMatrixUtils.asJama(words).getArray())); col.add(new MLDouble("users_" + name, CFMatrixUtils.asJama(users).getArray())); writ.write(new File("/Users/ss/" + name + ".mat"), col); } }
public static void main(String[] args) { SparseMatrix a = SparseMatrixFactoryMTJ.INSTANCE.copyMatrix(SparseMatrixFactoryMTJ.INSTANCE.createWrapper(new FlexCompRowMatrix(4, 1118))); CFMatrixUtils.plusInplace(a, 1); SparseRowMatrix xtrow = CFMatrixUtils.randomSparseRow(1118,22917,0d,1d,1 - 0.9998818947086253, new Random(1)); SparseColumnMatrix xtcol = CFMatrixUtils.randomSparseCol(1118,22917,0d,1d,1 - 0.9998818947086253, new Random(1)); System.out.println("xtrow sparsity: " + CFMatrixUtils.sparsity(xtrow)); System.out.println("xtcol sparsity: " + CFMatrixUtils.sparsity(xtcol)); System.out.println("Equal: " + CFMatrixUtils.fastsparsedot(a,xtcol).equals(a.times(xtcol), 0)); MeanVector mv = new MeanVector(); System.out.println("doing: a . xtcol"); for (int i = 0; i < 10; i++) { Timer t = Timer.timer(); CFMatrixUtils.fastsparsedot(a,xtcol); mv.update(new double[]{t.duration()}); System.out.println("time: " + mv.vec()[0]);
this.nodataseen = false; Matrix fakeu = new SparseSingleValueInitStrat(1).init(this.u.getNumColumns(), this.u.getNumRows()); Dprime = CFMatrixUtils.fastdot(fakeu,xt); } else { Dprime = CFMatrixUtils.fastdot(ut, xt); Vector rowMean = CFMatrixUtils.rowMean(Dprime); CFMatrixUtils.minusEqualsCol(Dprime,rowMean); logger.debug("Abs w_grad: " + CFMatrixUtils.absSum(gradW)); Matrix neww = null; for (int i = 0; i < 1000; i++) { neww = this.w.clone(); Matrix scaledGradW = gradW.scale(1./wLossWeighted); neww = CFMatrixUtils.fastminus(neww,scaledGradW); neww = regul.prox(neww, weightedLambda/wLossWeighted); if(loss.test_backtrack(this.w, gradW, neww, wLossWeighted))
private Map<String, Pair<Double>> minMaxWords() { Map<String, Pair<Double>> ret = new HashMap<String, Pair<Double>>(); if(this.learner == null) return ret; BiMap<String, Integer> depvals = this.learner.getDependantValues(); BilinearSparseOnlineLearner bilearner = this.learner.getBilinearLearner(); for (String task : depvals.keySet()) { Integer taskCol = this.learner.getDependantValues().get(task); ret.put( task, new Pair<Double>( CFMatrixUtils.min(bilearner.getW().getColumn(taskCol)), CFMatrixUtils.max(bilearner.getW().getColumn(taskCol)) ) ); } return ret; }
/** * Expand the U parameters matrix by added a set of rows. * If currently unset, this function does nothing (assuming U will be initialised in the first round) * The new U parameters are initialised used {@link BilinearLearnerParameters#EXPANDEDUINITSTRAT} * @param newUsers the number of new users to add */ public void addU(int newUsers) { if(this.u == null) return; // If u has not be inited, then it will be on first process final InitStrategy ustrat = this.getInitStrat(BilinearLearnerParameters.EXPANDEDUINITSTRAT,null,null); final Matrix newU = ustrat.init(newUsers, this.u.getNumColumns()); this.u = CFMatrixUtils.vstack(this.u,newU); }
@Override protected Matrix updateU(Matrix currentU, Matrix neww, double uLossWeighted, double weightedLambda) { Matrix current = currentU; int iter = 0; final Double biconvextol = this.params.getTyped(BilinearLearnerParameters.BICONVEX_TOL); final Integer maxiter = this.params.getTyped(BilinearLearnerParameters.BICONVEX_MAXITER); while (true) { final Matrix newcurrent = super.updateU(current, neww, uLossWeighted, weightedLambda); final double sumchange = CFMatrixUtils.absSum(current.minus(newcurrent)); final double total = CFMatrixUtils.absSum(current); final double ratio = sumchange / total; current = newcurrent; if (ratio < biconvextol || iter >= maxiter) { logger.debug("U tolerance reached after iteration: " + iter); break; } iter++; } return current; } }