Refine search
public double[][] getDataProjected(Matrix data, boolean debug) { // Project the original data set Matrix dataProjected; dataProjected = PC.transpose().times(data); if (debug) { System.out.println("Data projected:"); dataProjected.print(dataProjected.getRowDimension(), 3); } return dataProjected.getArray(); }
static double[] multiple(double[] diagYY /*diagonal*/, int nTot, int nVars) { int ny = diagYY.length; for (int i = 0; i < ny; i++) { diagYY[i] *= nTot; } double[][] uu = new double[ny][ny]; for (int i = 0; i < ny; i++) { for (int j = 0; j < ny; j++) { double yyij = i==j ? diagYY[i] : 0; uu[i][j] = (yyij - diagYY[i] * diagYY[j] / nTot) / (nVars * Math.sqrt(diagYY[i] * diagYY[j])); if (Double.isNaN(uu[i][j])) { uu[i][j] = 0; } } } EigenvalueDecomposition eigen = new EigenvalueDecomposition(new Matrix(uu)); double[] eigenvalues = eigen.getRealEigenvalues(); double[][] eigenvectors = eigen.getV().getArray(); int maxIndex = ArrayUtils.maxIndex(eigenvalues); return eigenvectors[maxIndex]; }
int M = data.getRowDimension(); int N = data.getColumnDimension(); double sd; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) throw new Error("eigenPCA: mean of dimension " + (i + 1) + " is 0.0"); if (scale) { sd = MathUtils.standardDeviation(data.getArray()[i]); if (sd == 0.0) throw new Error("eigenPCA: variance of dimension " + (i + 1) + " is 0.0"); data.print(data.getRowDimension(), 3); if (debug) { System.out.println("Covariance"); covariance.print(covariance.getRowDimension(), 3); d[i][j] = pc.getV().get(i, k); PC = new Matrix(d); if (debug) { System.out.println("PC:");
int M = data.getRowDimension(); int N = data.getColumnDimension(); double sd; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) throw new Error("eigenPCA: mean of dimension " + (i + 1) + " is 0.0"); if (scale) { sd = MathUtils.standardDeviation(data.getArray()[i]); if (sd == 0.0) throw new Error("eigenPCA: variance of dimension " + (i + 1) + " is 0.0"); data.print(data.getRowDimension(), 3); if (debug) { System.out.println("Covariance"); covariance.print(covariance.getRowDimension(), 3); d[i][j] = pc.getV().get(i, k); PC = new Matrix(d); if (debug) { System.out.println("PC:");
public double[][] getDataProjected(Matrix data, boolean debug) { // Project the original data set Matrix dataProjected; dataProjected = PC.transpose().times(data); if (debug) { System.out.println("Data projected:"); dataProjected.print(dataProjected.getRowDimension(), 3); } return dataProjected.getArray(); }
public PCAModel buildModel(DataInfo dinfo, GramTask tsk) { logStart(); Matrix myGram = new Matrix(tsk._gram.getXX()); // X'X/n where n = num rows SingularValueDecomposition mySVD = myGram.svd(); // Extract eigenvalues and eigenvectors // Note: Singular values ordered in weakly descending order by algorithm double[] Sval = mySVD.getSingularValues(); double[][] eigVec = mySVD.getV().getArray(); // rows = features, cols = principal components assert Sval.length == eigVec.length; // DKV.put(EigenvectorMatrix.makeKey(input("source"), destination_key), new EigenvectorMatrix(eigVec)); // Compute standard deviation double[] sdev = new double[Sval.length]; double totVar = 0; double dfcorr = dinfo._adaptedFrame.numRows()/(dinfo._adaptedFrame.numRows() - 1.0); for(int i = 0; i < Sval.length; i++) { // if(standardize) Sval[i] = dfcorr*Sval[i]; // Correct since degrees of freedom = n-1 sdev[i] = Math.sqrt(Sval[i]); totVar += Sval[i]; } double[] propVar = new double[Sval.length]; // Proportion of total variance double[] cumVar = new double[Sval.length]; // Cumulative proportion of total variance for(int i = 0; i < Sval.length; i++) { propVar[i] = Sval[i]/totVar; cumVar[i] = i == 0 ? propVar[0] : cumVar[i-1] + propVar[i]; } Key dataKey = input("source") == null ? null : Key.make(input("source")); int ncomp = Math.min(getNumPC(sdev, tolerance), max_pc); return new PCAModel(this, destination_key, dataKey, dinfo, tsk, sdev, propVar, cumVar, eigVec, mySVD.rank(), ncomp); }
false, false, false); GramTask gtsk = new GramTask(_job._key, tbInfo).doAll(tB); Matrix gramJ = new Matrix(gtsk._gram.getXX()); // form outer gram SingularValueDecomposition svdJ = gramJ.svd(); u=makeUVec(model, u_name, u, qfrm, new Matrix(stsk._atq), svdJ); model._output._d = ArrayUtils.mult((Arrays.copyOfRange(ArrayUtils.sqrtArr(svdJ.getSingularValues()), 0, _parms._nv)), sqrt(tB.numRows())); Matrix atqJ = new Matrix(stsk._atq); SingularValueDecomposition svdJ = atqJ.svd(); model._output._v = svdJ.getU().getMatrix(0, atqJ.getRowDimension() - 1, 0, _parms._nv - 1).getArray();
private int[] checkMeanColumns(String dataFile, int Y[], String[] features) { try { BufferedReader reader = new BufferedReader(new FileReader(dataFile)); Matrix data = Matrix.read(reader); reader.close(); data = data.transpose(); // then I have easy access to the columns int rows = data.getRowDimension() - 1; int cols = data.getColumnDimension() - 1; data = data.getMatrix(0, rows, 1, cols); // dataVowels(:,1:cols) -> dependent variables int M = data.getRowDimension(); double mn; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) { System.out.println("Removing feature: " + features[i] + " from list of features because it has mean=0.0"); Y = MathUtils.removeIndex(Y, i); } } } catch (Exception e) { throw new RuntimeException("Problem reading file " + dataFile, e); } System.out.println(); return Y; }
/** * see {@link UpdateableCholeskyDecomposition#choldowndate(double[][], double[])} * @param x * @param b */ public void choldowndate(double[] x, boolean b) { if(b) x = x.clone(); Matrix L = this.getL(); // work is done on an upper triangular matrix double[][] data = L.transpose().getArray(); choldowndate(data, x); // Make the output lower triangular again int Ll = L.getRowDimension(); L.setMatrix(0, Ll-1, 0, Ll-1, new Matrix(data, Ll, Ll).transpose()); }
private int[] checkMeanColumns(String dataFile, int Y[], String[] features) { try { BufferedReader reader = new BufferedReader(new FileReader(dataFile)); Matrix data = Matrix.read(reader); reader.close(); data = data.transpose(); // then I have easy access to the columns int rows = data.getRowDimension() - 1; int cols = data.getColumnDimension() - 1; data = data.getMatrix(0, rows, 1, cols); // dataVowels(:,1:cols) -> dependent variables int M = data.getRowDimension(); double mn; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) { System.out.println("Removing feature: " + features[i] + " from list of features because it has mean=0.0"); Y = MathUtils.removeIndex(Y, i); } } } catch (Exception e) { throw new RuntimeException("Problem reading file " + dataFile, e); } System.out.println(); return Y; }
public void test () { Log.info("CholTest::test enter"); for (int sz = 6000; sz < 10000; sz+=2000) { Log.info("CholTest::test sz is " + sz); DataSetup data = new DataSetup(sz, 12345); long start = System.currentTimeMillis(); CholeskyDecomposition jamaChol = new Matrix(data.xx).chol(); Log.info("JAMA CHOLESKY [N = " + sz + "] TAKES " + (System.currentTimeMillis() - start) + " MILLISECONDS."); if (!jamaChol.isSPD()) continue; ForkJoinPool fjp = new ForkJoinPool(32); for (int t = 2; t <= 32; t += 2) { for (int step : STEPS) fjp.invoke(new TestSetup(new DataSetup(data.xx),jamaChol.getL().getArray(),step,t)); } } Log.info("CholTest::test exit"); }
public void fit( final double x[][], final double y[][], final double lambda ) { final double[][] expandedX = kernelExpandMatrixNormalize( x ); final Matrix phiX = new Matrix( expandedX, expandedX.length, length ); final Matrix phiXTransp = phiX.transpose(); final Matrix phiXProduct = phiXTransp.times( phiX ); final int l = phiXProduct.getRowDimension(); final double lambda2 = 2 * lambda; for (int i = 0; i < l; ++i ) phiXProduct.set( i, i, phiXProduct.get( i, i ) + lambda2 ); final Matrix phiXPseudoInverse = phiXProduct.inverse(); final Matrix phiXProduct2 = phiXPseudoInverse.times( phiXTransp ); final Matrix betaMatrix = phiXProduct2.times( new Matrix( y, y.length, 2 ) ); setBeta( betaMatrix.getArray() ); }
int M = data.getRowDimension(); int N = data.getColumnDimension(); double sd; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) throw new Error("svdPCA: mean of dimension " + (i + 1) + " is 0.0"); if (scale) { sd = MathUtils.standardDeviation(data.getArray()[i]); if (sd == 0.0) throw new Error("svdPCA: variance of dimension " + (i + 1) + " is 0.0"); data.print(data.getRowDimension(), 3); V = new double[svd.getS().getRowDimension()]; for (int i = 0; i < svd.getS().getRowDimension(); i++) { V[i] = svd.getS().get(i, i);
Matrix gramJ = new Matrix(gtsk._gram.getXX()); SingularValueDecomposition svdJ = gramJ.svd(); double[][] v = svdJ.getV().getArray(); assert v.length == _ncolExp && LinearAlgebraUtils.numColsExp(dinfo._adaptedFrame,_parms._use_all_factor_levels) == _ncolExp; model._output._v = MemoryManager.malloc8d(_ncolExp, _parms._nv);
int M = data.getRowDimension(); int N = data.getColumnDimension(); double sd; for (int i = 0; i < M; i++) { mn = MathUtils.mean(data.getArray()[i]); if (mn == 0.0) throw new Error("svdPCA: mean of dimension " + (i + 1) + " is 0.0"); if (scale) { sd = MathUtils.standardDeviation(data.getArray()[i]); if (sd == 0.0) throw new Error("svdPCA: variance of dimension " + (i + 1) + " is 0.0"); data.print(data.getRowDimension(), 3); V = new double[svd.getS().getRowDimension()]; for (int i = 0; i < svd.getS().getRowDimension(); i++) { V[i] = svd.getS().get(i, i);
private double dotCol(Matrix colvec, Matrix m, int col) { final int rows = colvec.getRowDimension(); final double[][] colvec_arr = colvec.getArray(); final double[][] m_arr = m.getArray(); double dp = 0; for (int i = 0; i < rows; i++) dp += colvec_arr[i][0] * m_arr[i][col]; return dp; }