@Override public Matrix gradient(Matrix W) { return X.transpose().times(X.times(W).minus(Y)); }
@Override public double eval(Matrix W) { Matrix v = (X.times(W).minus(Y)); if(this.bias!=null) v.plus(this.bias); v.dotTimesEquals(v); return v.sumOfRows().sum(); }
@Override protected Matrix updateW(Matrix currentW, double wLossWeighted, double weightedLambda) { Matrix current = currentW; int iter = 0; final Double biconvextol = this.params.getTyped(BilinearLearnerParameters.BICONVEX_TOL); final Integer maxiter = this.params.getTyped(BilinearLearnerParameters.BICONVEX_MAXITER); while (true) { final Matrix newcurrent = super.updateW(current, wLossWeighted, weightedLambda); final double sumchange = CFMatrixUtils.absSum(current.minus(newcurrent)); final double total = CFMatrixUtils.absSum(current); final double ratio = sumchange / total; current = newcurrent; if (ratio < biconvextol || iter >= maxiter) { logger.debug("W tolerance reached after iteration: " + iter); break; } iter++; } return current; }
@Override protected Matrix updateU(Matrix currentU, Matrix neww, double uLossWeighted, double weightedLambda) { Matrix current = currentU; int iter = 0; final Double biconvextol = this.params.getTyped(BilinearLearnerParameters.BICONVEX_TOL); final Integer maxiter = this.params.getTyped(BilinearLearnerParameters.BICONVEX_MAXITER); while (true) { final Matrix newcurrent = super.updateU(current, neww, uLossWeighted, weightedLambda); final double sumchange = CFMatrixUtils.absSum(current.minus(newcurrent)); final double total = CFMatrixUtils.absSum(current); final double ratio = sumchange / total; current = newcurrent; if (ratio < biconvextol || iter >= maxiter) { logger.debug("U tolerance reached after iteration: " + iter); break; } iter++; } return current; } }
@Override public Matrix gradient(Matrix W) { final Matrix resid = X.times(W).minus(Y); if (this.bias != null) resid.plusEquals(this.bias); for (int r = 0; r < Y.getNumRows(); r++) { final double yc = Y.getElement(r, 0); if (Double.isNaN(yc)) { resid.setElement(r, 0, 0); } } return X.transpose().times(resid); }
@Override public boolean test_backtrack(Matrix W, Matrix grad, Matrix prox, double eta) { Matrix tmp = prox.minus(W); double evalW = eval(W); double evalProx = eval(prox); Matrix fastdotGradTmp = CFMatrixUtils.fastdot(grad.transpose(),tmp); double normGradProx = CFMatrixUtils.sum(fastdotGradTmp); double normTmp = 0.5*eta*tmp.normFrobenius(); return (evalProx <= evalW + normGradProx + normTmp); }
double totalbias = 0; final double sumchangew = CFMatrixUtils.absSum(neww.minus(this.w)); final double totalw = CFMatrixUtils.absSum(this.w); final double sumchangeu = CFMatrixUtils.absSum(newu.minus(this.u)); final double totalu = CFMatrixUtils.absSum(this.u); double ratio = ratioU + ratioW; if(this.biasMode){ final double sumchangebias = CFMatrixUtils.absSum(newbias.minus(this.bias)); totalbias = CFMatrixUtils.absSum(this.bias); if(totalbias!=0) ratioB = (sumchangebias/totalbias) ;
mean = sum.getSum(); mean.scaleEquals(1.0/n); C = sum2.scale( 1.0/(n-1) ).minus( mean.outerProduct(s2) );
mean = sum.getSum(); mean.scaleEquals(1.0/n); C = sum2.scale( 1.0/(n-1) ).minus( mean.outerProduct(s2) );
mean = sum.getSum(); mean.scaleEquals(1.0/n); C = sum2.scale( 1.0/(n-1) ).minus( mean.outerProduct(s2) );
covariance = s2.scale( 1.0/weightSum ).minus( mean.outerProduct(mean) );
covariance = s2.scale( 1.0/weightSum ).minus( mean.outerProduct(mean) );
covariance = s2.scale( 1.0/weightSum ).minus( mean.outerProduct(mean) );
final Matrix errmat = Y.minus(finalOutput); final double err = errmat.sumOfColumns().sum();
final Matrix errmat = Y.minus(finalOutput); final double err = errmat.sumOfColumns().sum();