public int getNumParameters() { return optimizables.get(0).getNumParameters(); }
public int getNumParameters() { return optimizables.get(0).getNumParameters(); }
public int getNumParameters() { return optimizables.get(0).getNumParameters(); }
crf.setWeightsDimensionAsIn(one, false); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(one); double[] params = new double[mcrf.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = i;
public static double testValueAndGradientInDirection (Optimizable.ByGradientValue maxable, double[] direction) int numParameters = maxable.getNumParameters(); assert (numParameters == direction.length); double[] oldParameters = new double[numParameters];
crf.setWeightsDimensionAsIn(one, false); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(one); double[] params = new double[mcrf.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = i;
public OrthantWiseLimitedMemoryBFGS(Optimizable.ByGradientValue function, double l1wt) { this.optimizable = function; this.l1Weight = l1wt; String parts[] = optimizable.getClass().getName().split("\\."); this.optName = parts[parts.length - 1]; // initialize optimizer state iterations = 0; s = new LinkedList<double[]>(); y = new LinkedList<double[]>(); rhos = new LinkedList<Double>(); alphas = new double[m]; MatrixOps.setAll(alphas, 0.0); yDotY = 0; int numParameters = optimizable.getNumParameters(); // get initial parameters parameters = new double[numParameters]; optimizable.getParameters(parameters); // get initial value value = evalL1(); // get initial gradient grad = new double[numParameters]; evalGradient(); // initialize direction direction = new double[numParameters]; steepestDescentDirection = new double[numParameters]; // initialize backups oldParameters = new double[numParameters]; oldGrad = new double[numParameters]; }
public OrthantWiseLimitedMemoryBFGS(Optimizable.ByGradientValue function, double l1wt) { this.optimizable = function; this.l1Weight = l1wt; String parts[] = optimizable.getClass().getName().split("\\."); this.optName = parts[parts.length - 1]; // initialize optimizer state iterations = 0; s = new LinkedList<double[]>(); y = new LinkedList<double[]>(); rhos = new LinkedList<Double>(); alphas = new double[m]; MatrixOps.setAll(alphas, 0.0); yDotY = 0; int numParameters = optimizable.getNumParameters(); // get initial parameters parameters = new double[numParameters]; optimizable.getParameters(parameters); // get initial value value = evalL1(); // get initial gradient grad = new double[numParameters]; evalGradient(); // initialize direction direction = new double[numParameters]; steepestDescentDirection = new double[numParameters]; // initialize backups oldParameters = new double[numParameters]; oldGrad = new double[numParameters]; }
public OrthantWiseLimitedMemoryBFGS(Optimizable.ByGradientValue function, double l1wt) { this.optimizable = function; this.l1Weight = l1wt; String parts[] = optimizable.getClass().getName().split("\\."); this.optName = parts[parts.length - 1]; // initialize optimizer state iterations = 0; s = new LinkedList<double[]>(); y = new LinkedList<double[]>(); rhos = new LinkedList<Double>(); alphas = new double[m]; MatrixOps.setAll(alphas, 0.0); yDotY = 0; int numParameters = optimizable.getNumParameters(); // get initial parameters parameters = new double[numParameters]; optimizable.getParameters(parameters); // get initial value value = evalL1(); // get initial gradient grad = new double[numParameters]; evalGradient(); // initialize direction direction = new double[numParameters]; steepestDescentDirection = new double[numParameters]; // initialize backups oldParameters = new double[numParameters]; oldGrad = new double[numParameters]; }
double fret; double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi);
double fret; double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi);
double fret; double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi);
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
double getLikelihood (MEMMTrainer memmt, InstanceList data) { Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double [mcrf.getNumParameters()]; mcrf.getParameters (params); mcrf.setParameters (params); return mcrf.getValue (); }
double getLikelihood (MEMMTrainer memmt, InstanceList data) { Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double [mcrf.getNumParameters()]; mcrf.getParameters (params); mcrf.setParameters (params); return mcrf.getValue (); }
double getLikelihood(CRF crf, InstanceList data) { CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double[mcrf.getNumParameters()]; mcrf.getParameters(params); mcrf.setParameters(params); return mcrf.getValue(); }
double getLikelihood(CRF crf, InstanceList data) { CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double[mcrf.getNumParameters()]; mcrf.getParameters(params); mcrf.setParameters(params); return mcrf.getValue(); }