public void setParameters(double[] params) { optimizables.get(0).setParameters(params); }
public void setParameters(double[] params) { optimizables.get(0).setParameters(params); }
public void setParameters(double[] params) { optimizables.get(0).setParameters(params); }
parameters[i] = param + epsilon; maxable.setParameters (parameters); double epsValue = maxable.getValue(); double slope = (epsValue - value) / epsilon;
parameters[i] = param + epsilon; maxable.setParameters (parameters); double epsValue = maxable.getValue(); double slope = (epsValue - value) / epsilon;
MatrixOps.plusEquals (parameters, normalizedDirection, epsilon); maxable.setParameters (parameters); double epsValue = maxable.getValue(); double slope = (epsValue - value) / epsilon; ", value+epsilon slope = "+slope+ ": slope difference = "+slopeDifference); maxable.setParameters (oldParameters); assert (Math.abs(slopeDifference) < tolerance) : "Slope difference "+slopeDifference+" is greater than tolerance "+tolerance; return slopeDifference;
params[i] = i; mcrf.setParameters(params);
MatrixOps.plusEquals (parameters, normalizedDirection, epsilon); maxable.setParameters (parameters); double epsValue = maxable.getValue(); double slope = (epsValue - value) / epsilon; ", value+epsilon slope = "+slope+ ": slope difference = "+slopeDifference); maxable.setParameters (oldParameters); assert (Math.abs(slopeDifference) < tolerance) : "Slope difference "+slopeDifference+" is greater than tolerance "+tolerance; return slopeDifference;
params[i] = i; mcrf.setParameters(params);
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
private void getNextPoint(double alpha) { for (int i = 0; i < parameters.length; i++) { parameters[i] = oldParameters[i] + direction[i] * alpha; if (l1Weight > 0) { // do not allow to cross orthant boundaries if using // L1-regularization if (oldParameters[i] * parameters[i] < 0) { parameters[i] = 0.0; } } } optimizable.setParameters(parameters); }
private void getNextPoint(double alpha) { for (int i = 0; i < parameters.length; i++) { parameters[i] = oldParameters[i] + direction[i] * alpha; if (l1Weight > 0) { // do not allow to cross orthant boundaries if using // L1-regularization if (oldParameters[i] * parameters[i] < 0) { parameters[i] = 0.0; } } } optimizable.setParameters(parameters); }
double getLikelihood (MEMMTrainer memmt, InstanceList data) { Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double [mcrf.getNumParameters()]; mcrf.getParameters (params); mcrf.setParameters (params); return mcrf.getValue (); }
double getLikelihood (MEMMTrainer memmt, InstanceList data) { Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double [mcrf.getNumParameters()]; mcrf.getParameters (params); mcrf.setParameters (params); return mcrf.getValue (); }
private void getNextPoint(double alpha) { for (int i = 0; i < parameters.length; i++) { parameters[i] = oldParameters[i] + direction[i] * alpha; if (l1Weight > 0) { // do not allow to cross orthant boundaries if using // L1-regularization if (oldParameters[i] * parameters[i] < 0) { parameters[i] = 0.0; } } } optimizable.setParameters(parameters); }
double getLikelihood(CRF crf, InstanceList data) { CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double[mcrf.getNumParameters()]; mcrf.getParameters(params); mcrf.setParameters(params); return mcrf.getValue(); }
double getLikelihood(CRF crf, InstanceList data) { CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf); Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(data); // Do this elaborate thing so that crf.cachedValueStale is forced true double[] params = new double[mcrf.getNumParameters()]; mcrf.getParameters(params); mcrf.setParameters(params); return mcrf.getValue(); }