double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi); optimizable.getValueGradient(xi); if (eval != null) { eval.evaluate (optimizable, iterations);
double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi); optimizable.getValueGradient(xi); if (eval != null) { eval.evaluate (optimizable, iterations);
double fp = optimizable.getValue (); double[] xi = new double [optimizable.getNumParameters()]; optimizable.getValueGradient(xi); optimizable.getValueGradient(xi); if (eval != null) { eval.evaluate (optimizable, iterations);
optable.getValueGradient(gradient);
maxable.getParameters (parameters); maxable.getParameters (oldParameters); maxable.getValueGradient (analyticGradient);
maxable.getParameters (parameters); maxable.getParameters (oldParameters); maxable.getValueGradient (analyticGradient);
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
public void getValueGradient (double [] buffer) { // PriorGradient is -parameter/gaussianPriorVariance // Gradient is (constraint - expectation + PriorGradient) // == -(expectation - constraint - PriorGradient). // Gradient points "up-hill", i.e. in the direction of higher value if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); // This will fill in the this.expectation, updating it if necessary MatrixOps.setAll(cachedGradie, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < opts.length; i++) { MatrixOps.setAll(b2, 0); opts[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradie, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradie, 0, buffer, 0, cachedGradie.length); }
public void getValueGradient (double [] buffer) { // PriorGradient is -parameter/gaussianPriorVariance // Gradient is (constraint - expectation + PriorGradient) // == -(expectation - constraint - PriorGradient). // Gradient points "up-hill", i.e. in the direction of higher value if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); // This will fill in the this.expectation, updating it if necessary MatrixOps.setAll(cachedGradie, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < opts.length; i++) { MatrixOps.setAll(b2, 0); opts[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradie, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradie, 0, buffer, 0, cachedGradie.length); }
public void getValueGradient (double [] buffer) { // PriorGradient is -parameter/gaussianPriorVariance // Gradient is (constraint - expectation + PriorGradient) // == -(expectation - constraint - PriorGradient). // Gradient points "up-hill", i.e. in the direction of higher value if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); // This will fill in the this.expectation, updating it if necessary MatrixOps.setAll(cachedGradie, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < opts.length; i++) { MatrixOps.setAll(b2, 0); opts[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradie, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradie, 0, buffer, 0, cachedGradie.length); }
public void getValueGradient (double [] buffer) { if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); MatrixOps.setAll(cachedGradient, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < optimizables.length; i++) { MatrixOps.setAll(b2, 0); optimizables[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradient, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradient, 0, buffer, 0, cachedGradient.length); } }
public void getValueGradient (double [] buffer) { if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); MatrixOps.setAll(cachedGradient, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < optimizables.length; i++) { MatrixOps.setAll(b2, 0); optimizables[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradient, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradient, 0, buffer, 0, cachedGradient.length); } }
public void getValueGradient (double [] buffer) { if (cachedGradientWeightsStamp != crf.weightsValueChangeStamp) { getValue (); MatrixOps.setAll(cachedGradient, 0); double[] b2 = new double[buffer.length]; for (int i = 0; i < optimizables.length; i++) { MatrixOps.setAll(b2, 0); optimizables[i].getValueGradient(b2); MatrixOps.plusEquals(cachedGradient, b2); } cachedGradientWeightsStamp = crf.weightsValueChangeStamp; } System.arraycopy(cachedGradient, 0, buffer, 0, cachedGradient.length); } }
public void getValueGradient (double[] buffer) { double[] b2 = new double[buffer.length]; for (Optimizable.ByGradientValue o : optimizables) { MatrixOps.setAll(b2, 0); o.getValueGradient(b2); MatrixOps.plusEquals(buffer, b2); } }
public void getValueGradient (double[] buffer) { double[] b2 = new double[buffer.length]; for (Optimizable.ByGradientValue o : optimizables) { MatrixOps.setAll(b2, 0); o.getValueGradient(b2); MatrixOps.plusEquals(buffer, b2); } }
/** * Evaluate gradient, make it a descent direction. */ private void evalGradient() { optimizable.getValueGradient(grad); adjustGradForInfiniteParams(grad); MatrixOps.timesEquals(grad, -1.0); }
/** * Evaluate gradient, make it a descent direction. */ private void evalGradient() { optimizable.getValueGradient(grad); adjustGradForInfiniteParams(grad); MatrixOps.timesEquals(grad, -1.0); }
public void getValueGradient (double[] buffer) { double[] b2 = new double[buffer.length]; for (Optimizable.ByGradientValue o : optimizables) { MatrixOps.setAll(b2, 0); o.getValueGradient(b2); MatrixOps.plusEquals(buffer, b2); } }
/** * Evaluate gradient, make it a descent direction. */ private void evalGradient() { optimizable.getValueGradient(grad); adjustGradForInfiniteParams(grad); MatrixOps.timesEquals(grad, -1.0); }