public static void main (String[] args) throws Exception { InstanceList data = InstanceList.load(new File(args[0])); LinearRegressionTrainer trainer = new LinearRegressionTrainer(data); Optimizer optimizer = new OrthantWiseLimitedMemoryBFGS(trainer); //Optimizer optimizer = new LimitedMemoryBFGS(trainer); optimizer.optimize(); optimizer.optimize(); }
public static void main (String[] args) throws Exception { InstanceList data = InstanceList.load(new File(args[0])); LinearRegressionTrainer trainer = new LinearRegressionTrainer(data); Optimizer optimizer = new OrthantWiseLimitedMemoryBFGS(trainer); //Optimizer optimizer = new LimitedMemoryBFGS(trainer); optimizer.optimize(); optimizer.optimize(); }
public static void main (String[] args) throws Exception { InstanceList data = InstanceList.load(new File(args[0])); LinearRegressionTrainer trainer = new LinearRegressionTrainer(data); Optimizer optimizer = new OrthantWiseLimitedMemoryBFGS(trainer); //Optimizer optimizer = new LimitedMemoryBFGS(trainer); optimizer.optimize(); optimizer.optimize(); }
for (int i = 0; i < numIterations; i++) { try { converged = optimizer.optimize (1); iterationCount++; logger.info ("CRF finished one iteration of maximizer, i="+i);
public void testOrthantWiseLBFGSWithoutL1() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly); bfgs.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public void testConjugateGradient() { SimplePoly poly = new SimplePoly(); Optimizer cg = new ConjugateGradient(poly); cg.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
for (int i = 0; i < numIterations; i++) { try { converged = opt.optimize (1); iterationCount++; logger.info ("CRF finished one iteration of maximizer, i="+i);
public void testLinearLBFGS() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new LimitedMemoryBFGS(poly); bfgs.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
for (int i = 0; i < numIterations; i++) { try { converged = optimizer.optimize (1); iterationCount++; logger.info ("CRF finished one iteration of maximizer, i="+i);
for (int i = 0; i < numIterations; i++) { try { converged = opt.optimize (1); iterationCount++; logger.info ("CRF finished one iteration of maximizer, i="+i);
public void testGradientAscent() { SimplePoly poly = new SimplePoly(); Optimizer gd = new GradientAscent(poly); gd.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public void testLinearLBFGS() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new LimitedMemoryBFGS(poly); bfgs.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public void testOrthantWiseLBFGSWithL1() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly, 3.0); bfgs.optimize(); assertEquals(2.0 / 6.0, poly.params[0], 1e-3); }
public void testConjugateGradient() { SimplePoly poly = new SimplePoly(); Optimizer cg = new ConjugateGradient(poly); cg.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public void testGradientAscent() { SimplePoly poly = new SimplePoly(); Optimizer gd = new GradientAscent(poly); gd.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public void testOrthantWiseLBFGSWithL1() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly, 3.0); bfgs.optimize(); assertEquals(2.0 / 6.0, poly.params[0], 1e-3); }
public void testOrthantWiseLBFGSWithoutL1() { SimplePoly poly = new SimplePoly(); Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly); bfgs.optimize(); assertEquals(5.0 / 6.0, poly.params[0], 1e-3); }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }