public Optimizer getOptimizer(Optimizable.ByGradientValue optimizable) { if (optimizer == null) { optimizer = new LimitedMemoryBFGS(optimizable); } return optimizer; }
public boolean optimize () { return optimize (Integer.MAX_VALUE); }
private double[][] optimizeQ(InstanceList data, Classifier p, boolean firstIter) { int numLabels = data.getTargetAlphabet().size(); double[][] base; if (firstIter) { base = null; } else { base = new double[data.size()][numLabels]; for (int ii = 0; ii < data.size(); ii++) { p.classify(data.get(ii)).getLabelVector().addTo(base[ii]); } } PRAuxClassifierOptimizable optimizable = new PRAuxClassifierOptimizable(data,base,q); LimitedMemoryBFGS bfgs = new LimitedMemoryBFGS(optimizable); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } bfgs.reset(); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } return base; } }
bfgs = new LimitedMemoryBFGS(opt); try { bfgs.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace(); LimitedMemoryBFGS bfgsP = new LimitedMemoryBFGS(optP); bfgsP.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace();
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
private double[][] optimizeQ(InstanceList data, Classifier p, boolean firstIter) { int numLabels = data.getTargetAlphabet().size(); double[][] base; if (firstIter) { base = null; } else { base = new double[data.size()][numLabels]; for (int ii = 0; ii < data.size(); ii++) { p.classify(data.get(ii)).getLabelVector().addTo(base[ii]); } } PRAuxClassifierOptimizable optimizable = new PRAuxClassifierOptimizable(data,base,q); LimitedMemoryBFGS bfgs = new LimitedMemoryBFGS(optimizable); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } bfgs.reset(); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } return base; } }
bfgs = new LimitedMemoryBFGS(opt); try { bfgs.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace(); LimitedMemoryBFGS bfgsP = new LimitedMemoryBFGS(optP); bfgsP.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace();
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
private double[][] optimizeQ(InstanceList data, Classifier p, boolean firstIter) { int numLabels = data.getTargetAlphabet().size(); double[][] base; if (firstIter) { base = null; } else { base = new double[data.size()][numLabels]; for (int ii = 0; ii < data.size(); ii++) { p.classify(data.get(ii)).getLabelVector().addTo(base[ii]); } } PRAuxClassifierOptimizable optimizable = new PRAuxClassifierOptimizable(data,base,q); LimitedMemoryBFGS bfgs = new LimitedMemoryBFGS(optimizable); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } bfgs.reset(); try { bfgs.optimize(); } catch (Exception e) { e.printStackTrace(); } return base; } }
private Optimizer createMaxer (Optimizable.ByGradientValue macrf) { if (maxer == null) { return new LimitedMemoryBFGS (macrf); } else return maxer; }
bfgs = new LimitedMemoryBFGS(opt); try { bfgs.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace(); LimitedMemoryBFGS bfgsP = new LimitedMemoryBFGS(optP); bfgsP.optimize(maxIterPerStep); } catch (Exception e) { e.printStackTrace();
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public boolean optimize () { return optimize (Integer.MAX_VALUE); }
new CRFOptimizableByLabelLikelihood(crf, labeled); likelihood.setGaussianPriorVariance(gaussianPriorVariance); this.bfgs = new LimitedMemoryBFGS(likelihood); logger.info ("CRF about to train with "+numIterations+" iterations"); for (int i = 0; i < numIterations; i++) { try { converged = bfgs.optimize(1); iteration++; logger.info ("CRF finished one iteration of maximizer, i="+i); this.bfgs = new LimitedMemoryBFGS(regLikelihood); converged = false; logger.info ("CRF about to train with "+numIterations+" iterations"); for (int i = 0; i < numIterations; i++) { try { converged = bfgs.optimize (1); iteration++; logger.info ("CRF finished one iteration of maximizer, i="+i); this.bfgs.reset();