public void testTestValueAndGradient () { SimplePoly maxable = new SimplePoly (); testValueAndGradient (maxable); try { WrongSimplePoly badMaxable = new WrongSimplePoly (); testValueAndGradient (badMaxable); fail ("WrongSimplyPoly should fail testMaxmiziable!"); } catch (Exception e) {} }
public static void main (String[] args) { junit.textui.TestRunner.run (suite()); }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
TestOptimizable.testValueAndGradient(minable); } else { System.out.println("Training Accuracy before training = " + crf.averageTokenAccuracy(lists[0]));
public void testSpaceMaximizable () { Pipe p = makeSpacePredictionPipe (); InstanceList training = new InstanceList (p); // String[] data = { TestMEMM.data[0], }; // TestMEMM.data[1], TestMEMM.data[2], TestMEMM.data[3], }; // String[] data = { "ab" }; training.addThruPipe (new ArrayIterator (data)); // CRF4 memm = new CRF4 (p, null); MEMM memm = new MEMM (p, null); memm.addFullyConnectedStatesForLabels (); memm.addStartState(); memm.setWeightsDimensionAsIn(training); MEMMTrainer memmt = new MEMMTrainer (memm); // memm.gatherTrainingSets (training); // ANNOYING: Need to set up per-instance training sets memmt.train (training, 1); // Set weights dimension, gathers training sets, etc. // memm.print(); // memm.printGradient = true; // memm.printInstanceLists(); // memm.setGaussianPriorVariance (Double.POSITIVE_INFINITY); Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(training); TestOptimizable.setNumComponents (150); TestOptimizable.testValueAndGradient (mcrf); }
public void testSetGetParameters () { MaxEntTrainer trainer = new MaxEntTrainer(); Alphabet fd = dictOfSize (6); String[] classNames = new String[] {"class0", "class1", "class2"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 20); Optimizable.ByGradientValue maxable = trainer.getOptimizable (ilist); TestOptimizable.testGetSetParameters (maxable); }
/** * Tests that parameters set by setParameters can be retrieved by * getParameters. * @param maxable Instance of a Maximizable that should be tested. * Its current parameters will be overwritten. */ public static boolean testGetSetParameters (Optimizable maxable) { System.out.println ("TestMaximizable testGetSetParameters"); // Set all the parameters to unique values using setParameters() double[] parameters = new double [maxable.getNumParameters()]; maxable.getParameters (parameters); for (int i = 0; i < parameters.length; i++) parameters[i] = (double)i; maxable.setParameters (parameters); // Test to make sure those parameters are there MatrixOps.setAll (parameters, 0.0); maxable.getParameters (parameters); for (int i = 0; i < parameters.length; i++) assertTrue (parameters[i] == (double)i); return true; }
TestOptimizable.testValueAndGradient(minable); } else { System.out.println("Training Accuracy before training = " + crf.averageTokenAccuracy(lists[0]));
public void testSpaceMaximizable () { Pipe p = makeSpacePredictionPipe (); InstanceList training = new InstanceList (p); // String[] data = { TestMEMM.data[0], }; // TestMEMM.data[1], TestMEMM.data[2], TestMEMM.data[3], }; // String[] data = { "ab" }; training.addThruPipe (new ArrayIterator (data)); // CRF4 memm = new CRF4 (p, null); MEMM memm = new MEMM (p, null); memm.addFullyConnectedStatesForLabels (); memm.addStartState(); memm.setWeightsDimensionAsIn(training); MEMMTrainer memmt = new MEMMTrainer (memm); // memm.gatherTrainingSets (training); // ANNOYING: Need to set up per-instance training sets memmt.train (training, 1); // Set weights dimension, gathers training sets, etc. // memm.print(); // memm.printGradient = true; // memm.printInstanceLists(); // memm.setGaussianPriorVariance (Double.POSITIVE_INFINITY); Optimizable.ByGradientValue mcrf = memmt.getOptimizableMEMM(training); TestOptimizable.setNumComponents (150); TestOptimizable.testValueAndGradient (mcrf); }
public void testSetGetParameters () { MaxEntTrainer trainer = new MaxEntTrainer(); Alphabet fd = dictOfSize (6); String[] classNames = new String[] {"class0", "class1", "class2"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 20); Optimizable.ByGradientValue maxable = trainer.getOptimizable (ilist); TestOptimizable.testGetSetParameters (maxable); }
/** * Tests that parameters set by setParameters can be retrieved by * getParameters. * @param maxable Instance of a Maximizable that should be tested. * Its current parameters will be overwritten. */ public static boolean testGetSetParameters (Optimizable maxable) { System.out.println ("TestMaximizable testGetSetParameters"); // Set all the parameters to unique values using setParameters() double[] parameters = new double [maxable.getNumParameters()]; maxable.getParameters (parameters); for (int i = 0; i < parameters.length; i++) parameters[i] = (double)i; maxable.setParameters (parameters); // Test to make sure those parameters are there MatrixOps.setAll (parameters, 0.0); maxable.getParameters (parameters); for (int i = 0; i < parameters.length; i++) assertTrue (parameters[i] == (double)i); return true; }
Optimizable.ByGradientValue minable = crft .getOptimizableCRF(lists[0]); TestOptimizable.testValueAndGradient(minable); } else { System.out.println("Training Accuracy before training = "
public void testTestValueAndGradient () { SimplePoly maxable = new SimplePoly (); testValueAndGradient (maxable); try { WrongSimplePoly badMaxable = new WrongSimplePoly (); testValueAndGradient (badMaxable); fail ("WrongSimplyPoly should fail testMaxmiziable!"); } catch (Exception e) {} }
/** * Tests that getValue and getValueGradient are consistent * at a random parameter setting. * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradientRandomParameters (Optimizable.ByGradientValue maxable, Random r) { double[] params = new double [maxable.getNumParameters()]; for (int i = 0; i < params.length; i++) { params[i] = r.nextDouble (); if (r.nextBoolean ()) params [i] = -params[i]; } maxable.setParameters (params); testValueAndGradientCurrentParameters (maxable); return true; }
public void testGetSetParameters() { int inputVocabSize = 100; int numStates = 5; Alphabet inputAlphabet = new Alphabet(); for (int i = 0; i < inputVocabSize; i++) inputAlphabet.lookupIndex("feature" + i); Alphabet outputAlphabet = new Alphabet(); MEMM memm = new MEMM (inputAlphabet, outputAlphabet); String[] stateNames = new String[numStates]; for (int i = 0; i < numStates; i++) stateNames[i] = "state" + i; memm.addFullyConnectedStates(stateNames); MEMMTrainer memmt = new MEMMTrainer (memm); MEMMTrainer.MEMMOptimizableByLabelLikelihood omemm = memmt.getOptimizableMEMM (new InstanceList(null)); TestOptimizable.testGetSetParameters(omemm); }
public static void main (String[] args) { junit.textui.TestRunner.run (suite()); }
public void testRandomMaximizable () { MaxEntTrainer trainer = new MaxEntTrainer(); Alphabet fd = dictOfSize (6); String[] classNames = new String[] {"class0", "class1"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 20); Optimizable.ByGradientValue maxable = trainer.getOptimizable (ilist); TestOptimizable.testValueAndGradient (maxable); }
/** * Tests that getValue and getValueGradient are consistent. * Tests for consistency at <tt>params = 0</tt> and at * <tt> params = -0.0001 * grad(f)</tt> * @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters * @throws IllegalStateException If the test fails. */ public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable) { double[] parameters = new double [maxable.getNumParameters()]; MatrixOps.setAll (parameters, 0.0); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); MatrixOps.setAll (parameters, 0.0); double[] delta = new double[maxable.getNumParameters()]; maxable.getValueGradient (delta); logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta)); logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001)); MatrixOps.timesEquals (delta, -0.0001); MatrixOps.plusEquals (parameters, delta); maxable.setParameters (parameters); testValueAndGradientCurrentParameters (maxable); return true; }
public void testGetSetParameters() { int inputVocabSize = 100; int numStates = 5; Alphabet inputAlphabet = new Alphabet(); for (int i = 0; i < inputVocabSize; i++) inputAlphabet.lookupIndex("feature" + i); Alphabet outputAlphabet = new Alphabet(); MEMM memm = new MEMM (inputAlphabet, outputAlphabet); String[] stateNames = new String[numStates]; for (int i = 0; i < numStates; i++) stateNames[i] = "state" + i; memm.addFullyConnectedStates(stateNames); MEMMTrainer memmt = new MEMMTrainer (memm); MEMMTrainer.MEMMOptimizableByLabelLikelihood omemm = memmt.getOptimizableMEMM (new InstanceList(null)); TestOptimizable.testGetSetParameters(omemm); }
public void testRandomMaximizable () { MaxEntTrainer trainer = new MaxEntTrainer(); Alphabet fd = dictOfSize (6); String[] classNames = new String[] {"class0", "class1"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 20); Optimizable.ByGradientValue maxable = trainer.getOptimizable (ilist); TestOptimizable.testValueAndGradient (maxable); }