private int initSparseWeights (InstanceList training) { // checkCliqueSizeConsistent (training); //debug int total = 0; // Build this bitsets that tell us what weights occur in the data int size = cliqueSizeFromInstance (training); BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } assignmentsPresent = new BitSet (size); collectWeightsPresent (training, weightsPresent); if (weights != null) { addInCurrentWeights (weightsPresent); } // We can allocate default Weights now total += allocateDefaultWeights (size); // Use those to allocate the SparseVectors SparseVector[] newWeights = new SparseVector [size]; total += allocateNewWeights (weightsPresent, newWeights); logger.info ("ACRF template "+this+" total num weights = "+total); this.weights = newWeights; return total; }
private void setupGraph () { for (Iterator it = allTemplates.iterator (); it.hasNext ();) { Template tmpl = (Template) it.next (); tmpl.addInstantiatedCliques (this, instance); } } // setupGraph
private void collectWeightsPresentForGraph (UnrolledGraph unrolled, BitSet[] weightsPresent) { for (Iterator it = unrolled.unrolledVarSetIterator (); it.hasNext();) { UnrolledVarSet clique = (UnrolledVarSet) it.next (); if (clique.tmpl == this) { int assn = clique.lookupAssignmentNumber (); addPresentFeatures (weightsPresent[assn], clique.fv); } } }
private int initSparseWeights (InstanceList training) { // checkCliqueSizeConsistent (training); //debug int total = 0; // Build this bitsets that tell us what weights occur in the data int size = cliqueSizeFromInstance (training); BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } assignmentsPresent = new BitSet (size); collectWeightsPresent (training, weightsPresent); if (weights != null) { addInCurrentWeights (weightsPresent); } // We can allocate default Weights now total += allocateDefaultWeights (size); // Use those to allocate the SparseVectors SparseVector[] newWeights = new SparseVector [size]; total += allocateNewWeights (weightsPresent, newWeights); logger.info ("ACRF template "+this+" total num weights = "+total); this.weights = newWeights; return total; }
private int initSparseWeights (InstanceList training) { // checkCliqueSizeConsistent (training); //debug int total = 0; // Build this bitsets that tell us what weights occur in the data int size = cliqueSizeFromInstance (training); BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } assignmentsPresent = new BitSet (size); collectWeightsPresent (training, weightsPresent); if (weights != null) { addInCurrentWeights (weightsPresent); } // We can allocate default Weights now total += allocateDefaultWeights (size); // Use those to allocate the SparseVectors SparseVector[] newWeights = new SparseVector [size]; total += allocateNewWeights (weightsPresent, newWeights); logger.info ("ACRF template "+this+" total num weights = "+total); this.weights = newWeights; return total; }
private int initDenseWeights (InstanceList training) { int numf = training.getDataAlphabet ().size (); int total = 0; // handle default weights int size = cliqueSizeFromInstance (training); total += allocateDefaultWeights (size); // and regular weights SparseVector[] newWeights = new SparseVector [size]; for (int i = 0; i < size; i++) { newWeights [i] = new SparseVector (new double[numf], false); if (weights != null) newWeights [i].plusEqualsSparse (weights [i]); total += numf; logger.info ("ACRF template "+this+" weights ["+i+"] num features "+numf); } logger.info ("ACRF template "+this+" total num weights = "+total); weights = newWeights; return total; }
private int initDenseWeights (InstanceList training) { int numf = training.getDataAlphabet ().size (); int total = 0; // handle default weights int size = cliqueSizeFromInstance (training); total += allocateDefaultWeights (size); // and regular weights SparseVector[] newWeights = new SparseVector [size]; for (int i = 0; i < size; i++) { newWeights [i] = new SparseVector (new double[numf], false); if (weights != null) newWeights [i].plusEqualsSparse (weights [i]); total += numf; logger.info ("ACRF template "+this+" weights ["+i+"] num features "+numf); } logger.info ("ACRF template "+this+" total num weights = "+total); weights = newWeights; return total; }
private int initDenseWeights (InstanceList training) { int numf = training.getDataAlphabet ().size (); int total = 0; // handle default weights int size = cliqueSizeFromInstance (training); total += allocateDefaultWeights (size); // and regular weights SparseVector[] newWeights = new SparseVector [size]; for (int i = 0; i < size; i++) { newWeights [i] = new SparseVector (new double[numf], false); if (weights != null) newWeights [i].plusEqualsSparse (weights [i]); total += numf; logger.info ("ACRF template "+this+" weights ["+i+"] num features "+numf); } logger.info ("ACRF template "+this+" total num weights = "+total); weights = newWeights; return total; }
private void collectSomeUnsupportedWeights (InstanceList training, BitSet[] weightsPresent) { for (int ii = 0; ii < training.size(); ii++) { Instance inst = training.get (ii); UnrolledGraph unrolled = new UnrolledGraph (inst, new Template[] { this }, new ArrayList (), true); for (Iterator it = unrolled.unrolledVarSetIterator (); it.hasNext();) { UnrolledVarSet vs = (UnrolledVarSet) it.next (); Factor f = vs.getFactor (); Factor nrmed = f.normalize (); for (AssignmentIterator assnIt = nrmed.assignmentIterator (); assnIt.hasNext ();) { if (nrmed.value (assnIt) > SOME_UNSUPPORTED_THRESHOLD) { addPresentFeatures (weightsPresent [assnIt.indexOfCurrentAssn ()], vs.fv); } assnIt.advance (); } } } }
private void collectSomeUnsupportedWeights (InstanceList training, BitSet[] weightsPresent) { for (int ii = 0; ii < training.size(); ii++) { Instance inst = training.get (ii); UnrolledGraph unrolled = new UnrolledGraph (inst, new Template[] { this }, new ArrayList (), true); for (Iterator it = unrolled.unrolledVarSetIterator (); it.hasNext();) { UnrolledVarSet vs = (UnrolledVarSet) it.next (); Factor f = vs.getFactor (); Factor nrmed = f.normalize (); for (AssignmentIterator assnIt = nrmed.assignmentIterator (); assnIt.hasNext ();) { if (nrmed.value (assnIt) > SOME_UNSUPPORTED_THRESHOLD) { addPresentFeatures (weightsPresent [assnIt.indexOfCurrentAssn ()], vs.fv); } assnIt.advance (); } } } }
private void collectSomeUnsupportedWeights (InstanceList training, BitSet[] weightsPresent) { for (int ii = 0; ii < training.size(); ii++) { Instance inst = training.get (ii); UnrolledGraph unrolled = new UnrolledGraph (inst, new Template[] { this }, new ArrayList (), true); for (Iterator it = unrolled.unrolledVarSetIterator (); it.hasNext();) { UnrolledVarSet vs = (UnrolledVarSet) it.next (); Factor f = vs.getFactor (); Factor nrmed = f.normalize (); for (AssignmentIterator assnIt = nrmed.assignmentIterator (); assnIt.hasNext ();) { if (nrmed.value (assnIt) > SOME_UNSUPPORTED_THRESHOLD) { addPresentFeatures (weightsPresent [assnIt.indexOfCurrentAssn ()], vs.fv); } assnIt.advance (); } } } }
public int addSomeUnsupportedWeights (InstanceList training) { // add debugging marker unsupportedWeightsAdded = true; int size = weights.length; BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } collectSomeUnsupportedWeights (training, weightsPresent); addInCurrentWeights (weightsPresent); SparseVector[] newWeights = new SparseVector [size]; int numAdded = allocateNewWeights (weightsPresent, newWeights); logger.info (this+" some supported weights added = "+numAdded); weights = newWeights; return numAdded; }
public int addSomeUnsupportedWeights (InstanceList training) { // add debugging marker unsupportedWeightsAdded = true; int size = weights.length; BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } collectSomeUnsupportedWeights (training, weightsPresent); addInCurrentWeights (weightsPresent); SparseVector[] newWeights = new SparseVector [size]; int numAdded = allocateNewWeights (weightsPresent, newWeights); logger.info (this+" some supported weights added = "+numAdded); weights = newWeights; return numAdded; }
public int addSomeUnsupportedWeights (InstanceList training) { // add debugging marker unsupportedWeightsAdded = true; int size = weights.length; BitSet[] weightsPresent = new BitSet [size]; for (int i = 0; i < size; i++) { weightsPresent [i] = new BitSet (); } collectSomeUnsupportedWeights (training, weightsPresent); addInCurrentWeights (weightsPresent); SparseVector[] newWeights = new SparseVector [size]; int numAdded = allocateNewWeights (weightsPresent, newWeights); logger.info (this+" some supported weights added = "+numAdded); weights = newWeights; return numAdded; }
public boolean someUnsupportedTrain (ACRF acrf, InstanceList trainingList, InstanceList validationList, InstanceList testSet, ACRFEvaluator eval, int numIter) { Optimizable.ByGradientValue macrf = createOptimizable (acrf, trainingList); train (acrf, trainingList, validationList, testSet, eval, 5, macrf); ACRF.Template[] tmpls = acrf.getTemplates (); for (int ti = 0; ti < tmpls.length; ti++) tmpls[ti].addSomeUnsupportedWeights (trainingList); logger.info ("Some unsupporetd weights initialized. Training..."); return train (acrf, trainingList, validationList, testSet, eval, numIter, macrf); }
public boolean someUnsupportedTrain (ACRF acrf, InstanceList trainingList, InstanceList validationList, InstanceList testSet, ACRFEvaluator eval, int numIter) { Optimizable.ByGradientValue macrf = createOptimizable (acrf, trainingList); train (acrf, trainingList, validationList, testSet, eval, 5, macrf); ACRF.Template[] tmpls = acrf.getTemplates (); for (int ti = 0; ti < tmpls.length; ti++) tmpls[ti].addSomeUnsupportedWeights (trainingList); logger.info ("Some unsupporetd weights initialized. Training..."); return train (acrf, trainingList, validationList, testSet, eval, numIter, macrf); }
public boolean someUnsupportedTrain (ACRF acrf, InstanceList trainingList, InstanceList validationList, InstanceList testSet, ACRFEvaluator eval, int numIter) { Optimizable.ByGradientValue macrf = createOptimizable (acrf, trainingList); train (acrf, trainingList, validationList, testSet, eval, 5, macrf); ACRF.Template[] tmpls = acrf.getTemplates (); for (int ti = 0; ti < tmpls.length; ti++) tmpls[ti].addSomeUnsupportedWeights (trainingList); logger.info ("Some unsupporetd weights initialized. Training..."); return train (acrf, trainingList, validationList, testSet, eval, numIter, macrf); }
private void collectWeightsPresent (InstanceList ilist, BitSet[] weightsPresent) { for (int inum = 0; inum < ilist.size(); inum++) { Instance inst = ilist.get (inum); UnrolledGraph unrolled = new UnrolledGraph (inst, new Template[] { this }, null, false); collectTransitionsPresentForGraph (unrolled); collectWeightsPresentForGraph (unrolled, weightsPresent); } }
private void collectWeightsPresentForGraph (UnrolledGraph unrolled, BitSet[] weightsPresent) { for (Iterator it = unrolled.unrolledVarSetIterator (); it.hasNext();) { UnrolledVarSet clique = (UnrolledVarSet) it.next (); if (clique.tmpl == this) { int assn = clique.lookupAssignmentNumber (); addPresentFeatures (weightsPresent[assn], clique.fv); } } }
private void collectWeightsPresent (InstanceList ilist, BitSet[] weightsPresent) { for (int inum = 0; inum < ilist.size(); inum++) { Instance inst = ilist.get (inum); UnrolledGraph unrolled = new UnrolledGraph (inst, new Template[] { this }, null, false); collectTransitionsPresentForGraph (unrolled); collectWeightsPresentForGraph (unrolled, weightsPresent); } }