private void writeObject(ObjectOutputStream out) throws IOException out.writeObject(getInstancePipe()); int np = parameters.length; out.writeInt(np);
/** Calculates the confidence in the tagging of an {@link Instance}. */ public double estimateConfidenceFor (Instance instance, Object[] startTags, Object[] inTags) { Classification c = null; if (Alphabet.alphabetsMatch(instance, this.pipe)) c = this.meClassifier.classify (new SequenceConfidenceInstance (instance)); else c = this.meClassifier.classify (instance); return c.getLabelVector().value (this.correct); }
@Override public void print(PrintWriter out) { final Alphabet dict = getAlphabet(); final LabelAlphabet labelDict = getLabelAlphabet(); int numFeatures = dict.size() + 1; int numLabels = labelDict.size(); // Include the feature weights according to each label for (int li = 0; li < numLabels; li++) { out.println ("FEATURES FOR CLASS "+labelDict.lookupObject (li)); out.println (" <default> "+parameters [li*numFeatures + defaultFeatureIndex]); for (int i = 0; i < defaultFeatureIndex; i++) { Object name = dict.lookupObject (i); double weight = parameters [li*numFeatures + i]; out.println (" "+name+" "+weight); } } }
public void getClassificationScores (Instance instance, double[] scores) { getUnnormalizedClassificationScores(instance, scores); // Move scores to a range where exp() is accurate, and normalize int numLabels = getLabelAlphabet().size(); double max = MatrixOps.max (scores); double sum = 0; for (int li = 0; li < numLabels; li++) sum += (scores[li] = Math.exp (scores[li] - max)); for (int li = 0; li < numLabels; li++) { scores[li] /= sum; // xxxNaN assert (!Double.isNaN(scores[li])); } }
public Classification classify (Instance instance) { int numClasses = getLabelAlphabet().size(); double[] scores = new double[numClasses]; //getClassificationScores (instance, scores); getClassificationScores(instance, scores); // Create and return a Classification object return new Classification (instance, this, new LabelVector (getLabelAlphabet(), scores)); }
this.parameters = classifier.getParameters(); this.defaultFeatureIndex = classifier.getDefaultFeatureIndex(); assert (initialClassifier.getInstancePipe() == instances.getPipe()); new MaxEnt (instances.getPipe(), parameters);
this.perLabelFeatureSelection = theClassifier.perClassFeatureSelection; this.defaultFeatureIndex = theClassifier.defaultFeatureIndex; assert (initialClassifier.getInstancePipe() == trainingSet.getPipe()); this.theClassifier = new MaxEnt (trainingSet.getPipe(), parameters, featureSelection, perLabelFeatureSelection);
/** * Use only the default features to set the topic prior (use no document features) */ public void setAlphas() { double[] parameters = dmrParameters.getParameters(); alphaSum = 0.0; smoothingOnlyMass = 0.0; // Use only the default features to set the topic prior (use no document features) for (int topic=0; topic < numTopics; topic++) { alpha[topic] = Math.exp( parameters[ (topic * numFeatures) + defaultFeatureIndex ] ); alphaSum += alpha[topic]; smoothingOnlyMass += alpha[topic] * beta / (tokensPerTopic[topic] + betaSum); cachedCoefficients[topic] = alpha[topic] / (tokensPerTopic[topic] + betaSum); } }
p = new MaxEnt(unlabeled.getPipe(),new double[numParameters]);
public int getNumParameters () { assert (this.instancePipe.getDataAlphabet() != null); assert (this.instancePipe.getTargetAlphabet() != null); return MaxEnt.getNumParameters(this.instancePipe); }
this.theClassifier.getClassificationScores (instance, scores); FeatureVector fv = (FeatureVector) instance.getData (); int li = labeling.getBestIndex();
public void setParameter (int classIndex, int featureIndex, double value) { parameters[classIndex*(getAlphabet().size()+1) + featureIndex] = value; }
public void getUnnormalizedClassificationScores (Instance instance, double[] scores) { // arrayOutOfBounds if pipe has grown since training // int numFeatures = getAlphabet().size() + 1; int numFeatures = this.defaultFeatureIndex + 1; int numLabels = getLabelAlphabet().size(); assert (scores.length == numLabels); FeatureVector fv = (FeatureVector) instance.getData (); // Make sure the feature vector's feature dictionary matches // what we are expecting from our data pipe (and thus our notion // of feature probabilities. assert (fv.getAlphabet () == this.instancePipe.getDataAlphabet ()); // Include the feature weights according to each label for (int li = 0; li < numLabels; li++) { scores[li] = parameters[li*numFeatures + defaultFeatureIndex] + MatrixOps.rowDotProduct (parameters, numFeatures, li, fv, defaultFeatureIndex, (perClassFeatureSelection == null ? featureSelection : perClassFeatureSelection[li])); } }
classifier.getClassificationScoresWithTemperature(instance, temperature, scores[ii]); for (MaxEntGEConstraint constraint : constraints) { constraint.computeExpectations(fv,scores[ii],instanceWeight);
this.parameters = classifier.getParameters(); this.defaultFeatureIndex = classifier.getDefaultFeatureIndex(); assert (initialClassifier.getInstancePipe() == instances.getPipe()); new MaxEnt (instances.getPipe(), parameters);
this.perLabelFeatureSelection = theClassifier.perClassFeatureSelection; this.defaultFeatureIndex = theClassifier.defaultFeatureIndex; assert (initialClassifier.getInstancePipe() == trainingSet.getPipe()); this.theClassifier = new MaxEnt (trainingSet.getPipe(), parameters, featureSelection, perLabelFeatureSelection);
public void getClassificationScores (Instance instance, double[] scores) { getUnnormalizedClassificationScores(instance, scores); // Move scores to a range where exp() is accurate, and normalize int numLabels = getLabelAlphabet().size(); double max = MatrixOps.max (scores); double sum = 0; for (int li = 0; li < numLabels; li++) sum += (scores[li] = Math.exp (scores[li] - max)); for (int li = 0; li < numLabels; li++) { scores[li] /= sum; // xxxNaN assert (!Double.isNaN(scores[li])); } }
public Classification classify (Instance instance) { int numClasses = getLabelAlphabet().size(); double[] scores = new double[numClasses]; //getClassificationScores (instance, scores); getClassificationScores(instance, scores); // Create and return a Classification object return new Classification (instance, this, new LabelVector (getLabelAlphabet(), scores)); }
/** * Use only the default features to set the topic prior (use no document features) */ public void setAlphas() { double[] parameters = dmrParameters.getParameters(); alphaSum = 0.0; smoothingOnlyMass = 0.0; // Use only the default features to set the topic prior (use no document features) for (int topic=0; topic < numTopics; topic++) { alpha[topic] = Math.exp( parameters[ (topic * numFeatures) + defaultFeatureIndex ] ); alphaSum += alpha[topic]; smoothingOnlyMass += alpha[topic] * beta / (tokensPerTopic[topic] + betaSum); cachedCoefficients[topic] = alpha[topic] / (tokensPerTopic[topic] + betaSum); } }
p = new MaxEnt(unlabeled.getPipe(),new double[numParameters]);