/** * <!-- lookup(Feature,boolean) --> Looks up a feature's index by calling * <code>lookup(f, training, * -1)</code>. See {@link #lookup(Feature,boolean,int)} for more details. * * @param f The feature to look up. * @param training Whether or not the learner is currently training. * @return The integer key that the feature maps to. **/ public int lookup(Feature f, boolean training) { return lookup(f, training, -1); }
/** * Sets the value of {@link #parentLexicon} and makes sure that any features marked for removal * in this lexicon are the identical objects also present in the parent. This is useful in * particular just after lexicons have been read from disk. * * @param p The new parent lexicon. **/ public void setParent(Lexicon p) { parentLexicon = p; int N = lexiconInv.size(); for (int i = 0; i < N; ++i) { Feature f = lexiconInv.get(i); if (f != null && parents.get(i) < 0) { Feature pf = p.lookupKey(p.lookup(f)); if (pf == null) { System.err.println("LBJava ERROR: Can't find feature " + f + " in parent lexicon."); new Exception().printStackTrace(); System.exit(1); } lexiconInv.set(i, pf); if (lexicon != null) lexicon.put(pf, lexicon.remove(f)); } } }
countPolicy == Lexicon.CountPolicy.perClass ? labelIndexes[0] : -1; for (int i = 0; i < featureIndexes.length; ++i) { lexicon.lookup(lexicon.lookupKey(featureIndexes[i]), true, labelIndex);
/** * <!-- lookup(Feature) --> Looks up a feature's index by calling <code>lookup(f, false)</code>. * See {@link #lookup(Feature,boolean,int)} for more details. * * @param f The feature to look up. * @return The integer key that the feature maps to. **/ public int lookup(Feature f) { return lookup(f, false, -1); }
int key = labelLexicon.lookup(f); score = ((BiasedRandomWeightVector) network.get(key)).dot(exampleFeatures,
/** * Determine if the provided feature has sum of weights greater than a threshold value, * and discard the feature if it falls below. * @param lex the lexicon. * @param f the feature. * @return true if the feature has any value, there is a */ protected boolean hasWeight(Lexicon lex, Feature f) { int index = lex.lookup(f); return getWeight(index) > this.threshold; }
int key = labelLexicon.lookup(f); LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(key); if (ltu != null)
/** * Sets the labeler. * * @param l A labeling classifier. **/ public void setLabeler(Classifier l) { if (l == null || l.allowableValues().length != 2) { System.err.println("Error: " + name + ": An LTU must be given a single binary label classifier."); new Exception().printStackTrace(); System.exit(1); } super.setLabeler(l); allowableValues = l.allowableValues(); labelLexicon.clear(); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[0], (short) 0, (short) 2), true); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[1], (short) 1, (short) 2), true); createPrediction(0); createPrediction(1); }
for (int i = 0; i < allExamples.size(); i++) { Feature label = labelLexicon.lookupKey(allLabels.get(i)); int newLabel = newLabelLexicon.lookup(label, true); same &= newLabel == allLabels.get(i); allLabels.set(i, newLabel); new DiscretePrimitiveStringFeature(labeler.containingPackage, labeler.name, "", allowableValues[1], (short) 1, (short) 2); int p = newLabelLexicon.lookup(f); int positive = 0; newLabelLexicon.lookup(f, true); newLabelLexicon.lookup(new DiscretePrimitiveStringFeature( labeler.containingPackage, labeler.name, "", allowableValues[0], (short) 0, (short) 2), true);
/** * Sets the labels list. * * @param l A new label producing classifier. **/ public void setLabeler(Classifier l) { if (!(l == null || l.allowableValues().length == 2)) { System.err.println("Error: " + name + ": An LTU must be given a single binary label classifier."); new Exception().printStackTrace(); System.exit(1); } super.setLabeler(l); allowableValues = l == null ? null : l.allowableValues(); labelLexicon.clear(); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[0], (short) 0, (short) 2), true); labelLexicon.lookup(new DiscretePrimitiveStringFeature(l.containingPackage, l.name, "", allowableValues[1], (short) 1, (short) 2), true); predictions = new FVector(2); createPrediction(0); createPrediction(1); }
id = lexicon.lookup(f, trainingMode, -1);
id = lexicon.lookup(f, trainingMode, -1);
key = newLabelLexicon.lookup(f); score = score(exampleFeatures, exampleValues, key);
/** * Determine if the provided feature has sum of weights greater than a threshold value, * and discard the feature if it falls below. * @param lex the lexicon. * @param f the feature. * @return true if the feature has any value, there is a */ protected boolean hasWeight(Lexicon lex, Feature f) { int featureindex = lex.lookup(f); // we assume each element of the network is of the same type, if that type is sparse averaged // perceptron, we check both the averaged and current weight double sum; if (this.ltuLearner instanceof SparseAveragedPerceptron) { SparseAveragedPerceptron sap = (SparseAveragedPerceptron) this.ltuLearner; double wt = sap.getWeightVector().getRawWeights().get(featureindex); double avg = sap.getAveragedWeightVector().getRawWeights().get(featureindex); sum = Math.abs(wt); sum += Math.abs(avg); } else { double wt = this.ltuLearner.getWeightVector().getRawWeights().get(featureindex); sum = Math.abs(wt); } // if the value is sufficiently large, then we have a good weight and should keep. if (sum > this.threshold) return true; else return false; }
key = labelLexicon.lookup(f); score = ((BiasedRandomWeightVector) network.get(key)).dot(exampleFeatures,
key = labelLexicon.lookup(f); LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(key); if (ltu != null)
Feature label = labelVector.getFeature(f); if (label.isDiscrete()) labelArray[f] = labelLexicon.lookup(label, true); else labelArray[f] = labelLexicon.lookup(label.getFeatureKey(labelLexicon), true); labelValues[f] += label.getStrength(); createPrediction(labelArray[f]); Feature feature = featureVector.getFeature(f); exampleArrayFeatures[f] = lexicon.lookup(feature.getFeatureKey(lexicon, training, labelIndex), training, labelIndex); exampleArrayValues[f] += feature.getStrength();
int i = 0; double sum = 0; int featureindex = lex.lookup(f);