public AmbiguityClasses(TTags ttags) { classes = new HashIndex<>(); // naClass.init(naWord, ttags); }
public HistoryTable() { idx = new HashIndex<>(capacity); }
public DeltaIndex(Index<E> backingIndex) { this(backingIndex, new HashIndex<>()); }
public static <E> Index<E> newIndex() { return new HashIndex<>(); }
/** * Constructs a Dataset by reading in a file in SVM light format. The lines * parameter is filled with the lines of the file for further processing (if * lines is null, it is assumed no line information is desired) */ public static RVFDataset<String, String> readSVMLightFormat(String filename, List<String> lines) { return readSVMLightFormat(filename, new HashIndex<>(), new HashIndex<>(), lines); }
/** * Constructs a Dataset by reading in a file in SVM light format. * The lines parameter is filled with the lines of the file for further processing * (if lines is null, it is assumed no line information is desired) */ public static Dataset<String, String> readSVMLightFormat(String filename, List<String> lines) { return readSVMLightFormat(filename, new HashIndex<>(), new HashIndex<>(), lines); }
public PresetSequenceClassifier(Properties props) { super(props); if (classIndex == null) classIndex = new HashIndex<>(); // classIndex.add("O"); classIndex.add(flags.backgroundSymbol); }
protected void read(DataInputStream file) { try { int size = file.readInt(); index = new HashIndex<>(); for (int i = 0; i < size; i++) { String tag = file.readUTF(); boolean inClosed = file.readBoolean(); index.add(tag); if (inClosed) closed.add(tag); } } catch (IOException e) { throw new RuntimeIOException(e); } }
/** * This assumes each line is one value and creates index by adding values in the order of the lines in the file * @param file Which file to load * @return An index built out of the lines in the file */ public static Index<String> loadFromFileWithList(String file) { Index<String> index = new HashIndex<>(); try (BufferedReader br = new BufferedReader(new FileReader(file))) { for (String line; (line = br.readLine()) != null; ) { index.add(line.trim()); } } catch (Exception e) { throw new RuntimeIOException(e); } // forget it return index; }
public Index<IntPair> createIndex() { Index<IntPair> index = new HashIndex<>(); for (int x = 0; x < px.length; x++) { int numberY = numY(x); for (int y = 0; y < numberY; y++) { index.add(new IntPair(x, y)); } } return index; }
private short tagProject(short tag) { if (smoothTPIndex == null) { smoothTPIndex = new HashIndex<>(tagIndex); } if (tag < 0) { return tag; } else { String tagStr = smoothTPIndex.get(tag); String binStr = TP_PREFIX + smoothTP.project(tagStr); return (short) smoothTPIndex.addToIndex(binStr); } }
private void initialBetasAndLexicon() { wordIndex = new HashIndex<>(); tagIndex = new HashIndex<>(); lex = op.tlpParams.lex(op, wordIndex, tagIndex); lex.initializeTraining(trainSize); for (Tree tree : trees) { double weight = treeWeights.getCount(tree); lex.incrementTreesRead(weight); initialBetasAndLexicon(tree, 0, weight); } lex.finishTraining(); }
public void loadTagIndex() { if (tagIndex == null) { tagIndex = new HashIndex<>(); for (String tag: classIndex.objectsList()) { String[] parts = tag.split("-"); // if (parts.length > 1) tagIndex.add(parts[parts.length-1]); } tagIndex.add(flags.backgroundSymbol); } if (flags.useNERPriorBIO) { if (entityMatrices == null) entityMatrices = readEntityMatrices(flags.entityMatrix, tagIndex); } }
public void buildStateIndex() { stateIndex = new HashIndex<>(); for (String key : stateSplitCounts.keySet()) { for (int i = 0; i < stateSplitCounts.getIntCount(key); ++i) { stateIndex.addToIndex(state(key, i)); } } }
/** * create an index for each parameter - the prior probs and the features with all of their values * */ protected Index<IntTuple> createIndex() { Index<IntTuple> index = new HashIndex<>(); for (int c = 0; c < numClasses; c++) { index.add(new IntUni(c)); for (int f = 0; f < numFeatures; f++) { for (int val = 0; val < numValues[f]; val++) { index.add(new IntTriple(c, f, val)); } } } return index; }
public <F> double score(Classifier<L,F> classifier, GeneralDataset<L,F> data) { labelIndex = new HashIndex<>(); labelIndex.addAll(classifier.labels()); labelIndex.addAll(data.labelIndex.objectsList()); clearCounts(); int[] labelsArr = data.getLabelsArray(); for (int i = 0; i < data.size(); i++) { Datum<L, F> d = data.getRVFDatum(i); L guess = classifier.classOf(d); addGuess(guess, labelIndex.get(labelsArr[i])); } finalizeCounts(); return getFMeasure(); }