@Override protected LemmatizerModel loadModel(InputStream modelIn) throws IOException { return new LemmatizerModel(modelIn); }
public LemmatizerModel(String languageCode, SequenceClassificationModel<String> lemmatizerModel, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); checkArtifactMap(); }
/** * Initializes the current instance with the provided model * and the default beam size of 3. * * @param model the model */ public LemmatizerME(LemmatizerModel model) { LemmatizerFactory factory = model.getFactory(); int defaultBeamSize = LemmatizerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { defaultBeamSize = Integer.parseInt(beamSizeString); } contextGenerator = factory.getContextGenerator(); beamSize = defaultBeamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getLemmatizerSequenceModel() != null) { this.model = model.getLemmatizerSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, (MaxentModel) model.getLemmatizerSequenceModel(), 0); } }
/** * Initializes the current instance with the provided model * and the default beam size of 3. * * @param model the model */ public LemmatizerME(LemmatizerModel model) { LemmatizerFactory factory = model.getFactory(); int defaultBeamSize = LemmatizerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { defaultBeamSize = Integer.parseInt(beamSizeString); } contextGenerator = factory.getContextGenerator(); beamSize = defaultBeamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getLemmatizerSequenceModel() != null) { this.model = model.getLemmatizerSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, (MaxentModel) model.getLemmatizerSequenceModel(), 0); } }
return new LemmatizerModel(languageCode, lemmatizerModel, beamSize, manifestInfoEntries, posFactory); return new LemmatizerModel(languageCode, seqLemmatizerModel, manifestInfoEntries, posFactory);
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel, int beamSize, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY); manifest.put(BeamSearch.BEAM_SIZE_PARAMETER, Integer.toString(beamSize)); checkArtifactMap(); }
/** * Initializes the current instance with the provided model * and the default beam size of 3. * * @param model the model */ public LemmatizerME(LemmatizerModel model) { LemmatizerFactory factory = model.getFactory(); int defaultBeamSize = LemmatizerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { defaultBeamSize = Integer.parseInt(beamSizeString); } contextGenerator = factory.getContextGenerator(); beamSize = defaultBeamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getLemmatizerSequenceModel() != null) { this.model = model.getLemmatizerSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, (MaxentModel) model.getLemmatizerSequenceModel(), 0); } }
@Override protected LemmatizerModel loadModel(InputStream modelIn) throws IOException { return new LemmatizerModel(modelIn); }
public LemmatizerModel(String languageCode, SequenceClassificationModel<String> lemmatizerModel, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); checkArtifactMap(); }
@Override protected LemmatizerModel loadModel(InputStream modelIn) throws IOException { return new LemmatizerModel(modelIn); }
public LemmatizerModel(String languageCode, SequenceClassificationModel<String> lemmatizerModel, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); checkArtifactMap(); }
@Override protected LemmatizerME produceResource(InputStream aStream) throws Exception { // Load the lemmatizer model from the location the model provider offers LemmatizerModel model = new LemmatizerModel(aStream); // Create a new POS tagger instance from the loaded model return new LemmatizerME(model); } };
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel, int beamSize, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY); manifest.put(BeamSearch.BEAM_SIZE_PARAMETER, Integer.toString(beamSize)); checkArtifactMap(); }
model = (T) new LemmatizerModel(modelFile);
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel, int beamSize, Map<String, String> manifestInfoEntries, LemmatizerFactory factory) { super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory); artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel); Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY); manifest.put(BeamSearch.BEAM_SIZE_PARAMETER, Integer.toString(beamSize)); checkArtifactMap(); }
return new LemmatizerModel(languageCode, lemmatizerModel, beamSize, manifestInfoEntries, posFactory); return new LemmatizerModel(languageCode, seqLemmatizerModel, manifestInfoEntries, posFactory);
return new LemmatizerModel(languageCode, lemmatizerModel, beamSize, manifestInfoEntries, posFactory); return new LemmatizerModel(languageCode, seqLemmatizerModel, manifestInfoEntries, posFactory);