public String[] getOrderedTags(List<String> words, List<String> tags, int index,double[] tprobs) { if (modelPackage.getPosModel() != null) { MaxentModel posModel = modelPackage.getPosModel(); double[] probs = posModel.eval(contextGen.getContext(index, words.toArray(new String[words.size()]), tags.toArray(new String[tags.size()]),null)); String[] orderedTags = new String[probs.length]; for (int i = 0; i < probs.length; i++) { int max = 0; for (int ti = 1; ti < probs.length; ti++) { if (probs[ti] > probs[max]) { max = ti; } } orderedTags[i] = posModel.getOutcome(max); if (tprobs != null) { tprobs[i] = probs[max]; } probs[max] = 0; } return orderedTags; } else { throw new UnsupportedOperationException("This method can only be called if the " + "classifcation model is an event model!"); } }
public POSModel create(InputStream in) throws IOException { POSModel posModel = new POSModel(new UncloseableInputStream(in)); // The 1.6.x models write the non-default beam size into the model itself. // In 1.5.x the parser configured the beam size when the model was loaded, // this is not possible anymore with the new APIs Version version = posModel.getVersion(); if (version.getMajor() == 1 && version.getMinor() == 5) { if (posModel.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER) == null) { Map<String, String> manifestInfoEntries = new HashMap<>(); // The version in the model must be correct or otherwise version // dependent code branches in other places fail manifestInfoEntries.put("OpenNLP-Version", "1.5.0"); posModel = new POSModel(posModel.getLanguage(), posModel.getPosModel(), 10, manifestInfoEntries, posModel.getFactory()); } } return posModel; }
/** * Initializes the current instance with the provided model. * * @param model */ public POSTaggerME(POSModel model) { POSTaggerFactory factory = model.getFactory(); int beamSize = POSTaggerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { beamSize = Integer.parseInt(beamSizeString); } modelPackage = model; contextGen = factory.getPOSContextGenerator(beamSize); tagDictionary = factory.getTagDictionary(); size = beamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getPosSequenceModel() != null) { this.model = model.getPosSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, model.getPosModel(), 0); } }
public String[] getOrderedTags(List<String> words, List<String> tags, int index,double[] tprobs) { if (modelPackage.getPosModel() != null) { MaxentModel posModel = modelPackage.getPosModel(); double[] probs = posModel.eval(contextGen.getContext(index, words.toArray(new String[words.size()]), tags.toArray(new String[tags.size()]),null)); String[] orderedTags = new String[probs.length]; for (int i = 0; i < probs.length; i++) { int max = 0; for (int ti = 1; ti < probs.length; ti++) { if (probs[ti] > probs[max]) { max = ti; } } orderedTags[i] = posModel.getOutcome(max); if (tprobs != null) { tprobs[i] = probs[max]; } probs[max] = 0; } return orderedTags; } else { throw new UnsupportedOperationException("This method can only be called if the " + "classifcation model is an event model!"); } }
public String[] getOrderedTags(List<String> words, List<String> tags, int index,double[] tprobs) { if (modelPackage.getPosModel() != null) { MaxentModel posModel = modelPackage.getPosModel(); double[] probs = posModel.eval(contextGen.getContext(index, words.toArray(new String[words.size()]), tags.toArray(new String[tags.size()]),null)); String[] orderedTags = new String[probs.length]; for (int i = 0; i < probs.length; i++) { int max = 0; for (int ti = 1; ti < probs.length; ti++) { if (probs[ti] > probs[max]) { max = ti; } } orderedTags[i] = posModel.getOutcome(max); if (tprobs != null) { tprobs[i] = probs[max]; } probs[max] = 0; } return orderedTags; } else { throw new UnsupportedOperationException("This method can only be called if the " + "classifcation model is an event model!"); } }
@Override protected Parser produceResource(InputStream aStream) throws Exception { ParserModel model = new ParserModel(aStream); Properties metadata = getResourceMetaData(); addTagset(new OpenNlpTagsetDescriptionProvider( metadata.getProperty("pos.tagset"), POS.class, model.getParserTaggerModel() .getPosModel())); addTagset(new OpenNlpParserTagsetDescriptionProvider( metadata.getProperty("constituent.tagset"), Constituent.class, model, metadata)); if (printTagSet) { getContext().getLogger().log(INFO, getTagset().toString()); } return ParserFactory.create(model); } }
public POSModel create(InputStream in) throws IOException { POSModel posModel = new POSModel(new UncloseableInputStream(in)); // The 1.6.x models write the non-default beam size into the model itself. // In 1.5.x the parser configured the beam size when the model was loaded, // this is not possible anymore with the new APIs Version version = posModel.getVersion(); if (version.getMajor() == 1 && version.getMinor() == 5) { if (posModel.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER) == null) { Map<String, String> manifestInfoEntries = new HashMap<>(); // The version in the model must be correct or otherwise version // dependent code branches in other places fail manifestInfoEntries.put("OpenNLP-Version", "1.5.0"); posModel = new POSModel(posModel.getLanguage(), posModel.getPosModel(), 10, manifestInfoEntries, posModel.getFactory()); } } return posModel; }
@Override protected POSTaggerME produceResource(InputStream aStream) throws Exception { // Load the POS tagger model from the location the model provider offers POSModel model = new POSModel(aStream); // end::model-provider-decl[] // Extract tagset information from the model OpenNlpTagsetDescriptionProvider tsdp = new OpenNlpTagsetDescriptionProvider( getResourceMetaData().getProperty("pos.tagset"), POS.class, model.getPosModel()); if (getResourceMetaData().containsKey("pos.tagset.tagSplitPattern")) { tsdp.setTagSplitPattern(getResourceMetaData().getProperty( "pos.tagset.tagSplitPattern")); } addTagset(tsdp); if (printTagSet) { getContext().getLogger().log(INFO, tsdp.toString()); } // tag::model-provider-decl[] // Create a new POS tagger instance from the loaded model return new POSTaggerME(model); } };
public POSModel create(InputStream in) throws IOException { POSModel posModel = new POSModel(new UncloseableInputStream(in)); // The 1.6.x models write the non-default beam size into the model itself. // In 1.5.x the parser configured the beam size when the model was loaded, // this is not possible anymore with the new APIs Version version = posModel.getVersion(); if (version.getMajor() == 1 && version.getMinor() == 5) { if (posModel.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER) == null) { Map<String, String> manifestInfoEntries = new HashMap<>(); // The version in the model must be correct or otherwise version // dependent code branches in other places fail manifestInfoEntries.put("OpenNLP-Version", "1.5.0"); posModel = new POSModel(posModel.getLanguage(), posModel.getPosModel(), 10, manifestInfoEntries, posModel.getFactory()); } } return posModel; }
/** * Initializes the current instance with the provided model. * * @param model */ public POSTaggerME(POSModel model) { POSTaggerFactory factory = model.getFactory(); int beamSize = POSTaggerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { beamSize = Integer.parseInt(beamSizeString); } modelPackage = model; contextGen = factory.getPOSContextGenerator(beamSize); tagDictionary = factory.getTagDictionary(); size = beamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getPosSequenceModel() != null) { this.model = model.getPosSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, model.getPosModel(), 0); } }
/** * Initializes the current instance with the provided model. * * @param model */ public POSTaggerME(POSModel model) { POSTaggerFactory factory = model.getFactory(); int beamSize = POSTaggerME.DEFAULT_BEAM_SIZE; String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER); if (beamSizeString != null) { beamSize = Integer.parseInt(beamSizeString); } modelPackage = model; contextGen = factory.getPOSContextGenerator(beamSize); tagDictionary = factory.getTagDictionary(); size = beamSize; sequenceValidator = factory.getSequenceValidator(); if (model.getPosSequenceModel() != null) { this.model = model.getPosSequenceModel(); } else { this.model = new opennlp.tools.ml.BeamSearch<>(beamSize, model.getPosModel(), 0); } }