public static Parser create(ParserModel model) { return create(model, AbstractBottomUpParser.defaultBeamSize, AbstractBottomUpParser.defaultAdvancePercentage); } }
Parser parser = ParserFactory.create(model);
/** * Initializes the current instance with the given context. */ public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); this.context = context; mLogger = context.getLogger(); if (mLogger.isLoggable(Level.INFO)) { mLogger.log(Level.INFO, "Initializing the OpenNLP Parser."); } ParserModel model; try { ParserModelResource modelResource = (ParserModelResource) context .getResourceObject(UimaUtil.MODEL_PARAMETER); model = modelResource.getModel(); } catch (ResourceAccessException e) { throw new ResourceInitializationException(e); } mParser = ParserFactory.create(model); }
@Test public void evalParserModel() throws Exception { ParserModel model = new ParserModel( new File(getOpennlpDataDir(), "models-sf/en-parser-chunking.bin")); MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM); Parser parser = ParserFactory.create(model); try (ObjectStream<LeipzigTestSample> lines = createLineWiseStream()) { LeipzigTestSample line; while ((line = lines.read()) != null) { Parse[] parse = ParserTool.parseLine(String.join(" ", line.getText()), parser, 1); if (parse.length > 0) { StringBuffer sb = new StringBuffer(); parse[0].show(sb); digest.update(sb.toString().getBytes(StandardCharsets.UTF_8)); } else { digest.update("empty".getBytes(StandardCharsets.UTF_8)); } } } Assert.assertEquals(new BigInteger("312218841713337505306598301082074515847"), new BigInteger(1, digest.digest())); } }
/** * Verify that training and tagging does not cause * runtime problems. */ @Test public void testTreeInsertParserTraining() throws Exception { ObjectStream<Parse> parseSamples = ParserTestUtil.openTestTrainingData(); HeadRules headRules = ParserTestUtil.createTestHeadRules(); ParserModel model = Parser.train("eng", parseSamples, headRules, 100, 0); opennlp.tools.parser.Parser parser = ParserFactory.create(model); // Tests parsing to make sure the code does not has // a bug which fails always with a runtime exception parser.parse(Parse.parseParse("She was just another freighter from the " + "States and she seemed as commonplace as her name .")); // Test serializing and de-serializing model ByteArrayOutputStream outArray = new ByteArrayOutputStream(); model.serialize(outArray); outArray.close(); new ParserModel(new ByteArrayInputStream(outArray.toByteArray())); // TODO: compare both models } }
TrainingParameters.defaultParams()); opennlp.tools.parser.Parser parser = ParserFactory.create(model);
public static Parser create(ParserModel model) { return create(model, AbstractBottomUpParser.defaultBeamSize, AbstractBottomUpParser.defaultAdvancePercentage); } }
public static Parser create(ParserModel model) { return create(model, AbstractBottomUpParser.defaultBeamSize, AbstractBottomUpParser.defaultAdvancePercentage); } }
public void evaluate(ObjectStream<Parse> samples, int nFolds) throws IOException { CrossValidationPartitioner<Parse> partitioner = new CrossValidationPartitioner<>(samples, nFolds); while (partitioner.hasNext()) { CrossValidationPartitioner.TrainingSampleStream<Parse> trainingSampleStream = partitioner.next(); ParserModel model; if (ParserType.CHUNKING.equals(parserType)) { model = opennlp.tools.parser.chunking.Parser.train(languageCode, samples, rules, params); } else if (ParserType.TREEINSERT.equals(parserType)) { model = opennlp.tools.parser.treeinsert.Parser.train(languageCode, samples, rules, params); } else { throw new IllegalStateException("Unexpected parser type: " + parserType); } ParserEvaluator evaluator = new ParserEvaluator(ParserFactory.create(model), monitors); evaluator.evaluate(trainingSampleStream.getTestSampleStream()); fmeasure.mergeInto(evaluator.getFMeasure()); } }
Parser parser = ParserFactory.create(model, beamSize, advancePercentage);
protected void initializeParser() { InputStream is = null; try { is = new FileInputStream(MODEL_DIR + "/en-parser-chunking.bin"); ParserModel model = new ParserModel(is); parser = ParserFactory.create(model); } catch (IOException e) { //e.printStackTrace(); } finally { if (is != null) { try { is.close(); } catch (IOException e) { // we swallow exception to support the cached run } } } }
Parser parser = ParserFactory.create(model);
parser = ParserFactory.create(new ParserModel(is), beamSize, advancePercentage); } catch (IOException e) { LOGGER.error("[OpenNLP Parser] Could not load Parser models: " + e.getMessage());
Parser parser = ParserFactory.create(model);
Parser parser = ParserFactory.create(parserModel);
Parser parser = ParserFactory.create(parserModel);
/** * Initializes the current instance with the given context. */ public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); this.context = context; mLogger = context.getLogger(); if (mLogger.isLoggable(Level.INFO)) { mLogger.log(Level.INFO, "Initializing the OpenNLP Parser."); } ParserModel model; try { ParserModelResource modelResource = (ParserModelResource) context .getResourceObject(UimaUtil.MODEL_PARAMETER); model = modelResource.getModel(); } catch (ResourceAccessException e) { throw new ResourceInitializationException(e); } mParser = ParserFactory.create(model); }
@Override protected Parser produceResource(InputStream aStream) throws Exception { ParserModel model = new ParserModel(aStream); Properties metadata = getResourceMetaData(); addTagset(new OpenNlpTagsetDescriptionProvider( metadata.getProperty("pos.tagset"), POS.class, model.getParserTaggerModel() .getPosModel())); addTagset(new OpenNlpParserTagsetDescriptionProvider( metadata.getProperty("constituent.tagset"), Constituent.class, model, metadata)); if (printTagSet) { getContext().getLogger().log(INFO, getTagset().toString()); } return ParserFactory.create(model); } }
public void evaluate(ObjectStream<Parse> samples, int nFolds) throws IOException { CrossValidationPartitioner<Parse> partitioner = new CrossValidationPartitioner<>(samples, nFolds); while (partitioner.hasNext()) { CrossValidationPartitioner.TrainingSampleStream<Parse> trainingSampleStream = partitioner.next(); ParserModel model; if (ParserType.CHUNKING.equals(parserType)) { model = opennlp.tools.parser.chunking.Parser.train(languageCode, samples, rules, params); } else if (ParserType.TREEINSERT.equals(parserType)) { model = opennlp.tools.parser.treeinsert.Parser.train(languageCode, samples, rules, params); } else { throw new IllegalStateException("Unexpected parser type: " + parserType); } ParserEvaluator evaluator = new ParserEvaluator(ParserFactory.create(model), monitors); evaluator.evaluate(trainingSampleStream.getTestSampleStream()); fmeasure.mergeInto(evaluator.getFMeasure()); } }
public void evaluate(ObjectStream<Parse> samples, int nFolds) throws IOException { CrossValidationPartitioner<Parse> partitioner = new CrossValidationPartitioner<>(samples, nFolds); while (partitioner.hasNext()) { CrossValidationPartitioner.TrainingSampleStream<Parse> trainingSampleStream = partitioner.next(); ParserModel model; if (ParserType.CHUNKING.equals(parserType)) { model = opennlp.tools.parser.chunking.Parser.train(languageCode, samples, rules, params); } else if (ParserType.TREEINSERT.equals(parserType)) { model = opennlp.tools.parser.treeinsert.Parser.train(languageCode, samples, rules, params); } else { throw new IllegalStateException("Unexpected parser type: " + parserType); } ParserEvaluator evaluator = new ParserEvaluator(ParserFactory.create(model), monitors); evaluator.evaluate(trainingSampleStream.getTestSampleStream()); fmeasure.mergeInto(evaluator.getFMeasure()); } }