TopicInferencer inferencer = model.getInferencer(); double[] testProbabilities = inferencer .getSampledDistribution(testing.get(0), 10, 1, 5);
Map<String, List<Topic>> result = new HashMap<String, List<Topic>>(); ParallelTopicModel malletModel = model.malletModel; TopicInferencer inferencer = malletModel.getInferencer();
testing.addThruPipe(new Instance(topicZeroText.toString(), null, "test instance", null)); TopicInferencer inferencer = model.getInferencer(); double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(inferencerFilename.value)); oos.writeObject(topicModel.getInferencer()); oos.close(); } catch (Exception e) {
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(inferencerFilename.value)); oos.writeObject(topicModel.getInferencer()); oos.close(); } catch (Exception e) {
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(inferencerFilename.value)); oos.writeObject(topicModel.getInferencer()); oos.close(); } catch (Exception e) {
testing.addThruPipe(new Instance(topicZeroText.toString(), null, "test instance", null)); TopicInferencer inferencer = model.getInferencer(); double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
testing.addThruPipe(new Instance(topicZeroText.toString(), null, "test instance", null)); TopicInferencer inferencer = model.getInferencer(); double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
oos.writeObject(topicModel.getInferencer()); oos.close();
oos.writeObject(topicModel.getInferencer()); oos.close();
oos.writeObject(topicModel.getInferencer()); oos.close();
oos.writeObject(topicModel.getInferencer()); oos.close();
oos.writeObject(topicModel.getInferencer()); oos.close();
oos.writeObject(topicModel.getInferencer()); oos.close();
@Override protected void doProcess(JCas jCas) throws AnalysisEngineProcessException { InstanceList testing = new InstanceList(pipe); testing.addThruPipe(new Instance(jCas.getDocumentText(), null, "from jcas", null)); TopicInferencer inferencer = model.getInferencer(); double[] topicDistribution = inferencer.getSampledDistribution(testing.get(0), iterations, thining, burnIn); int topicIndex = new MaximumIndex(topicDistribution).find(); List<String> inferedTopic = topicWords.forTopic(topicIndex); Metadata md = new Metadata(jCas); md.setKey(metadataKey); md.setValue(inferedTopic.toString()); addToJCasIndex(md); }
@Override protected void doProcess(JCas jCas) throws AnalysisEngineProcessException { InstanceList testing = new InstanceList(pipe); testing.addThruPipe(new Instance(jCas.getDocumentText(), null, "from jcas", null)); TopicInferencer inferencer = model.getInferencer(); double[] topicDistribution = inferencer.getSampledDistribution(testing.get(0), iterations, thining, burnIn); int topicIndex = new MaximumIndex(topicDistribution).find(); List<String> inferedTopic = topicWords.forTopic(topicIndex); Metadata md = new Metadata(jCas); md.setKey(metadataKey); md.setValue(inferedTopic.toString()); addToJCasIndex(md); }
@Override public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); ParallelTopicModel model; try { getLogger().info("Loading model file " + modelLocation); model = ParallelTopicModel.read(modelLocation); if (maxTopicAssignments <= 0) { maxTopicAssignments = model.getNumTopics() / 10; } } catch (Exception e) { throw new ResourceInitializationException(e); } getLogger().info("Model loaded."); inferencer = model.getInferencer(); malletPipe = new TokenSequence2FeatureSequence(model.getAlphabet()); try { sequenceGenerator = new PhraseSequenceGenerator.Builder() .featurePath(tokenFeaturePath) .minTokenLength(minTokenLength) .lowercase(lowercase) .buildStringSequenceGenerator(); } catch (IOException e) { throw new ResourceInitializationException(e); } }