TopicInferencer inferencer = model.getInferencer(); double[] topicProbs = inferencer.getSampledDistribution(newInstance, 100, 10, 10);
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer(int language) { return new TopicInferencer(languageTypeTopicCounts[language], languageTokensPerTopic[language], alphabets[language], alpha, betas[language], betaSums[language]); }
TopicInferencer.read(new File(inferencerFilename.value)); inferencer.setRandomSeed(randomSeed.value); inferencer.writeInferredDistributions(instances, new File(docTopicsFile.value), numIterations.value, sampleInterval.value, burnInIterations.value,
TopicInferencer.read(new File(inferencerFilename.value)); inferencer.setRandomSeed(randomSeed.value); inferencer.writeInferredDistributions(instances, new File(docTopicsFile.value), numIterations.value, sampleInterval.value, burnInIterations.value,
.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
TopicInferencer.read(new File(inferencerFilename.value)); inferencer.setRandomSeed(randomSeed.value); inferencer.writeInferredDistributions(instances, new File(docTopicsFile.value), numIterations.value, sampleInterval.value, burnInIterations.value,
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer(int language) { return new TopicInferencer(languageTypeTopicCounts[language], languageTokensPerTopic[language], alphabets[language], alpha, betas[language], betaSums[language]); }
getSampledDistribution(instance, numIterations, thinning, burnIn); builder.append(doc);
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer(int language) { return new TopicInferencer(languageTypeTopicCounts[language], languageTokensPerTopic[language], alphabets[language], alpha, betas[language], betaSums[language]); }
Instance instance = instances.get(0); double[] distribution = inferencer.getSampledDistribution( instance, numIterations, thinning, burnIn); List<Topic> topics = new ArrayList<Topic>();
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer() { return new TopicInferencer(typeTopicCounts, tokensPerTopic, data.get(0).instance.getDataAlphabet(), alpha, beta, betaSum); }
getSampledDistribution(instance, numIterations, thinning, burnIn); builder.append(doc);
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer() { return new TopicInferencer(typeTopicCounts, tokensPerTopic, data.get(0).instance.getDataAlphabet(), alpha, beta, betaSum); }
getSampledDistribution(instance, numIterations, thinning, burnIn); builder.append(doc);
/** Return a tool for estimating topic distributions for new documents */ public TopicInferencer getInferencer() { return new TopicInferencer(typeTopicCounts, tokensPerTopic, data.get(0).instance.getDataAlphabet(), alpha, beta, betaSum); }
double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
double[] testProbabilities = inferencer.getSampledDistribution(testing.get(0), 10, 1, 5); System.out.println("0\t" + testProbabilities[0]);
@Override protected void doProcess(JCas jCas) throws AnalysisEngineProcessException { InstanceList testing = new InstanceList(pipe); testing.addThruPipe(new Instance(jCas.getDocumentText(), null, "from jcas", null)); TopicInferencer inferencer = model.getInferencer(); double[] topicDistribution = inferencer.getSampledDistribution(testing.get(0), iterations, thining, burnIn); int topicIndex = new MaximumIndex(topicDistribution).find(); List<String> inferedTopic = topicWords.forTopic(topicIndex); Metadata md = new Metadata(jCas); md.setKey(metadataKey); md.setValue(inferedTopic.toString()); addToJCasIndex(md); }
@Override protected void doProcess(JCas jCas) throws AnalysisEngineProcessException { InstanceList testing = new InstanceList(pipe); testing.addThruPipe(new Instance(jCas.getDocumentText(), null, "from jcas", null)); TopicInferencer inferencer = model.getInferencer(); double[] topicDistribution = inferencer.getSampledDistribution(testing.get(0), iterations, thining, burnIn); int topicIndex = new MaximumIndex(topicDistribution).find(); List<String> inferedTopic = topicWords.forTopic(topicIndex); Metadata md = new Metadata(jCas); md.setKey(metadataKey); md.setValue(inferedTopic.toString()); addToJCasIndex(md); }