public void estimate () throws IOException { estimate (numIterations); }
public static PolylingualTopicModel read (File f) throws Exception { PolylingualTopicModel topicModel = null; ObjectInputStream ois = new ObjectInputStream (new FileInputStream(f)); topicModel = (PolylingualTopicModel) ois.readObject(); ois.close(); topicModel.initializeHistograms(); return topicModel; }
public PolylingualTopicModel (int numberOfTopics, double alphaSum, Randoms random) { this (newLabelAlphabet (numberOfTopics), alphaSum, random); }
topicModel = PolylingualTopicModel.read(new File(inputModelFilename.value)); } catch (Exception e) { System.err.println("Unable to restore saved topic model " + topicModel = new PolylingualTopicModel (numTopicsOption.value, alphaOption.value); if (randomSeedOption.value != 0) { topicModel.setRandomSeed(randomSeedOption.value); topicModel.addInstances(training); topicModel.setTopicDisplay(showTopicsIntervalOption.value, topWordsOption.value); topicModel.setNumIterations(numIterationsOption.value); topicModel.setOptimizeInterval(optimizeIntervalOption.value); topicModel.setBurninPeriod(optimizeBurnInOption.value); topicModel.setSaveState(outputStateIntervalOption.value, stateFile.value); topicModel.setModelOutput(outputModelIntervalOption.value, outputModelFilename.value); topicModel.estimate(); topicModel.printTopWords(new File(topicKeysFile.value), topWordsOption.value, false); topicModel.printState (new File(stateFile.value)); topicModel.printDocumentTopics(out, docTopicsThreshold.value, docTopicsMax.value); out.close();
printTopWords (System.out, wordsPerTopic, false); this.printState(new File(stateFilename + '.' + iterationsSoFar)); optimizeBetas(); clearHistograms(); cacheValues(); sampleTopicsForOneDoc (data.get(doc), (iterationsSoFar >= burninPeriod && iterationsSoFar % saveSampleInterval == 0)); double ll = modelLogLikelihood(); System.out.println(elapsedMillis + "\t" + totalTime + "\t" + ll);
topicModel = PolylingualTopicModel.read(new File(inputModelFilename.value)); } catch (Exception e) { System.err.println("Unable to restore saved topic model " + topicModel = new PolylingualTopicModel (numTopicsOption.value, alphaOption.value); if (randomSeedOption.value != 0) { topicModel.setRandomSeed(randomSeedOption.value); topicModel.addInstances(training); topicModel.setTopicDisplay(showTopicsIntervalOption.value, topWordsOption.value); topicModel.setNumIterations(numIterationsOption.value); topicModel.setOptimizeInterval(optimizeIntervalOption.value); topicModel.setBurninPeriod(optimizeBurnInOption.value); topicModel.setSaveState(outputStateIntervalOption.value, stateFile.value); topicModel.setModelOutput(outputModelIntervalOption.value, outputModelFilename.value); topicModel.estimate(); topicModel.printTopWords(new File(topicKeysFile.value), topWordsOption.value, false); topicModel.printState (new File(stateFile.value)); topicModel.printDocumentTopics(out, docTopicsThreshold.value, docTopicsMax.value); out.close();
printTopWords (System.out, wordsPerTopic, false); this.printState(new File(stateFilename + '.' + iterationsSoFar)); optimizeBetas(); clearHistograms(); cacheValues(); sampleTopicsForOneDoc (data.get(doc), (iterationsSoFar >= burninPeriod && iterationsSoFar % saveSampleInterval == 0)); double ll = modelLogLikelihood(); System.out.println(elapsedMillis + "\t" + totalTime + "\t" + ll);
topicModel = PolylingualTopicModel.read(new File(inputModelFilename.value)); } catch (Exception e) { System.err.println("Unable to restore saved topic model " + topicModel = new PolylingualTopicModel (numTopicsOption.value, alphaOption.value); if (randomSeedOption.value != 0) { topicModel.setRandomSeed(randomSeedOption.value); topicModel.addInstances(training); topicModel.setTopicDisplay(showTopicsIntervalOption.value, topWordsOption.value); topicModel.setNumIterations(numIterationsOption.value); topicModel.setOptimizeInterval(optimizeIntervalOption.value); topicModel.setBurninPeriod(optimizeBurnInOption.value); topicModel.setSaveState(outputStateIntervalOption.value, stateFile.value); topicModel.setModelOutput(outputModelIntervalOption.value, outputModelFilename.value); topicModel.estimate(); topicModel.printTopWords(new File(topicKeysFile.value), topWordsOption.value, false); topicModel.printState (new File(stateFile.value)); topicModel.printDocumentTopics(out, docTopicsThreshold.value, docTopicsMax.value); out.close();
printTopWords (System.out, wordsPerTopic, false); this.printState(new File(stateFilename + '.' + iterationsSoFar)); optimizeBetas(); clearHistograms(); cacheValues(); sampleTopicsForOneDoc (data.get(doc), (iterationsSoFar >= burninPeriod && iterationsSoFar % saveSampleInterval == 0)); double ll = modelLogLikelihood(); System.out.println(elapsedMillis + "\t" + totalTime + "\t" + ll);
public static PolylingualTopicModel read (File f) throws Exception { PolylingualTopicModel topicModel = null; ObjectInputStream ois = new ObjectInputStream (new FileInputStream(f)); topicModel = (PolylingualTopicModel) ois.readObject(); ois.close(); topicModel.initializeHistograms(); return topicModel; }
public PolylingualTopicModel (int numberOfTopics, double alphaSum, Randoms random) { this (newLabelAlphabet (numberOfTopics), alphaSum, random); }
public void estimate () throws IOException { estimate (numIterations); }
topicModel = new PolylingualTopicModel (numTopics.value, alpha.value); if (randomSeed.value != 0) { topicModel.setRandomSeed(randomSeed.value); topicModel.addInstances(training); topicModel.setTopicDisplay(showTopicsInterval.value, topWords.value); topicModel.setNumIterations(numIterations.value); topicModel.setOptimizeInterval(optimizeInterval.value); topicModel.setBurninPeriod(optimizeBurnIn.value); topicModel.setSaveState(outputStateInterval.value, stateFile.value); topicModel.setModelOutput(outputModelInterval.value, outputModelFilename.value); topicModel.estimate(); topicModel.printTopWords(new File(topicKeysFile.value), topWords.value, false); topicModel.printState (new File(stateFile.value)); topicModel.printDocumentTopics(out, docTopicsThreshold.value, docTopicsMax.value); out.close(); oos.writeObject(topicModel.getInferencer(language)); oos.close();
public static PolylingualTopicModel read (File f) throws Exception { PolylingualTopicModel topicModel = null; ObjectInputStream ois = new ObjectInputStream (new FileInputStream(f)); topicModel = (PolylingualTopicModel) ois.readObject(); ois.close(); topicModel.initializeHistograms(); return topicModel; }
public PolylingualTopicModel (int numberOfTopics, double alphaSum, Randoms random) { this (newLabelAlphabet (numberOfTopics), alphaSum, random); }
public void estimate () throws IOException { estimate (numIterations); }
topicModel = new PolylingualTopicModel (numTopics.value, alpha.value); if (randomSeed.value != 0) { topicModel.setRandomSeed(randomSeed.value); topicModel.addInstances(training); topicModel.setTopicDisplay(showTopicsInterval.value, topWords.value); topicModel.setNumIterations(numIterations.value); topicModel.setOptimizeInterval(optimizeInterval.value); topicModel.setBurninPeriod(optimizeBurnIn.value); topicModel.setSaveState(outputStateInterval.value, stateFile.value); topicModel.setModelOutput(outputModelInterval.value, outputModelFilename.value); topicModel.estimate(); topicModel.printTopWords(new File(topicKeysFile.value), topWords.value, false); topicModel.printState (new File(stateFile.value)); topicModel.printDocumentTopics(out, docTopicsThreshold.value, docTopicsMax.value); out.close(); oos.writeObject(topicModel.getInferencer(language)); oos.close();