public MetadataWriter(boolean countWords) { this.countWords = countWords; mentionTypes = new HashMap<>(); goldClusters = new HashMap<>(); wordCounts = new ClassicCounter<>(); try { mentionPairs = IOUtils.readObjectFromFile(StatisticalCorefTrainer.datasetFile); } catch (Exception e) { throw new RuntimeException(e); } }
public static void main(String[] args) throws IOException, ClassNotFoundException { Object o = IOUtils.readObjectFromFile(args[0]); IOUtils.writeObjectToFile(o, args[1]); } }
public static double[] getWeights(String loadPath) throws IOException, ClassCastException, ClassNotFoundException { log.info("Loading weights from " + loadPath + "..."); double[] wt; Weights w; w = IOUtils.readObjectFromFile(loadPath); wt = w.w; return wt; }
public static double[] getDiag(String loadPath) throws IOException, ClassCastException, ClassNotFoundException { log.info("Loading weights from " + loadPath + "..."); double[] diag; Weights w; w = IOUtils.readObjectFromFile(loadPath); diag = w.d; return diag; }
public static InvertedIndexByTokens loadIndex(Properties props, Set<String> stopwords, String dir, Function<CoreLabel, Map<String, String>> transformSentenceToString) { try { Map<String, Set<String>> index = IOUtils.readObjectFromFile(dir + "/map.ser"); System.out.println("Loading inverted index from " + dir); return new InvertedIndexByTokens(props, stopwords, transformSentenceToString, index); } catch (Exception e) { throw new RuntimeException("Cannot load the inverted index. " + e); } }
@Override public void load(String allPatternsDir) { try { addPatterns(IOUtils.readObjectFromFile(allPatternsDir+"/allpatterns.ser")); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } }
/** * Read an object from a stored file. * * @param filename The filename of the object to be retrieved * @throws IOException If file cannot be read * @throws ClassNotFoundException If reading serialized object fails * @return The object read from the file. */ public static <T> T readObjectFromFile(String filename) throws IOException, ClassNotFoundException { return ErasureUtils.uncheckedCast(readObjectFromFile(new File(filename))); }
public static List<CoreMap> readSentencesFromFile(String path) throws IOException, ClassNotFoundException { Annotation doc = (Annotation) IOUtils.readObjectFromFile(path); return doc.get(CoreAnnotations.SentencesAnnotation.class); }
public FeatureExtractorRunner(Properties props, Dictionaries dictionaries) { documents = new ArrayList<>(); compressor = new Compressor<>(); extractor = new FeatureExtractor(props, dictionaries, compressor); try { dataset = IOUtils.readObjectFromFile(StatisticalCorefTrainer.datasetFile); } catch(Exception e) { throw new RuntimeException("Error initializing FeatureExtractorRunner", e); } }
@Override public Pair<Map<String, DataInstance>, File> next() { if(batchProcessSents){ try { File f= sentfilesIter.next(); return new Pair<>(IOUtils.readObjectFromFile(f), f); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } }else{ readInMemory= true; return new Pair<>(Data.sents, new File(Data.inMemorySaveFileLocation)); } } }
public static List<ClustererDoc> loadDocuments(int maxDocs) throws Exception { Map<Integer, Map<Pair<Integer, Integer>, Boolean>> labeledPairs = IOUtils.readObjectFromFile(StatisticalCorefTrainer.datasetFile); Map<Integer, Map<Integer, String>> mentionTypes = IOUtils.readObjectFromFile(StatisticalCorefTrainer.mentionTypesFile); Map<Integer, List<List<Integer>>> goldClusters = IOUtils.readObjectFromFile(StatisticalCorefTrainer.goldClustersFile); Map<Integer, Counter<Pair<Integer, Integer>>> classificationScores = IOUtils.readObjectFromFile(StatisticalCorefTrainer.pairwiseModelsPath + StatisticalCorefTrainer.CLASSIFICATION_MODEL + "/" + StatisticalCorefTrainer.predictionsName + ".ser"); Map<Integer, Counter<Pair<Integer, Integer>>> rankingScores = IOUtils.readObjectFromFile(StatisticalCorefTrainer.pairwiseModelsPath + StatisticalCorefTrainer.RANKING_MODEL + "/" + StatisticalCorefTrainer.predictionsName + ".ser"); Map<Integer, Counter<Pair<Integer, Integer>>> anaphoricityScoresLoaded = IOUtils.readObjectFromFile(StatisticalCorefTrainer.pairwiseModelsPath + StatisticalCorefTrainer.ANAPHORICITY_MODEL + "/" + StatisticalCorefTrainer.predictionsName + ".ser");
public static void main(String[] args) { try { LearnImportantFeatures lmf = new LearnImportantFeatures(); Properties props = StringUtils.argsToPropertiesWithResolve(args); ArgumentParser.fillOptions(lmf, props); lmf.setUp(); String sentsFile = props.getProperty("sentsFile"); Map<String, DataInstance> sents = IOUtils .readObjectFromFile(sentsFile); System.out.println("Read the sents file: " + sentsFile); double perSelectRand = Double.parseDouble(props .getProperty("perSelectRand")); double perSelectNeg = Double.parseDouble(props .getProperty("perSelectNeg")); // String wekaOptions = props.getProperty("wekaOptions"); //lmf.getTopFeatures(false, , perSelectRand, perSelectNeg, props.getProperty("externalFeatureWeightsFile")); } catch (Exception e) { e.printStackTrace(); } } }
Map<String, DataInstance> sentsf = IOUtils.readObjectFromFile(f); for(Map.Entry<String, DataInstance> s: sentsf.entrySet()){ for(E pat: sentIds2Pats.get(s.getKey()))
corpusSentences = IOUtils.readObjectFromFile(serializedSentences); MachineReadingProperties.logger.info("Done. Loaded " + corpusSentences.get(CoreAnnotations.SentencesAnnotation.class).size() + " sentences."); } else {
Redwood.log(Redwood.DBG, "reading from ser file " + f); if (!batchProcessSents) sents.putAll((Map<String, DataInstance>) IOUtils.readObjectFromFile(f)); else{ File newf = new File(tempSaveSentencesDir.getAbsolutePath() + "/" + f.getAbsolutePath().replaceAll(java.util.regex.Pattern.quote("/"), "_")); evalsents.putAll((Map<? extends String, ? extends DataInstance>) IOUtils.readObjectFromFile(f));
public static void test(PairwiseModel model, String predictionsName, boolean anaphoricityModel) throws Exception { Redwood.log("scoref-train", "Reading compression..."); Compressor<String> compressor = IOUtils.readObjectFromFile( StatisticalCorefTrainer.compressorFile); Redwood.log("scoref-train", "Reading test data..."); List<DocumentExamples> testDocuments = IOUtils.readObjectFromFile( StatisticalCorefTrainer.extractedFeaturesFile); Redwood.log("scoref-train", "Building test set..."); List<Pair<Example, Map<Integer, CompressedFeatureVector>>> allExamples = anaphoricityModel ? getAnaphoricityExamples(testDocuments) : getExamples(testDocuments); Redwood.log("scoref-train", "Testing..."); PrintWriter writer = new PrintWriter(model.getDefaultOutputPath() + predictionsName); Map<Integer, Counter<Pair<Integer, Integer>>> scores = new HashMap<>(); writeScores(allExamples, compressor, model, writer, scores); if (model instanceof MaxMarginMentionRanker) { writer.close(); writer = new PrintWriter(model.getDefaultOutputPath() + predictionsName + "_anaphoricity"); testDocuments = IOUtils.readObjectFromFile( StatisticalCorefTrainer.extractedFeaturesFile); allExamples = getAnaphoricityExamples(testDocuments); writeScores(allExamples, compressor, model, writer, scores); } IOUtils.writeObjectToFile(scores, model.getDefaultOutputPath() + predictionsName + ".ser"); writer.close(); }
Compressor<String> compressor = IOUtils.readObjectFromFile( StatisticalCorefTrainer.compressorFile); List<DocumentExamples> trainDocuments = IOUtils.readObjectFromFile( StatisticalCorefTrainer.extractedFeaturesFile);
Map<Integer, Counter<E>> patterns = IOUtils.readObjectFromFile(patf); if(numIterationsOfSavedPatternsToLoad < Integer.MAX_VALUE){ Set<Integer> toremove = new HashSet<>();
public static void trainRanking(PairwiseModel model) throws Exception { Redwood.log("scoref-train", "Reading compression..."); Compressor<String> compressor = IOUtils.readObjectFromFile( StatisticalCorefTrainer.compressorFile); List<DocumentExamples> trainDocuments = IOUtils.readObjectFromFile( StatisticalCorefTrainer.extractedFeaturesFile);
try { log.info("Reading temporary feature index file."); featureIndex = IOUtils.readObjectFromFile(featIndexFile); } catch (Exception e) { throw new RuntimeException("Could not open temporary feature index file for reading.");