private void saveToArffFile(Instances dataSet, File file) throws IOException { ArffSaver saver = new ArffSaver(); saver.setInstances(dataSet); saver.setFile(file); saver.writeBatch(); }
/** * Sets the destination file. * * @param outputFile the destination file. * @throws IOException throws an IOException if file cannot be set */ @Override public void setFile(File outputFile) throws IOException { if (outputFile.getAbsolutePath().endsWith( ArffLoader.FILE_EXTENSION_COMPRESSED)) { setCompressOutput(true); } super.setFile(outputFile); }
@Override public void close() throws Exception { saver.writeIncremental(null); } }
/** * Main method. * * @param args should contain the options of a Saver. */ public static void main(String[] args) { runFileSaver(new ArffSaver(), args); } }
return masterInstance; saver = new ArffSaver(); saver.setRetrieval(Saver.INCREMENTAL); saver.setFile(arffTarget); saver.setCompressOutput(false); attributeStore.getAttributes(), instances.size()); masterInstance.setClassIndex(outcomeAttributes.size()); saver.setInstances(masterInstance);
s.writeIncremental(null); File tmpFile = s.retrieveFile(); ArffLoader loader = new ArffLoader(); loader.setFile(tmpFile); saver = new ArffSaver(); try { File tmpFile = File.createTempFile("weka", ".arff"); saver.setFile(tmpFile); saver.setRetrieval(weka.core.converters.Saver.INCREMENTAL); saver.setInstances(new Instances(currentI.dataset(), 0)); m_incrementalSavers.put(e.getSource(), saver); } catch (IOException e1) { saver.writeIncremental(currentI);
/** * returns the saver used in the tests * * @return the configured saver */ public AbstractSaver getSaver() { return new ArffSaver(); }
public static void main(String[] args) throws Exception { if (args.length != 3) throw new IllegalArgumentException("Required parameters: <input> <attribute_indices> <output>"); System.out.println("Loading input data: " + args[0]); Instances input = DataSource.read(args[0]); System.out.println("Applying filter using indices: " + args[1]); MekaClassAttributes filter = new MekaClassAttributes(); filter.setAttributeIndices(args[1]); filter.setInputFormat(input); Instances output = Filter.useFilter(input, filter); System.out.println("Saving filtered data to: " + args[2]); ArffSaver saver = new ArffSaver(); saver.setFile(new File(args[2])); DataSink.write(saver, output); } }
if (getInstances() == null) { throw new IOException("No instances to save"); if (getRetrieval() == INCREMENTAL) { throw new IOException("Batch and incremental saving cannot be mixed."); setRetrieval(BATCH); setWriteMode(WRITE); if (retrieveFile() == null && getWriter() == null) { Instances data = getInstances(); System.out.println(new Instances(data, 0)); for (int i = 0; i < data.numInstances(); i++) { m_MaxDecimalPlaces)); setWriteMode(WAIT); return; PrintWriter outW = new PrintWriter(getWriter()); Instances data = getInstances(); outW.close(); setWriteMode(WAIT); outW = null; resetWriter(); setWriteMode(CANCEL);
public void setOptions(String[] options) throws Exception { setCompressOutput(Utils.getFlag("compress", options)); setMaxDecimalPlaces(Integer.parseInt(tmpStr));
return masterInstance; saver = new ArffSaver(); saver.setRetrieval(Saver.INCREMENTAL); saver.setFile(arffTarget); saver.setCompressOutput(false); attributeStore.getAttributes(), instances.size()); masterInstance.setClassIndex(outcomeAttributes.size()); saver.setInstances(masterInstance);
s.writeIncremental(null); File tmpFile = s.retrieveFile(); ArffLoader loader = new ArffLoader(); loader.setFile(tmpFile); saver = new ArffSaver(); try { File tmpFile = File.createTempFile("weka", ".arff"); saver.setFile(tmpFile); saver.setRetrieval(weka.core.converters.Saver.INCREMENTAL); saver.setInstances(new Instances(currentI.dataset(), 0)); m_incrementalSavers.put(e.getSource(), saver); } catch (IOException e1) { saver.writeIncremental(currentI);
/** * returns the saver used in the tests * * @return the configured saver */ public AbstractSaver getSaver() { return new ArffSaver(); }
if (getInstances() == null) { throw new IOException("No instances to save"); if (getRetrieval() == INCREMENTAL) { throw new IOException("Batch and incremental saving cannot be mixed."); setRetrieval(BATCH); setWriteMode(WRITE); if (retrieveFile() == null && getWriter() == null) { Instances data = getInstances(); System.out.println(new Instances(data, 0)); for (int i = 0; i < data.numInstances(); i++) { m_MaxDecimalPlaces)); setWriteMode(WAIT); return; PrintWriter outW = new PrintWriter(getWriter()); Instances data = getInstances(); outW.close(); setWriteMode(WAIT); outW = null; resetWriter(); setWriteMode(CANCEL);
/** * Main method. * * @param args should contain the options of a Saver. */ public static void main(String[] args) { runFileSaver(new ArffSaver(), args); } }
public void setOptions(String[] options) throws Exception { setCompressOutput(Utils.getFlag("compress", options)); setMaxDecimalPlaces(Integer.parseInt(tmpStr));
return masterInstance; saver = new ArffSaver(); saver.setRetrieval(Saver.INCREMENTAL); saver.setFile(arffTarget); saver.setCompressOutput(false); instances.size()); masterInstance.setClass(outcomeAttribute); saver.setInstances(masterInstance);
private void writeArffFile(Instances dataSet, File file) throws IOException { ArffSaver saver = new ArffSaver(); saver.setInstances(dataSet); saver.setFile(file); saver.writeBatch(); }
/** * initializes the sink to save the data to the given file. * * @param filename the file to save data to * @throws Exception if set of saver fails */ public DataSink(String filename) throws Exception { m_Stream = null; if (DataSource.isArff(filename)) { m_Saver = new ArffSaver(); } else { m_Saver = getSaverForFile(filename); } ((AbstractFileSaver) m_Saver).setFile(new File(filename)); }
@Override public void close() throws Exception { saver.writeIncremental(null); }