ArffLoader testingData = new ArffLoader(); testingData.setFile(new File("sample2.txt")); Instances testingStructure = testingData.getStructure(); testingStructure.setClassIndex(structure.numAttributes() - 1); Instance test; while ((test = testingData.getNextInstance(testingStructure)) != null) { System.out.println(nb.classifyInstance(test)); }
ArffLoader arffLoader = new ArffLoader(); arffLoader.setFile(new File(fName1)); Instances newData1 = arffLoader.getDataSet(); arffLoader.setFile(new File(fName2)); Instances newData2 = arffLoader.getDataSet(); Instances mergedData = Instances.mergeInstances( newData1 ,newData2);
ArffLoader loader= new ArffLoader(); loader.setSource(new File("C:/Users/..../Desktop/maitre.arff")); Instances data= loader.getDataSet();
/** * Main method. * * @param args should contain the name of an input file. */ public static void main(String[] args) { runFileLoader(new ArffLoader(), args); } }
ArffLoader loader = new ArffLoader(); loader.setFile(new File("some.arff")); Instances data= loader.getStructure(); // you missed this sampler.setInputFormat(data);
weka.core.converters.ArffLoader loader = null; if (filename.length() != 0) { loader = new weka.core.converters.ArffLoader(); loader.setFile(new java.io.File(filename)); } else { throw new Exception("No training file specified!");
/** * returns the loader used in the tests * * @return the configured loader */ public AbstractLoader getLoader() { return new ArffLoader(); }
data = (Instances) source; } else if (source instanceof weka.core.converters.ArffLoader) { data = ((weka.core.converters.ArffLoader) source).getStructure(); Instance current = null; int count = 0; while ((current = loader.getNextInstance(data)) != null) { processSingleton(current, singletons); count++; loader.reset();
/** * Resets the Loader ready to read a new data set or the same data set again. * * @throws IOException if something goes wrong */ @Override public void reset() throws IOException { m_structure = null; m_ArffReader = null; setRetrieval(NONE); if (m_File != null && !(new File(m_File).isDirectory())) { setFile(new File(m_File)); } else if (m_URL != null && !m_URL.equals("http://")) { setURL(m_URL); } }
throw new IOException("No source has been specified"); if (getRetrieval() == INCREMENTAL) { throw new IOException( "Cannot mix getting Instances in both incremental and batch modes"); setRetrieval(BATCH); if (m_structure == null) { getStructure();
data = (Instances) dataSource; } else if (dataSource instanceof weka.core.converters.ArffLoader) { data = ((weka.core.converters.ArffLoader) dataSource).getStructure(); Instance current = null; int count = 0; while ((current = loader.getNextInstance(data)) != null) { insertInstance(current, singletons, tree, minSupport); count++;
/** * Resets the Loader object and sets the source of the data set to be the * supplied url. * * @param url the source url. * @throws IOException if an error occurs */ public void setSource(URL url) throws IOException { m_structure = null; setRetrieval(NONE); setSource(url.openStream()); m_URL = url.toString(); // make sure that the file is null so that any calls to // reset() work properly m_File = null; }
data = ((weka.core.converters.ArffLoader) source).getStructure(); capabilities.setMinimumNumberInstances(0); arffLoader = true; ((weka.core.converters.ArffLoader) source).reset();
/** * Set the url to load from * * @param url the url to load from * @throws IOException if the url can't be set. */ @Override public void setURL(String url) throws IOException { m_URL = url; setSource(new URL(url)); }
/** * Determines and returns (if possible) the structure (internally the header) * of the data set as an empty set of instances. * * @return the structure of the data set as an empty set of Instances * @throws IOException if an error occurs */ @Override public Instances getStructure() throws IOException { if (m_structure == null) { if (m_sourceReader == null) { throw new IOException("No source has been specified"); } try { m_ArffReader = new ArffReader(m_sourceReader, 1, (getRetrieval() == BATCH)); m_ArffReader.setRetainStringValues(getRetainStringVals()); m_structure = m_ArffReader.getStructure(); } catch (Exception ex) { throw new IOException("Unable to determine structure as arff (Reason: " + ex.toString() + ")."); } } return new Instances(m_structure, 0); }
if (getRetrieval() == BATCH) { throw new IOException( "Cannot mix getting Instances in both incremental and batch modes"); setRetrieval(INCREMENTAL);
weka.core.converters.ArffLoader loader = null; if (filename.length() != 0) { loader = new weka.core.converters.ArffLoader(); loader.setFile(new java.io.File(filename)); } else { throw new Exception("No training file specified!");
/** * returns the loader used in the tests * * @return the configured loader */ public AbstractLoader getLoader() { return new ArffLoader(); }
data = (Instances) source; } else if (source instanceof weka.core.converters.ArffLoader) { data = ((weka.core.converters.ArffLoader) source).getStructure(); Instance current = null; int count = 0; while ((current = loader.getNextInstance(data)) != null) { processSingleton(current, singletons); count++; loader.reset();
/** * Main method. * * @param args should contain the name of an input file. */ public static void main(String[] args) { runFileLoader(new ArffLoader(), args); } }