Classifier cModel = (Classifier)new NaiveBayes(); cModel.buildClassifier(isTrainingSet); weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel); Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model"); // Test the model Evaluation eTest = new Evaluation(isTrainingSet); eTest.evaluateModel(cls, isTrainingSet);
public AggregateableFilteredClassifier() { m_Classifier = new NaiveBayes(); }
/** * Returns the Capabilities of this filter. * * @return the capabilities of this object * @see Capabilities */ @Override public Capabilities getCapabilities() { return new NaiveBayes().getCapabilities(); }
/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new NaiveBayes(), argv); } }
Classifier Mode; // a parent class if(alg.equals("DecisionStump")) { Mode = new DecisionStump(); } else if(alg.equals("NaiveBayes")) { Mode = new NaiveBayes(); }
/** * Returns the Capabilities of this filter. * * @return the capabilities of this object * @see Capabilities */ @Override public Capabilities getCapabilities() { return new NaiveBayes().getCapabilities(); }
/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new NaiveBayes(), argv); } }
Classifier mode if(alg.equals("DecisionStump")) { mode = new DecisionStump(); } else if(alg.equals("NaiveBayes")) { mode = new NaiveBayes(); }
/** Creates a default NaiveBayes */ public Classifier getClassifier() { return new NaiveBayes(); }
/** Creates a default NaiveBayes */ public Classifier getClassifier() { return new NaiveBayes(); }
// Untested Java, I use Weka through JRuby NaiveBayes naiveBayes = new NaiveBayes(); Remove remove = new Remove(); remove.setOptions(Utils.splitOptions("-R 1-2")); FilteredClassifier model = new FilteredClassifier(naiveBayes, remove); // Use model to classify as normal
/** * Builds the classifier for the given training model */ private void initializeModel(CommonConfig config) throws ConfigurationException { // Train the classifier logger.info("Training the classifier..."); File arffFile = new File(modelDir + "/" + this.getClass().getSimpleName() + ".arff"); classifier = new NaiveBayes(); try { Instances data = DataSource.read(arffFile.getAbsolutePath()); data.setClassIndex(data.numAttributes() - 1); classifier.buildClassifier(data); } catch (Exception e) { throw new ConfigurationException(e); } }
Classifier Clfs = null; try { if (modelType.equals("J48")) { Clfs = new J48(); } else if (modelType.equals("MLP")) { Clfs = new MultilayerPerceptron(); } else if (modelType.equals("IB3")) { Clfs = new IBk(3); } else if (modelType.equals("RF")) { Clfs = new RandomForest(); } else if (modelType.equals("NB")) { Clfs = new NaiveBayes(); //...
protected WekaClassifierMapTask setupAggregateableBatchClassifier() { WekaClassifierMapTask task = new WekaClassifierMapTask(); task.setClassifier(new weka.classifiers.bayes.NaiveBayes()); return task; }
public static Classifier getClassifier(WekaClassifier classifier) throws IllegalArgumentException { try { switch (classifier) { case NAIVE_BAYES: return new NaiveBayes(); case J48: J48 j48 = new J48(); j48.setOptions(new String[] { "-C", "0.25", "-M", "2" }); return j48; case SMO: SMO smo = new SMO(); smo.setOptions(Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\"")); return smo; case LOGISTIC: Logistic logistic = new Logistic(); logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1")); return logistic; default: throw new IllegalArgumentException("Classifier " + classifier + " not found!"); } } catch (Exception e) { throw new IllegalArgumentException(e); } }
public static Classifier getClassifier(WekaClassifier classifier) throws IllegalArgumentException { try { switch (classifier) { case NAIVE_BAYES: return new NaiveBayes(); case J48: J48 j48 = new J48(); j48.setOptions(new String[] { "-C", "0.25", "-M", "2" }); return j48; case SMO: SMO smo = new SMO(); smo.setOptions(Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\"")); return smo; case LOGISTIC: Logistic logistic = new Logistic(); logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1")); return logistic; default: throw new IllegalArgumentException("Classifier " + classifier + " not found!"); } } catch (Exception e) { throw new IllegalArgumentException(e); } }
public static Classifier getClassifier(WekaClassifier classifier) throws IllegalArgumentException { try { switch (classifier) { case NAIVE_BAYES: return new NaiveBayes(); case J48: J48 j48 = new J48(); j48.setOptions(new String[] { "-C", "0.25", "-M", "2" }); return j48; case SMO: SMO smo = new SMO(); smo.setOptions(Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\"")); return smo; case LOGISTIC: Logistic logistic = new Logistic(); logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1")); return logistic; default: throw new IllegalArgumentException("Classifier " + classifier + " not found!"); } } catch (Exception e) { throw new IllegalArgumentException(e); } }
@Test public void testScoreWithClassifier() throws Exception { Instances train = new Instances(new BufferedReader(new StringReader( CorrelationMatrixMapTaskTest.IRIS))); train.setClassIndex(train.numAttributes() - 1); NaiveBayes bayes = new NaiveBayes(); bayes.buildClassifier(train); WekaScoringMapTask task = new WekaScoringMapTask(); task.setModel(bayes, train, train); assertEquals(0, task.getMissingMismatchAttributeInfo().length()); assertEquals(3, task.getPredictionLabels().size()); for (int i = 0; i < train.numInstances(); i++) { assertEquals(3, task.processInstance(train.instance(i)).length); } }
public class Run { public static void main(String[] args) throws Exception { ConverterUtils.DataSource source1 = new ConverterUtils.DataSource("./data/train.arff"); Instances train = source1.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (train.classIndex() == -1) train.setClassIndex(train.numAttributes() - 1); ConverterUtils.DataSource source2 = new ConverterUtils.DataSource("./data/test.arff"); Instances test = source2.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (test.classIndex() == -1) test.setClassIndex(train.numAttributes() - 1); // model NaiveBayes naiveBayes = new NaiveBayes(); naiveBayes.buildClassifier(train); // this does the trick double label = naiveBayes.classifyInstance(test.instance(0)); test.instance(0).setClassValue(label); System.out.println(test.instance(0).stringValue(4)); } }
@Test public void testScoreWithClassifierSomeMissingFields() throws Exception { Instances train = new Instances(new BufferedReader(new StringReader( CorrelationMatrixMapTaskTest.IRIS))); train.setClassIndex(train.numAttributes() - 1); NaiveBayes bayes = new NaiveBayes(); bayes.buildClassifier(train); WekaScoringMapTask task = new WekaScoringMapTask(); Remove r = new Remove(); r.setAttributeIndices("1"); r.setInputFormat(train); Instances test = Filter.useFilter(train, r); task.setModel(bayes, train, test); assertTrue(task.getMissingMismatchAttributeInfo().length() > 0); assertTrue(task.getMissingMismatchAttributeInfo().equals( "sepallength missing from incoming data\n")); assertEquals(3, task.getPredictionLabels().size()); for (int i = 0; i < test.numInstances(); i++) { assertEquals(3, task.processInstance(test.instance(i)).length); } }