Classifier cModel = (Classifier)new NaiveBayes(); cModel.buildClassifier(isTrainingSet); weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel); Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model"); // Test the model Evaluation eTest = new Evaluation(isTrainingSet); eTest.evaluateModel(cls, isTrainingSet);
m_eval.evaluateModel(m_classifier, test); if (m_predFrac <= 0) { ((AggregateableEvaluationWithPriors) m_eval).deleteStoredPredictions();
/** * Evaluates the classifier on a given set of instances. Note that the data * must have exactly the same format (e.g. order of attributes) as the data * used to train the classifier! Otherwise the results will generally be * meaningless. * * @param classifier machine learning classifier * @param data set of test instances for evaluation * @param forPredictionsPrinting varargs parameter that, if supplied, is * expected to hold a * weka.classifiers.evaluation.output.prediction.AbstractOutput * object * @return the predictions * @throws Exception if model could not be evaluated successfully */ public double[] evaluateModel(Classifier classifier, Instances data, Object... forPredictionsPrinting) throws Exception { return m_delegate.evaluateModel(classifier, data, forPredictionsPrinting); }
/** * Evaluates the classifier on a given set of instances. Note that the data * must have exactly the same format (e.g. order of attributes) as the data * used to train the classifier! Otherwise the results will generally be * meaningless. * * @param classifier machine learning classifier * @param data set of test instances for evaluation * @param forPredictionsPrinting varargs parameter that, if supplied, is * expected to hold a * weka.classifiers.evaluation.output.prediction.AbstractOutput * object * @return the predictions * @throws Exception if model could not be evaluated successfully */ public double[] evaluateModel(Classifier classifier, Instances data, Object... forPredictionsPrinting) throws Exception { return m_delegate.evaluateModel(classifier, data, forPredictionsPrinting); }
throws Exception { return weka.classifiers.evaluation.Evaluation.evaluateModel( classifierString, options);
return weka.classifiers.evaluation.Evaluation.evaluateModel(classifier, options);
return weka.classifiers.evaluation.Evaluation.evaluateModel(classifier, options);
/** * A test method for this class. Just extracts the first command line argument * as a classifier class name and calls evaluateModel. * * @param args an array of command line arguments, the first of which must be * the class name of a classifier. */ public static void main(String[] args) { try { if (args.length == 0) { throw new Exception("The first argument must be the class name of a classifier"); } String classifier = args[0]; args[0] = ""; System.out.println(evaluateModel(classifier, args)); } catch (Exception ex) { ex.printStackTrace(); System.err.println(ex.getMessage()); } }
Evaluation eval = new Evaluation(data); eval.evaluateModel(j48DecisionTree, data); System.out.println(eval.toSummaryString("\nResults\n======\n", true));
/** * A test method for this class. Just extracts the first command line argument * as a classifier class name and calls evaluateModel. * * @param args an array of command line arguments, the first of which must be * the class name of a classifier. */ public static void main(String[] args) { try { if (args.length == 0) { throw new Exception("The first argument must be the class name of a classifier"); } String classifier = args[0]; args[0] = ""; System.out.println(evaluateModel(classifier, args)); } catch (Exception ex) { ex.printStackTrace(); System.err.println(ex.getMessage()); } }
Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.errorRate()); //Printing Training Mean root squared Error System.out.println(eval.toSummaryString()); //Summary of Training
//Learning DataSource source = new DataSource(Path); Instances data = source.getDataSet(); J48 tree = tree.buildClassifier(data); //Evaluation Evaluation eval = new Evaluation(data); eval.evaluateModel(tree, data); System.out.println((eval.correct()/data.numInstances())*100);
Instances trainData = ds.getDataset(); //get training dataset SMO sm = new SMO(); //build classifier sm.buildClassifier(data); //train classifier Instances testData = ds.getDataSet(); //now get the test set Evaluation eval = new Evaluation(data); //for recording results eval.evaluateModel(sm, testData); System.out.println(eval.toMatrixString()); //gives the confusion matrix for predictions
1. filteredData = new Instances(new BufferedReader(new FileReader("/Users/Passionate/Desktop/train_std.arff"))); 2. Instances filteredTests= new Instances(new BufferedReader(new FileReader("/Users/Passionate/Desktop/test_std.arff"))); 3. filteredData.setClassIndex(filteredData.attribute("@@class@@").index()); 4. Classifier classifier=new SMO(); 5. classifier.buildClassifier(filteredData); 6. FilteredClassifier filteredClassifier=new FilteredClassifier(); 7. filteredClassifier.setClassifier(classifier); 8. Evaluation eval = new Evaluation(filteredData); 9. eval.evaluateModel(filteredClassifier, filteredTests); **// Error line.** 10. System.out.println(eval.toSummaryString("\nResults\n======\n", false));
InputMappedClassifier mappedCls = new InputMappedClassifier(); cls.buildClassifier(data); mappedCls.setModelHeader(data); mappedCls.setClassifier(cls); mappedCls.setSuppressMappingReport(true); Evaluation eval = new Evaluation(testdata); eval.evaluateModel(mappedCls, testdata);
eval.evaluateModel(cls, test); System.out.println(cls); System.out.println(eval.toSummaryString("\nResults\n======\n", false));
public static void classify() { try { Instances train = new Instances (...); train.setClassIndex(train.numAttributes() - 1); Instances test = new Instances (...); test.setClassIndex(test.numAttributes() - 1); ClassificationType classificationType = ClassificationTypeDAO.get(6); // 6 is SVM. LibSVM classifier = new LibSVM(); String options = (classificationType.getParameters()); String[] optionsArray = options.split(" "); classifier.setOptions(optionsArray); classifier.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); } catch (Exception ex) { Misc_Utils.printStackTrace(ex); } }
eval1.evaluateModel(cls, test);
evaluateModel(copiedClassifier, test, forPrinting); } else { evaluateModel(copiedClassifier, test);
evaluateModel(copiedClassifier, test, forPrinting); } else { evaluateModel(copiedClassifier, test);