/** * Calculate the true positive rate with respect to a particular class. This * is defined as * <p/> * * <pre> * correctly classified positives * ------------------------------ * total positives * </pre> * * @param classIndex the index of the class to consider as "positive" * @return the true positive rate */ public double truePositiveRate(int classIndex) { return m_delegate.truePositiveRate(classIndex); }
/** * Calculate the true positive rate with respect to a particular class. This * is defined as * <p/> * * <pre> * correctly classified positives * ------------------------------ * total positives * </pre> * * @param classIndex the index of the class to consider as "positive" * @return the true positive rate */ public double truePositiveRate(int classIndex) { return m_delegate.truePositiveRate(classIndex); }
/** * Calculate the recall with respect to a particular class. This is defined as * <p/> * * <pre> * correctly classified positives * ------------------------------ * total positives * </pre> * <p/> * (Which is also the same as the truePositiveRate.) * * @param classIndex the index of the class to consider as "positive" * @return the recall */ public double recall(int classIndex) { return truePositiveRate(classIndex); }
/** * Calculate the recall with respect to a particular class. This is defined as * <p/> * * <pre> * correctly classified positives * ------------------------------ * total positives * </pre> * <p/> * (Which is also the same as the truePositiveRate.) * * @param classIndex the index of the class to consider as "positive" * @return the recall */ public double recall(int classIndex) { return truePositiveRate(classIndex); }
/** * Calculates the weighted (by class size) true positive rate. * * @return the weighted true positive rate. */ public double weightedTruePositiveRate() { double[] classCounts = new double[m_NumClasses]; double classCountSum = 0; for (int i = 0; i < m_NumClasses; i++) { for (int j = 0; j < m_NumClasses; j++) { classCounts[i] += m_ConfusionMatrix[i][j]; } classCountSum += classCounts[i]; } double truePosTotal = 0; for (int i = 0; i < m_NumClasses; i++) { double temp = truePositiveRate(i); if (classCounts[i] > 0) { // If temp is NaN, we want the sum to also be NaN if count > 0 truePosTotal += (temp * classCounts[i]); } } return truePosTotal / classCountSum; }
/** * Calculates the weighted (by class size) true positive rate. * * @return the weighted true positive rate. */ public double weightedTruePositiveRate() { double[] classCounts = new double[m_NumClasses]; double classCountSum = 0; for (int i = 0; i < m_NumClasses; i++) { for (int j = 0; j < m_NumClasses; j++) { classCounts[i] += m_ConfusionMatrix[i][j]; } classCountSum += classCounts[i]; } double truePosTotal = 0; for (int i = 0; i < m_NumClasses; i++) { double temp = truePositiveRate(i); if (classCounts[i] > 0) { // If temp is NaN, we want the sum to also be NaN if count > 0 truePosTotal += (temp * classCounts[i]); } } return truePosTotal / classCountSum; }
text.append(" "); if (displayTP) { double tpr = truePositiveRate(i); if (Utils.isMissingValue(tpr)) { text.append("? ");
text.append(" "); if (displayTP) { double tpr = truePositiveRate(i); if (Utils.isMissingValue(tpr)) { text.append("? ");
vals[offset++] = eval.kappa(); for (int i = 0; i < eval.getHeader().classAttribute().numValues(); i++) { vals[offset++] = eval.truePositiveRate(i); vals[offset++] = eval.falseNegativeRate(i); vals[offset++] = eval.precision(i);
return m_eval.sizeOfPredictedRegions(); case 17: return hasValIndex ? m_eval.truePositiveRate(classValIndex[0]) : m_eval .weightedTruePositiveRate(); case 18:
return m_eval.sizeOfPredictedRegions(); case 17: return hasValIndex ? m_eval.truePositiveRate(classValIndex[0]) : m_eval .weightedTruePositiveRate(); case 18: