/** * Returns the cluster probability distribution for an instance. * * @param instance the instance to be clustered * @return the probability distribution * @throws Exception if computation fails */ public double[] distributionForInstance(Instance instance) throws Exception { return Utils.logs2probs(logJointDensitiesForInstance(instance)); }
/** * Returns the cluster probability distribution for an instance. * * @param instance the instance to be clustered * @return the probability distribution * @throws Exception if computation fails */ public double[] distributionForInstance(Instance instance) throws Exception { return Utils.logs2probs(logJointDensitiesForInstance(instance)); }
/** * Calculates entrpy for given model and data. */ protected double entropy(MM mixtureModel) { double entropy = 0; for (int j = 0; j < m_NumValues; j++) { entropy += m_Weights[j] * ContingencyTables.entropy(Utils.logs2probs(mixtureModel.logJointDensities(m_Values[j]))); } entropy *= Utils.log2; // Need natural logarithm, not base-2 logarithm return entropy / (double)m_NumValues; }
/** * Calculates entrpy for given model and data. */ protected double entropy(MM mixtureModel) { double entropy = 0; for (int j = 0; j < m_NumValues; j++) { entropy += m_Weights[j] * ContingencyTables.entropy(Utils.logs2probs(mixtureModel.logJointDensities(m_Values[j]))); } entropy *= Utils.log2; // Need natural logarithm, not base-2 logarithm return entropy / (double)m_NumValues; }
/** * Calculates the class membership probabilities for the given test instance. * * @param instance the instance to be classified * @return predicted class probability distribution * @throws Exception if instance could not be classified successfully */ @Override public double[] distributionForInstance(Instance instance) throws Exception { // default model? if (m_NumIterationsPerformed == 0) { return m_ZeroR.distributionForInstance(instance); } if (m_NumIterationsPerformed == 0) { throw new Exception("No model built"); } double[] sums = new double[instance.numClasses()]; if (m_NumIterationsPerformed == 1) { return m_Classifiers[0].distributionForInstance(instance); } else { for (int i = 0; i < m_NumIterationsPerformed; i++) { sums[(int) m_Classifiers[i].classifyInstance(instance)] += m_Betas[i]; } return Utils.logs2probs(sums); } }
/** * Calculates the class membership probabilities for the given test instance. * * @param instance the instance to be classified * @return predicted class probability distribution * @throws Exception if instance could not be classified successfully */ @Override public double[] distributionForInstance(Instance instance) throws Exception { // default model? if (m_NumIterationsPerformed == 0) { return m_ZeroR.distributionForInstance(instance); } if (m_NumIterationsPerformed == 0) { throw new Exception("No model built"); } double[] sums = new double[instance.numClasses()]; if (m_NumIterationsPerformed == 1) { return m_Classifiers[0].distributionForInstance(instance); } else { for (int i = 0; i < m_NumIterationsPerformed; i++) { sums[(int) m_Classifiers[i].classifyInstance(instance)] += m_Betas[i]; } return Utils.logs2probs(sums); } }
tempDist = Utils.logs2probs(tempDist); Utils.normalize(tempDist);
double[] p = Utils.logs2probs(model.logJointDensities(values[i])); for (int j = 0; j < p.length; j++) { probs[j][i] = p[j];
double[] p = Utils.logs2probs(model.logJointDensities(values[i])); for (int j = 0; j < p.length; j++) { probs[j][i] = p[j];
normDist[i] += Math.log(nbDist[i]); normDist = Utils.logs2probs(normDist);
/** * Output class probabilities using Bayes' rule. */ public double[] distributionForInstance(Instance inst) throws Exception { // Filter instance m_RemoveUseless.input(inst); inst = m_RemoveUseless.output(); // Convert instance to array double[] values = new double[inst.numAttributes() - 1]; int index = 0; for (int i = 0; i < m_Data.numAttributes(); i++) { if (i != m_Data.classIndex()) { values[index++] = inst.value(i); } } double[] posteriorProbs = new double[m_Data.numClasses()]; for (int i = 0; i < m_Data.numClasses(); i++) { if (m_Estimators[i] != null) { posteriorProbs[i] = m_Estimators[i].logDensity(values) + m_LogPriors[i]; } else { posteriorProbs[i] = -Double.MAX_VALUE; } } posteriorProbs = Utils.logs2probs(posteriorProbs); return posteriorProbs; }
/** * Output class probabilities using Bayes' rule. */ public double[] distributionForInstance(Instance inst) throws Exception { // Filter instance m_RemoveUseless.input(inst); inst = m_RemoveUseless.output(); // Convert instance to array double[] posteriorProbs = new double[m_Data.numClasses()]; double[] values = new double[inst.numAttributes() - 1]; for (int i = 0; i < m_Data.numClasses(); i++) { if (m_Means[i] != null) { int index = 0; for (int j = 0; j < m_Data.numAttributes(); j++) { if (j != m_Data.classIndex()) { values[index] = inst.value(j) - m_Means[i][index] + m_GlobalMean[index]; index++; } } posteriorProbs[i] = m_Estimator.logDensity(values) + m_LogPriors[i]; } else { posteriorProbs[i] = -Double.MAX_VALUE; } } posteriorProbs = Utils.logs2probs(posteriorProbs); return posteriorProbs; }
tempvals = Utils.logs2probs(tempvals); System.arraycopy(tempvals, 0, instanceVals, 0, tempvals.length); if (instance.classIndex() >= 0) {
tempvals = Utils.logs2probs(tempvals); System.arraycopy(tempvals, 0, instanceVals, 0, tempvals.length); if (instance.classIndex() >= 0) {
normDist[l] += Math.log(nbDist[l]); normDist = Utils.logs2probs(normDist);
probs = Utils.logs2probs(probs); } else {
probs = Utils.logs2probs(probs); } else {