/** * Returns the weighted arithmetic mean of the entries in the input array. * <p> * Throws <code>IllegalArgumentException</code> if either array is null.</p> * <p> * See {@link Mean} for details on the computing algorithm. The two-pass algorithm * described above is used here, with weights applied in computing both the original * estimate and the correction factor.</p> * <p> * Throws <code>IllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * </ul></p> * * @param values the input array * @param weights the weights array * @return the mean of the values or Double.NaN if length = 0 * @throws IllegalArgumentException if the parameters are not valid * @since 2.1 */ public double evaluate(final double[] values, final double[] weights) { return evaluate(values, weights, 0, values.length); }
/** * Returns the weighted arithmetic mean of the entries in the input array. * <p> * Throws <code>IllegalArgumentException</code> if either array is null.</p> * <p> * See {@link Mean} for details on the computing algorithm. The two-pass algorithm * described above is used here, with weights applied in computing both the original * estimate and the correction factor.</p> * <p> * Throws <code>IllegalArgumentException</code> if any of the following are true: * <ul><li>the values array is null</li> * <li>the weights array is null</li> * <li>the weights array does not have the same length as the values array</li> * <li>the weights array contains one or more infinite values</li> * <li>the weights array contains one or more NaN values</li> * <li>the weights array contains negative values</li> * </ul></p> * * @param values the input array * @param weights the weights array * @return the mean of the values or Double.NaN if length = 0 * @throws IllegalArgumentException if the parameters are not valid * @since 2.1 */ public double evaluate(final double[] values, final double[] weights) { return evaluate(values, weights, 0, values.length); }
@Override public double detect(double[] vec) { return new Mean().evaluate(vec); }
@Override public double detect(double[] vec) { return new Mean().evaluate(vec); }
/** * Calculates the mean of all attribute values. * * @param attributeValues attribute values * @return the mean */ public Double calculateMean( Comparable[] attributeValues ) { Mean mean = new Mean(); Double evaluatedMean = mean.evaluate( convertToPrimitives( attributeValues ) ); log.debug( "mean = " + evaluatedMean ); return evaluatedMean; }
/** * This method calculates {@link SemiVariance} for the entire array against the mean, using * the current value of the biasCorrection instance property. * * @param values the input array * @param direction the {@link Direction} of the semivariance * @return the SemiVariance * @throws IllegalArgumentException if values is null * */ public double evaluate(final double[] values, Direction direction) { double m = (new Mean()).evaluate(values); return evaluate (values, m, direction, biasCorrected, 0, values.length); }
/** * This method calculates {@link SemiVariance} for the entire array against the mean, using * the current value of the biasCorrection instance property. * * @param values the input array * @param direction the {@link Direction} of the semivariance * @return the SemiVariance * @throws IllegalArgumentException if values is null * */ public double evaluate(final double[] values, Direction direction) { double m = (new Mean()).evaluate(values); return evaluate (values, m, direction, biasCorrected, 0, values.length); }
avgSize = (float) mean.evaluate(distribution); stdevSize = (float) stdev.evaluate(distribution);
avgSize = (float) mean.evaluate(distribution); stdevSize = (float) stdev.evaluate(distribution);
/** * <p>Returns the {@link SemiVariance} of the designated values against the mean, using * instance properties varianceDirection and biasCorrection.</p> * * <p>Returns <code>NaN</code> if the array is empty and throws * <code>IllegalArgumentException</code> if the array is null.</p> * * @param values the input array * @param start index of the first array element to include * @param length the number of elements to include * @return the SemiVariance * @throws IllegalArgumentException if the parameters are not valid * */ @Override public double evaluate(final double[] values, final int start, final int length) { double m = (new Mean()).evaluate(values, start, length); return evaluate(values, m, varianceDirection, biasCorrected, 0, values.length); }
/** * <p>Returns the {@link SemiVariance} of the designated values against the mean, using * instance properties varianceDirection and biasCorrection.</p> * * <p>Returns <code>NaN</code> if the array is empty and throws * <code>IllegalArgumentException</code> if the array is null.</p> * * @param values the input array * @param start index of the first array element to include * @param length the number of elements to include * @return the SemiVariance * @throws IllegalArgumentException if the parameters are not valid * */ @Override public double evaluate(final double[] values, final int start, final int length) { double m = (new Mean()).evaluate(values, start, length); return evaluate(values, m, varianceDirection, biasCorrected, 0, values.length); }
/** * Calculates a number of evaluation measures for multi-label classification, without class-wise * measures. * * @param predictions * predictions by the classifier (ranking) * @param goldStandard * gold standard (bipartition) * @param t * a threshold to create bipartitions from rankings * @return the evaluation statistics */ public static HashMap<String, Double> calcMLStats(double predictions[][], int goldStandard[][], double t[]) { int N = goldStandard.length; int L = goldStandard[0].length; int Ypred[][] = ThresholdUtils.threshold(predictions, t); HashMap<String, Double> results = new LinkedHashMap<String, Double>(); Mean mean = new Mean(); results.put("Number labels", (double) L); results.put("Number examples", (double) N); results.put("Zero-one-loss", Metrics.L_ZeroOne(goldStandard, Ypred)); results.put("Label cardinality predicted", MLUtils.labelCardinality(Ypred)); results.put("Label cardinality actual", MLUtils.labelCardinality(goldStandard)); results.put("Average threshold", mean.evaluate(t, 0, t.length)); // average results.put("Empy vectors", MLUtils.emptyVectors(Ypred)); return results; }
/** * Calculates a number of evaluation measures for multi-label classification, without class-wise * measures. * * @param predictions * predictions by the classifier (ranking) * @param goldStandard * gold standard (bipartition) * @param t * a threshold to create bipartitions from rankings * @return the evaluation statistics */ public static HashMap<String, Double> calcMLStats(double predictions[][], int goldStandard[][], double t[]) { int N = goldStandard.length; int L = goldStandard[0].length; int Ypred[][] = ThresholdUtils.threshold(predictions, t); HashMap<String, Double> results = new LinkedHashMap<String, Double>(); Mean mean = new Mean(); results.put("Number labels", (double) L); results.put("Number examples", (double) N); results.put("Zero-one-loss", Metrics.L_ZeroOne(goldStandard, Ypred)); results.put("Label cardinality predicted", MLUtils.labelCardinality(Ypred)); results.put("Label cardinality actual", MLUtils.labelCardinality(goldStandard)); results.put("Average threshold", mean.evaluate(t, 0, t.length)); // average results.put("Empy vectors", MLUtils.emptyVectors(Ypred)); return results; }
Mean mean = new Mean(); double m = mean.evaluate(values, begin, length);
} else if (length > 1) { Mean mean = new Mean(); double m = mean.evaluate(values, begin, length); var = evaluate(values, m, begin, length);
} else if (length > 1) { Mean mean = new Mean(); double m = mean.evaluate(values, begin, length); var = evaluate(values, m, begin, length);
} else if (length > 1) { Mean mean = new Mean(); double m = mean.evaluate(values, begin, length); var = evaluate(values, m, begin, length);
protected void compute() { Percentile percentile = new Percentile(); double[] rawDataAsArray = clearRawDataAndGetAsArray(); if (null != rawDataAsArray && rawDataAsArray.length != 0) { sampleSize = rawDataAsArray.length; percentile.setData(rawDataAsArray); percentile_99_5 = percentile.evaluate(99.5); percentile_99 = percentile.evaluate(99); percentile_90 = percentile.evaluate(90); median = Math.max(1d, percentile.evaluate(50)); max = StatUtils.max(rawDataAsArray); mean = new Mean().evaluate(rawDataAsArray); stddev = new StandardDeviation().evaluate(rawDataAsArray); } computedData.set(getCopyOfComputedData()); }