double eval(double[] args) { return args[Utils.minIndex(args)]; }
double eval(double[] args) { return args[Utils.minIndex(args)]; }
/** * Convert distribution using minimum expected cost approach. The incoming * array is modified and returned! * * @param pred the predicted distribution * @param instance the instance * @return the modified distribution */ protected double[] convertDistribution(double[] pred, Instance instance) throws Exception { double [] costs = m_CostMatrix.expectedCosts(pred, instance); // This is probably not ideal int classIndex = Utils.minIndex(costs); for (int i = 0; i < pred.length; i++) { if (i == classIndex) { pred[i] = 1.0; } else { pred[i] = 0.0; } } return pred; }
/** * Convert distribution using minimum expected cost approach. The incoming * array is modified and returned! * * @param pred the predicted distribution * @param instance the instance * @return the modified distribution */ protected double[] convertDistribution(double[] pred, Instance instance) throws Exception { double [] costs = m_CostMatrix.expectedCosts(pred, instance); // This is probably not ideal int classIndex = Utils.minIndex(costs); for (int i = 0; i < pred.length; i++) { if (i == classIndex) { pred[i] = 1.0; } else { pred[i] = 0.0; } } return pred; }
double min = preds[Utils.minIndex(preds)]; if (min < 0) { for (int i = 0; i < preds.length; i++) {
double min = preds[Utils.minIndex(preds)]; if (min < 0) { for (int i = 0; i < preds.length; i++) {
finished = true; bestCluster = Utils.minIndex(distance);
System.out.println("Max index (doubles): " + Utils.maxIndex(doubles)); System.out.println("Max index (ints): " + Utils.maxIndex(ints)); System.out.println("Min index (doubles): " + Utils.minIndex(doubles)); System.out.println("Min index (ints): " + Utils.minIndex(ints)); System.out.println("Median (doubles): " + Utils.kthSmallestValue(doubles, doubles.length / 2));
System.out.println("Max index (doubles): " + Utils.maxIndex(doubles)); System.out.println("Max index (ints): " + Utils.maxIndex(ints)); System.out.println("Min index (doubles): " + Utils.minIndex(doubles)); System.out.println("Min index (ints): " + Utils.minIndex(ints)); System.out.println("Median (doubles): " + Utils.kthSmallestValue(doubles, doubles.length / 2));
double isLabelMin = isLabelModelOuts[Utils.minIndex(isLabelModelOuts)]; double isNotLabelMax = isNotLabelModelOuts[Utils.maxIndex(isNotLabelModelOuts)];
performance[i] = Math.abs(measure.getIdealValue() - measureForThreshold[i].getValue()); int t = Utils.minIndex(performance); if (t == 0) { tempThreshold = conf[j].get(t);
protected void buildInternal(MultiLabelInstances trainingData) throws Exception { baseLearner.build(trainingData); MultiLabelOutput mlo = baseLearner.makePrediction(trainingData.getDataSet().firstInstance()); if (!mlo.hasRanking()) { throw new MulanRuntimeException("Learner is not a ranker"); } // by default set threshold equal to the rounded average cardinality if (measure == null) { t = (int) Math.round(trainingData.getCardinality()); t = 2; } else { // hold a reference to the trainingData in case of auto-tuning if (folds == 0) { double[] diff = computeThreshold(baseLearner, trainingData, measure); t = Utils.minIndex(diff); } else { autoTuneThreshold(trainingData, measure, folds); } } }
/** * Automatically selects a threshold based on training set performance * evaluated using cross-validation * * @param measure performance is evaluated based on this parameter * @param folds number of cross-validation folds * @throws InvalidDataFormatException * @throws Exception */ private void autoTuneThreshold(MultiLabelInstances trainingData, BipartitionMeasureBase measure, int folds) throws InvalidDataFormatException, Exception { if (folds < 2) { throw new IllegalArgumentException("folds should be more than 1"); } double[] totalDiff = new double[numLabels + 1]; LabelsMetaData labelsMetaData = trainingData.getLabelsMetaData(); MultiLabelLearner tempLearner = foldLearner.makeCopy(); for (int f = 0; f < folds; f++) { Instances train = trainingData.getDataSet().trainCV(folds, f); MultiLabelInstances trainMulti = new MultiLabelInstances(train, labelsMetaData); Instances test = trainingData.getDataSet().testCV(folds, f); MultiLabelInstances testMulti = new MultiLabelInstances(test, labelsMetaData); tempLearner.build(trainMulti); double[] diff = computeThreshold(tempLearner, testMulti, measure); for (int k = 0; k < diff.length; k++) { totalDiff[k] += diff[k]; } } t = Utils.minIndex(totalDiff); }
return min + Utils.minIndex(performance) * step;
int pos = Utils.minIndex(kullback); predict[(int) m_Class[pos]] += m_Weights[pos]; kullback[pos] = Double.POSITIVE_INFINITY;
if (this.adwin.getEstimation() > ErrEstim) { int index = Utils.minIndex(accuracies); if (getDebug()) System.out.println("------- CHANGE DETECTED / Reset Model #"+index+" ------- ");
int minI = Utils.minIndex(preds); preds = new double[preds.length]; preds[minI] = 1.0;
int minI = Utils.minIndex(preds); preds = new double[preds.length]; preds[minI] = 1.0;
int index = Utils.minIndex(dists); pred[(int) m_Class[index]]++; dists[index] = Double.POSITIVE_INFINITY;