private final static double[] decodeArrayDouble(final String _tab) { // System.err.println("appel de decodeArrayDouble"); final StringTokenizer stk = new StringTokenizer(_tab," "); TDoubleArrayList liste = new TDoubleArrayList(_tab.length()/5); while(stk.hasMoreElements()) { liste.add(Double.parseDouble(stk.nextToken())); } return liste.toNativeArray(); }
/** * Permet d'enlever plusieurs indices d'un tableau. Les anciennes valeurs peuvent etre enregistree. Attention, si un * index n'est pas correct cela peut tout fausser. * * @param _init le tableau a modifier * @param _sortedIdx les indices a enlever donner dans l'ordre * @param _removedValues le tableau qui va contenir (si non null et si de meme taille qui _init) les valeurs enlevees. * @return true si toutes les valeurs */ public static boolean removeIdx(final TDoubleArrayList _init, final int[] _sortedIdx, final double[] _removedValues) { if (_init == null) { return false; } final int n = _init.size(); final double[] newValues = new double[n]; int idx = 0; if (_removedValues != null) { final boolean addOld = _removedValues.length == _sortedIdx.length; int idxOld = 0; for (int i = 0; i < n; i++) { if (Arrays.binarySearch(_sortedIdx, i) < 0) { newValues[idx++] = _init.getQuick(i); } else if (addOld) { _removedValues[idxOld++] = _init.getQuick(i); } } } _init.clear(); _init.add(newValues, 0, idx); // true si les tailles concordent return n == _sortedIdx.length + _init.size(); }
public void write(String outDir, CotrainOutputData data) throws Exception { java.io.File f = new java.io.File(outDir); f.mkdirs(); String fname = outDir + Os.pathSeparator() + "cotraining.db"; DataOutputStream os = new DataOutputStream(new BufferedOutputStream( new FileOutputStream(fname))); os.writeInt(data.catsThreshold.size()); for (int i = 0; i < data.catsThreshold.size(); i++) { os.writeDouble(data.catsThreshold.get(i)); } // Close the stream. os.close(); }
/** * Sheds any excess capacity above and beyond the current size of * the list. */ public void trimToSize() { if (_data.length > size()) { double[] tmp = new double[size()]; toNativeArray(tmp, 0, tmp.length); _data = tmp; } }
public EvolutionReguliereTFixe(final double[] _t) { super(); t_ = _t; val_.ensureCapacity(_t.length); val_.add(new double[t_.length]); }
public static double[][] getCorrectValue(final String[][] _values, final CtuluDoubleParser _parser) { final int nbValues = _values.length; if (nbValues == 0) { return null; } final int nbLine = _values[0].length; final TDoubleArrayList[] newValues = new TDoubleArrayList[nbValues]; for (int i = 0; i < nbValues; i++) { newValues[i] = new TDoubleArrayList(nbLine); } for (int j = 0; j < nbLine; j++) { boolean ok = true; for (int i = 0; i < nbValues && ok; i++) { ok = _parser.isValid(_values[i][j]); } if (ok) { for (int k = nbValues - 1; k >= 0; k--) { newValues[k].add(_parser.parse(_values[k][j])); } } } final double[][] res = new double[nbValues][newValues[0].size()]; for (int k = nbValues - 1; k >= 0; k--) { res[k] = newValues[k].toNativeArray(); } return res; }
/** * @param _t les temps * @param _var les variables */ public EvolutionReguliereFixe(final double[] _t,final double[] _var) { super(_t); val_=new TDoubleArrayList(_var); }
protected void initWith(final EvolutionReguliereAbstract _evol) { if (_evol == null) { return; } used_ = _evol.used_; nom_ = _evol.nom_; unite_ = _evol.unite_; xVal_ = _evol.xVal_; yVal_ = _evol.yVal_; listener_ = _evol.listener_; val_.clear(); val_.add(_evol.val_.toNativeArray()); }
/** * Le x est ajoute et l'ordre des x est conserve. * * @param _x le x ajouter * @param _y le y correspondant */ void put(final double _x, final double _y) { if (t_.size() == 0) { t_.add(_x); val_.add(_y); } else { int k = t_.binarySearch(_x); if (k < 0) { k = -k; if (k > t_.size()) { t_.add(_x); val_.add(_y); } else { t_.insert(k - 1, _x); val_.insert(k - 1, _y); } } else { val_.set(k, _y); } } }
public ClassificationResult classify(IIndex testIndex, int docID) { ClassificationResult bagres = new ClassificationResult(); bagres.documentID = docID; for (int i = 0; i < _classifiers.length; ++i) { ClassificationResult res = _classifiers[i].classify(testIndex, docID); if (bagres.categoryID.size() == 0) { for (int j = 0; j < res.categoryID.size(); ++j) { bagres.categoryID.add(res.categoryID.getQuick(j)); bagres.score.add(0); } } for (int j = 0; j < res.score.size(); ++j) { bagres.score.setQuick(j, bagres.score.getQuick(j) + res.score.getQuick(j)); } } for (int j = 0; j < bagres.score.size(); ++j) { bagres.score.setQuick(j, bagres.score.getQuick(j) / _classifiers.length); } return bagres; }
public ClassificationResult computeScore(KnnCommitteeClassifier cl, Vector<ClassificationResult> results, IIndex testIndex, int docID) { if (results.size() != _matrixes.size()) throw new RuntimeException("The number of matrixes and classifiers must be the same"); ClassificationResult cr = new ClassificationResult(); cr.documentID = docID; for (int i = 0; i < results.get(0).categoryID.size(); i++) { cr.categoryID.add(results.get(0).categoryID.get(i)); cr.score.add(0); } for (int i = 0; i < results.size(); i++) { ClassificationResult res = results.get(i); for (int j = 0; j < res.score.size(); j++) { double val = cr.score.get(j) + (res.score.get(j) * _matrixes.get(i).getWeight(res.categoryID.get(j), docID, 0)); res.score.set(j, val); } } return cr; }
for (int i = 0; i < centroids.length; i++) { centroids[i].documents.clear(); centroids[i].distances.clear(); centroids[whichCluster].distances.add(best); cd.centroid = centroids[i].features; cd.documents = centroids[i].documents; cd.distance = new TDoubleArrayList(cd.documents.size()); for (int j = 0; j < cd.documents.size(); j++) { double score = _customizer.getSimilarityFunction().computeSimilarity(centroids[i].features, dists, (int) cd.documents.get(j)); cd.distance.add(score);
/** * Adds the values in the array <tt>vals</tt> to the end of the * list, in order. * * @param vals an <code>double[]</code> value */ public void add(double[] vals) { add(vals, 0, vals.length); }
/** * @param _idx l'indice demande * @return la valeur y a l'indice _idx */ public final double getY(final int _idx) { return val_.getQuick(_idx); }
/** * @param _init les valeurs initiales */ public CtuluArrayDoubleImmutable(final TDoubleArrayList _init) { this(_init.toNativeArray()); }
/** * Compare les X de cette evolution avec <code>_toCompare</code>. Si les x sont diffrents renvoie false et stocke * dans <code>_commonX</code> l'union des valeurs des x. * * @param _toCompare l'evolution a comparer * @param _commonX la liste a remplir avec l'union des x des 2 evolutions si differentes * @return true si identique */ public boolean isEvolutionWithSameX(final EvolutionReguliere _toCompare, final TDoubleHashSet _commonX) { if (_toCompare == null) { return false; } if (t_.equals(_toCompare.t_)) { return true; } if (_commonX != null) { _commonX.ensureCapacity(t_.size() + _toCompare.t_.size()); _commonX.addAll(t_.toNativeArray()); _commonX.addAll(_toCompare.t_.toNativeArray()); } return false; }
public ClassificationResult(int size) { categoryID = new TShortArrayList(); categoryID.ensureCapacity(size); score = new TDoubleArrayList(); score.ensureCapacity(size); } }