/** * Convert a {@link FeatureVector} to an array of doubles using * {@link FeatureVector#asDoubleVector()}. * * @param feature * the feature * @param bias * any bias term to add. if <=0 then no term is added; otherwise * an extra element will be added to the end of the vector set to * this value. * @return the double[] version of the feature */ public static double[] convertDense(FeatureVector feature, double bias) { final double[] arr = feature.asDoubleVector(); if (bias <= 0) return arr; final double[] arr2 = new double[arr.length + 1]; System.arraycopy(arr, 0, arr2, 0, arr.length); arr2[arr.length] = bias; return arr2; } }
/** * Learn the PCA basis of the given feature vectors. * * @param data * the feature vectors to apply PCA to. */ public void learnBasis(FeatureVector[] data) { final double[][] d = new double[data.length][]; for (int i = 0; i < data.length; i++) { d[i] = data[i].asDoubleVector(); } learnBasis(d); }
/** * Learn the PCA basis of the given feature vectors. * * @param data * the feature vectors to apply PCA to. */ public void learnBasis(Collection<? extends FeatureVector> data) { final double[][] d = new double[data.size()][]; int i = 0; for (final FeatureVector fv : data) { d[i++] = fv.asDoubleVector(); } learnBasis(d); }
double execute() { FeatureVector fv1 = featureOp.extract(im1); FeatureVector fv2 = featureOp.extract(im2); if (compare == FeatureComparison.EQUALS) { if (Arrays.equals(fv1.asDoubleVector(), fv2.asDoubleVector())) return 1; return 0; } else { return getComp(fv1, compare).compare(fv1, fv2); } }
/** * Takes a {@link FeatureVector} and converts it into an array of {@link svm_node}s * for the svm library. * * @param featureVector The feature vector to convert * @return The equivalent svm_node[] */ static private svm_node[] featureToNode( final FeatureVector featureVector ) { // if( featureVector instanceof SparseFeatureVector ) // { // // } // else { final double[] fv = featureVector.asDoubleVector(); final svm_node[] nodes = new svm_node[fv.length]; for( int i = 0; i < fv.length; i++ ) { nodes[i] = new svm_node(); nodes[i].index = i; nodes[i].value = fv[i]; } return nodes; } } }
double execute() { FeatureVector fv1 = featureOp.extract(im1); FeatureVector fv2 = featureOp.extract(im2); if (compare == FeatureComparison.EQUALS) { if (Arrays.equals(fv1.asDoubleVector(), fv2.asDoubleVector())) return 1; return 0; } else { return getComp(fv1, compare).compare(fv1, fv2); } }
private Vector convert(FeatureVector feature) { return VectorFactory.getDenseDefault().copyArray(feature.asDoubleVector()); } }
/** * Project a vector by the basis. The vector is normalised by subtracting * the mean and then multiplied by the basis. * * @param vector * the vector to project * @return projected vector */ public DoubleFV project(FeatureVector vector) { return new DoubleFV(project(vector.asDoubleVector())); }
@Override public List<ScoredAnnotation<ANNOTATION>> annotate(OBJECT image) { final double[] fv = extractor.extractFeature(image).asDoubleVector(); final Matrix F = new Matrix(new double[][] { fv }); final Matrix res = F.times(transform); final List<ScoredAnnotation<ANNOTATION>> ann = new ArrayList<ScoredAnnotation<ANNOTATION>>(); for (int i = 0; i < terms.size(); i++) { ann.add(new ScoredAnnotation<ANNOTATION>(terms.get(i), (float) res.get(0, i))); } Collections.sort(ann, new Comparator<ScoredAnnotation<ANNOTATION>>() { @Override public int compare(ScoredAnnotation<ANNOTATION> o1, ScoredAnnotation<ANNOTATION> o2) { return o1.confidence < o2.confidence ? 1 : -1; } }); return ann; }
@Override public MixtureOfGaussians apply(LocalFeatureList<? extends LocalFeature<?,? extends FeatureVector>> features) { System.out.println("Creating double array..."); double[][] doubleFeatures = new double[features.size()][]; int i = 0; for (LocalFeature<?,?> localFeature : features) { doubleFeatures[i] = ArrayUtils.divide(localFeature.getFeatureVector().asDoubleVector(), 128); i++; } System.out.println(String.format("Launching EM with double array: %d x %d",doubleFeatures.length,doubleFeatures[0].length)); return this.gmm.estimate(new Matrix(doubleFeatures)); }
double execute() { FImage mask1 = FloodFill.floodFill(im1, px1, py1, thresh1); FImage mask2 = FloodFill.floodFill(im2, px2, py2, thresh2); FeatureVector fv1 = featureOp.execute(im1, mask1); FeatureVector fv2 = featureOp.execute(im2, mask2); if (compare == FeatureComparison.EQUALS) { if (Arrays.equals(fv1.asDoubleVector(), fv2.asDoubleVector())) return 1; return 0; } else { return getComp(fv1, compare).compare(fv1, fv2); } }
@Override public MixtureOfGaussians apply(LocalFeatureList<? extends LocalFeature<?,? extends FeatureVector>> features) { System.out.println("Creating double array..."); double[][] doubleFeatures = new double[features.size()][]; int i = 0; for (LocalFeature<?,?> localFeature : features) { doubleFeatures[i] = ArrayUtils.divide(localFeature.getFeatureVector().asDoubleVector(), 128); i++; } System.out.println(String.format("Launching EM with double array: %d x %d",doubleFeatures.length,doubleFeatures[0].length)); return this.gmm.estimate(new Matrix(doubleFeatures)); }
double execute() { FImage mask1 = FloodFill.floodFill(im1, px1, py1, thresh1); FImage mask2 = FloodFill.floodFill(im2, px2, py2, thresh2); FeatureVector fv1 = featureOp.execute(im1, mask1); FeatureVector fv2 = featureOp.execute(im2, mask2); if (compare == FeatureComparison.EQUALS) { if (Arrays.equals(fv1.asDoubleVector(), fv2.asDoubleVector())) return 1; return 0; } else { return getComp(fv1, compare).compare(fv1, fv2); } }
@Override public FeatureVector extract(MBFImage image, FImage mask) { final FeatureVector fv = super.extract(image, mask); final double[] vals = fv.asDoubleVector(); final int index = ArrayUtils.maxIndex(vals); return new FloatFV(hm.colourAverage(index)); } }
@Override public List<ScoredAnnotation<ANNOTATION>> annotate(OBJECT object) { final FeatureVector feature = extractor.extractFeature(object); final Vector vec = VectorFactory.getDefault().copyArray(feature.asDoubleVector()); return mode.getAnnotations(categorizer, vec); } }
@Override public void train(List<? extends Annotated<OBJECT, ANNOTATION>> data) { final Set<ANNOTATION> termsSet = new HashSet<ANNOTATION>(); for (final Annotated<OBJECT, ANNOTATION> d : data) termsSet.addAll(d.getAnnotations()); terms = new ArrayList<ANNOTATION>(termsSet); final int termLen = terms.size(); final int trainingLen = data.size(); final Annotated<OBJECT, ANNOTATION> first = data.get(0); final double[] fv = extractor.extractFeature(first.getObject()).asDoubleVector(); final int featureLen = fv.length; final Matrix F = new Matrix(trainingLen, featureLen); final Matrix W = new Matrix(trainingLen, termLen); addRow(F, W, 0, fv, first.getAnnotations()); for (int i = 1; i < trainingLen; i++) { addRow(F, W, i, data.get(i)); } final Matrix pinvF = PseudoInverse.pseudoInverse(F, k); transform = pinvF.times(W); }
private void addRow(Matrix F, Matrix W, int r, Annotated<OBJECT, ANNOTATION> data) { final double[] fv = extractor.extractFeature(data.getObject()).asDoubleVector(); addRow(F, W, r, fv, data.getAnnotations()); }
@Override public FeatureVector extract(MBFImage image, FImage mask) { final FeatureVector fv = super.extract(image, mask); final double[] vals = fv.asDoubleVector(); final int index = ArrayUtils.maxIndex(vals); return new FloatFV(hm.colourAverage(index)); } }
@Override public Boolean call() { try { final int D = ds.getData(0).length(); final T[] points = ds.createTemporaryArray(stopRow - startRow); ds.getData(startRow, stopRow, points); final int[] argmins = new int[points.length]; final float[] mins = new float[points.length]; nno.searchNN(points, argmins, mins); synchronized (centroids_accum) { for (int i = 0; i < points.length; ++i) { final int k = argmins[i]; final double[] vector = points[i].asDoubleVector(); for (int d = 0; d < D; ++d) { centroids_accum[k][d] += vector[d]; } counts[k] += 1; } } } catch (final Exception e) { e.printStackTrace(); } return true; } }
@Override public void train(Annotated<OBJECT, ANNOTATION> annotated) { final FeatureVector feature = extractor.extractFeature(annotated.getObject()); final Vector vec = VectorFactory.getDefault().copyArray(feature.asDoubleVector()); for (final ANNOTATION ann : annotated.getAnnotations()) { learner.update(categorizer, new DefaultInputOutputPair<Vector, ANNOTATION>(vec, ann)); } }