/** * Helper method to convert a list of {@link TrackedFace}s to * {@link CLMDetectedFace}s. * * @param faces * the {@link TrackedFace}s. * @param image * the image the {@link TrackedFace}s came from. * @return the list of {@link CLMDetectedFace}s */ public static List<CLMDetectedFace> convert(final List<TrackedFace> faces, final FImage image) { final List<CLMDetectedFace> cvt = new ArrayList<CLMDetectedFace>(); for (final TrackedFace f : faces) { cvt.add(new CLMDetectedFace(f, image)); } return cvt; }
private List<CLMDetectedFace> detectFaces() { return CLMDetectedFace.convert(engine.model.trackedFaces, currentFrame); }
@Override public void drawDetectedFace(MBFImage image, int thickness, CLMDetectedFace f) { this.thickness = thickness; drawFaceModel(image, f.getShapeMatrix(), f.getVisibility(), f.getBounds()); }
/** * Get the parameters describing the pose of the face. This doesn't include * the translation or scale. The values are {pitch, yaw, roll} * * @return the pose parameters */ public DoubleFV getPoseParameters() { return new DoubleFV(new double[] { this.getPitch(), this.getYaw(), this.getRoll() }); }
@Override public synchronized void beforeUpdate(MBFImage frame) { this.currentFrame = frame.flatten(); engine.track(frame); engine.drawModel(frame, true, true, true, true, true); if (recogniser != null && recogniser.listPeople().size() >= 1) { for (final CLMDetectedFace f : detectFaces()) { final List<ScoredAnnotation<String>> name = recogniser.annotate(f); if (name.size() > 0) { final Point2d r = f.getBounds().getTopLeft(); frame.drawText(name.get(0).annotation, r, HersheyFont.ROMAN_SIMPLEX, 15, RGBColour.GREEN); } } } }
@Override public FImage align(CLMDetectedFace face) { if (face == null) return null; final List<Triangle> triangles = CLMFaceTracker.getTriangles( face.getShapeMatrix(), face.getVisibility(), this.config.triangles); final List<Pair<Shape>> matches = computeMatches(triangles); final PiecewiseMeshWarp<Float, FImage> pmw = new PiecewiseMeshWarp<Float, FImage>(matches); return pmw.transform(face.getFacePatch(), size, size); }
@Override public CLMPoseShapeFeature extractFeature(CLMDetectedFace face) { return new CLMPoseShapeFeature(face.getPoseShapeParameters()); }
@Override public CLMPoseFeature extractFeature(CLMDetectedFace face) { return new CLMPoseFeature(face.getPoseParameters()); }
@Override public void writeBinary(final DataOutput out) throws IOException { super.writeBinary(out); IOUtils.write(this.getShape(), out); IOUtils.write(this.poseParameters, out); IOUtils.write(this.shapeParameters, out); }
/** * Get a vector describing the pose (pitch, yaw and roll only) and shape of * the model. * * @return the combined pose and shape vector */ public DoubleFV getPoseShapeParameters() { final int len = this.shapeParameters.getRowDimension(); final double[] vector = new double[len + 3]; vector[0] = this.getPitch(); vector[1] = this.getYaw(); vector[2] = this.getRoll(); for (int i = 3; i < len + 3; i++) { vector[i] = this.shapeParameters.get(i, 0); } return new DoubleFV(vector); }
@Override public synchronized void beforeUpdate(MBFImage frame) { this.currentFrame = frame.flatten(); engine.track(frame); engine.drawModel(frame, true, true, true, true, true); if (recogniser != null && recogniser.listPeople().size() >= 1) { for (final CLMDetectedFace f : detectFaces()) { final List<ScoredAnnotation<String>> name = recogniser.annotate(f); if (name.size() > 0) { final Point2d r = f.getBounds().getTopLeft(); frame.drawText(name.get(0).annotation, r, HersheyFont.ROMAN_SIMPLEX, 15, RGBColour.GREEN); } } } }
private List<CLMDetectedFace> detectFaces() { return CLMDetectedFace.convert(engine.model.trackedFaces, currentFrame); }
@Override public synchronized void beforeUpdate(MBFImage frame) { this.currentFrame = frame.flatten(); engine.track(frame); engine.drawModel(frame, true, true, true, true, true); if (recogniser != null && recogniser.listPeople().size() >= 1) { for (final CLMDetectedFace f : detectFaces()) { final List<ScoredAnnotation<String>> name = recogniser.annotate(f); if (name.size() > 0) { final Point2d r = f.getBounds().getTopLeft(); frame.drawText(name.get(0).annotation, r, HersheyFont.ROMAN_SIMPLEX, 15, RGBColour.GREEN); } } } }
if ((m = this.conversionCache.get(f)) == null) m = new CLMDetectedFace(f, img); this.conversionCache.put(f, m);
private List<CLMDetectedFace> detectFaces() { return CLMDetectedFace.convert(engine.model.trackedFaces, currentFrame); }
@Override public synchronized void beforeUpdate(MBFImage frame) { this.currentFrame = frame.flatten(); engine.track(frame); engine.drawModel(frame, true, true, true, true, true); if (recogniser != null && recogniser.listPeople().size() >= 1) { for (final CLMDetectedFace f : detectFaces()) { final List<ScoredAnnotation<String>> name = recogniser.annotate(f); if (name.size() > 0) { final Point2d r = f.getBounds().getTopLeft(); frame.drawText(name.get(0).annotation, r, HersheyFont.ROMAN_SIMPLEX, 15, RGBColour.GREEN); } } } }
if ((m = this.conversionCache.get(f)) == null) m = new CLMDetectedFace(f, img); this.conversionCache.put(f, m);
private List<CLMDetectedFace> detectFaces() { return CLMDetectedFace.convert(engine.model.trackedFaces, currentFrame); }
/** * Detect faces in the image using the given rectangles as the seeds from * which to start fitting the model. * * @param image * the image * @param detRects * the seed rectangles * @return the detected faces */ public List<CLMDetectedFace> detectFaces(FImage image, List<Rectangle> detRects) { final List<CLMDetectedFace> faces = new ArrayList<CLMDetectedFace>(); for (final Rectangle f : detRects) { if ((f.width == 0) || (f.height == 0)) { continue; } initShape(f, config.shape, config.referenceShape); config.clm._pdm.calcParams(config.shape, config.clm._plocal, config.clm._pglobl); config.clm.fit(image, config.windowSize, config.nIter, config.clamp, config.fTol); config.clm._pdm.calcShape2D(config.shape, config.clm._plocal, config.clm._pglobl); if (config.fcheck) { if (!config.failureCheck.check(config.clm.getViewIdx(), image, config.shape)) { continue; } } faces.add(new CLMDetectedFace(f, config.shape.copy(), config.clm._pglobl.copy(), config.clm._plocal.copy(), config.clm._visi[config.clm.getViewIdx()].copy(), image)); } return faces; }
/** * Helper method to convert a list of {@link TrackedFace}s to * {@link CLMDetectedFace}s. * * @param faces * the {@link TrackedFace}s. * @param image * the image the {@link TrackedFace}s came from. * @return the list of {@link CLMDetectedFace}s */ public static List<CLMDetectedFace> convert(final List<TrackedFace> faces, final MBFImage image) { final FImage fimage = image.flatten(); return CLMDetectedFace.convert(faces, fimage); }