@Override public FaceAligner<CLMDetectedFace> getAligner() { return new CLMAligner(size); }
@Override public FaceAligner<CLMDetectedFace> getAligner() { return new CLMAligner(size); }
public static void main(String[] args) throws MalformedURLException, IOException { final CLMFaceDetector detector = new CLMFaceDetector(); // final FaceAligner<KEDetectedFace> aligner = new MeshWarpAligner(); final FaceAligner<CLMDetectedFace> aligner = new CLMAligner(200); final VideoCapture vc = new VideoCapture(640, 480); VideoDisplay.createOffscreenVideoDisplay(vc).addVideoListener(new VideoDisplayListener<MBFImage>() { @Override public void beforeUpdate(MBFImage frame) { if (frame == null) return; final List<CLMDetectedFace> faces = detector.detectFaces(frame.flatten()); if (faces.size() <= 0) return; final CLMDetectedFace face = faces.get(0); DisplayUtilities.displayName(aligner.align(face), "aligned"); DisplayUtilities.displayName(frame, "tracked"); } @Override public void afterUpdate(VideoDisplay<MBFImage> display) { } }); } }
public static void main(String[] args) throws MalformedURLException, IOException { final CLMFaceDetector detector = new CLMFaceDetector(); // final FaceAligner<KEDetectedFace> aligner = new MeshWarpAligner(); final FaceAligner<CLMDetectedFace> aligner = new CLMAligner(200); final VideoCapture vc = new VideoCapture(640, 480); VideoDisplay.createOffscreenVideoDisplay(vc).addVideoListener(new VideoDisplayListener<MBFImage>() { @Override public void beforeUpdate(MBFImage frame) { if (frame == null) return; final List<CLMDetectedFace> faces = detector.detectFaces(frame.flatten()); if (faces.size() <= 0) return; final CLMDetectedFace face = faces.get(0); DisplayUtilities.displayName(aligner.align(face), "aligned"); DisplayUtilities.displayName(frame, "tracked"); } @Override public void afterUpdate(VideoDisplay<MBFImage> display) { } }); } }
public VideoFaceRecognition() throws Exception { capture = new VideoCapture(320, 240); engine = new CLMFaceTracker(); engine.fpd = 120; // engine.fcheck = true; videoFrame = VideoDisplay.createVideoDisplay(capture); videoFrame.addVideoListener(this); SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this); final LocalLBPHistogram.Extractor<CLMDetectedFace> extractor = new LocalLBPHistogram.Extractor<CLMDetectedFace>( new CLMAligner(), 20, 20, 8, 1); final FacialFeatureComparator<LocalLBPHistogram> comparator = new FaceFVComparator<LocalLBPHistogram, FloatFV>( FloatFVComparison.EUCLIDEAN); final KNNAnnotator<CLMDetectedFace, String, LocalLBPHistogram> knn = KNNAnnotator.create(extractor, comparator, 1, 5f); // final CLMShapeFeature.Extractor extractor = new // CLMShapeFeature.Extractor(); // final FacialFeatureComparator<CLMShapeFeature> comparator = new // FaceFVComparator<CLMShapeFeature, DoubleFV>( // DoubleFVComparison.EUCLIDEAN); // // final KNNAnnotator<CLMDetectedFace, String, // CLMShapeFeature.Extractor, CLMShapeFeature> knn = // KNNAnnotator.create(extractor, comparator, 1, 5f); recogniser = AnnotatorFaceRecogniser.create(knn); }
public VideoFaceRecognition() throws Exception { capture = new VideoCapture(320, 240); engine = new CLMFaceTracker(); engine.fpd = 120; // engine.fcheck = true; videoFrame = VideoDisplay.createVideoDisplay(capture); videoFrame.addVideoListener(this); SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this); final LocalLBPHistogram.Extractor<CLMDetectedFace> extractor = new LocalLBPHistogram.Extractor<CLMDetectedFace>( new CLMAligner(), 20, 20, 8, 1); final FacialFeatureComparator<LocalLBPHistogram> comparator = new FaceFVComparator<LocalLBPHistogram, FloatFV>( FloatFVComparison.EUCLIDEAN); final KNNAnnotator<CLMDetectedFace, String, LocalLBPHistogram> knn = KNNAnnotator.create(extractor, comparator, 1, 5f); // final CLMShapeFeature.Extractor extractor = new // CLMShapeFeature.Extractor(); // final FacialFeatureComparator<CLMShapeFeature> comparator = new // FaceFVComparator<CLMShapeFeature, DoubleFV>( // DoubleFVComparison.EUCLIDEAN); // // final KNNAnnotator<CLMDetectedFace, String, // CLMShapeFeature.Extractor, CLMShapeFeature> knn = // KNNAnnotator.create(extractor, comparator, 1, 5f); recogniser = AnnotatorFaceRecogniser.create(knn); }
public static void main(String[] args) throws IOException { final FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/test-images/A7K9ZlZCAAA9VoL.jpg")); final CLMFaceDetector detector = new CLMFaceDetector(); final List<Rectangle> rects = detector.getConfiguration().faceDetector.detect(image); final MBFImage img = new MBFImage(image.clone(), image.clone(), image.clone()); for (final Rectangle r : rects) { r.scaleCentroid(1.2f); img.drawShape(r, RGBColour.RED); } DisplayUtilities.display(img); final List<CLMDetectedFace> faces = detector.detectFaces(image, rects); final CLMAligner aligner = new CLMAligner(); DisplayUtilities.display(aligner.align(faces.get(0))); } }
public static void main(String[] args) throws IOException { final FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/test-images/A7K9ZlZCAAA9VoL.jpg")); final CLMFaceDetector detector = new CLMFaceDetector(); final List<Rectangle> rects = detector.getConfiguration().faceDetector.detect(image); final MBFImage img = new MBFImage(image.clone(), image.clone(), image.clone()); for (final Rectangle r : rects) { r.scaleCentroid(1.2f); img.drawShape(r, RGBColour.RED); } DisplayUtilities.display(img); final List<CLMDetectedFace> faces = detector.detectFaces(image, rects); final CLMAligner aligner = new CLMAligner(); DisplayUtilities.display(aligner.align(faces.get(0))); } }