private static ConfigFastHessian confDetectFH() { ConfigFastHessian conf = new ConfigFastHessian(); conf.initialSampleSize = 2; conf.extractRadius = 2; conf.maxFeaturesPerScale = 120; return conf; }
/** * Creates a Fast Hessian blob detector used by SURF. * * @param config Configuration for detector. Pass in null for default options. * @param <II> Integral Image * @return The feature detector */ public static <II extends ImageGray> FastHessianFeatureDetector<II> fastHessian( ConfigFastHessian config ) { if( config == null ) config = new ConfigFastHessian(); config.checkValidity(); // ignore border is overwritten by Fast Hessian at detection time NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax( new ConfigExtract(config.extractRadius, config.detectThreshold, 0, true)); return new FastHessianFeatureDetector<>(extractor, config.maxFeaturesPerScale, config.initialSampleSize, config.initialSize, config.numberScalesPerOctave, config.numberOfOctaves, config.scaleStepSize); }
public ArrayList<ConvexPolygon2d> detectConeLocations(BufferedImage image) { ImageFloat32 input = ConvertBufferedImage.convertFromSingle(image, null, ImageFloat32.class); // Create a Fast Hessian detector from the SURF paper. // Other detectors can be used in this example too. InterestPointDetector<ImageFloat32> detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(10, 2, 100, 2, 9, 3, 4)); // find interest points in the image detector.detect(input); // Show the features return new ArrayList<ConvexPolygon2d>(); }
private void setSelection( int which ) { InterestPointDetector<GrayU8> detector; switch( which ) { case 0: detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(10,3,100,2,9,4,4)); break; case 1: ConfigSiftDetector configSift = new ConfigSiftDetector(200); detector = FactoryInterestPoint.sift(null, configSift, GrayU8.class); break; default: throw new RuntimeException("Unknown selection"); } setProcessing(new PointProcessing(detector)); }
public DetectPointScaleOriWithNoiseApp(Class<T> imageType, Class<D> derivType) { super(1); this.imageType = imageType; FeatureLaplacePyramid<T, D> flss = FactoryInterestPointAlgs.hessianLaplace(radius, thresh, maxScaleFeatures, imageType, derivType); addAlgorithm(0, "Hess Lap SS", FactoryInterestPoint.wrapDetector(flss, scales, false, imageType)); FeatureLaplacePyramid<T, D> flp = FactoryInterestPointAlgs.hessianLaplace(radius, thresh, maxScaleFeatures, imageType, derivType); addAlgorithm(0, "Hess Lap P", FactoryInterestPoint.wrapDetector(flp, scales, true,imageType)); addAlgorithm(0, "FastHessian", FactoryInterestPoint.<T>fastHessian( new ConfigFastHessian(thresh, 2, maxScaleFeatures, 2, 9, 4, 4))); addAlgorithm(0, "SIFT", FactoryInterestPoint.sift(null,new ConfigSiftDetector(2*maxScaleFeatures),imageType)); JPanel viewArea = new JPanel(new BorderLayout()); corruptPanel = new ImageCorruptPanel(); corruptPanel.setListener(this); panel = new ImagePanel(); viewArea.add(corruptPanel, BorderLayout.WEST); viewArea.add(panel, BorderLayout.CENTER); setMainGUI(viewArea); }
/** * Detects key points inside the image and computes descriptions at those points. */ protected double[][] extractFeaturesInternal(BufferedImage image) { ImageFloat32 boofcvImage = ConvertBufferedImage.convertFromSingle(image, null, ImageFloat32.class); // create the SURF detector and descriptor in BoofCV v0.15 ConfigFastHessian conf = new ConfigFastHessian(detectThreshold, 2, maxFeaturesPerScale, 2, 9, 4, 4); DetectDescribePoint<ImageFloat32, SurfFeature> surf = FactoryDetectDescribe.surfStable(conf, null, null, ImageFloat32.class); // specify the image to process surf.detect(boofcvImage); int numPoints = surf.getNumberOfFeatures(); double[][] descriptions = new double[numPoints][SURFLength]; for (int i = 0; i < numPoints; i++) { descriptions[i] = surf.getDescription(i).getValue(); } return descriptions; } }
public static <T extends ImageGray<T>, D extends ImageGray<D>> void perform( String fileName , Class<T> imageType , Class<D> derivType ) { SimpleImageSequence<T> sequence = BoofVideoManager.loadManagerDefault().load(fileName, ImageType.single(imageType)); int maxCorners = 200; int radius = 2; // if null then no orientation will be computed OrientationImageAverage<T> orientation = null; orientation = FactoryOrientationAlgs.nogradient(1.0/2.0,radius,imageType); InterestPointDetector<T> detector; detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 2, 100, 2, 9, 4, 4)); // FeatureScaleSpace<T,D> feature = FactoryInterestPointAlgs.hessianScaleSpace(radius,1,maxCorners,defaultType,derivType); // detector = FactoryInterestPoint.wrapDetector(feature,new double[]{1,2,4,6,8,12},defaultType); VideoDetectInterestPoints<T> display = new VideoDetectInterestPoints<>(sequence, detector, orientation); display.process(); }
public DemoThreeViewStereoApp(List<PathLabel> examples) { super(true, false, examples, ImageType.single(GrayU8.class)); // remove some unused items from the menu bar. This app is an exception JMenu fileMenu = menuBar.getMenu(0); fileMenu.remove(1); fileMenu.remove(1); detDesc = FactoryDetectDescribe.surfStable( new ConfigFastHessian( 0, 4, 1000, 1, 9, 4, 2), null,null, GrayU8.class); for (int i = 0; i < 3; i++) { locations[i] = new FastQueue<>(Point2D_F64.class,true); features[i] = UtilFeature.createQueue(detDesc,100); dimensions[i] = new ImageDimension(); } rectifiedPanel.setImages(visualRect1,visualRect2); guiDisparity.setImage(visualDisparity); gui.setLayout(new BorderLayout()); updateVisibleGui(); add(BorderLayout.WEST, controls); add(BorderLayout.CENTER, gui); setPreferredSize(new Dimension(800,600)); }
public static void main( String args[] ) { String file1 = UtilIO.pathExample("stitch/kayak_01.jpg"); String file2 = UtilIO.pathExample("stitch/kayak_02.jpg"); InterestPointDetector<GrayF32> detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1,10,-1,2,9,4,4)); DescribeRegionPoint<GrayF32,TupleDesc_F64> describeA = (DescribeRegionPoint)FactoryDescribeRegionPoint.surfStable(null, GrayF32.class); ConvertTupleDesc<TupleDesc_F64,TupleDesc_S8> converter = FactoryConvertTupleDesc.real_F64_S8(describeA.createDescription().size()); DescribeRegionPoint<GrayF32,TupleDesc_S8> describeB = new DescribeRegionPointConvert<>(describeA, converter); ScoreAssociation<TupleDesc_F64> scoreA = FactoryAssociation.scoreSad(TupleDesc_F64.class); ScoreAssociation<TupleDesc_S8> scoreB = FactoryAssociation.scoreSad(TupleDesc_S8.class); BufferedImage image1 = UtilImageIO.loadImage(file1); BufferedImage image2 = UtilImageIO.loadImage(file2); visualize("Original",image1,image2,detector,describeA,scoreA); visualize("Modified",image1,image2,detector,describeB,scoreB); System.out.println("Done"); } }
public VisualizeAssociationScoreApp(Class<T> imageType, Class<D> derivType) { super(2); this.imageType = imageType; imageLeft = GeneralizedImageOps.createSingleBand(imageType, 1, 1); imageRight = GeneralizedImageOps.createSingleBand(imageType, 1, 1); GeneralFeatureDetector<T, D> alg; addAlgorithm(0, "Fast Hessian", FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4))); addAlgorithm(0, "SIFT", FactoryInterestPoint.sift(null,new ConfigSiftDetector(500),imageType)); alg = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(500,2,1), false, derivType); addAlgorithm(0, "Shi-Tomasi", FactoryInterestPoint.wrapPoint(alg, 1, imageType, derivType)); addAlgorithm(1, "SURF", FactoryDescribeRegionPoint.surfStable(null, imageType)); addAlgorithm(1, "SIFT", FactoryDescribeRegionPoint.sift(null,null, imageType)); addAlgorithm(1, "BRIEF", FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType)); addAlgorithm(1, "BRIEFO", FactoryDescribeRegionPoint.brief(new ConfigBrief(false), imageType)); addAlgorithm(1, "Pixel 11x11", FactoryDescribeRegionPoint.pixel(11, 11, imageType)); addAlgorithm(1, "NCC 11x11", FactoryDescribeRegionPoint.pixelNCC(11, 11, imageType)); // estimate orientation using this once since it is fast Class integralType = GIntegralImageOps.getIntegralType(imageType); OrientationIntegral orientationII = FactoryOrientationAlgs.sliding_ii(null, integralType); orientation = FactoryOrientation.convertImage(orientationII, imageType); controlPanel = new VisualizeScorePanel(this); scorePanel = new AssociationScorePanel<>(3); JPanel gui = new JPanel(); gui.setLayout(new BorderLayout()); gui.add(controlPanel, BorderLayout.WEST); gui.add(scorePanel, BorderLayout.CENTER); setMainGUI(gui); }
public VisualizeAssociationAlgorithmsApp( Class<T> imageType ) { super(1); this.imageType = imageType; detector = (DetectDescribePoint) FactoryDetectDescribe.surfStable( new ConfigFastHessian(5, 4, 200, 1, 9, 4, 4), null, null, GrayF32.class); // detector = (DetectDescribePoint) FactoryDetectDescribe.sift(4,1,false,200); int DOF = detector.createDescription().size(); ScoreAssociation<TupleDesc_F64> score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class,true); addAlgorithm(0, "Greedy", FactoryAssociation.greedy(score, Double.MAX_VALUE, false)); addAlgorithm(0, "Greedy Backwards", FactoryAssociation.greedy(score, Double.MAX_VALUE, true)); addAlgorithm(0, "K-D Tree BBF", FactoryAssociation.kdtree(null,DOF, 75)); addAlgorithm(0, "Random Forest", FactoryAssociation.kdRandomForest(null,DOF, 75, 10, 5, 1233445565)); image0 = GeneralizedImageOps.createSingleBand(imageType, 1, 1); image1 = GeneralizedImageOps.createSingleBand(imageType, 1, 1); setMainGUI(panel); }
public VideoTrackerPointFeaturesApp(Class<I> imageType, Class<D> derivType) { super(1,imageType); PkltConfig config = new PkltConfig(); config.templateRadius = 3; config.pyramidScaling = new int[]{1,2,4,8}; ConfigFastHessian configFH = new ConfigFastHessian(); configFH.maxFeaturesPerScale = 200; configFH.extractRadius = 4; configFH.detectThreshold = 15f; addAlgorithm(0,"KLT", FactoryPointTracker.klt(config, new ConfigGeneralDetector(maxFeatures, 1, 3), imageType, derivType)); addAlgorithm(0,"ST-BRIEF", FactoryPointTracker. dda_ST_BRIEF(200, new ConfigGeneralDetector(maxFeatures, 3, 1), imageType, derivType)); addAlgorithm(0,"ST-NCC", FactoryPointTracker. dda_ST_NCC(new ConfigGeneralDetector(maxFeatures, 3, 2), 5, imageType, derivType)); addAlgorithm(0,"FH-SURF", FactoryPointTracker. dda_FH_SURF_Fast(configFH, null, null, imageType)); addAlgorithm(0,"ST-SURF-KLT", FactoryPointTracker. combined_ST_SURF_KLT(new ConfigGeneralDetector(maxFeatures, 3, 1), config, 50, null, null, imageType, derivType)); addAlgorithm(0,"FH-SURF-KLT", FactoryPointTracker.combined_FH_SURF_KLT( config, 50, configFH, null, null, imageType)); gui.addMouseListener(this); gui.requestFocus(); setMainGUI(gui); }
public VideoStabilizeSequentialPointApp(Class<I> imageType, Class<D> derivType) { super(2,imageType,true,new Stabilize2DPanel()); PkltConfig config = new PkltConfig(); config.templateRadius = 3; config.pyramidScaling = new int[]{1,2,4,8}; ConfigFastHessian configFH = new ConfigFastHessian(); configFH.maxFeaturesPerScale = 200; configFH.initialSampleSize = 2; addAlgorithm(0, "KLT", FactoryPointTracker.klt(config, new ConfigGeneralDetector(maxFeatures, 1, 3), imageType,derivType)); addAlgorithm(0, "ST-BRIEF", FactoryPointTracker. dda_ST_BRIEF(100, new ConfigGeneralDetector(400, 1, 10), imageType, derivType)); // size of the description region has been increased to improve quality. addAlgorithm(0, "ST-NCC", FactoryPointTracker. dda_ST_NCC(new ConfigGeneralDetector(500, 3, 10), 5, imageType, derivType)); addAlgorithm(0, "FH-SURF", FactoryPointTracker.dda_FH_SURF_Fast(configFH, null, null, imageType)); addAlgorithm(0, "ST-SURF-KLT", FactoryPointTracker. combined_ST_SURF_KLT(new ConfigGeneralDetector(400, 3, 1), config, 50, null, null, imageType, derivType)); addAlgorithm(0, "FH-SURF-KLT", FactoryPointTracker.combined_FH_SURF_KLT( config, 50, configFH, null, null, imageType)); addAlgorithm(1,"Affine", new Affine2D_F64()); addAlgorithm(1,"Homography", new Homography2D_F64()); absoluteMinimumTracks = 40; respawnTrackFraction = 0.3; respawnCoverageFraction = 0.5; maxJumpFraction = 0.3; inlierThreshold = 4; }
public VideoMosaicSequentialPointApp(Class<I> imageType, Class<D> derivType) { super(2,imageType,true,new Mosaic2DPanel()); PkltConfig config = new PkltConfig(); config.templateRadius = 3; config.pyramidScaling = new int[]{1,2,4,8}; ConfigFastHessian configFH = new ConfigFastHessian(); configFH.initialSampleSize = 2; configFH.maxFeaturesPerScale = 200; addAlgorithm(0, "KLT", FactoryPointTracker.klt(config, new ConfigGeneralDetector(maxFeatures, 3, 1), imageType, derivType)); addAlgorithm(0, "ST-BRIEF", FactoryPointTracker. dda_ST_BRIEF(150, new ConfigGeneralDetector(400, 1, 10), imageType, null)); // size of the description region has been increased to improve quality. addAlgorithm(0, "ST-NCC", FactoryPointTracker. dda_ST_NCC(new ConfigGeneralDetector(500, 3, 9), 10, imageType, derivType)); addAlgorithm(0, "FH-SURF", FactoryPointTracker.dda_FH_SURF_Fast(configFH, null, null, imageType)); addAlgorithm(0, "ST-SURF-KLT", FactoryPointTracker. combined_ST_SURF_KLT(new ConfigGeneralDetector(400, 3, 1), config, 75, null, null, imageType, derivType)); addAlgorithm(0, "FH-SURF-KLT", FactoryPointTracker.combined_FH_SURF_KLT( config, 75, configFH, null, null, imageType)); addAlgorithm(1,"Affine", new Affine2D_F64()); addAlgorithm(1,"Homography", new Homography2D_F64()); absoluteMinimumTracks = 40; respawnTrackFraction = 0.3; respawnCoverageFraction = 0.8; maxJumpFraction = 0.3; inlierThreshold = 4; }
public VisualizeAssociationMatchesApp(Class<T> imageType, Class<D> derivType) { super(3); this.imageType = imageType; GeneralFeatureDetector<T, D> alg; addAlgorithm(0, "Fast Hessian",FactoryInterestPoint.fastHessian(new ConfigFastHessian( 1, 2, 200, 1, 9, 4, 4))); addAlgorithm(0, "SIFT", FactoryInterestPoint.sift(null,new ConfigSiftDetector(400),imageType)); alg = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(500,2,1), false, derivType); addAlgorithm(0, "Shi-Tomasi", FactoryInterestPoint.wrapPoint(alg, 1, imageType, derivType)); addAlgorithm(1, "SURF-S", FactoryDescribeRegionPoint.surfStable(null, imageType)); addAlgorithm(1, "SURF-S Color", FactoryDescribeRegionPoint.surfColorStable(null, ImageType.pl(3, imageType))); addAlgorithm(1, "SIFT", FactoryDescribeRegionPoint.sift(null,null,imageType)); addAlgorithm(1, "BRIEF", FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType)); addAlgorithm(1, "BRIEFSO", FactoryDescribeRegionPoint.brief(new ConfigBrief(false), imageType)); addAlgorithm(1, "Pixel 11x11", FactoryDescribeRegionPoint.pixel(11, 11, imageType)); addAlgorithm(1, "NCC 11x11", FactoryDescribeRegionPoint.pixelNCC(11, 11, imageType)); addAlgorithm(2, "Greedy", false); addAlgorithm(2, "Backwards", true); // estimate orientation using this once since it is fast and accurate Class integralType = GIntegralImageOps.getIntegralType(imageType); OrientationIntegral orientationII = FactoryOrientationAlgs.sliding_ii(null, integralType); orientation = FactoryOrientation.convertImage(orientationII,imageType); imageLeft = new Planar<>(imageType, 1, 1, 3); imageRight = new Planar<>(imageType, 1, 1, 3); grayLeft = GeneralizedImageOps.createSingleBand(imageType, 1, 1); grayRight = GeneralizedImageOps.createSingleBand(imageType, 1, 1); setMainGUI(panel); }