/** * Computes motion by associating points using the provided distance function then * computing the Se2_F64 transform. * * @return found motion */ protected Se2_F64 computeMotion(AssociateLrfMeas assoc) { assoc.associate(scanSrc, scanDst); List<Point2D_F64> srcPts = assoc.getListSource(); List<Point2D_F64> dstPts = assoc.getListDestination(); motionAlg.process(srcPts, dstPts); return motionAlg.getTransformSrcToDst(); }
/** * Constructor where default algorithms are used for motion estimation and nearest-neighbor search * @param maxDistance Maximum distance two points can be apart for them to be associated * @param maxIterations Maximum number of iterations. Try 20 * @param convergenceTol Tolerance for convergence. Try 1e-12 */ public IcpCloud3D(double maxDistance, int maxIterations, double convergenceTol ) { this.maxDistance = maxDistance; this.maxIterations = maxIterations; this.convergenceTol = convergenceTol; motionAlg = new MotionSe3PointSVD_F64(); nn = FactoryNearestNeighbor.kdtree(); nn.init(3); }
/** * Creates two 3D point clouds for the left and right camera using the known calibration points and camera * calibration. Then find the optimal rigid body transform going from the right to left views. * * @return Transform from right to left view. */ private Se3_F64 computeRightToLeft() { // location of points in the world coordinate system List<Point2D_F64> points2D = layout; List<Point3D_F64> points3D = new ArrayList<>(); for( Point2D_F64 p : points2D ) { points3D.add( new Point3D_F64(p.x,p.y,0)); } // create point cloud in each view List<Point3D_F64> left = new ArrayList<>(); List<Point3D_F64> right = new ArrayList<>(); for( int i = 0; i < viewLeft.size(); i++ ) { Se3_F64 worldToLeft = viewLeft.get(i); Se3_F64 worldToRight = viewRight.get(i); // These points can really be arbitrary and don't have to be target points for( Point3D_F64 p : points3D ) { Point3D_F64 l = SePointOps_F64.transform(worldToLeft, p, null); Point3D_F64 r = SePointOps_F64.transform(worldToRight, p, null); left.add(l); right.add(r); } } // find the transform from right to left cameras return FitSpecialEuclideanOps_F64.fitPoints3D(right,left); }
/** * Robust solution to PnP problem using {@link Ransac}. Input observations are in normalized * image coordinates. * * <p>See code for all the details.</p> * * @param pnp PnP parameters. Can't be null. * @param ransac Parameters for RANSAC. Can't be null. * @return Robust Se3_F64 estimator */ public static Ransac<Se3_F64, Point2D3D> pnpRansac( ConfigPnP pnp, ConfigRansac ransac) { Estimate1ofPnP estimatorPnP = FactoryMultiView.computePnP_1(pnp.which, -1, pnp.numResolve); DistanceModelMonoPixels<Se3_F64,Point2D3D> distance = new PnPDistanceReprojectionSq(); distance.setIntrinsic(pnp.intrinsic.fx,pnp.intrinsic.fy,pnp.intrinsic.skew); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<>(estimatorPnP); // convert from pixels to pixels squared double threshold = ransac.inlierThreshold*ransac.inlierThreshold; return new Ransac<>(ransac.randSeed, manager, generator, distance, ransac.maxIterations, threshold); }
/** * Robust solution to PnP problem using {@link LeastMedianOfSquares LMedS}. Input observations are * in normalized image coordinates. * * <ul> * <li>Error units is pixels squared.</li> * </ul> * * <p>See code for all the details.</p> * * @param pnp PnP parameters. Can't be null. * @param lmeds Parameters for LMedS. Can't be null. * @return Robust Se3_F64 estimator */ public static LeastMedianOfSquares<Se3_F64, Point2D3D> pnpLMedS( ConfigPnP pnp, ConfigLMedS lmeds) { Estimate1ofPnP estimatorPnP = FactoryMultiView.computePnP_1(EnumPNP.P3P_FINSTERWALDER, -1, 1); DistanceModelMonoPixels<Se3_F64,Point2D3D> distance = new PnPDistanceReprojectionSq(); distance.setIntrinsic(pnp.intrinsic.fx,pnp.intrinsic.fy,pnp.intrinsic.skew); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<>(estimatorPnP); return new LeastMedianOfSquares<>(lmeds.randSeed, lmeds.totalCycles, manager, generator, distance); }
/** * Creates two 3D point clouds for the left and right camera using the known calibration points and camera * calibration. Then find the optimal rigid body transform going from the right to left views. * * @return Transform from right to left view. */ private Se3_F64 computeRightToLeft() { // location of points in the world coordinate system List<Point2D_F64> points2D = layout; List<Point3D_F64> points3D = new ArrayList<>(); for( Point2D_F64 p : points2D ) { points3D.add( new Point3D_F64(p.x,p.y,0)); } // create point cloud in each view List<Point3D_F64> left = new ArrayList<>(); List<Point3D_F64> right = new ArrayList<>(); for( int i = 0; i < viewLeft.size(); i++ ) { Se3_F64 worldToLeft = viewLeft.get(i); Se3_F64 worldToRight = viewRight.get(i); // These points can really be arbitrary and don't have to be target points for( Point3D_F64 p : points3D ) { Point3D_F64 l = SePointOps_F64.transform(worldToLeft, p, null); Point3D_F64 r = SePointOps_F64.transform(worldToRight, p, null); left.add(l); right.add(r); } } // find the transform from right to left cameras return FitSpecialEuclideanOps_F64.fitPoints3D(right,left); }
/** * Constructor where default algorithms are used for motion estimation and nearest-neighbor search * @param maxDistance Maximum distance two points can be apart for them to be associated * @param maxIterations Maximum number of iterations. Try 20 * @param convergenceTol Tolerance for convergence. Try 1e-12 */ public IcpCloud3D(double maxDistance, int maxIterations, double convergenceTol ) { this.maxDistance = maxDistance; this.maxIterations = maxIterations; this.convergenceTol = convergenceTol; motionAlg = new MotionSe3PointSVD_F64(); nn = FactoryNearestNeighbor.kdtree(); nn.init(3); }
/** * Robust solution to PnP problem using {@link Ransac}. Input observations are in normalized * image coordinates. Found transform is from world to camera. * * <p>NOTE: Observations are in normalized image coordinates NOT pixels.</p> * * <p>See code for all the details.</p> * * @see Estimate1ofPnP * * @param pnp PnP parameters. Can't be null. * @param ransac Parameters for RANSAC. Can't be null. * @return Robust Se3_F64 estimator */ public static ModelMatcherMultiview<Se3_F64, Point2D3D> pnpRansac( @Nullable ConfigPnP pnp, @Nonnull ConfigRansac ransac ) { if( pnp == null ) pnp = new ConfigPnP(); pnp.checkValidity(); ransac.checkValidity(); Estimate1ofPnP estimatorPnP = FactoryMultiView.pnp_1(pnp.which, pnp.epnpIterations, pnp.numResolve); DistanceFromModelMultiView<Se3_F64,Point2D3D> distance = new PnPDistanceReprojectionSq(); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<>(estimatorPnP); // convert from pixels to pixels squared double threshold = ransac.inlierThreshold*ransac.inlierThreshold; return new RansacMultiView<>(ransac.randSeed, manager, generator, distance, ransac.maxIterations, threshold); }
/** * Creates an estimator for the PnP problem that uses only three observations, which is the minimal case * and known as P3P. * * @param which The algorithm which is to be returned. * @param numIterations Number of iterations. Only used by some algorithms and recommended number varies * significantly by algorithm. * @return An estimator which can return multiple estimates. */ public static EstimateNofPnP computePnP_N(EnumPNP which , int numIterations ) { MotionTransformPoint<Se3_F64, Point3D_F64> motionFit = FitSpecialEuclideanOps_F64.fitPoints3D(); switch( which ) { case P3P_GRUNERT: P3PGrunert grunert = new P3PGrunert(PolynomialOps.createRootFinder(5, RootFinderType.STURM)); return new WrapP3PLineDistance(grunert,motionFit); case P3P_FINSTERWALDER: P3PFinsterwalder finster = new P3PFinsterwalder(PolynomialOps.createRootFinder(4,RootFinderType.STURM)); return new WrapP3PLineDistance(finster,motionFit); case EPNP: Estimate1ofPnP epnp = computePnP_1(which,numIterations,0); return new Estimate1toNofPnP(epnp); } throw new IllegalArgumentException("Type "+which+" not known"); }
/** * Constructor where default algorithms are used for motion estimation and nearest-neighbor search * @param maxDistance Maximum distance two points can be apart for them to be associated * @param maxIterations Maximum number of iterations. Try 20 * @param convergenceTol Tolerance for convergence. Try 1e-12 */ public IcpCloud3D(double maxDistance, int maxIterations, double convergenceTol ) { this.maxDistance = maxDistance; this.maxIterations = maxIterations; this.convergenceTol = convergenceTol; motionAlg = new MotionSe3PointSVD_F64(); nn = FactoryNearestNeighbor.kdtree(); nn.init(3); }
ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<>(estimatorPnP);
/** * Creates an estimator for the PnP problem that uses only three observations, which is the minimal case * and known as P3P. * * <p>NOTE: Observations are in normalized image coordinates NOT pixels.</p> * * @param which The algorithm which is to be returned. * @param numIterations Number of iterations. Only used by some algorithms and recommended number varies * significantly by algorithm. * @return An estimator which can return multiple estimates. */ public static EstimateNofPnP pnp_N(EnumPNP which , int numIterations ) { MotionTransformPoint<Se3_F64, Point3D_F64> motionFit = FitSpecialEuclideanOps_F64.fitPoints3D(); switch( which ) { case P3P_GRUNERT: P3PGrunert grunert = new P3PGrunert(PolynomialOps.createRootFinder(5, RootFinderType.STURM)); return new WrapP3PLineDistance(grunert,motionFit); case P3P_FINSTERWALDER: P3PFinsterwalder finster = new P3PFinsterwalder(PolynomialOps.createRootFinder(4,RootFinderType.STURM)); return new WrapP3PLineDistance(finster,motionFit); case EPNP: Estimate1ofPnP epnp = pnp_1(which,numIterations,0); return new Estimate1toNofPnP(epnp); case IPPE: Estimate1ofEpipolar H = FactoryMultiView.homographyTLS(); return new Estimate1toNofPnP(new IPPE_to_EstimatePnP(H)); } throw new IllegalArgumentException("Type "+which+" not known"); }
/** * Robust solution for estimating {@link Se3_F64} using epipolar geometry from two views with * {@link Ransac}. Input observations are in normalized image coordinates. * * <p>See code for all the details.</p> * * @param essential Essential matrix estimation parameters. Can't be null. * @param ransac Parameters for RANSAC. Can't be null. * @return Robust Se3_F64 estimator */ public static Ransac<Se3_F64, AssociatedPair> essentialRansac( ConfigEssential essential, ConfigRansac ransac ) { essential.checkValidity(); Estimate1ofEpipolar essentialAlg = FactoryMultiView. computeFundamental_1(essential.which, essential.numResolve); TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric(); ModelManager<Se3_F64> manager = new ModelManagerSe3_F64(); ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion = new Se3FromEssentialGenerator(essentialAlg, triangulate); CameraPinholeRadial intrinsic = essential.intrinsic; DistanceFromModel<Se3_F64, AssociatedPair> distanceSe3 = new DistanceSe3SymmetricSq(triangulate, intrinsic.fx, intrinsic.fy, intrinsic.skew, intrinsic.fx, intrinsic.fy, intrinsic.skew); double ransacTOL = ransac.inlierThreshold * ransac.inlierThreshold * 2.0; return new Ransac<>(ransac.randSeed, manager, generateEpipolarMotion, distanceSe3, ransac.maxIterations, ransacTOL); }
computeFundamental_1(essential.which, essential.numResolve); TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric(); ModelManager<Se3_F64> manager = new ModelManagerSe3_F64(); ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion = new Se3FromEssentialGenerator(essentialAlg, triangulate);
ModelManager<Se3_F64> manager = new ModelManagerSe3_F64(); ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion = new Se3FromEssentialGenerator(epipolar, triangulate);
ModelManager<Se3_F64> manager = new ModelManagerSe3_F64(); ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion = new Se3FromEssentialGenerator(epipolar, triangulate);
distance.setIntrinsic(intrinsic.fx,intrinsic.fy,intrinsic.skew); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<Se3_F64,Point2D3D>(estimator);
distance.setIntrinsic(intrinsic.fx,intrinsic.fy,intrinsic.skew); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<Se3_F64,Point2D3D>(estimator);
distance.setIntrinsic(intrinsic.fx,intrinsic.fy,intrinsic.skew); ModelManagerSe3_F64 manager = new ModelManagerSe3_F64(); EstimatorToGenerator<Se3_F64,Point2D3D> generator = new EstimatorToGenerator<Se3_F64,Point2D3D>(estimator);