GeometryMath_F32.mult(K_inv, temp0, out);
private void changeFocus(MouseEvent e) { double scale = panelFisheye.scale; double omniX = e.getX()/scale; double omniY = e.getY()/scale; if( !fisheye.isInBounds((int)omniX,(int)omniY)) return; panelPinhole.grabFocus(); synchronized (imageLock) { Point3D_F32 norm = new Point3D_F32(); fisheyeDistort.undistortPtoS_F32().compute((float)omniX, (float)omniY, norm); Rodrigues_F32 rotation = new Rodrigues_F32(); Vector3D_F32 canonical = new Vector3D_F32(0,0,1); rotation.theta = UtilVector3D_F32.acute(new Vector3D_F32(norm),canonical); GeometryMath_F32.cross(canonical,norm,rotation.unitAxisRotation); rotation.unitAxisRotation.normalize(); FMatrixRMaj R = ConvertRotation3D_F32.rodriguesToMatrix(rotation,null); distorter.setRotationWideToNarrow(R); distortImage.setModel(new PointToPixelTransform_F32(distorter)); if (inputMethod == InputMethod.IMAGE) { rerenderPinhole(); } } }
private void process(GrayF32 disparity , BufferedImage color ) { for( int pixelY = 0; pixelY < disparity.height; pixelY++ ) { int index = disparity.startIndex + disparity.stride*pixelY; for( int pixelX = 0; pixelX < disparity.width; pixelX++ ) { float value = disparity.data[index++]; // invalid disparity if( value >= rangeDisparity ) continue; value += minDisparity; // The point lies at infinity. if( value == 0 ) continue; p.z = baseline*focalLengthX/value; p.x = p.z*(pixelX - centerX)/focalLengthX; p.y = p.z*(pixelY - centerY)/focalLengthY; // Bring it back into left camera frame GeometryMath_F32.multTran(rectifiedR,p,p); cloudRgb.add(getColor(disparity, color, pixelX, pixelY)); cloudXyz.add(p.x); cloudXyz.add(p.y); cloudXyz.add(p.z); } } }
public void setPinholeCenter( double pixelX , double pixelY ) { this.pixelX = pixelX; this.pixelY = pixelY; Point3D_F32 norm = new Point3D_F32(); fisheyeDistort.undistortPtoS_F32().compute((float)pixelX, (float)pixelY, norm); Rodrigues_F32 rotation = new Rodrigues_F32(); Vector3D_F32 canonical = new Vector3D_F32(0,0,1); rotation.theta = UtilVector3D_F32.acute(new Vector3D_F32(norm),canonical); GeometryMath_F32.cross(canonical,norm,rotation.unitAxisRotation); rotation.unitAxisRotation.normalize(); FMatrixRMaj R = ConvertRotation3D_F32.rodriguesToMatrix(rotation,null); distorter.setRotationWideToNarrow(R); distortImage.setModel(new PointToPixelTransform_F32(distorter)); }
private void process(GrayU8 disparity , BufferedImage color ) { for( int pixelY = 0; pixelY < disparity.height; pixelY++ ) { int index = disparity.startIndex + disparity.stride*pixelY; for( int pixelX = 0; pixelX < disparity.width; pixelX++ ) { int value = disparity.data[index++] & 0xFF; if( value >= rangeDisparity ) continue; value += minDisparity; // The point lies at infinity. if( value == 0 ) continue; // Note that this will be in the rectified left camera's reference frame. // An additional rotation is needed to put it into the original left camera frame. p.z = baseline*focalLengthX/value; p.x = p.z*(pixelX - centerX)/focalLengthX; p.y = p.z*(pixelY - centerY)/focalLengthY; // Bring it back into left camera frame GeometryMath_F32.multTran(rectifiedR,p,p); cloudRgb.add(getColor(disparity, color, pixelX, pixelY)); cloudXyz.add(p.x); cloudXyz.add(p.y); cloudXyz.add(p.z); } } }
@Override public void compute(float x, float y, Point2D_F32 out) { out.set(x,y); GeometryMath_F32.mult(K_inv, out, out); } }
/** * Input is in pinhole camera pixel coordinates. Output is in equirectangular coordinates * * @param x Pixel x-coordinate in rendered pinhole camera * @param y Pixel y-coordinate in rendered pinhole camera */ @Override public void compute(int x, int y) { // grab precomputed normalized image coordinate at canonical location Point3D_F32 v = vectors[y*outWidth+x]; // move to requested orientation GeometryMath_F32.mult(R,v,n); // TODO make faster by not using an array based matrix // compute pixel coordinate tools.normToEquiFV(n.x,n.y,n.z,out); distX = out.x; distY = out.y; }
/** * Removes radial distortion * * @param x Distorted x-coordinate pixel * @param y Distorted y-coordinate pixel * @param out Undistorted normalized coordinate. */ @Override public void compute(float x, float y, Point2D_F32 out) { out.x = x; out.y = y; // initial estimate of undistorted point GeometryMath_F32.mult(K_inv, out, out); removeRadial(out.x, out.y, params.radial, params.t1, params.t2, out, tol ); } }
/** * Input is in pinhole camera pixel coordinates. Output is in equirectangular coordinates * * @param x Pixel x-coordinate in rendered pinhole camera * @param y Pixel y-coordinate in rendered pinhole camera */ @Override public void compute(int x, int y) { // grab precomputed normalized image coordinate at canonical location Point3D_F32 v = vectors[y*outWidth+x]; // move to requested orientation GeometryMath_F32.mult(R,v,n); // TODO make faster by not using an array based matrix // compute pixel coordinate tools.normToEquiFV(n.x,n.y,n.z,out); distX = out.x; distY = out.y; }
GeometryMath_F32.mult(K_inv, p2, p2);
GeometryMath_F32.mult(K_inv, p2, p2);
public static Point2D_F32 renderPixel( Se3_F32 worldToCamera , FMatrixRMaj K , Point3D_F32 X ) { Point3D_F32 X_cam = new Point3D_F32(); SePointOps_F32.transform(worldToCamera, X, X_cam); // see if it's behind the camera if( X_cam.z <= 0 ) return null; Point2D_F32 norm = new Point2D_F32(X_cam.x/X_cam.z,X_cam.y/X_cam.z); if( K == null ) return norm; // convert into pixel coordinates return GeometryMath_F32.mult(K, norm, norm); }
/** * Apply the transformation * * @param x x-coordinate of point in pixels. Synthetic narrow FOV camera * @param y y-coordinate of point in pixels. Synthetic narrow FOV camera * @param out Pixel location of point in wide FOV camera. */ @Override public void compute(float x, float y, Point2D_F32 out) { narrowToNorm.compute(x,y, norm); // Convert from 2D homogenous to 3D unit.set( norm.x , norm.y , 1.0f); // Rotate then make it a unit vector GeometryMath_F32.mult(rotateWideToNarrow,unit,unit); float n = unit.norm(); unit.x /= n; unit.y /= n; unit.z /= n; unitToWide.compute(unit.x,unit.y,unit.z,out); }
/** * Apply the transformation * * @param x x-coordinate of point in pixels. Synthetic narrow FOV camera * @param y y-coordinate of point in pixels. Synthetic narrow FOV camera * @param out Pixel location of point in wide FOV camera. */ @Override public void compute(float x, float y, Point2D_F32 out) { narrowToNorm.compute(x,y, norm); // Convert from 2D homogenous to 3D unit.set( norm.x , norm.y , 1.0f); // Rotate then make it a unit vector GeometryMath_F32.mult(rotateWideToNarrow,unit,unit); float n = unit.norm(); unit.x /= n; unit.y /= n; unit.z /= n; unitToWide.compute(unit.x,unit.y,unit.z,out); }