使用opencv4将鱼眼图像转换为等距柱状图像

问题描述 投票:0回答:2

我想用C++算法和OpenCV4将单圆形鱼眼图像转换为等距柱状图像。

这个想法来自于我的计算机上加载的输入图像,如下所示:enter image description here

我想获得这样的输出图像:enter image description here

我正在使用此博客中描述的方法: http://paulbourke.net/dome/dualfish2sphere/

方法可以用这张图来描述:enter image description here

不幸的是,当我运行代码时,我得到了这样的结果:enter image description here

我正在使用 Xcode 处理 MacOSX,并使用终端“ITerm2”来构建和执行我的代码。

代码如下:

#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>

using namespace std;
using namespace cv;

const double PI = 3.141592653589793;
const string PATH_IMAGE = "/Users/Kenza/Desktop/Xcode_cpp_opencv/PaulBourke2/PaulBourke2/Images/img1.jpg";
const int ESC = 27;

Point2f findCorrespondingFisheyePoint(int Xe, int Ye, double He, double We, double Hf, double Wf, double FOV){
    Point2f fisheyePoint;
    double Xfn, Yfn; //Normalized Cartesian Coordinates
    double longitude, latitude, Px, Py, Pz; //Spherical Coordinates
    double r, theta; //Polar coordinates
    double Xpn, Ypn; //Normalized Polar coordinates

    //Normalize Coordinates
    Xfn = ( ( 2.0 * (double)Xe ) - We) / Wf;//Between -1 and 1
    Yfn = ( ( 2.0 * (double)Ye ) - He) / Hf;//Between -1 and 1

    //Normalize Coordinates to Spherical Coordinates
    longitude = Xfn*PI; //Between -PI and PI (2*PI interval)
    latitude = Yfn*(PI/2.0); //Between -PI/2 and PI/2 (PI interval)
    Px = cos(latitude)*cos(longitude);
    Py = cos(latitude)*sin(longitude);
    Pz = sin(latitude);

    //Spherical Coordinates to Polar Coordinates
    r =  2.0 * atan2(sqrt(pow(Px,2)+pow(Pz,2)),Py)/FOV;
    theta = atan2(Pz,-Px);
    Xpn = r * cos(theta);
    Ypn = r * sin(theta);

    //Normalize Coordinates to CartesianImage Coordinates
    fisheyePoint.x = (int)(((Xpn+1.0)*Wf)/2.0);
    fisheyePoint.y = (int)(((Ypn+1.0)*Hf)/2.0);

    return fisheyePoint;
}

int main(int argc, char** argv){

    Mat fisheyeImage, equirectangularImage;

    fisheyeImage = imread(PATH_IMAGE, CV_32FC1);
    namedWindow("Fisheye Image", WINDOW_AUTOSIZE);
    imshow("Fisheye Image", fisheyeImage);

    while(waitKey(0) != ESC) {
        //wait until the key ESC is pressed
    }

    //destroyWindow("Fisheye Image");

    int Hf, Wf; //Height, width and FOV for the input image (=fisheyeImage)
    double FOV;
    int He, We; //Height and width for the outpout image (=EquirectangularImage)

    Hf = fisheyeImage.size().height;
    Wf = fisheyeImage.size().width;
    FOV = PI; //FOV in radian

    //We keep the same ratio for the image input and the image output
    We = Wf;
    He = Hf;

    equirectangularImage.create(Hf, Wf, fisheyeImage.type()); //We create the outpout image (=EquirectangularImage)

    //For each pixels of the ouput equirectangular Image
    for (int Xe = 0; Xe <equirectangularImage.size().width; Xe++){
        for (int Ye = 0; Ye <equirectangularImage.size().height; Ye++){

            equirectangularImage.at<Vec3b>(Point(Xe,Ye)) = fisheyeImage.at<Vec3b>(findCorrespondingFisheyePoint(Xe, Ye, He, We, Hf, Wf, FOV)); //We find the corresponding point in the fisheyeImage
        }
    }

    namedWindow("Equirectangular Image", WINDOW_AUTOSIZE);
    imshow("Equirectangular Image",equirectangularImage);

    while(waitKey(0) != ESC) {
        //wait until the key ESC is pressed
    }

    destroyWindow("Fisheye Image");

    imwrite("equirectangularImage.jpg", equirectangularImage);

    return 0;

}
c++ opencv image-processing projection fisheye
2个回答
1
投票

通过这段代码,我得到了预期的结果:

#include <iostream>
#include <opencv2/highgui/highgui.hpp>

using namespace std;
using namespace cv;

const string PATH_IMAGE = "/Users/Kenza/Desktop/Xcode_cpp_opencv/Sos/Sos/Images/img1.jpg";
const int ESC = 27;

Point2f findCorrespondingFisheyePoint(int Xe, int Ye, int We, int He, float FOV){
    Point2f fisheyePoint;
    float theta, phi, r;
    Point3f sphericalPoint;

    theta = CV_PI * (Xe / ( (float) We ) - 0.5);
    phi = CV_PI * (Ye / ( (float) He ) - 0.5);

    sphericalPoint.x = cos(phi) * sin(theta);
    sphericalPoint.y = cos(phi) * cos(theta);
    sphericalPoint.z = sin(phi);

    theta = atan2(sphericalPoint.z, sphericalPoint.x);
    phi = atan2(sqrt(pow(sphericalPoint.x,2) + pow(sphericalPoint.z,2)), sphericalPoint.y);
    r = ( (float) We ) * phi / FOV;

    fisheyePoint.x = (int) ( 0.5 * ( (float) We ) + r * cos(theta) );
    fisheyePoint.y = (int) ( 0.5 * ( (float) He ) + r * sin(theta) );

    return fisheyePoint;
}

int main(int argc, char** argv){

    Mat fisheyeImage, equirectangularImage;
    int Wf, Hf;
    float FOV;
    int We, He;

    fisheyeImage = imread(PATH_IMAGE, IMREAD_COLOR);
    namedWindow("Fisheye Image");
    imshow("fisheye Image", fisheyeImage);

    Wf = fisheyeImage.size().width;
    Hf = fisheyeImage.size().height;
    FOV = (180 * CV_PI ) / 180;

    We = Wf;
    He = Hf;

    while (waitKey(0) != ESC){

   }

    equirectangularImage.create(He, We, CV_8UC3);

    for (int Xe = 0; Xe < We; Xe++){

        for (int Ye = 0; Ye < He; Ye++){

            Point2f fisheyePoint = findCorrespondingFisheyePoint(Xe, Ye, We, He, FOV);

            if (fisheyePoint.x >= We || fisheyePoint.y >= He)

                continue;

            if (fisheyePoint.x < 0 || fisheyePoint.y < 0)
                continue;

                equirectangularImage.at<Vec3b>(Point(Xe, Ye)) = fisheyeImage.at<Vec3b>(fisheyePoint);

        }

    }

    namedWindow("Equirectangular Image");
    imshow("Equirectangular Image", equirectangularImage);

    while (waitKey(0) != ESC){

    }

    imwrite("im2.jpg", equirectangularImage);

}

0
投票

我按照相同的说明遇到了同样的问题,只是我使用了 MATLAB。

我终于发现问题出在最开始的X坐标标准化

在说明中,等距柱状图像中的 X 和 Y 坐标似乎都应归一化为 [-1, 1]。但不,X 实际上应该映射到 [0,1]。这样你就会得到正确的结果。

这是我的算法的 MATLAB 版本:

img=imread("0.jpg")
[nRow, nCol, ~]=size(img);
imshow(img)

aperture_in_degree=200;
equirect=uint8(zeros(nRow, nCol,3));
for xCoord=1:nCol
    for yCoord=1:nRow
        [xNorm, yNorm]=normalize_coord(nRow, nCol, xCoord, yCoord);
        xNorm=(xNorm+1)/2;

        longitude=xNorm*pi;
        latitude=yNorm*pi/2;
        px=cos(latitude)*cos(longitude);
        py=cos(latitude)*sin(longitude);
        pz=sin(latitude);
        r=2*atan2(norm([px pz]),py)/(aperture*pi/180);
        theta=atan2(pz,px);
        x=r*cos(theta);
        y=r*sin(theta);

        [x,y]=unnormalize_coord(nRow,nCol,x,y);
        x=fix(x);
        y=fix(y);
        equirect(yCoord,end-xCoord+1,:)=img(y,x,:);
    end
end
imshow(equirect,[]);

function [x,y] = normalize_coord(R, C, x, y) % [x,y] to [-1,1]
    coordNorm = ([x y]-[C+1 R+1]/2)./[C-1 R-1]*2;
    x=coordNorm(1);
    y=coordNorm(2);
end

function [x,y] = unnormalize_coord(R, C, x, y) % [-1,1] to [x,y]
    coordNorm = [x y].*[C-1 R-1]/2+[C+1 R+1]/2;
    x=coordNorm(1);
    y=coordNorm(2);
end

Before & After:

© www.soinside.com 2019 - 2024. All rights reserved.