From 79ed4e4c9270f764d84ea806b2aeab02f9da9f0d Mon Sep 17 00:00:00 2001 From: Alexey Spizhevoy Date: Mon, 16 May 2011 05:11:09 +0000 Subject: [PATCH] refactored opencv_stitching --- .../{focal_estimators.cpp => autocalib.cpp} | 2 +- .../{focal_estimators.hpp => autocalib.hpp} | 6 +-- modules/stitching/main.cpp | 12 +++--- modules/stitching/matchers.cpp | 12 +++--- modules/stitching/motion_estimators.cpp | 26 ++++++------ modules/stitching/motion_estimators.hpp | 5 ++- modules/stitching/util_inl.hpp | 4 +- modules/stitching/warpers.cpp | 36 ++++++++--------- modules/stitching/warpers.hpp | 12 +++--- modules/stitching/warpers_inl.hpp | 40 +++++++++---------- 10 files changed, 78 insertions(+), 77 deletions(-) rename modules/stitching/{focal_estimators.cpp => autocalib.cpp} (98%) rename modules/stitching/{focal_estimators.hpp => autocalib.hpp} (72%) diff --git a/modules/stitching/focal_estimators.cpp b/modules/stitching/autocalib.cpp similarity index 98% rename from modules/stitching/focal_estimators.cpp rename to modules/stitching/autocalib.cpp index aab6e99b2..3c1eca0bf 100644 --- a/modules/stitching/focal_estimators.cpp +++ b/modules/stitching/autocalib.cpp @@ -1,4 +1,4 @@ -#include "focal_estimators.hpp" +#include "autocalib.hpp" #include "util.hpp" using namespace std; diff --git a/modules/stitching/focal_estimators.hpp b/modules/stitching/autocalib.hpp similarity index 72% rename from modules/stitching/focal_estimators.hpp rename to modules/stitching/autocalib.hpp index d7fcbd006..b269a38c0 100644 --- a/modules/stitching/focal_estimators.hpp +++ b/modules/stitching/autocalib.hpp @@ -1,5 +1,5 @@ -#ifndef __OPENCV_FOCAL_ESTIMATORS_HPP__ -#define __OPENCV_FOCAL_ESTIMATORS_HPP__ +#ifndef __OPENCV_AUTOCALIB_HPP__ +#define __OPENCV_AUTOCALIB_HPP__ #include @@ -9,4 +9,4 @@ void focalsFromHomography(const cv::Mat &H, double &f0, double &f1, bool &f0_ok, bool focalsFromFundamental(const cv::Mat &F, double &f0, double &f1); -#endif // __OPENCV_FOCAL_ESTIMATORS_HPP__ +#endif // __OPENCV_AUTOCALIB_HPP__ diff --git a/modules/stitching/main.cpp b/modules/stitching/main.cpp index d37d10c54..fab1646d7 100644 --- a/modules/stitching/main.cpp +++ b/modules/stitching/main.cpp @@ -181,8 +181,8 @@ int main(int argc, char* argv[]) for (size_t i = 0; i < cameras.size(); ++i) { Mat R; - cameras[i].M.convertTo(R, CV_32F); - cameras[i].M = R; + cameras[i].R.convertTo(R, CV_32F); + cameras[i].R = R; LOGLN("Initial focal length " << i << ": " << cameras[i].focal); } @@ -195,10 +195,10 @@ int main(int argc, char* argv[]) LOGLN("Wave correcting..."); vector rmats; for (size_t i = 0; i < cameras.size(); ++i) - rmats.push_back(cameras[i].M); + rmats.push_back(cameras[i].R); waveCorrect(rmats); for (size_t i = 0; i < cameras.size(); ++i) - cameras[i].M = rmats[i]; + cameras[i].R = rmats[i]; } // Find median focal length @@ -226,8 +226,8 @@ int main(int argc, char* argv[]) Ptr warper = Warper::createByCameraFocal(camera_focal, warp_type); for (int i = 0; i < num_images; ++i) { - corners[i] = (*warper)(images[i], static_cast(cameras[i].focal), cameras[i].M, images_warped[i]); - (*warper)(masks[i], static_cast(cameras[i].focal), cameras[i].M, masks_warped[i], INTER_NEAREST, BORDER_CONSTANT); + corners[i] = (*warper)(images[i], static_cast(cameras[i].focal), cameras[i].R, images_warped[i]); + (*warper)(masks[i], static_cast(cameras[i].focal), cameras[i].R, masks_warped[i], INTER_NEAREST, BORDER_CONSTANT); } vector images_f(num_images); for (int i = 0; i < num_images; ++i) diff --git a/modules/stitching/matchers.cpp b/modules/stitching/matchers.cpp index 997ba71b4..9dd44b626 100644 --- a/modules/stitching/matchers.cpp +++ b/modules/stitching/matchers.cpp @@ -303,14 +303,14 @@ void BestOf2NearestMatcher::match(const Mat &img1, const ImageFeatures &features Mat dst_points(1, matches_info.matches.size(), CV_32FC2); for (size_t i = 0; i < matches_info.matches.size(); ++i) { - const DMatch& m = matches_info.matches[i]; + const DMatch& r = matches_info.matches[i]; - Point2f p = features1.keypoints[m.queryIdx].pt; + Point2f p = features1.keypoints[r.queryIdx].pt; p.x -= img1.cols * 0.5f; p.y -= img1.rows * 0.5f; src_points.at(0, i) = p; - p = features2.keypoints[m.trainIdx].pt; + p = features2.keypoints[r.trainIdx].pt; p.x -= img2.cols * 0.5f; p.y -= img2.rows * 0.5f; dst_points.at(0, i) = p; @@ -338,14 +338,14 @@ void BestOf2NearestMatcher::match(const Mat &img1, const ImageFeatures &features if (!matches_info.inliers_mask[i]) continue; - const DMatch& m = matches_info.matches[i]; + const DMatch& r = matches_info.matches[i]; - Point2f p = features1.keypoints[m.queryIdx].pt; + Point2f p = features1.keypoints[r.queryIdx].pt; p.x -= img1.cols * 0.5f; p.y -= img2.rows * 0.5f; src_points.at(0, inlier_idx) = p; - p = features2.keypoints[m.trainIdx].pt; + p = features2.keypoints[r.trainIdx].pt; p.x -= img2.cols * 0.5f; p.y -= img2.rows * 0.5f; dst_points.at(0, inlier_idx) = p; diff --git a/modules/stitching/motion_estimators.cpp b/modules/stitching/motion_estimators.cpp index 43acd4788..ca575d84a 100644 --- a/modules/stitching/motion_estimators.cpp +++ b/modules/stitching/motion_estimators.cpp @@ -1,7 +1,7 @@ #include #include "opencv2/core/core_c.h" #include -#include "focal_estimators.hpp" +#include "autocalib.hpp" #include "motion_estimators.hpp" #include "util.hpp" @@ -11,7 +11,7 @@ using namespace cv; ////////////////////////////////////////////////////////////////////////////// -CameraParams::CameraParams() : focal(1), M(Mat::eye(3, 3, CV_64F)), t(Mat::zeros(3, 1, CV_64F)) {} +CameraParams::CameraParams() : focal(1), R(Mat::eye(3, 3, CV_64F)), t(Mat::zeros(3, 1, CV_64F)) {} CameraParams::CameraParams(const CameraParams &other) @@ -23,7 +23,7 @@ CameraParams::CameraParams(const CameraParams &other) const CameraParams& CameraParams::operator =(const CameraParams &other) { focal = other.focal; - M = other.M.clone(); + R = other.R.clone(); t = other.t.clone(); return *this; } @@ -60,7 +60,7 @@ struct CalcRotation K_to.at(1, 1) = f_to; Mat R = K_from.inv() * pairwise_matches[pair_idx].H.inv() * K_to; - cameras[edge.to].M = cameras[edge.from].M * R; + cameras[edge.to].R = cameras[edge.from].R * R; } int num_images; @@ -132,7 +132,7 @@ void BundleAdjuster::estimate(const vector &images, const vector(i * 4, 0) = cameras[i].focal; - svd(cameras[i].M, SVD::FULL_UV); + svd(cameras[i].R, SVD::FULL_UV); Mat R = svd.u * svd.vt; if (determinant(R) < 0) R *= -1; Mat rvec; @@ -207,19 +207,19 @@ void BundleAdjuster::estimate(const vector &images, const vector(0, 0) = cameras_.at(i * 4 + 1, 0); rvec.at(1, 0) = cameras_.at(i * 4 + 2, 0); rvec.at(2, 0) = cameras_.at(i * 4 + 3, 0); - Rodrigues(rvec, cameras[i].M); + Rodrigues(rvec, cameras[i].R); Mat Mf; - cameras[i].M.convertTo(Mf, CV_32F); - cameras[i].M = Mf; + cameras[i].R.convertTo(Mf, CV_32F); + cameras[i].R = Mf; } // Normalize motion to center image Graph span_tree; vector span_tree_centers; findMaxSpanningTree(num_images_, pairwise_matches, span_tree, span_tree_centers); - Mat R_inv = cameras[span_tree_centers[0]].M.inv(); + Mat R_inv = cameras[span_tree_centers[0]].R.inv(); for (int i = 0; i < num_images_; ++i) - cameras[i].M = R_inv * cameras[i].M; + cameras[i].R = R_inv * cameras[i].R; } @@ -255,12 +255,12 @@ void BundleAdjuster::calcError(Mat &err) if (!matches_info.inliers_mask[k]) continue; - const DMatch& m = matches_info.matches[k]; + const DMatch& r = matches_info.matches[k]; - Point2d kp1 = features1.keypoints[m.queryIdx].pt; + Point2d kp1 = features1.keypoints[r.queryIdx].pt; kp1.x -= 0.5 * images_[i].cols; kp1.y -= 0.5 * images_[i].rows; - Point2d kp2 = features2.keypoints[m.trainIdx].pt; + Point2d kp2 = features2.keypoints[r.trainIdx].pt; kp2.x -= 0.5 * images_[j].cols; kp2.y -= 0.5 * images_[j].rows; double len1 = sqrt(kp1.x * kp1.x + kp1.y * kp1.y + f1 * f1); diff --git a/modules/stitching/motion_estimators.hpp b/modules/stitching/motion_estimators.hpp index 896a7e9c9..97a064387 100644 --- a/modules/stitching/motion_estimators.hpp +++ b/modules/stitching/motion_estimators.hpp @@ -12,8 +12,9 @@ struct CameraParams CameraParams(const CameraParams& other); const CameraParams& operator =(const CameraParams& other); - double focal; - cv::Mat M, t; + double focal; // Focal length + cv::Mat R; // Rotation + cv::Mat t; // Translation }; diff --git a/modules/stitching/util_inl.hpp b/modules/stitching/util_inl.hpp index 73a2d77de..f8bd58bbd 100644 --- a/modules/stitching/util_inl.hpp +++ b/modules/stitching/util_inl.hpp @@ -58,9 +58,9 @@ float normL2(const cv::Point3f& a, const cv::Point3f& b) static inline -double normL2sq(const cv::Mat &m) +double normL2sq(const cv::Mat &r) { - return m.dot(m); + return r.dot(r); } diff --git a/modules/stitching/warpers.cpp b/modules/stitching/warpers.cpp index d2eacdc9b..6b14c8a48 100644 --- a/modules/stitching/warpers.cpp +++ b/modules/stitching/warpers.cpp @@ -16,25 +16,25 @@ Ptr Warper::createByCameraFocal(float focal, int type) } -void ProjectorBase::setCameraMatrix(const Mat &M) +void ProjectorBase::setCameraMatrix(const Mat &R) { - CV_Assert(M.size() == Size(3, 3)); - CV_Assert(M.type() == CV_32F); - m[0] = M.at(0, 0); m[1] = M.at(0, 1); m[2] = M.at(0, 2); - m[3] = M.at(1, 0); m[4] = M.at(1, 1); m[5] = M.at(1, 2); - m[6] = M.at(2, 0); m[7] = M.at(2, 1); m[8] = M.at(2, 2); + CV_Assert(R.size() == Size(3, 3)); + CV_Assert(R.type() == CV_32F); + r[0] = R.at(0, 0); r[1] = R.at(0, 1); r[2] = R.at(0, 2); + r[3] = R.at(1, 0); r[4] = R.at(1, 1); r[5] = R.at(1, 2); + r[6] = R.at(2, 0); r[7] = R.at(2, 1); r[8] = R.at(2, 2); - Mat M_inv = M.inv(); - minv[0] = M_inv.at(0, 0); minv[1] = M_inv.at(0, 1); minv[2] = M_inv.at(0, 2); - minv[3] = M_inv.at(1, 0); minv[4] = M_inv.at(1, 1); minv[5] = M_inv.at(1, 2); - minv[6] = M_inv.at(2, 0); minv[7] = M_inv.at(2, 1); minv[8] = M_inv.at(2, 2); + Mat Rinv = R.inv(); + rinv[0] = Rinv.at(0, 0); rinv[1] = Rinv.at(0, 1); rinv[2] = Rinv.at(0, 2); + rinv[3] = Rinv.at(1, 0); rinv[4] = Rinv.at(1, 1); rinv[5] = Rinv.at(1, 2); + rinv[6] = Rinv.at(2, 0); rinv[7] = Rinv.at(2, 1); rinv[8] = Rinv.at(2, 2); } -Point Warper::operator ()(const Mat &src, float focal, const Mat& M, Mat &dst, +Point Warper::operator ()(const Mat &src, float focal, const Mat& R, Mat &dst, int interp_mode, int border_mode) { - return warp(src, focal, M, dst, interp_mode, border_mode); + return warp(src, focal, R, dst, interp_mode, border_mode); } @@ -79,9 +79,9 @@ void SphericalWarper::detectResultRoi(Point &dst_tl, Point &dst_br) float br_uf = static_cast(dst_br.x); float br_vf = static_cast(dst_br.y); - float x = projector_.minv[1]; - float y = projector_.minv[4]; - float z = projector_.minv[7]; + float x = projector_.rinv[1]; + float y = projector_.rinv[4]; + float z = projector_.rinv[7]; if (y > 0.f) { x = projector_.focal * x / z + src_size_.width * 0.5f; @@ -93,9 +93,9 @@ void SphericalWarper::detectResultRoi(Point &dst_tl, Point &dst_br) } } - x = projector_.minv[1]; - y = -projector_.minv[4]; - z = projector_.minv[7]; + x = projector_.rinv[1]; + y = -projector_.rinv[4]; + z = projector_.rinv[7]; if (y > 0.f) { x = projector_.focal * x / z + src_size_.width * 0.5f; diff --git a/modules/stitching/warpers.hpp b/modules/stitching/warpers.hpp index 2a9c315ef..63e1e2b64 100644 --- a/modules/stitching/warpers.hpp +++ b/modules/stitching/warpers.hpp @@ -13,23 +13,23 @@ public: virtual ~Warper() {} - cv::Point operator ()(const cv::Mat &src, float focal, const cv::Mat& M, cv::Mat &dst, + cv::Point operator ()(const cv::Mat &src, float focal, const cv::Mat& R, cv::Mat &dst, int interp_mode = cv::INTER_LINEAR, int border_mode = cv::BORDER_REFLECT); protected: - virtual cv::Point warp(const cv::Mat &src, float focal, const cv::Mat& M, cv::Mat &dst, + virtual cv::Point warp(const cv::Mat &src, float focal, const cv::Mat& R, cv::Mat &dst, int interp_mode, int border_mode) = 0; }; struct ProjectorBase { - void setCameraMatrix(const cv::Mat& M); + void setCameraMatrix(const cv::Mat& R); cv::Size size; float focal; - float m[9]; - float minv[9]; + float r[9]; + float rinv[9]; float scale; }; @@ -38,7 +38,7 @@ template class WarperBase : public Warper { protected: - cv::Point warp(const cv::Mat &src, float focal, const cv::Mat &M, cv::Mat &dst, + cv::Point warp(const cv::Mat &src, float focal, const cv::Mat &R, cv::Mat &dst, int interp_mode, int border_mode); // Detects ROI of the destination image. It's correct for any projection. diff --git a/modules/stitching/warpers_inl.hpp b/modules/stitching/warpers_inl.hpp index 545bf3bc0..9a558c2b8 100644 --- a/modules/stitching/warpers_inl.hpp +++ b/modules/stitching/warpers_inl.hpp @@ -4,14 +4,14 @@ #include "warpers.hpp" // Make your IDE see declarations template -cv::Point WarperBase

::warp(const cv::Mat &src, float focal, const cv::Mat &M, cv::Mat &dst, +cv::Point WarperBase

::warp(const cv::Mat &src, float focal, const cv::Mat &R, cv::Mat &dst, int interp_mode, int border_mode) { src_size_ = src.size(); projector_.size = src.size(); projector_.focal = focal; - projector_.setCameraMatrix(M); + projector_.setCameraMatrix(R); cv::Point dst_tl, dst_br; detectResultRoi(dst_tl, dst_br); @@ -106,9 +106,9 @@ void PlaneProjector::mapForward(float x, float y, float &u, float &v) x -= size.width * 0.5f; y -= size.height * 0.5f; - float x_ = m[0] * x + m[1] * y + m[2] * focal; - float y_ = m[3] * x + m[4] * y + m[5] * focal; - float z_ = m[6] * x + m[7] * y + m[8] * focal; + float x_ = r[0] * x + r[1] * y + r[2] * focal; + float y_ = r[3] * x + r[4] * y + r[5] * focal; + float z_ = r[6] * x + r[7] * y + r[8] * focal; u = scale * x_ / z_ * plane_dist; v = scale * y_ / z_ * plane_dist; @@ -122,9 +122,9 @@ void PlaneProjector::mapBackward(float u, float v, float &x, float &y) float y_ = v / scale; float z; - x = minv[0] * x_ + minv[1] * y_ + minv[2] * plane_dist; - y = minv[3] * x_ + minv[4] * y_ + minv[5] * plane_dist; - z = minv[6] * x_ + minv[7] * y_ + minv[8] * plane_dist; + x = rinv[0] * x_ + rinv[1] * y_ + rinv[2] * plane_dist; + y = rinv[3] * x_ + rinv[4] * y_ + rinv[5] * plane_dist; + z = rinv[6] * x_ + rinv[7] * y_ + rinv[8] * plane_dist; x = focal * x / z + size.width * 0.5f; y = focal * y / z + size.height * 0.5f; @@ -137,9 +137,9 @@ void SphericalProjector::mapForward(float x, float y, float &u, float &v) x -= size.width * 0.5f; y -= size.height * 0.5f; - float x_ = m[0] * x + m[1] * y + m[2] * focal; - float y_ = m[3] * x + m[4] * y + m[5] * focal; - float z_ = m[6] * x + m[7] * y + m[8] * focal; + float x_ = r[0] * x + r[1] * y + r[2] * focal; + float y_ = r[3] * x + r[4] * y + r[5] * focal; + float z_ = r[6] * x + r[7] * y + r[8] * focal; u = scale * atan2f(x_, z_); v = scale * (static_cast(CV_PI) - acosf(y_ / sqrtf(x_ * x_ + y_ * y_ + z_ * z_))); @@ -155,9 +155,9 @@ void SphericalProjector::mapBackward(float u, float v, float &x, float &y) float z_ = sinv * cosf(u / scale); float z; - x = minv[0] * x_ + minv[1] * y_ + minv[2] * z_; - y = minv[3] * x_ + minv[4] * y_ + minv[5] * z_; - z = minv[6] * x_ + minv[7] * y_ + minv[8] * z_; + x = rinv[0] * x_ + rinv[1] * y_ + rinv[2] * z_; + y = rinv[3] * x_ + rinv[4] * y_ + rinv[5] * z_; + z = rinv[6] * x_ + rinv[7] * y_ + rinv[8] * z_; x = focal * x / z + size.width * 0.5f; y = focal * y / z + size.height * 0.5f; @@ -170,9 +170,9 @@ void CylindricalProjector::mapForward(float x, float y, float &u, float &v) x -= size.width * 0.5f; y -= size.height * 0.5f; - float x_ = m[0] * x + m[1] * y + m[2] * focal; - float y_ = m[3] * x + m[4] * y + m[5] * focal; - float z_ = m[6] * x + m[7] * y + m[8] * focal; + float x_ = r[0] * x + r[1] * y + r[2] * focal; + float y_ = r[3] * x + r[4] * y + r[5] * focal; + float z_ = r[6] * x + r[7] * y + r[8] * focal; u = scale * atan2f(x_, z_); v = scale * y_ / sqrtf(x_ * x_ + z_ * z_); @@ -187,9 +187,9 @@ void CylindricalProjector::mapBackward(float u, float v, float &x, float &y) float z_ = cosf(u / scale); float z; - x = minv[0] * x_ + minv[1] * y_ + minv[2] * z_; - y = minv[3] * x_ + minv[4] * y_ + minv[5] * z_; - z = minv[6] * x_ + minv[7] * y_ + minv[8] * z_; + x = rinv[0] * x_ + rinv[1] * y_ + rinv[2] * z_; + y = rinv[3] * x_ + rinv[4] * y_ + rinv[5] * z_; + z = rinv[6] * x_ + rinv[7] * y_ + rinv[8] * z_; x = focal * x / z + size.width * 0.5f; y = focal * y / z + size.height * 0.5f;