From 07efb17d128b148083345b14c978aff7eaa940e4 Mon Sep 17 00:00:00 2001 From: Alexey Spizhevoy Date: Mon, 19 Sep 2011 06:22:40 +0000 Subject: [PATCH] Fixed warper selection bug in stitching_detailed. Removed estimation of aspect ratio in BA in stitching to avoid stretching of input images. Did minor refactoring. --- .../stitching/detail/motion_estimators.hpp | 11 +-- modules/stitching/src/motion_estimators.cpp | 92 +++++++++---------- modules/stitching/src/stitcher.cpp | 2 +- samples/cpp/stitching_detailed.cpp | 34 ++----- 4 files changed, 55 insertions(+), 84 deletions(-) diff --git a/modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp b/modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp index 2ec1a4e61..fcfb50e4c 100644 --- a/modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp +++ b/modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp @@ -80,15 +80,11 @@ private: }; -class CV_EXPORTS BundleAdjuster : public Estimator +// Minimizes reprojection error +class CV_EXPORTS BundleAdjusterReproj : public Estimator { public: - enum { NO, RAY_SPACE, FOCAL_RAY_SPACE }; - - BundleAdjuster(int cost_space = FOCAL_RAY_SPACE, float conf_thresh = 1.f) - : cost_space_(cost_space), conf_thresh_(conf_thresh) {} - - Mat K; + BundleAdjusterReproj(float conf_thresh = 1.f) : conf_thresh_(conf_thresh) {} private: void estimate(const std::vector &features, const std::vector &pairwise_matches, @@ -104,7 +100,6 @@ private: Mat cameras_; std::vector > edges_; - int cost_space_; float conf_thresh_; Mat err_, err1_, err2_; Mat J_; diff --git a/modules/stitching/src/motion_estimators.cpp b/modules/stitching/src/motion_estimators.cpp index 4d5503bf3..9ebe34180 100644 --- a/modules/stitching/src/motion_estimators.cpp +++ b/modules/stitching/src/motion_estimators.cpp @@ -155,12 +155,10 @@ void HomographyBasedEstimator::estimate(const vector &features, c ////////////////////////////////////////////////////////////////////////////// -void BundleAdjuster::estimate(const vector &features, const vector &pairwise_matches, - vector &cameras) +void BundleAdjusterReproj::estimate(const vector &features, + const vector &pairwise_matches, + vector &cameras) { - if (cost_space_ == NO) - return; - LOG("Bundle adjustment"); int64 t = getTickCount(); @@ -169,14 +167,13 @@ void BundleAdjuster::estimate(const vector &features, const vecto pairwise_matches_ = &pairwise_matches[0]; // Prepare focals and rotations - cameras_.create(num_images_ * 7, 1, CV_64F); + cameras_.create(num_images_ * 6, 1, CV_64F); SVD svd; for (int i = 0; i < num_images_; ++i) { - cameras_.at(i * 7, 0) = cameras[i].focal; - cameras_.at(i * 7 + 1, 0) = cameras[i].ppx; - cameras_.at(i * 7 + 2, 0) = cameras[i].ppy; - cameras_.at(i * 7 + 3, 0) = cameras[i].aspect; + cameras_.at(i * 6, 0) = cameras[i].focal; + cameras_.at(i * 6 + 1, 0) = cameras[i].ppx; + cameras_.at(i * 6 + 2, 0) = cameras[i].ppy; svd(cameras[i].R, SVD::FULL_UV); Mat R = svd.u * svd.vt; @@ -185,9 +182,9 @@ void BundleAdjuster::estimate(const vector &features, const vecto Mat rvec; Rodrigues(R, rvec); CV_Assert(rvec.type() == CV_32F); - cameras_.at(i * 7 + 4, 0) = rvec.at(0, 0); - cameras_.at(i * 7 + 5, 0) = rvec.at(1, 0); - cameras_.at(i * 7 + 6, 0) = rvec.at(2, 0); + cameras_.at(i * 6 + 3, 0) = rvec.at(0, 0); + cameras_.at(i * 6 + 4, 0) = rvec.at(1, 0); + cameras_.at(i * 6 + 5, 0) = rvec.at(2, 0); } // Select only consistent image pairs for futher adjustment @@ -207,7 +204,7 @@ void BundleAdjuster::estimate(const vector &features, const vecto for (size_t i = 0; i < edges_.size(); ++i) total_num_matches_ += static_cast(pairwise_matches[edges_[i].first * num_images_ + edges_[i].second].num_inliers); - CvLevMarq solver(num_images_ * 7, total_num_matches_ * 2, + CvLevMarq solver(num_images_ * 6, total_num_matches_ * 2, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON)); CvMat matParams = cameras_; @@ -250,14 +247,13 @@ void BundleAdjuster::estimate(const vector &features, const vecto // Obtain global motion for (int i = 0; i < num_images_; ++i) { - cameras[i].focal = cameras_.at(i * 7, 0); - cameras[i].ppx = cameras_.at(i * 7 + 1, 0); - cameras[i].ppy = cameras_.at(i * 7 + 2, 0); - cameras[i].aspect = cameras_.at(i * 7 + 3, 0); + cameras[i].focal = cameras_.at(i * 6, 0); + cameras[i].ppx = cameras_.at(i * 6 + 1, 0); + cameras[i].ppy = cameras_.at(i * 6 + 2, 0); Mat rvec(3, 1, CV_64F); - rvec.at(0, 0) = cameras_.at(i * 7 + 4, 0); - rvec.at(1, 0) = cameras_.at(i * 7 + 5, 0); - rvec.at(2, 0) = cameras_.at(i * 7 + 6, 0); + rvec.at(0, 0) = cameras_.at(i * 6 + 3, 0); + rvec.at(1, 0) = cameras_.at(i * 6 + 4, 0); + rvec.at(2, 0) = cameras_.at(i * 6 + 5, 0); Rodrigues(rvec, cameras[i].R); Mat Mf; cameras[i].R.convertTo(Mf, CV_32F); @@ -276,7 +272,7 @@ void BundleAdjuster::estimate(const vector &features, const vecto } -void BundleAdjuster::calcError(Mat &err) +void BundleAdjusterReproj::calcError(Mat &err) { err.create(total_num_matches_ * 2, 1, CV_64F); @@ -285,28 +281,26 @@ void BundleAdjuster::calcError(Mat &err) { int i = edges_[edge_idx].first; int j = edges_[edge_idx].second; - double f1 = cameras_.at(i * 7, 0); - double f2 = cameras_.at(j * 7, 0); - double ppx1 = cameras_.at(i * 7 + 1, 0); - double ppx2 = cameras_.at(j * 7 + 1, 0); - double ppy1 = cameras_.at(i * 7 + 2, 0); - double ppy2 = cameras_.at(j * 7 + 2, 0); - double a1 = cameras_.at(i * 7 + 3, 0); - double a2 = cameras_.at(j * 7 + 3, 0); + double f1 = cameras_.at(i * 6, 0); + double f2 = cameras_.at(j * 6, 0); + double ppx1 = cameras_.at(i * 6 + 1, 0); + double ppx2 = cameras_.at(j * 6 + 1, 0); + double ppy1 = cameras_.at(i * 6 + 2, 0); + double ppy2 = cameras_.at(j * 6 + 2, 0); double R1[9]; Mat R1_(3, 3, CV_64F, R1); Mat rvec(3, 1, CV_64F); - rvec.at(0, 0) = cameras_.at(i * 7 + 4, 0); - rvec.at(1, 0) = cameras_.at(i * 7 + 5, 0); - rvec.at(2, 0) = cameras_.at(i * 7 + 6, 0); + rvec.at(0, 0) = cameras_.at(i * 6 + 3, 0); + rvec.at(1, 0) = cameras_.at(i * 6 + 4, 0); + rvec.at(2, 0) = cameras_.at(i * 6 + 5, 0); Rodrigues(rvec, R1_); double R2[9]; Mat R2_(3, 3, CV_64F, R2); - rvec.at(0, 0) = cameras_.at(j * 7 + 4, 0); - rvec.at(1, 0) = cameras_.at(j * 7 + 5, 0); - rvec.at(2, 0) = cameras_.at(j * 7 + 6, 0); + rvec.at(0, 0) = cameras_.at(j * 6 + 3, 0); + rvec.at(1, 0) = cameras_.at(j * 6 + 4, 0); + rvec.at(2, 0) = cameras_.at(j * 6 + 5, 0); Rodrigues(rvec, R2_); const ImageFeatures& features1 = features_[i]; @@ -315,11 +309,11 @@ void BundleAdjuster::calcError(Mat &err) Mat_ K1 = Mat::eye(3, 3, CV_64F); K1(0,0) = f1; K1(0,2) = ppx1; - K1(1,1) = f1*a1; K1(1,2) = ppy1; + K1(1,1) = f1; K1(1,2) = ppy1; Mat_ K2 = Mat::eye(3, 3, CV_64F); K2(0,0) = f2; K2(0,2) = ppx2; - K2(1,1) = f2*a2; K2(1,2) = ppy2; + K2(1,1) = f2; K2(1,2) = ppy2; Mat_ H = K2 * R2_.inv() * R1_ * K1.inv(); @@ -329,8 +323,8 @@ void BundleAdjuster::calcError(Mat &err) continue; const DMatch& m = matches_info.matches[k]; - Point2d p1 = features1.keypoints[m.queryIdx].pt; - Point2d p2 = features2.keypoints[m.trainIdx].pt; + Point2f p1 = features1.keypoints[m.queryIdx].pt; + Point2f p2 = features2.keypoints[m.trainIdx].pt; double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2); double y = H(1,0)*p1.x + H(1,1)*p1.y + H(1,2); double z = H(2,0)*p1.x + H(2,1)*p1.y + H(2,2); @@ -343,24 +337,24 @@ void BundleAdjuster::calcError(Mat &err) } -void BundleAdjuster::calcJacobian() +void BundleAdjusterReproj::calcJacobian() { - J_.create(total_num_matches_ * 2, num_images_ * 7, CV_64F); + J_.create(total_num_matches_ * 2, num_images_ * 6, CV_64F); double val; - const double step = 1e-3; + const double step = 1e-4; for (int i = 0; i < num_images_; ++i) { - for (int j = 0; j < 7; ++j) + for (int j = 0; j < 6; ++j) { - val = cameras_.at(i * 7 + j, 0); - cameras_.at(i * 7+ j, 0) = val - step; + val = cameras_.at(i * 6 + j, 0); + cameras_.at(i * 6 + j, 0) = val - step; calcError(err1_); - cameras_.at(i * 7 + j, 0) = val + step; + cameras_.at(i * 6 + j, 0) = val + step; calcError(err2_); - calcDeriv(err1_, err2_, 2 * step, J_.col(i * 7 + j)); - cameras_.at(i * 7 + j, 0) = val; + calcDeriv(err1_, err2_, 2 * step, J_.col(i * 6 + j)); + cameras_.at(i * 6 + j, 0) = val; } } } diff --git a/modules/stitching/src/stitcher.cpp b/modules/stitching/src/stitcher.cpp index 45a9f974b..f8362f6d6 100644 --- a/modules/stitching/src/stitcher.cpp +++ b/modules/stitching/src/stitcher.cpp @@ -189,7 +189,7 @@ Stitcher::Status Stitcher::stitch(InputArray imgs_, OutputArray pano_) LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K()); } - detail::BundleAdjuster adjuster(detail::BundleAdjuster::FOCAL_RAY_SPACE, conf_thresh_); + detail::BundleAdjusterReproj adjuster(conf_thresh_); adjuster(features, pairwise_matches, cameras); // Find median focal length diff --git a/samples/cpp/stitching_detailed.cpp b/samples/cpp/stitching_detailed.cpp index fc5653c91..5ed3d290e 100644 --- a/samples/cpp/stitching_detailed.cpp +++ b/samples/cpp/stitching_detailed.cpp @@ -79,8 +79,6 @@ void printUsage() " --conf_thresh \n" " Threshold for two images are from the same panorama confidence.\n" " The default is 1.0.\n" - " --ba (no|ray|focal_ray)\n" - " Bundle adjustment cost function. The default is 'focal_ray'.\n" " --wave_correct (no|yes)\n" " Perform wave effect correction. The default is 'yes'.\n" " --save_graph \n" @@ -115,7 +113,6 @@ bool try_gpu = false; double work_megapix = 0.6; double seam_megapix = 0.1; double compose_megapix = -1; -int ba_space = BundleAdjuster::FOCAL_RAY_SPACE; float conf_thresh = 1.f; bool wave_correct = true; bool save_graph = false; @@ -184,21 +181,6 @@ int parseCmdArgs(int argc, char** argv) match_conf = static_cast(atof(argv[i + 1])); i++; } - else if (string(argv[i]) == "--ba") - { - if (string(argv[i + 1]) == "no") - ba_space = BundleAdjuster::NO; - else if (string(argv[i + 1]) == "ray") - ba_space = BundleAdjuster::RAY_SPACE; - else if (string(argv[i + 1]) == "focal_ray") - ba_space = BundleAdjuster::FOCAL_RAY_SPACE; - else - { - cout << "Bad bundle adjustment space\n"; - return -1; - } - i++; - } else if (string(argv[i]) == "--conf_thresh") { conf_thresh = static_cast(atof(argv[i + 1])); @@ -431,14 +413,14 @@ int main(int argc, char* argv[]) LOGLN("Initial focal length #" << indices[i]+1 << ": " << cameras[i].focal); } - BundleAdjuster adjuster(ba_space, conf_thresh); + BundleAdjusterReproj adjuster(conf_thresh); adjuster(features, pairwise_matches, cameras); // Find median focal length vector focals; for (size_t i = 0; i < cameras.size(); ++i) { - LOGLN("Camera #" << indices[i]+1 << " focal length: " << cameras[i].focal); + LOGLN("Camera #" << indices[i]+1 << ":\n" << cameras[i].K()); focals.push_back(cameras[i].focal); } nth_element(focals.begin(), focals.begin() + focals.size()/2, focals.end()); @@ -476,16 +458,16 @@ int main(int argc, char* argv[]) #ifndef ANDROID if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) { - if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); - else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); - else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); + if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu(); + else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu(); + else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu(); } else #endif { - if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu(); - else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu(); - else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu(); + if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); + else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); + else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); } if (warper_creator.empty())