updated focal estimation (opencv_stitching) + refactoring
This commit is contained in:
parent
5c5cd449b3
commit
ef966e15c1
@ -76,35 +76,46 @@ void focalsFromHomography(const Mat& H, double &f0, double &f1, bool &f0_ok, boo
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
double estimateFocal(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches)
|
void estimateFocal(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches,
|
||||||
|
vector<double> &focals)
|
||||||
{
|
{
|
||||||
const int num_images = static_cast<int>(features.size());
|
const int num_images = static_cast<int>(features.size());
|
||||||
|
focals.resize(num_images);
|
||||||
|
|
||||||
vector<double> focals;
|
vector<vector<double> > all_focals(num_images);
|
||||||
for (int src_idx = 0; src_idx < num_images; ++src_idx)
|
|
||||||
|
for (int i = 0; i < num_images; ++i)
|
||||||
{
|
{
|
||||||
for (int dst_idx = 0; dst_idx < num_images; ++dst_idx)
|
for (int j = 0; j < num_images; ++j)
|
||||||
{
|
{
|
||||||
const MatchesInfo &m = pairwise_matches[src_idx*num_images + dst_idx];
|
const MatchesInfo &m = pairwise_matches[i*num_images + j];
|
||||||
if (m.H.empty())
|
if (m.H.empty())
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
double f0, f1;
|
double f0, f1;
|
||||||
bool f0ok, f1ok;
|
bool f0ok, f1ok;
|
||||||
focalsFromHomography(m.H, f0, f1, f0ok, f1ok);
|
focalsFromHomography(m.H, f0, f1, f0ok, f1ok);
|
||||||
if (f0ok && f1ok) focals.push_back(sqrt(f0*f1));
|
if (f0ok && f1ok)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (static_cast<int>(focals.size()) >= num_images * (num_images - 1) / 2)
|
|
||||||
{
|
{
|
||||||
nth_element(focals.begin(), focals.end(), focals.begin() + focals.size()/2);
|
all_focals[i].push_back(f0);
|
||||||
return focals[focals.size()/2];
|
all_focals[j].push_back(f1);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGLN("Can't estimate focal length, will use naive approach");
|
|
||||||
double focals_sum = 0;
|
|
||||||
for (int i = 0; i < num_images; ++i)
|
for (int i = 0; i < num_images; ++i)
|
||||||
focals_sum += features[i].img_size.width + features[i].img_size.height;
|
{
|
||||||
return focals_sum / num_images;
|
if (all_focals[i].size() > 0)
|
||||||
|
{
|
||||||
|
nth_element(all_focals[i].begin(), all_focals[i].end(),
|
||||||
|
all_focals[i].begin() + all_focals[i].size()/2);
|
||||||
|
focals[i] = all_focals[i][all_focals[i].size()/2];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
LOGLN("Can't estimate focal length #" << i << ", will use naive approach");
|
||||||
|
focals[i] = features[i].img_size.width + features[i].img_size.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,6 +49,7 @@
|
|||||||
// by Heung-Yeung Shum and Richard Szeliski.
|
// by Heung-Yeung Shum and Richard Szeliski.
|
||||||
void focalsFromHomography(const cv::Mat &H, double &f0, double &f1, bool &f0_ok, bool &f1_ok);
|
void focalsFromHomography(const cv::Mat &H, double &f0, double &f1, bool &f0_ok, bool &f1_ok);
|
||||||
|
|
||||||
double estimateFocal(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches);
|
void estimateFocal(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches,
|
||||||
|
std::vector<double> &focals);
|
||||||
|
|
||||||
#endif // __OPENCV_AUTOCALIB_HPP__
|
#endif // __OPENCV_AUTOCALIB_HPP__
|
||||||
|
@ -185,6 +185,11 @@ void MultiBandBlender::feed(const Mat &img, const Mat &mask, Point tl)
|
|||||||
CV_Assert(img.type() == CV_16SC3);
|
CV_Assert(img.type() == CV_16SC3);
|
||||||
CV_Assert(mask.type() == CV_8U);
|
CV_Assert(mask.type() == CV_8U);
|
||||||
|
|
||||||
|
//int gap = 10 * (1 << num_bands_);
|
||||||
|
//Point tl_new(max(dst_roi_.x, tl.x - gap),
|
||||||
|
// max(dst_roi_.y, tl.y - gap));
|
||||||
|
//Point br_new(min(dst_roi_.br().x, tl.x + img.cols + gap),
|
||||||
|
// min(dst_roi_.br().y, tl.y + img.rows + gap));
|
||||||
Point tl_new(dst_roi_.tl());
|
Point tl_new(dst_roi_.tl());
|
||||||
Point br_new(dst_roi_.br());
|
Point br_new(dst_roi_.br());
|
||||||
int top = tl.y - tl_new.y;
|
int top = tl.y - tl_new.y;
|
||||||
@ -215,8 +220,10 @@ void MultiBandBlender::feed(const Mat &img, const Mat &mask, Point tl)
|
|||||||
// Add weighted layer of the source image to the final Laplacian pyramid layer
|
// Add weighted layer of the source image to the final Laplacian pyramid layer
|
||||||
for (int i = 0; i <= num_bands_; ++i)
|
for (int i = 0; i <= num_bands_; ++i)
|
||||||
{
|
{
|
||||||
int dx = 0;//(tl_new.x >> i) - (dst_roi_.x >> i);
|
int dx = 0;
|
||||||
int dy = 0;//(tl_new.y >> i) - (dst_roi_.y >> i);
|
int dy = 0;
|
||||||
|
//int dx = (tl_new.x >> i) - (dst_roi_.x >> i);
|
||||||
|
//int dy = (tl_new.y >> i) - (dst_roi_.y >> i);
|
||||||
|
|
||||||
for (int y = 0; y < src_pyr_laplace[i].rows; ++y)
|
for (int y = 0; y < src_pyr_laplace[i].rows; ++y)
|
||||||
{
|
{
|
||||||
|
@ -290,6 +290,9 @@ void FeaturesMatcher::operator ()(const vector<ImageFeatures> &features, vector<
|
|||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
|
// These two classes are aimed to find features matches only, not to
|
||||||
|
// estimate homography
|
||||||
|
|
||||||
class CpuMatcher : public FeaturesMatcher
|
class CpuMatcher : public FeaturesMatcher
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
@ -300,10 +303,24 @@ namespace
|
|||||||
float match_conf_;
|
float match_conf_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
class GpuMatcher : public FeaturesMatcher
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
GpuMatcher(float match_conf) : match_conf_(match_conf) {}
|
||||||
|
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info);
|
||||||
|
|
||||||
|
private:
|
||||||
|
float match_conf_;
|
||||||
|
GpuMat descriptors1_;
|
||||||
|
GpuMat descriptors2_;
|
||||||
|
GpuMat train_idx_, distance_, all_dist_;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
void CpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
|
void CpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
|
||||||
{
|
{
|
||||||
matches_info.matches.clear();
|
matches_info.matches.clear();
|
||||||
|
|
||||||
BruteForceMatcher< L2<float> > matcher;
|
BruteForceMatcher< L2<float> > matcher;
|
||||||
vector< vector<DMatch> > pair_matches;
|
vector< vector<DMatch> > pair_matches;
|
||||||
|
|
||||||
@ -333,33 +350,18 @@ namespace
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class GpuMatcher : public FeaturesMatcher
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
GpuMatcher(float match_conf) : match_conf_(match_conf) {}
|
|
||||||
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info);
|
|
||||||
|
|
||||||
private:
|
|
||||||
float match_conf_;
|
|
||||||
GpuMat descriptors1_;
|
|
||||||
GpuMat descriptors2_;
|
|
||||||
GpuMat trainIdx_, distance_, allDist_;
|
|
||||||
};
|
|
||||||
|
|
||||||
void GpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
|
void GpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
|
||||||
{
|
{
|
||||||
matches_info.matches.clear();
|
matches_info.matches.clear();
|
||||||
|
|
||||||
BruteForceMatcher_GPU< L2<float> > matcher;
|
|
||||||
|
|
||||||
descriptors1_.upload(features1.descriptors);
|
descriptors1_.upload(features1.descriptors);
|
||||||
descriptors2_.upload(features2.descriptors);
|
descriptors2_.upload(features2.descriptors);
|
||||||
|
BruteForceMatcher_GPU< L2<float> > matcher;
|
||||||
vector< vector<DMatch> > pair_matches;
|
vector< vector<DMatch> > pair_matches;
|
||||||
|
|
||||||
// Find 1->2 matches
|
// Find 1->2 matches
|
||||||
matcher.knnMatch(descriptors1_, descriptors2_, trainIdx_, distance_, allDist_, 2);
|
matcher.knnMatch(descriptors1_, descriptors2_, train_idx_, distance_, all_dist_, 2);
|
||||||
matcher.knnMatchDownload(trainIdx_, distance_, pair_matches);
|
matcher.knnMatchDownload(train_idx_, distance_, pair_matches);
|
||||||
for (size_t i = 0; i < pair_matches.size(); ++i)
|
for (size_t i = 0; i < pair_matches.size(); ++i)
|
||||||
{
|
{
|
||||||
if (pair_matches[i].size() < 2)
|
if (pair_matches[i].size() < 2)
|
||||||
@ -376,8 +378,8 @@ namespace
|
|||||||
|
|
||||||
// Find 2->1 matches
|
// Find 2->1 matches
|
||||||
pair_matches.clear();
|
pair_matches.clear();
|
||||||
matcher.knnMatch(descriptors2_, descriptors1_, trainIdx_, distance_, allDist_, 2);
|
matcher.knnMatch(descriptors2_, descriptors1_, train_idx_, distance_, all_dist_, 2);
|
||||||
matcher.knnMatchDownload(trainIdx_, distance_, pair_matches);
|
matcher.knnMatchDownload(train_idx_, distance_, pair_matches);
|
||||||
for (size_t i = 0; i < pair_matches.size(); ++i)
|
for (size_t i = 0; i < pair_matches.size(); ++i)
|
||||||
{
|
{
|
||||||
if (pair_matches[i].size() < 2)
|
if (pair_matches[i].size() < 2)
|
||||||
@ -392,7 +394,9 @@ namespace
|
|||||||
matches_info.matches.push_back(DMatch(m0.trainIdx, m0.queryIdx, m0.distance));
|
matches_info.matches.push_back(DMatch(m0.trainIdx, m0.queryIdx, m0.distance));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
} // anonymous namespace
|
||||||
|
|
||||||
|
|
||||||
BestOf2NearestMatcher::BestOf2NearestMatcher(bool try_use_gpu, float match_conf, int num_matches_thresh1, int num_matches_thresh2)
|
BestOf2NearestMatcher::BestOf2NearestMatcher(bool try_use_gpu, float match_conf, int num_matches_thresh1, int num_matches_thresh2)
|
||||||
{
|
{
|
||||||
|
@ -97,16 +97,18 @@ class FeaturesMatcher
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual ~FeaturesMatcher() {}
|
virtual ~FeaturesMatcher() {}
|
||||||
void operator ()(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
|
|
||||||
{
|
void operator ()(const ImageFeatures &features1, const ImageFeatures &features2,
|
||||||
match(features1, features2, matches_info);
|
MatchesInfo& matches_info) { match(features1, features2, matches_info); }
|
||||||
}
|
|
||||||
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches);
|
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches);
|
||||||
|
|
||||||
bool isThreadSafe() const { return is_thread_safe_; }
|
bool isThreadSafe() const { return is_thread_safe_; }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
FeaturesMatcher(bool is_thread_safe = false) : is_thread_safe_(is_thread_safe) {}
|
FeaturesMatcher(bool is_thread_safe = false) : is_thread_safe_(is_thread_safe) {}
|
||||||
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info) = 0;
|
|
||||||
|
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2,
|
||||||
|
MatchesInfo& matches_info) = 0;
|
||||||
|
|
||||||
bool is_thread_safe_;
|
bool is_thread_safe_;
|
||||||
};
|
};
|
||||||
@ -115,14 +117,14 @@ protected:
|
|||||||
class BestOf2NearestMatcher : public FeaturesMatcher
|
class BestOf2NearestMatcher : public FeaturesMatcher
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
BestOf2NearestMatcher(bool try_use_gpu = true, float match_conf = 0.55f, int num_matches_thresh1 = 6, int num_matches_thresh2 = 6);
|
BestOf2NearestMatcher(bool try_use_gpu = true, float match_conf = 0.55f, int num_matches_thresh1 = 6,
|
||||||
|
int num_matches_thresh2 = 6);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo &matches_info);
|
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo &matches_info);
|
||||||
|
|
||||||
int num_matches_thresh1_;
|
int num_matches_thresh1_;
|
||||||
int num_matches_thresh2_;
|
int num_matches_thresh2_;
|
||||||
|
|
||||||
cv::Ptr<FeaturesMatcher> impl_;
|
cv::Ptr<FeaturesMatcher> impl_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -109,10 +109,11 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c
|
|||||||
const int num_images = static_cast<int>(features.size());
|
const int num_images = static_cast<int>(features.size());
|
||||||
|
|
||||||
// Estimate focal length and set it for all cameras
|
// Estimate focal length and set it for all cameras
|
||||||
double focal = estimateFocal(features, pairwise_matches);
|
vector<double> focals;
|
||||||
|
estimateFocal(features, pairwise_matches, focals);
|
||||||
cameras.resize(num_images);
|
cameras.resize(num_images);
|
||||||
for (int i = 0; i < num_images; ++i)
|
for (int i = 0; i < num_images; ++i)
|
||||||
cameras[i].focal = focal;
|
cameras[i].focal = focals[i];
|
||||||
|
|
||||||
// Restore global motion
|
// Restore global motion
|
||||||
Graph span_tree;
|
Graph span_tree;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user