- hide the patch size from the user (only one is used now as training was only done at that scale)
- enable a border_threshold just like for SIFt so that calling ORB, or descriptor after feature gives the same number of features
This commit is contained in:
parent
ce94e4a994
commit
211c112b91
@ -231,21 +231,13 @@ Class for extracting ORB features and descriptors from an image ::
|
||||
{
|
||||
public:
|
||||
/** The patch sizes that can be used (only one right now) */
|
||||
enum PatchSize
|
||||
{
|
||||
PATCH_LEARNED_31 = 31
|
||||
};
|
||||
|
||||
struct CommonParams
|
||||
{
|
||||
static const unsigned int DEFAULT_N_LEVELS = 3;
|
||||
static const float DEFAULT_SCALE_FACTOR = 1.2;
|
||||
static const unsigned int DEFAULT_FIRST_LEVEL = 0;
|
||||
static const PatchSize DEFAULT_PATCH_SIZE = PATCH_LEARNED_31;
|
||||
enum { DEFAULT_N_LEVELS = 3, DEFAULT_FIRST_LEVEL = 0};
|
||||
|
||||
/** default constructor */
|
||||
CommonParams(float scale_factor = DEFAULT_SCALE_FACTOR, unsigned int n_levels = DEFAULT_N_LEVELS,
|
||||
unsigned int first_level = DEFAULT_FIRST_LEVEL, PatchSize patch_size = DEFAULT_PATCH_SIZE);
|
||||
CommonParams(float scale_factor = 1.2f, unsigned int n_levels = DEFAULT_N_LEVELS,
|
||||
int edge_threshold = 31, unsigned int first_level = DEFAULT_FIRST_LEVEL);
|
||||
void read(const FileNode& fn);
|
||||
void write(FileStorage& fs) const;
|
||||
|
||||
@ -257,8 +249,8 @@ Class for extracting ORB features and descriptors from an image ::
|
||||
* if 1, that means we will also look at the image scale_factor_ times bigger
|
||||
*/
|
||||
unsigned int first_level_;
|
||||
/** The size of the patch that will be used for orientation and comparisons */
|
||||
PatchSize patch_size_;
|
||||
/** How far from the boundary the points should be */
|
||||
int edge_threshold_;
|
||||
};
|
||||
|
||||
// c:function::default constructor
|
||||
|
@ -420,14 +420,16 @@ public:
|
||||
|
||||
struct CV_EXPORTS CommonParams
|
||||
{
|
||||
enum { DEFAULT_N_LEVELS = 3, DEFAULT_FIRST_LEVEL = 0, DEFAULT_PATCH_SIZE = 31 };
|
||||
enum { DEFAULT_N_LEVELS = 3, DEFAULT_FIRST_LEVEL = 0};
|
||||
|
||||
/** default constructor */
|
||||
CommonParams(float scale_factor = 1.2f, unsigned int n_levels = DEFAULT_N_LEVELS,
|
||||
unsigned int first_level = DEFAULT_FIRST_LEVEL, int patch_size = DEFAULT_PATCH_SIZE) :
|
||||
CommonParams(float scale_factor = 1.2f, unsigned int n_levels = DEFAULT_N_LEVELS, int edge_threshold = 31,
|
||||
unsigned int first_level = DEFAULT_FIRST_LEVEL) :
|
||||
scale_factor_(scale_factor), n_levels_(n_levels), first_level_(first_level >= n_levels ? 0 : first_level),
|
||||
patch_size_(patch_size)
|
||||
edge_threshold_(edge_threshold)
|
||||
{
|
||||
// No other patch size is supported right now
|
||||
patch_size_ = 31;
|
||||
}
|
||||
void read(const FileNode& fn);
|
||||
void write(FileStorage& fs) const;
|
||||
@ -440,6 +442,11 @@ public:
|
||||
* if 1, that means we will also look at the image scale_factor_ times bigger
|
||||
*/
|
||||
unsigned int first_level_;
|
||||
/** How far from the boundary the points should be */
|
||||
int edge_threshold_;
|
||||
|
||||
friend class ORB;
|
||||
protected:
|
||||
/** The size of the patch that will be used for orientation and comparisons */
|
||||
int patch_size_;
|
||||
};
|
||||
|
@ -469,17 +469,18 @@ ORB::ORB(size_t n_features, const CommonParams & detector_params) :
|
||||
params_(detector_params), n_features_(n_features)
|
||||
{
|
||||
// fill the extractors and descriptors for the corresponding scales
|
||||
int n_desired_features_per_scale = cvRound(
|
||||
n_features / ((1.0 / std::pow(params_.scale_factor_,
|
||||
2.f * params_.n_levels_) - 1) / (1.0
|
||||
/ std::pow(params_.scale_factor_, 2) - 1)));
|
||||
float factor = 1.0 / params_.scale_factor_ / params_.scale_factor_;
|
||||
int n_desired_features_per_scale = cvRound(n_features / ((std::pow(factor, params_.n_levels_) - 1) / (factor - 1)));
|
||||
n_features_per_level_.resize(detector_params.n_levels_);
|
||||
for (unsigned int level = 0; level < detector_params.n_levels_; level++)
|
||||
{
|
||||
n_features_per_level_[level] = n_desired_features_per_scale;
|
||||
n_desired_features_per_scale = cvRound(n_desired_features_per_scale / std::pow(params_.scale_factor_, 2));
|
||||
n_desired_features_per_scale = cvRound(n_desired_features_per_scale * factor);
|
||||
}
|
||||
|
||||
// Make sure we forget about what is too close to the boundary
|
||||
params_.edge_threshold_ = std::max(params_.edge_threshold_, params_.patch_size_ + kKernelWidth / 2 + 2);
|
||||
|
||||
// pre-compute the end of a row in a circular patch
|
||||
half_patch_size_ = params_.patch_size_ / 2;
|
||||
u_max_.resize(half_patch_size_ + 1);
|
||||
@ -563,12 +564,16 @@ void ORB::operator()(const cv::Mat &image, const cv::Mat &mask, std::vector<cv::
|
||||
}
|
||||
|
||||
// Pre-compute the keypoints (we keep the best over all scales, so this has to be done beforehand
|
||||
std::vector<std::vector<cv::KeyPoint> > all_keypoints;
|
||||
std::vector < std::vector<cv::KeyPoint> > all_keypoints;
|
||||
if (do_keypoints)
|
||||
// Get keypoints, those will be far enough from the border that no check will be required for the descriptor
|
||||
computeKeyPoints(image_pyramid, mask_pyramid, all_keypoints);
|
||||
else
|
||||
{
|
||||
// Cluster the input keypoints
|
||||
// Remove keypoints very close to the border
|
||||
cv::KeyPointsFilter::runByImageBorder(keypoints_in_out, image.size(), params_.edge_threshold_);
|
||||
|
||||
// Cluster the input keypoints depending on the level they were computed at
|
||||
all_keypoints.resize(params_.n_levels_);
|
||||
for (std::vector<cv::KeyPoint>::iterator keypoint = keypoints_in_out.begin(), keypoint_end = keypoints_in_out.end(); keypoint
|
||||
!= keypoint_end; ++keypoint)
|
||||
@ -651,6 +656,9 @@ void ORB::computeKeyPoints(const std::vector<cv::Mat>& image_pyramid, const std:
|
||||
std::vector<cv::KeyPoint> all_keypoints;
|
||||
all_keypoints.reserve(2 * n_features_);
|
||||
|
||||
// half_patch_size_ for orientation, 4 for Harris
|
||||
unsigned int edge_threshold = std::max(std::max(half_patch_size_, 4), params_.edge_threshold_);
|
||||
|
||||
for (unsigned int level = 0; level < params_.n_levels_; ++level)
|
||||
{
|
||||
all_keypoints_out[level].reserve(n_features_per_level_[level]);
|
||||
@ -662,9 +670,8 @@ void ORB::computeKeyPoints(const std::vector<cv::Mat>& image_pyramid, const std:
|
||||
fd.detect(image_pyramid[level], keypoints, mask_pyramid[level]);
|
||||
|
||||
// Remove keypoints very close to the border
|
||||
// half_patch_size_ for orientation, 4 for Harris
|
||||
unsigned int border_safety = std::max(half_patch_size_, 4);
|
||||
cv::KeyPointsFilter::runByImageBorder(keypoints, image_pyramid[level].size(), border_safety);
|
||||
cv::KeyPointsFilter::runByImageBorder(keypoints, image_pyramid[level].size(), edge_threshold);
|
||||
|
||||
// Keep more points than necessary as FAST does not give amazing corners
|
||||
cull(keypoints, 2 * n_features_per_level_[level]);
|
||||
|
||||
@ -817,10 +824,6 @@ void ORB::computeDescriptors(const cv::Mat& image, const cv::Mat& integral_image
|
||||
if (image.type() != CV_8UC1)
|
||||
cv::cvtColor(image, gray_image, CV_BGR2GRAY);
|
||||
|
||||
int border_safety = params_.patch_size_ + kKernelWidth / 2 + 2;
|
||||
//Remove keypoints very close to the border
|
||||
cv::KeyPointsFilter::runByImageBorder(keypoints, image.size(), border_safety);
|
||||
|
||||
// Get the patterns to apply
|
||||
OrbPatterns* patterns = patterns_[level];
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user