revert orientation computation in jeypoint detection for efficiency (like done originally)

This commit is contained in:
Vincent Rabaud 2012-08-24 16:23:07 +02:00 committed by Vadim Pisarevsky
parent da1921b2fc
commit 84c4797030
2 changed files with 70 additions and 93 deletions

View File

@ -307,6 +307,11 @@ protected:
void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void computeKeypointsNoOrientation(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const;
void computeDescriptorsAndOrOrientation(InputArray image, InputArray mask, vector<KeyPoint>& keypoints,
OutputArray descriptors, bool doDescriptors, bool doOrientation,
bool useProvidedKeypoints) const;
// Feature parameters
CV_PROP_RW int threshold;
CV_PROP_RW int octaves;

View File

@ -528,13 +528,28 @@ RoiPredicate(const float minX, const float minY, const float maxX, const float m
void
BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
OutputArray _descriptors, bool useProvidedKeypoints) const
{
bool doOrientation;
if (useProvidedKeypoints)
doOrientation = false;
computeDescriptorsAndOrOrientation(_image, _mask, keypoints, _descriptors, true, doOrientation,
useProvidedKeypoints);
}
void
BRISK::computeDescriptorsAndOrOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
OutputArray _descriptors, bool doDescriptors, bool doOrientation,
bool useProvidedKeypoints) const
{
Mat image = _image.getMat(), mask = _mask.getMat();
if( image.type() != CV_8UC1 )
cvtColor(image, image, CV_BGR2GRAY);
if (!useProvidedKeypoints)
detectImpl(image, keypoints, mask);
{
doOrientation = true;
computeKeypointsNoOrientation(_image, _mask, keypoints);
}
//Remove keypoints very close to the border
size_t ksize = keypoints.size();
@ -578,9 +593,13 @@ BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoi
int* _values = new int[points_]; // for temporary use
// resize the descriptors:
_descriptors.create(ksize, strings_, CV_8U);
cv::Mat descriptors = _descriptors.getMat();
descriptors.setTo(0);
cv::Mat descriptors;
if (doDescriptors)
{
_descriptors.create(ksize, strings_, CV_8U);
descriptors = _descriptors.getMat();
descriptors.setTo(0);
}
// now do the extraction for all keypoints:
@ -592,13 +611,44 @@ BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoi
uchar* ptr = descriptors.data;
for (size_t k = 0; k < ksize; k++)
{
int theta;
cv::KeyPoint& kp = keypoints[k];
const int& scale = kscales[k];
int shifter = 0;
int* pvalues = _values;
const float& x = kp.pt.x;
const float& y = kp.pt.y;
if (doOrientation)
{
// get the gray values in the unrotated pattern
for (unsigned int i = 0; i < points_; i++)
{
*(pvalues++) = smoothedIntensity(image, _integral, x, y, scale, 0, i);
}
int direction0 = 0;
int direction1 = 0;
// now iterate through the long pairings
const BriskLongPair* max = longPairs_ + noLongPairs_;
for (BriskLongPair* iter = longPairs_; iter < max; ++iter)
{
t1 = *(_values + iter->i);
t2 = *(_values + iter->j);
const int delta_t = (t1 - t2);
// update the direction:
const int tmp0 = delta_t * (iter->weighted_dx) / 1024;
const int tmp1 = delta_t * (iter->weighted_dy) / 1024;
direction0 += tmp0;
direction1 += tmp1;
}
kp.angle = atan2((float) direction1, (float) direction0) / M_PI * 180.0;
if (kp.angle < 0)
kp.angle += 360;
}
if (!doDescriptors)
continue;
int theta;
if (kp.angle==-1)
{
// don't compute the gradient direction, just assign a rotation of 0°
@ -615,7 +665,7 @@ BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoi
// now also extract the stuff for the actual direction:
// let us compute the smoothed values
shifter = 0;
int shifter = 0;
//unsigned int mean=0;
pvalues = _values;
@ -675,7 +725,14 @@ BRISK::~BRISK()
}
void
BRISK::operator()(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints) const
BRISK::operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const
{
computeKeypointsNoOrientation(image, mask, keypoints);
computeDescriptorsAndOrOrientation(image, mask, keypoints, cv::noArray(), false, true, true);
}
void
BRISK::computeKeypointsNoOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints) const
{
Mat image = _image.getMat(), mask = _mask.getMat();
if( image.type() != CV_8UC1 )
@ -687,91 +744,6 @@ BRISK::operator()(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoin
// remove invalid points
removeInvalidPoints(mask, keypoints);
// Compute the orientations of the keypoints
//Remove keypoints very close to the border
size_t ksize = keypoints.size();
std::vector<int> kscales; // remember the scale per keypoint
kscales.resize(ksize);
static const float log2 = 0.693147180559945;
static const float lb_scalerange = log(scalerange_) / (log2);
std::vector<cv::KeyPoint>::iterator beginning = keypoints.begin();
std::vector<int>::iterator beginningkscales = kscales.begin();
static const float basicSize06 = basicSize_ * 0.6;
for (size_t k = 0; k < ksize; k++)
{
unsigned int scale;
scale = std::max((int) (scales_ / lb_scalerange * (log(keypoints[k].size / (basicSize06)) / log2) + 0.5), 0);
// saturate
if (scale >= scales_)
scale = scales_ - 1;
kscales[k] = scale;
const int border = sizeList_[scale];
const int border_x = image.cols - border;
const int border_y = image.rows - border;
if (RoiPredicate(border, border, border_x, border_y, keypoints[k]))
{
keypoints.erase(beginning + k);
kscales.erase(beginningkscales + k);
if (k == 0)
{
beginning = keypoints.begin();
beginningkscales = kscales.begin();
}
ksize--;
k--;
}
}
// first, calculate the integral image over the whole image:
// current integral image
cv::Mat _integral; // the integral image
cv::integral(image, _integral);
int* _values = new int[points_]; // for temporary use
// now do the extraction for all keypoints:
// temporary variables containing gray values at sample points:
int t1;
int t2;
// the feature orientation
int direction0;
int direction1;
for (size_t k = 0; k < ksize; k++)
{
cv::KeyPoint& kp = keypoints[k];
const int& scale = kscales[k];
int* pvalues = _values;
const float& x = kp.pt.x;
const float& y = kp.pt.y;
// get the gray values in the unrotated pattern
for (unsigned int i = 0; i < points_; i++)
{
*(pvalues++) = smoothedIntensity(image, _integral, x, y, scale, 0, i);
}
direction0 = 0;
direction1 = 0;
// now iterate through the long pairings
const BriskLongPair* max = longPairs_ + noLongPairs_;
for (BriskLongPair* iter = longPairs_; iter < max; ++iter)
{
t1 = *(_values + iter->i);
t2 = *(_values + iter->j);
const int delta_t = (t1 - t2);
// update the direction:
const int tmp0 = delta_t * (iter->weighted_dx) / 1024;
const int tmp1 = delta_t * (iter->weighted_dy) / 1024;
direction0 += tmp0;
direction1 += tmp1;
}
kp.angle = atan2((float) direction1, (float) direction0) / M_PI * 180.0;
if (kp.angle < 0)
kp.angle += 360;
}
}