Remove all using directives for STL namespace and members

Made all STL usages explicit to be able automatically find all usages of
particular class or function.
This commit is contained in:
Andrey Kamaev
2013-02-24 20:14:01 +04:00
parent f783f34e0b
commit 2a6fb2867e
310 changed files with 5744 additions and 5964 deletions

View File

@@ -85,12 +85,12 @@ public:
size_t hash() const;
//! converts vector of keypoints to vector of points
static void convert(const vector<KeyPoint>& keypoints,
CV_OUT vector<Point2f>& points2f,
const vector<int>& keypointIndexes=vector<int>());
static void convert(const std::vector<KeyPoint>& keypoints,
CV_OUT std::vector<Point2f>& points2f,
const std::vector<int>& keypointIndexes=std::vector<int>());
//! converts vector of points to the vector of keypoints, where each keypoint is assigned the same size and the same orientation
static void convert(const vector<Point2f>& points2f,
CV_OUT vector<KeyPoint>& keypoints,
static void convert(const std::vector<Point2f>& points2f,
CV_OUT std::vector<KeyPoint>& keypoints,
float size=1, float response=1, int octave=0, int class_id=-1);
//! computes overlap for pair of keypoints;
@@ -109,9 +109,9 @@ public:
};
//! writes vector of keypoints to the file storage
CV_EXPORTS void write(FileStorage& fs, const string& name, const vector<KeyPoint>& keypoints);
CV_EXPORTS void write(FileStorage& fs, const std::string& name, const std::vector<KeyPoint>& keypoints);
//! reads vector of keypoints from the specified file storage node
CV_EXPORTS void read(const FileNode& node, CV_OUT vector<KeyPoint>& keypoints);
CV_EXPORTS void read(const FileNode& node, CV_OUT std::vector<KeyPoint>& keypoints);
/*
* A class filters a vector of keypoints.
@@ -126,25 +126,25 @@ public:
/*
* Remove keypoints within borderPixels of an image edge.
*/
static void runByImageBorder( vector<KeyPoint>& keypoints, Size imageSize, int borderSize );
static void runByImageBorder( std::vector<KeyPoint>& keypoints, Size imageSize, int borderSize );
/*
* Remove keypoints of sizes out of range.
*/
static void runByKeypointSize( vector<KeyPoint>& keypoints, float minSize,
static void runByKeypointSize( std::vector<KeyPoint>& keypoints, float minSize,
float maxSize=FLT_MAX );
/*
* Remove keypoints from some image by mask for pixels of this image.
*/
static void runByPixelsMask( vector<KeyPoint>& keypoints, const Mat& mask );
static void runByPixelsMask( std::vector<KeyPoint>& keypoints, const Mat& mask );
/*
* Remove duplicated keypoints.
*/
static void removeDuplicated( vector<KeyPoint>& keypoints );
static void removeDuplicated( std::vector<KeyPoint>& keypoints );
/*
* Retain the specified number of the best keypoints (according to the response)
*/
static void retainBest( vector<KeyPoint>& keypoints, int npoints );
static void retainBest( std::vector<KeyPoint>& keypoints, int npoints );
};
@@ -165,7 +165,7 @@ public:
* mask Mask specifying where to look for keypoints (optional). Must be a char
* matrix with non-zero values in the region of interest.
*/
CV_WRAP void detect( const Mat& image, CV_OUT vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
CV_WRAP void detect( const Mat& image, CV_OUT std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
/*
* Detect keypoints in an image set.
@@ -173,23 +173,23 @@ public:
* keypoints Collection of keypoints detected in an input images. keypoints[i] is a set of keypoints detected in an images[i].
* masks Masks for image set. masks[i] is a mask for images[i].
*/
void detect( const vector<Mat>& images, vector<vector<KeyPoint> >& keypoints, const vector<Mat>& masks=vector<Mat>() ) const;
void detect( const std::vector<Mat>& images, std::vector<std::vector<KeyPoint> >& keypoints, const std::vector<Mat>& masks=std::vector<Mat>() ) const;
// Return true if detector object is empty
CV_WRAP virtual bool empty() const;
// Create feature detector by detector name.
CV_WRAP static Ptr<FeatureDetector> create( const string& detectorType );
CV_WRAP static Ptr<FeatureDetector> create( const std::string& detectorType );
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const = 0;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const = 0;
/*
* Remove keypoints that are not in the mask.
* Helper function, useful when wrapping a library call for keypoint detection that
* does not support a mask argument.
*/
static void removeInvalidPoints( const Mat& mask, vector<KeyPoint>& keypoints );
static void removeInvalidPoints( const Mat& mask, std::vector<KeyPoint>& keypoints );
};
@@ -213,7 +213,7 @@ public:
* keypoints The input keypoints. Keypoints for which a descriptor cannot be computed are removed.
* descriptors Copmputed descriptors. Row i is the descriptor for keypoint i.
*/
CV_WRAP void compute( const Mat& image, CV_OUT CV_IN_OUT vector<KeyPoint>& keypoints, CV_OUT Mat& descriptors ) const;
CV_WRAP void compute( const Mat& image, CV_OUT CV_IN_OUT std::vector<KeyPoint>& keypoints, CV_OUT Mat& descriptors ) const;
/*
* Compute the descriptors for a keypoints collection detected in image collection.
@@ -222,22 +222,22 @@ public:
* Keypoints for which a descriptor cannot be computed are removed.
* descriptors Descriptor collection. descriptors[i] are descriptors computed for set keypoints[i].
*/
void compute( const vector<Mat>& images, vector<vector<KeyPoint> >& keypoints, vector<Mat>& descriptors ) const;
void compute( const std::vector<Mat>& images, std::vector<std::vector<KeyPoint> >& keypoints, std::vector<Mat>& descriptors ) const;
CV_WRAP virtual int descriptorSize() const = 0;
CV_WRAP virtual int descriptorType() const = 0;
CV_WRAP virtual bool empty() const;
CV_WRAP static Ptr<DescriptorExtractor> create( const string& descriptorExtractorType );
CV_WRAP static Ptr<DescriptorExtractor> create( const std::string& descriptorExtractorType );
protected:
virtual void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const = 0;
virtual void computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const = 0;
/*
* Remove keypoints within borderPixels of an image edge.
*/
static void removeBorderKeypoints( vector<KeyPoint>& keypoints,
static void removeBorderKeypoints( std::vector<KeyPoint>& keypoints,
Size imageSize, int borderSize );
};
@@ -259,12 +259,12 @@ public:
* descriptors for the provided keypoints
*/
CV_WRAP_AS(detectAndCompute) virtual void operator()( InputArray image, InputArray mask,
CV_OUT vector<KeyPoint>& keypoints,
CV_OUT std::vector<KeyPoint>& keypoints,
OutputArray descriptors,
bool useProvidedKeypoints=false ) const = 0;
// Create feature detector and descriptor extractor by name.
CV_WRAP static Ptr<Feature2D> create( const string& name );
CV_WRAP static Ptr<Feature2D> create( const std::string& name );
};
/*!
@@ -283,10 +283,10 @@ public:
int descriptorType() const;
// Compute the BRISK features on an image
void operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const;
void operator()(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints) const;
// Compute the BRISK features and descriptors on an image
void operator()( InputArray image, InputArray mask, vector<KeyPoint>& keypoints,
void operator()( InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints,
OutputArray descriptors, bool useProvidedKeypoints=false ) const;
AlgorithmInfo* info() const;
@@ -304,11 +304,11 @@ public:
protected:
void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void computeKeypointsNoOrientation(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const;
void computeDescriptorsAndOrOrientation(InputArray image, InputArray mask, vector<KeyPoint>& keypoints,
void computeKeypointsNoOrientation(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints) const;
void computeDescriptorsAndOrOrientation(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints,
OutputArray descriptors, bool doDescriptors, bool doOrientation,
bool useProvidedKeypoints) const;
@@ -377,18 +377,18 @@ public:
int descriptorType() const;
// Compute the ORB features and descriptors on an image
void operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const;
void operator()(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints) const;
// Compute the ORB features and descriptors on an image
void operator()( InputArray image, InputArray mask, vector<KeyPoint>& keypoints,
void operator()( InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints,
OutputArray descriptors, bool useProvidedKeypoints=false ) const;
AlgorithmInfo* info() const;
protected:
void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
CV_PROP_RW int nfeatures;
CV_PROP_RW double scaleFactor;
@@ -420,7 +420,7 @@ public:
bool scaleNormalized = true,
float patternScale = 22.0f,
int nOctaves = 4,
const vector<int>& selectedPairs = vector<int>());
const std::vector<int>& selectedPairs = std::vector<int>());
FREAK( const FREAK& rhs );
FREAK& operator=( const FREAK& );
@@ -439,7 +439,7 @@ public:
* @param verbose print construction information
* @return list of best pair indexes
*/
vector<int> selectPairs( const vector<Mat>& images, vector<vector<KeyPoint> >& keypoints,
std::vector<int> selectPairs( const std::vector<Mat>& images, std::vector<std::vector<KeyPoint> >& keypoints,
const double corrThresh = 0.7, bool verbose = true );
AlgorithmInfo* info() const;
@@ -450,7 +450,7 @@ public:
};
protected:
virtual void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const;
virtual void computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const;
void buildPattern();
uchar meanIntensity( const Mat& image, const Mat& integral, const float kp_x, const float kp_y,
const unsigned int scale, const unsigned int rot, const unsigned int point ) const;
@@ -463,7 +463,7 @@ protected:
double patternScale0;
int nOctaves0;
vector<int> selectedPairs0;
std::vector<int> selectedPairs0;
struct PatternPoint
{
@@ -486,7 +486,7 @@ protected:
int weight_dy; // dy/(norm_sq))*4096
};
vector<PatternPoint> patternLookup; // look-up table for the pattern points (position+sigma of all points at all scales and orientation)
std::vector<PatternPoint> patternLookup; // look-up table for the pattern points (position+sigma of all points at all scales and orientation)
int patternSizes[NB_SCALES]; // size of the pattern at a specific scale (used to check if a point is within image boundaries)
DescriptionPair descriptionPairs[NB_PAIRS];
OrientationPair orientationPairs[NB_ORIENPAIRS];
@@ -512,12 +512,12 @@ public:
double _min_margin=0.003, int _edge_blur_size=5 );
//! the operator that extracts the MSERs from the image or the specific part of it
CV_WRAP_AS(detect) void operator()( const Mat& image, CV_OUT vector<vector<Point> >& msers,
CV_WRAP_AS(detect) void operator()( const Mat& image, CV_OUT std::vector<std::vector<Point> >& msers,
const Mat& mask=Mat() ) const;
AlgorithmInfo* info() const;
protected:
void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
int delta;
int minArea;
@@ -548,12 +548,12 @@ public:
//! finds the keypoints in the image
CV_WRAP_AS(detect) void operator()(const Mat& image,
CV_OUT vector<KeyPoint>& keypoints) const;
CV_OUT std::vector<KeyPoint>& keypoints) const;
AlgorithmInfo* info() const;
protected:
void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
int maxSize;
int responseThreshold;
@@ -563,10 +563,10 @@ protected:
};
//! detects corners using FAST algorithm by E. Rosten
CV_EXPORTS void FAST( InputArray image, CV_OUT vector<KeyPoint>& keypoints,
CV_EXPORTS void FAST( InputArray image, CV_OUT std::vector<KeyPoint>& keypoints,
int threshold, bool nonmaxSupression=true );
CV_EXPORTS void FAST( InputArray image, CV_OUT vector<KeyPoint>& keypoints,
CV_EXPORTS void FAST( InputArray image, CV_OUT std::vector<KeyPoint>& keypoints,
int threshold, bool nonmaxSupression, int type );
class CV_EXPORTS_W FastFeatureDetector : public FeatureDetector
@@ -582,7 +582,7 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
int threshold;
bool nonmaxSuppression;
@@ -598,7 +598,7 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
int nfeatures;
double qualityLevel;
@@ -655,8 +655,8 @@ protected:
double confidence;
};
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void findBlobs(const Mat &image, const Mat &binaryImage, vector<Center> &centers) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void findBlobs(const Mat &image, const Mat &binaryImage, std::vector<Center> &centers) const;
Params params;
};
@@ -673,7 +673,7 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
double initFeatureScale;
int featureScaleLevels;
@@ -710,7 +710,7 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
Ptr<FeatureDetector> detector;
int maxTotalKeypoints;
@@ -732,7 +732,7 @@ public:
virtual bool empty() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
Ptr<FeatureDetector> detector;
int maxLevel;
@@ -764,7 +764,7 @@ public:
virtual Ptr<AdjusterAdapter> clone() const = 0;
static Ptr<AdjusterAdapter> create( const string& detectorType );
static Ptr<AdjusterAdapter> create( const std::string& detectorType );
};
/** \brief an adaptively adjusting detector that iteratively detects until the desired number
* of features are detected.
@@ -793,7 +793,7 @@ public:
virtual bool empty() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
private:
DynamicAdaptedFeatureDetector& operator=(const DynamicAdaptedFeatureDetector&);
@@ -822,7 +822,7 @@ public:
virtual Ptr<AdjusterAdapter> clone() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
int thresh_;
bool nonmax_;
@@ -845,7 +845,7 @@ public:
virtual Ptr<AdjusterAdapter> clone() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
double thresh_, init_thresh_, min_thresh_, max_thresh_;
};
@@ -862,12 +862,12 @@ public:
virtual Ptr<AdjusterAdapter> clone() const;
protected:
virtual void detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
double thresh_, init_thresh_, min_thresh_, max_thresh_;
};
CV_EXPORTS Mat windowedMatchingMask( const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
CV_EXPORTS Mat windowedMatchingMask( const std::vector<KeyPoint>& keypoints1, const std::vector<KeyPoint>& keypoints2,
float maxDeltaX, float maxDeltaY );
@@ -895,7 +895,7 @@ public:
virtual bool empty() const;
protected:
virtual void computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const;
virtual void computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const;
Ptr<DescriptorExtractor> descriptorExtractor;
};
@@ -923,9 +923,9 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void computeImpl(const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors) const;
virtual void computeImpl(const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors) const;
typedef void(*PixelTestFn)(const Mat&, const vector<KeyPoint>&, Mat&);
typedef void(*PixelTestFn)(const Mat&, const std::vector<KeyPoint>&, Mat&);
int bytes_;
PixelTestFn test_fn_;
@@ -975,7 +975,7 @@ struct CV_EXPORTS L2
ResultType operator()( const T* a, const T* b, int size ) const
{
return (ResultType)sqrt((double)normL2Sqr<ValueType, ResultType>(a, b, size));
return (ResultType)std::sqrt((double)normL2Sqr<ValueType, ResultType>(a, b, size));
}
};
@@ -1069,11 +1069,11 @@ public:
* Add descriptors to train descriptor collection.
* descriptors Descriptors to add. Each descriptors[i] is a descriptors set from one image.
*/
CV_WRAP virtual void add( const vector<Mat>& descriptors );
CV_WRAP virtual void add( const std::vector<Mat>& descriptors );
/*
* Get train descriptors collection.
*/
CV_WRAP const vector<Mat>& getTrainDescriptors() const;
CV_WRAP const std::vector<Mat>& getTrainDescriptors() const;
/*
* Clear train descriptors collection.
*/
@@ -1106,29 +1106,29 @@ public:
*/
// Find one best match for each query descriptor (if mask is empty).
CV_WRAP void match( const Mat& queryDescriptors, const Mat& trainDescriptors,
CV_OUT vector<DMatch>& matches, const Mat& mask=Mat() ) const;
CV_OUT std::vector<DMatch>& matches, const Mat& mask=Mat() ) const;
// Find k best matches for each query descriptor (in increasing order of distances).
// compactResult is used when mask is not empty. If compactResult is false matches
// vector will have the same size as queryDescriptors rows. If compactResult is true
// matches vector will not contain matches for fully masked out query descriptors.
CV_WRAP void knnMatch( const Mat& queryDescriptors, const Mat& trainDescriptors,
CV_OUT vector<vector<DMatch> >& matches, int k,
CV_OUT std::vector<std::vector<DMatch> >& matches, int k,
const Mat& mask=Mat(), bool compactResult=false ) const;
// Find best matches for each query descriptor which have distance less than
// maxDistance (in increasing order of distances).
void radiusMatch( const Mat& queryDescriptors, const Mat& trainDescriptors,
vector<vector<DMatch> >& matches, float maxDistance,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const Mat& mask=Mat(), bool compactResult=false ) const;
/*
* Group of methods to match descriptors from one image to image set.
* See description of similar methods for matching image pair above.
*/
CV_WRAP void match( const Mat& queryDescriptors, CV_OUT vector<DMatch>& matches,
const vector<Mat>& masks=vector<Mat>() );
CV_WRAP void knnMatch( const Mat& queryDescriptors, CV_OUT vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
CV_WRAP void match( const Mat& queryDescriptors, CV_OUT std::vector<DMatch>& matches,
const std::vector<Mat>& masks=std::vector<Mat>() );
CV_WRAP void knnMatch( const Mat& queryDescriptors, CV_OUT std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
// Reads matcher object from a file node
virtual void read( const FileNode& );
@@ -1140,7 +1140,7 @@ public:
// but with empty train data.
virtual Ptr<DescriptorMatcher> clone( bool emptyTrainData=false ) const = 0;
CV_WRAP static Ptr<DescriptorMatcher> create( const string& descriptorMatcherType );
CV_WRAP static Ptr<DescriptorMatcher> create( const std::string& descriptorMatcherType );
protected:
/*
* Class to work with descriptors from several images as with one merged matrix.
@@ -1154,7 +1154,7 @@ protected:
virtual ~DescriptorCollection();
// Vector of matrices "descriptors" will be merged to one matrix "mergedDescriptors" here.
void set( const vector<Mat>& descriptors );
void set( const std::vector<Mat>& descriptors );
virtual void clear();
const Mat& getDescriptors() const;
@@ -1166,25 +1166,25 @@ protected:
protected:
Mat mergedDescriptors;
vector<int> startIdxs;
std::vector<int> startIdxs;
};
// In fact the matching is implemented only by the following two methods. These methods suppose
// that the class object has been trained already. Public match methods call these methods
// after calling train().
virtual void knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false ) = 0;
virtual void radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false ) = 0;
virtual void knnMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false ) = 0;
virtual void radiusMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false ) = 0;
static bool isPossibleMatch( const Mat& mask, int queryIdx, int trainIdx );
static bool isMaskedOut( const vector<Mat>& masks, int queryIdx );
static bool isMaskedOut( const std::vector<Mat>& masks, int queryIdx );
static Mat clone_op( Mat m ) { return m.clone(); }
void checkMasks( const vector<Mat>& masks, int queryDescriptorsCount ) const;
void checkMasks( const std::vector<Mat>& masks, int queryDescriptorsCount ) const;
// Collection of descriptors from train images.
vector<Mat> trainDescCollection;
std::vector<Mat> trainDescCollection;
};
/*
@@ -1208,10 +1208,10 @@ public:
AlgorithmInfo* info() const;
protected:
virtual void knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
virtual void radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
virtual void knnMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
virtual void radiusMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
int normType;
bool crossCheck;
@@ -1227,7 +1227,7 @@ public:
CV_WRAP FlannBasedMatcher( const Ptr<flann::IndexParams>& indexParams=new flann::KDTreeIndexParams(),
const Ptr<flann::SearchParams>& searchParams=new flann::SearchParams() );
virtual void add( const vector<Mat>& descriptors );
virtual void add( const std::vector<Mat>& descriptors );
virtual void clear();
// Reads matcher object from a file node
@@ -1244,12 +1244,12 @@ public:
protected:
static void convertToDMatches( const DescriptorCollection& descriptors,
const Mat& indices, const Mat& distances,
vector<vector<DMatch> >& matches );
std::vector<std::vector<DMatch> >& matches );
virtual void knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
virtual void radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
virtual void knnMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
virtual void radiusMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
Ptr<flann::IndexParams> indexParams;
Ptr<flann::SearchParams> searchParams;
@@ -1284,11 +1284,11 @@ public:
* If inheritor class need perform such prefiltering the method add() must be overloaded.
* In the other class methods programmer has access to the train keypoints by a constant link.
*/
virtual void add( const vector<Mat>& images,
vector<vector<KeyPoint> >& keypoints );
virtual void add( const std::vector<Mat>& images,
std::vector<std::vector<KeyPoint> >& keypoints );
const vector<Mat>& getTrainImages() const;
const vector<vector<KeyPoint> >& getTrainKeypoints() const;
const std::vector<Mat>& getTrainImages() const;
const std::vector<std::vector<KeyPoint> >& getTrainKeypoints() const;
/*
* Clear images and keypoints storing in train collection.
@@ -1313,10 +1313,10 @@ public:
* trainKeypoints Keypoints from the train image
*/
// Classify keypoints from query image under one train image.
void classify( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints ) const;
void classify( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints ) const;
// Classify keypoints from query image under train image collection.
void classify( const Mat& queryImage, vector<KeyPoint>& queryKeypoints );
void classify( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints );
/*
* Group of methods to match keypoints from image pair.
@@ -1324,34 +1324,34 @@ public:
* train() method is called here.
*/
// Find one best match for each query descriptor (if mask is empty).
void match( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<DMatch>& matches, const Mat& mask=Mat() ) const;
void match( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<DMatch>& matches, const Mat& mask=Mat() ) const;
// Find k best matches for each query keypoint (in increasing order of distances).
// compactResult is used when mask is not empty. If compactResult is false matches
// vector will have the same size as queryDescriptors rows.
// If compactResult is true matches vector will not contain matches for fully masked out query descriptors.
void knnMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<vector<DMatch> >& matches, int k,
void knnMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<std::vector<DMatch> >& matches, int k,
const Mat& mask=Mat(), bool compactResult=false ) const;
// Find best matches for each query descriptor which have distance less than maxDistance (in increasing order of distances).
void radiusMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
void radiusMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const Mat& mask=Mat(), bool compactResult=false ) const;
/*
* Group of methods to match keypoints from one image to image set.
* See description of similar methods for matching image pair above.
*/
void match( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<DMatch>& matches, const vector<Mat>& masks=vector<Mat>() );
void knnMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void match( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<DMatch>& matches, const std::vector<Mat>& masks=std::vector<Mat>() );
void knnMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks=std::vector<Mat>(), bool compactResult=false );
// Reads matcher object from a file node
virtual void read( const FileNode& fn );
@@ -1366,19 +1366,19 @@ public:
// but with empty train data.
virtual Ptr<GenericDescriptorMatcher> clone( bool emptyTrainData=false ) const = 0;
static Ptr<GenericDescriptorMatcher> create( const string& genericDescritptorMatcherType,
const string &paramsFilename=string() );
static Ptr<GenericDescriptorMatcher> create( const std::string& genericDescritptorMatcherType,
const std::string &paramsFilename=std::string() );
protected:
// In fact the matching is implemented only by the following two methods. These methods suppose
// that the class object has been trained already. Public match methods call these methods
// after calling train().
virtual void knnMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks, bool compactResult ) = 0;
virtual void radiusMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult ) = 0;
virtual void knnMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks, bool compactResult ) = 0;
virtual void radiusMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks, bool compactResult ) = 0;
/*
* A storage for sets of keypoints together with corresponding images and class IDs
*/
@@ -1387,29 +1387,29 @@ protected:
public:
KeyPointCollection();
KeyPointCollection( const KeyPointCollection& collection );
void add( const vector<Mat>& images, const vector<vector<KeyPoint> >& keypoints );
void add( const std::vector<Mat>& images, const std::vector<std::vector<KeyPoint> >& keypoints );
void clear();
// Returns the total number of keypoints in the collection
size_t keypointCount() const;
size_t imageCount() const;
const vector<vector<KeyPoint> >& getKeypoints() const;
const vector<KeyPoint>& getKeypoints( int imgIdx ) const;
const std::vector<std::vector<KeyPoint> >& getKeypoints() const;
const std::vector<KeyPoint>& getKeypoints( int imgIdx ) const;
const KeyPoint& getKeyPoint( int imgIdx, int localPointIdx ) const;
const KeyPoint& getKeyPoint( int globalPointIdx ) const;
void getLocalIdx( int globalPointIdx, int& imgIdx, int& localPointIdx ) const;
const vector<Mat>& getImages() const;
const std::vector<Mat>& getImages() const;
const Mat& getImage( int imgIdx ) const;
protected:
int pointCount;
vector<Mat> images;
vector<vector<KeyPoint> > keypoints;
std::vector<Mat> images;
std::vector<std::vector<KeyPoint> > keypoints;
// global indices of the first points in each image, startIndices.size() = keypoints.size()
vector<int> startIndices;
std::vector<int> startIndices;
private:
static Mat clone_op( Mat m ) { return m.clone(); }
@@ -1435,8 +1435,8 @@ public:
VectorDescriptorMatcher( const Ptr<DescriptorExtractor>& extractor, const Ptr<DescriptorMatcher>& matcher );
virtual ~VectorDescriptorMatcher();
virtual void add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection );
virtual void add( const std::vector<Mat>& imgCollection,
std::vector<std::vector<KeyPoint> >& pointCollection );
virtual void clear();
@@ -1451,12 +1451,12 @@ public:
virtual Ptr<GenericDescriptorMatcher> clone( bool emptyTrainData=false ) const;
protected:
virtual void knnMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, int k,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
virtual void knnMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, int k,
const std::vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks, bool compactResult );
Ptr<DescriptorExtractor> extractor;
Ptr<DescriptorMatcher> matcher;
@@ -1481,42 +1481,42 @@ struct CV_EXPORTS DrawMatchesFlags
};
// Draw keypoints.
CV_EXPORTS_W void drawKeypoints( const Mat& image, const vector<KeyPoint>& keypoints, CV_OUT Mat& outImage,
CV_EXPORTS_W void drawKeypoints( const Mat& image, const std::vector<KeyPoint>& keypoints, CV_OUT Mat& outImage,
const Scalar& color=Scalar::all(-1), int flags=DrawMatchesFlags::DEFAULT );
// Draws matches of keypints from two images on output image.
CV_EXPORTS void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<DMatch>& matches1to2, Mat& outImg,
CV_EXPORTS void drawMatches( const Mat& img1, const std::vector<KeyPoint>& keypoints1,
const Mat& img2, const std::vector<KeyPoint>& keypoints2,
const std::vector<DMatch>& matches1to2, Mat& outImg,
const Scalar& matchColor=Scalar::all(-1), const Scalar& singlePointColor=Scalar::all(-1),
const vector<char>& matchesMask=vector<char>(), int flags=DrawMatchesFlags::DEFAULT );
const std::vector<char>& matchesMask=std::vector<char>(), int flags=DrawMatchesFlags::DEFAULT );
CV_EXPORTS void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<vector<DMatch> >& matches1to2, Mat& outImg,
CV_EXPORTS void drawMatches( const Mat& img1, const std::vector<KeyPoint>& keypoints1,
const Mat& img2, const std::vector<KeyPoint>& keypoints2,
const std::vector<std::vector<DMatch> >& matches1to2, Mat& outImg,
const Scalar& matchColor=Scalar::all(-1), const Scalar& singlePointColor=Scalar::all(-1),
const vector<vector<char> >& matchesMask=vector<vector<char> >(), int flags=DrawMatchesFlags::DEFAULT );
const std::vector<std::vector<char> >& matchesMask=std::vector<std::vector<char> >(), int flags=DrawMatchesFlags::DEFAULT );
/****************************************************************************************\
* Functions to evaluate the feature detectors and [generic] descriptor extractors *
\****************************************************************************************/
CV_EXPORTS void evaluateFeatureDetector( const Mat& img1, const Mat& img2, const Mat& H1to2,
vector<KeyPoint>* keypoints1, vector<KeyPoint>* keypoints2,
std::vector<KeyPoint>* keypoints1, std::vector<KeyPoint>* keypoints2,
float& repeatability, int& correspCount,
const Ptr<FeatureDetector>& fdetector=Ptr<FeatureDetector>() );
CV_EXPORTS void computeRecallPrecisionCurve( const vector<vector<DMatch> >& matches1to2,
const vector<vector<uchar> >& correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve );
CV_EXPORTS void computeRecallPrecisionCurve( const std::vector<std::vector<DMatch> >& matches1to2,
const std::vector<std::vector<uchar> >& correctMatches1to2Mask,
std::vector<Point2f>& recallPrecisionCurve );
CV_EXPORTS float getRecall( const vector<Point2f>& recallPrecisionCurve, float l_precision );
CV_EXPORTS int getNearestPoint( const vector<Point2f>& recallPrecisionCurve, float l_precision );
CV_EXPORTS float getRecall( const std::vector<Point2f>& recallPrecisionCurve, float l_precision );
CV_EXPORTS int getNearestPoint( const std::vector<Point2f>& recallPrecisionCurve, float l_precision );
CV_EXPORTS void evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, const Mat& H1to2,
vector<KeyPoint>& keypoints1, vector<KeyPoint>& keypoints2,
vector<vector<DMatch> >* matches1to2, vector<vector<uchar> >* correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve,
std::vector<KeyPoint>& keypoints1, std::vector<KeyPoint>& keypoints2,
std::vector<std::vector<DMatch> >* matches1to2, std::vector<std::vector<uchar> >* correctMatches1to2Mask,
std::vector<Point2f>& recallPrecisionCurve,
const Ptr<GenericDescriptorMatcher>& dmatch=Ptr<GenericDescriptorMatcher>() );
@@ -1533,7 +1533,7 @@ public:
virtual ~BOWTrainer();
void add( const Mat& descriptors );
const vector<Mat>& getDescriptors() const;
const std::vector<Mat>& getDescriptors() const;
int descripotorsCount() const;
virtual void clear();
@@ -1549,7 +1549,7 @@ public:
virtual Mat cluster( const Mat& descriptors ) const = 0;
protected:
vector<Mat> descriptors;
std::vector<Mat> descriptors;
int size;
};
@@ -1587,8 +1587,8 @@ public:
void setVocabulary( const Mat& vocabulary );
const Mat& getVocabulary() const;
void compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& imgDescriptor,
vector<vector<int> >* pointIdxsOfClusters=0, Mat* descriptors=0 );
void compute( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& imgDescriptor,
std::vector<std::vector<int> >* pointIdxsOfClusters=0, Mat* descriptors=0 );
// compute() is not constant because DescriptorMatcher::match is not constant
int descriptorSize() const;

View File

@@ -9,7 +9,7 @@ using std::tr1::get;
enum { TYPE_5_8 =FastFeatureDetector::TYPE_5_8, TYPE_7_12 = FastFeatureDetector::TYPE_7_12, TYPE_9_16 = FastFeatureDetector::TYPE_9_16 };
CV_ENUM(FastType, TYPE_5_8, TYPE_7_12, TYPE_9_16)
typedef std::tr1::tuple<String, FastType> File_Type_t;
typedef std::tr1::tuple<string, FastType> File_Type_t;
typedef perf::TestBaseWithParam<File_Type_t> fast;
#define FAST_IMAGES \
@@ -21,7 +21,7 @@ PERF_TEST_P(fast, detect, testing::Combine(
testing::ValuesIn(FastType::all())
))
{
String filename = getDataPath(get<0>(GetParam()));
string filename = getDataPath(get<0>(GetParam()));
int type = get<1>(GetParam());
Mat frame = imread(filename, IMREAD_GRAYSCALE);

View File

@@ -14,7 +14,7 @@ typedef perf::TestBaseWithParam<std::string> orb;
PERF_TEST_P(orb, detect, testing::Values(ORB_IMAGES))
{
String filename = getDataPath(GetParam());
string filename = getDataPath(GetParam());
Mat frame = imread(filename, IMREAD_GRAYSCALE);
if (frame.empty())
@@ -33,7 +33,7 @@ PERF_TEST_P(orb, detect, testing::Values(ORB_IMAGES))
PERF_TEST_P(orb, extract, testing::Values(ORB_IMAGES))
{
String filename = getDataPath(GetParam());
string filename = getDataPath(GetParam());
Mat frame = imread(filename, IMREAD_GRAYSCALE);
if (frame.empty())
@@ -56,7 +56,7 @@ PERF_TEST_P(orb, extract, testing::Values(ORB_IMAGES))
PERF_TEST_P(orb, full, testing::Values(ORB_IMAGES))
{
String filename = getDataPath(GetParam());
string filename = getDataPath(GetParam());
Mat frame = imread(filename, IMREAD_GRAYSCALE);
if (frame.empty())

View File

@@ -41,8 +41,6 @@
#include "precomp.hpp"
using namespace std;
namespace cv
{
@@ -69,7 +67,7 @@ void BOWTrainer::add( const Mat& _descriptors )
descriptors.push_back(_descriptors);
}
const vector<Mat>& BOWTrainer::getDescriptors() const
const std::vector<Mat>& BOWTrainer::getDescriptors() const
{
return descriptors;
}
@@ -130,7 +128,7 @@ void BOWImgDescriptorExtractor::setVocabulary( const Mat& _vocabulary )
{
dmatcher->clear();
vocabulary = _vocabulary;
dmatcher->add( vector<Mat>(1, vocabulary) );
dmatcher->add( std::vector<Mat>(1, vocabulary) );
}
const Mat& BOWImgDescriptorExtractor::getVocabulary() const
@@ -138,8 +136,8 @@ const Mat& BOWImgDescriptorExtractor::getVocabulary() const
return vocabulary;
}
void BOWImgDescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& imgDescriptor,
vector<vector<int> >* pointIdxsOfClusters, Mat* _descriptors )
void BOWImgDescriptorExtractor::compute( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& imgDescriptor,
std::vector<std::vector<int> >* pointIdxsOfClusters, Mat* _descriptors )
{
imgDescriptor.release();
@@ -153,7 +151,7 @@ void BOWImgDescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& key
dextractor->compute( image, keypoints, descriptors );
// Match keypoint descriptors to cluster center (to vocabulary)
vector<DMatch> matches;
std::vector<DMatch> matches;
dmatcher->match( descriptors, matches );
// Compute image descriptor

View File

@@ -162,12 +162,12 @@ void SimpleBlobDetector::write( cv::FileStorage& fs ) const
params.write(fs);
}
void SimpleBlobDetector::findBlobs(const cv::Mat &image, const cv::Mat &binaryImage, vector<Center> &centers) const
void SimpleBlobDetector::findBlobs(const cv::Mat &image, const cv::Mat &binaryImage, std::vector<Center> &centers) const
{
(void)image;
centers.clear();
vector < vector<Point> > contours;
std::vector < std::vector<Point> > contours;
Mat tmpBinaryImage = binaryImage.clone();
findContours(tmpBinaryImage, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
@@ -204,7 +204,7 @@ void SimpleBlobDetector::findBlobs(const cv::Mat &image, const cv::Mat &binaryIm
if (params.filterByInertia)
{
double denominator = sqrt(pow(2 * moms.mu11, 2) + pow(moms.mu20 - moms.mu02, 2));
double denominator = std::sqrt(std::pow(2 * moms.mu11, 2) + std::pow(moms.mu20 - moms.mu02, 2));
const double eps = 1e-2;
double ratio;
if (denominator > eps)
@@ -231,7 +231,7 @@ void SimpleBlobDetector::findBlobs(const cv::Mat &image, const cv::Mat &binaryIm
if (params.filterByConvexity)
{
vector < Point > hull;
std::vector < Point > hull;
convexHull(Mat(contours[contourIdx]), hull);
double area = contourArea(Mat(contours[contourIdx]));
double hullArea = contourArea(Mat(hull));
@@ -250,7 +250,7 @@ void SimpleBlobDetector::findBlobs(const cv::Mat &image, const cv::Mat &binaryIm
//compute blob radius
{
vector<double> dists;
std::vector<double> dists;
for (size_t pointIdx = 0; pointIdx < contours[contourIdx].size(); pointIdx++)
{
Point2d pt = contours[contourIdx][pointIdx];
@@ -282,7 +282,7 @@ void SimpleBlobDetector::detectImpl(const cv::Mat& image, std::vector<cv::KeyPoi
else
grayscaleImage = image;
vector < vector<Center> > centers;
std::vector < std::vector<Center> > centers;
for (double thresh = params.minThreshold; thresh < params.maxThreshold; thresh += params.thresholdStep)
{
Mat binarizedImage;
@@ -293,9 +293,9 @@ void SimpleBlobDetector::detectImpl(const cv::Mat& image, std::vector<cv::KeyPoi
// cvtColor( binarizedImage, keypointsImage, CV_GRAY2RGB );
#endif
vector < Center > curCenters;
std::vector < Center > curCenters;
findBlobs(grayscaleImage, binarizedImage, curCenters);
vector < vector<Center> > newCenters;
std::vector < std::vector<Center> > newCenters;
for (size_t i = 0; i < curCenters.size(); i++)
{
#ifdef DEBUG_BLOB_DETECTOR
@@ -324,8 +324,8 @@ void SimpleBlobDetector::detectImpl(const cv::Mat& image, std::vector<cv::KeyPoi
}
if (isNew)
{
newCenters.push_back(vector<Center> (1, curCenters[i]));
//centers.push_back(vector<Center> (1, curCenters[i]));
newCenters.push_back(std::vector<Center> (1, curCenters[i]));
//centers.push_back(std::vector<Center> (1, curCenters[i]));
}
}
std::copy(newCenters.begin(), newCenters.end(), std::back_inserter(centers));

View File

@@ -247,7 +247,7 @@ BRISK::generateKernel(std::vector<float> &radiusList, std::vector<int> &numberLi
BriskPatternPoint* patternIterator = patternPoints_;
// define the scale discretization:
static const float lb_scale = (float)(log(scalerange_) / log(2.0));
static const float lb_scale = (float)(std::log(scalerange_) / std::log(2.0));
static const float lb_scale_step = lb_scale / (scales_);
scaleList_ = new float[scales_];
@@ -257,7 +257,7 @@ BRISK::generateKernel(std::vector<float> &radiusList, std::vector<int> &numberLi
for (unsigned int scale = 0; scale < scales_; ++scale)
{
scaleList_[scale] = (float)pow((double) 2.0, (double) (scale * lb_scale_step));
scaleList_[scale] = (float)std::pow((double) 2.0, (double) (scale * lb_scale_step));
sizeList_[scale] = 0;
// generate the pattern points look-up
@@ -519,7 +519,7 @@ RoiPredicate(const float minX, const float minY, const float maxX, const float m
// computes the descriptor
void
BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
BRISK::operator()( InputArray _image, InputArray _mask, std::vector<KeyPoint>& keypoints,
OutputArray _descriptors, bool useProvidedKeypoints) const
{
bool doOrientation=true;
@@ -530,7 +530,7 @@ BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoi
}
void
BRISK::computeDescriptorsAndOrOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
BRISK::computeDescriptorsAndOrOrientation(InputArray _image, InputArray _mask, std::vector<KeyPoint>& keypoints,
OutputArray _descriptors, bool doDescriptors, bool doOrientation,
bool useProvidedKeypoints) const
{
@@ -549,14 +549,14 @@ BRISK::computeDescriptorsAndOrOrientation(InputArray _image, InputArray _mask, v
std::vector<int> kscales; // remember the scale per keypoint
kscales.resize(ksize);
static const float log2 = 0.693147180559945f;
static const float lb_scalerange = (float)(log(scalerange_) / (log2));
static const float lb_scalerange = (float)(std::log(scalerange_) / (log2));
std::vector<cv::KeyPoint>::iterator beginning = keypoints.begin();
std::vector<int>::iterator beginningkscales = kscales.begin();
static const float basicSize06 = basicSize_ * 0.6f;
for (size_t k = 0; k < ksize; k++)
{
unsigned int scale;
scale = std::max((int) (scales_ / lb_scalerange * (log(keypoints[k].size / (basicSize06)) / log2) + 0.5), 0);
scale = std::max((int) (scales_ / lb_scalerange * (std::log(keypoints[k].size / (basicSize06)) / log2) + 0.5), 0);
// saturate
if (scale >= scales_)
scale = scales_ - 1;
@@ -718,14 +718,14 @@ BRISK::~BRISK()
}
void
BRISK::operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const
BRISK::operator()(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints) const
{
computeKeypointsNoOrientation(image, mask, keypoints);
computeDescriptorsAndOrOrientation(image, mask, keypoints, cv::noArray(), false, true, true);
}
void
BRISK::computeKeypointsNoOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints) const
BRISK::computeKeypointsNoOrientation(InputArray _image, InputArray _mask, std::vector<KeyPoint>& keypoints) const
{
Mat image = _image.getMat(), mask = _mask.getMat();
if( image.type() != CV_8UC1 )
@@ -741,13 +741,13 @@ BRISK::computeKeypointsNoOrientation(InputArray _image, InputArray _mask, vector
void
BRISK::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
BRISK::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
(*this)(image, mask, keypoints);
}
void
BRISK::computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors) const
BRISK::computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors) const
{
(*this)(image, Mat(), keypoints, descriptors, true);
}

View File

@@ -41,8 +41,6 @@
#include "precomp.hpp"
using namespace std;
namespace cv
{
@@ -55,7 +53,7 @@ namespace cv
DescriptorExtractor::~DescriptorExtractor()
{}
void DescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const
void DescriptorExtractor::compute( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const
{
if( image.empty() || keypoints.empty() )
{
@@ -69,7 +67,7 @@ void DescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& keypoints
computeImpl( image, keypoints, descriptors );
}
void DescriptorExtractor::compute( const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, vector<Mat>& descCollection ) const
void DescriptorExtractor::compute( const std::vector<Mat>& imageCollection, std::vector<std::vector<KeyPoint> >& pointCollection, std::vector<Mat>& descCollection ) const
{
CV_Assert( imageCollection.size() == pointCollection.size() );
descCollection.resize( imageCollection.size() );
@@ -88,18 +86,18 @@ bool DescriptorExtractor::empty() const
return false;
}
void DescriptorExtractor::removeBorderKeypoints( vector<KeyPoint>& keypoints,
void DescriptorExtractor::removeBorderKeypoints( std::vector<KeyPoint>& keypoints,
Size imageSize, int borderSize )
{
KeyPointsFilter::runByImageBorder( keypoints, imageSize, borderSize );
}
Ptr<DescriptorExtractor> DescriptorExtractor::create(const string& descriptorExtractorType)
Ptr<DescriptorExtractor> DescriptorExtractor::create(const std::string& descriptorExtractorType)
{
if( descriptorExtractorType.find("Opponent") == 0 )
{
size_t pos = string("Opponent").size();
string type = descriptorExtractorType.substr(pos);
size_t pos = std::string("Opponent").size();
std::string type = descriptorExtractorType.substr(pos);
return new OpponentColorDescriptorExtractor(DescriptorExtractor::create(type));
}
@@ -117,7 +115,7 @@ OpponentColorDescriptorExtractor::OpponentColorDescriptorExtractor( const Ptr<De
CV_Assert( !descriptorExtractor.empty() );
}
static void convertBGRImageToOpponentColorSpace( const Mat& bgrImage, vector<Mat>& opponentChannels )
static void convertBGRImageToOpponentColorSpace( const Mat& bgrImage, std::vector<Mat>& opponentChannels )
{
if( bgrImage.type() != CV_8UC3 )
CV_Error( CV_StsBadArg, "input image must be an BGR image of type CV_8UC3" );
@@ -144,23 +142,23 @@ static void convertBGRImageToOpponentColorSpace( const Mat& bgrImage, vector<Mat
struct KP_LessThan
{
KP_LessThan(const vector<KeyPoint>& _kp) : kp(&_kp) {}
KP_LessThan(const std::vector<KeyPoint>& _kp) : kp(&_kp) {}
bool operator()(int i, int j) const
{
return (*kp)[i].class_id < (*kp)[j].class_id;
}
const vector<KeyPoint>* kp;
const std::vector<KeyPoint>* kp;
};
void OpponentColorDescriptorExtractor::computeImpl( const Mat& bgrImage, vector<KeyPoint>& keypoints, Mat& descriptors ) const
void OpponentColorDescriptorExtractor::computeImpl( const Mat& bgrImage, std::vector<KeyPoint>& keypoints, Mat& descriptors ) const
{
vector<Mat> opponentChannels;
std::vector<Mat> opponentChannels;
convertBGRImageToOpponentColorSpace( bgrImage, opponentChannels );
const int N = 3; // channels count
vector<KeyPoint> channelKeypoints[N];
std::vector<KeyPoint> channelKeypoints[N];
Mat channelDescriptors[N];
vector<int> idxs[N];
std::vector<int> idxs[N];
// Compute descriptors three times, once for each Opponent channel to concatenate into a single color descriptor
int maxKeypointsCount = 0;
@@ -181,7 +179,7 @@ void OpponentColorDescriptorExtractor::computeImpl( const Mat& bgrImage, vector<
maxKeypointsCount = std::max( maxKeypointsCount, (int)channelKeypoints[ci].size());
}
vector<KeyPoint> outKeypoints;
std::vector<KeyPoint> outKeypoints;
outKeypoints.reserve( keypoints.size() );
int dSize = descriptorExtractor->descriptorSize();

View File

@@ -41,8 +41,6 @@
#include "precomp.hpp"
using namespace std;
namespace cv
{
@@ -53,7 +51,7 @@ namespace cv
FeatureDetector::~FeatureDetector()
{}
void FeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void FeatureDetector::detect( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
keypoints.clear();
@@ -65,7 +63,7 @@ void FeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, con
detectImpl( image, keypoints, mask );
}
void FeatureDetector::detect(const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, const vector<Mat>& masks ) const
void FeatureDetector::detect(const std::vector<Mat>& imageCollection, std::vector<std::vector<KeyPoint> >& pointCollection, const std::vector<Mat>& masks ) const
{
pointCollection.resize( imageCollection.size() );
for( size_t i = 0; i < imageCollection.size(); i++ )
@@ -83,12 +81,12 @@ bool FeatureDetector::empty() const
return false;
}
void FeatureDetector::removeInvalidPoints( const Mat& mask, vector<KeyPoint>& keypoints )
void FeatureDetector::removeInvalidPoints( const Mat& mask, std::vector<KeyPoint>& keypoints )
{
KeyPointsFilter::runByPixelsMask( keypoints, mask );
}
Ptr<FeatureDetector> FeatureDetector::create( const string& detectorType )
Ptr<FeatureDetector> FeatureDetector::create( const std::string& detectorType )
{
if( detectorType.find("Grid") == 0 )
{
@@ -127,17 +125,17 @@ GFTTDetector::GFTTDetector( int _nfeatures, double _qualityLevel,
{
}
void GFTTDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
void GFTTDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
vector<Point2f> corners;
std::vector<Point2f> corners;
goodFeaturesToTrack( grayImage, corners, nfeatures, qualityLevel, minDistance, mask,
blockSize, useHarrisDetector, k );
keypoints.resize(corners.size());
vector<Point2f>::const_iterator corner_it = corners.begin();
vector<KeyPoint>::iterator keypoint_it = keypoints.begin();
std::vector<Point2f>::const_iterator corner_it = corners.begin();
std::vector<KeyPoint>::iterator keypoint_it = keypoints.begin();
for( ; corner_it != corners.end(); ++corner_it, ++keypoint_it )
{
*keypoint_it = KeyPoint( *corner_it, (float)blockSize );
@@ -159,7 +157,7 @@ DenseFeatureDetector::DenseFeatureDetector( float _initFeatureScale, int _featur
{}
void DenseFeatureDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void DenseFeatureDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
float curScale = static_cast<float>(initFeatureScale);
int curStep = initXyStep;
@@ -203,11 +201,11 @@ struct ResponseComparator
}
};
static void keepStrongest( int N, vector<KeyPoint>& keypoints )
static void keepStrongest( int N, std::vector<KeyPoint>& keypoints )
{
if( (int)keypoints.size() > N )
{
vector<KeyPoint>::iterator nth = keypoints.begin() + N;
std::vector<KeyPoint>::iterator nth = keypoints.begin() + N;
std::nth_element( keypoints.begin(), nth, keypoints.end(), ResponseComparator() );
keypoints.erase( nth, keypoints.end() );
}
@@ -219,7 +217,7 @@ class GridAdaptedFeatureDetectorInvoker
private:
int gridRows_, gridCols_;
int maxPerCell_;
vector<KeyPoint>& keypoints_;
std::vector<KeyPoint>& keypoints_;
const Mat& image_;
const Mat& mask_;
const Ptr<FeatureDetector>& detector_;
@@ -231,7 +229,7 @@ private:
public:
GridAdaptedFeatureDetectorInvoker(const Ptr<FeatureDetector>& detector, const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints, int maxPerCell, int gridRows, int gridCols
GridAdaptedFeatureDetectorInvoker(const Ptr<FeatureDetector>& detector, const Mat& image, const Mat& mask, std::vector<KeyPoint>& keypoints, int maxPerCell, int gridRows, int gridCols
#ifdef HAVE_TBB
, tbb::mutex* kptLock
#endif
@@ -257,7 +255,7 @@ public:
Mat sub_mask;
if (!mask_.empty()) sub_mask = mask_(row_range, col_range);
vector<KeyPoint> sub_keypoints;
std::vector<KeyPoint> sub_keypoints;
sub_keypoints.reserve(maxPerCell_);
detector_->detect( sub_image, sub_keypoints, sub_mask );
@@ -279,7 +277,7 @@ public:
};
} // namepace
void GridAdaptedFeatureDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void GridAdaptedFeatureDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
if (image.empty() || maxTotalKeypoints < gridRows * gridCols)
{
@@ -310,7 +308,7 @@ bool PyramidAdaptedFeatureDetector::empty() const
return detector.empty() || (FeatureDetector*)detector->empty();
}
void PyramidAdaptedFeatureDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void PyramidAdaptedFeatureDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat src = image;
Mat src_mask = mask;
@@ -327,9 +325,9 @@ void PyramidAdaptedFeatureDetector::detectImpl( const Mat& image, vector<KeyPoin
for( int l = 0, multiplier = 1; l <= maxLevel; ++l, multiplier *= 2 )
{
// Detect on current level of the pyramid
vector<KeyPoint> new_pts;
std::vector<KeyPoint> new_pts;
detector->detect( src, new_pts, src_mask );
vector<KeyPoint>::iterator it = new_pts.begin(),
std::vector<KeyPoint>::iterator it = new_pts.begin(),
end = new_pts.end();
for( ; it != end; ++it)
{

View File

@@ -41,8 +41,6 @@
#include "precomp.hpp"
using namespace std;
const int draw_shift_bits = 4;
const int draw_multiplier = 1 << draw_shift_bits;
@@ -90,7 +88,7 @@ static inline void _drawKeypoint( Mat& img, const KeyPoint& p, const Scalar& col
}
}
void drawKeypoints( const Mat& image, const vector<KeyPoint>& keypoints, Mat& outImage,
void drawKeypoints( const Mat& image, const std::vector<KeyPoint>& keypoints, Mat& outImage,
const Scalar& _color, int flags )
{
if( !(flags & DrawMatchesFlags::DRAW_OVER_OUTIMG) )
@@ -113,7 +111,7 @@ void drawKeypoints( const Mat& image, const vector<KeyPoint>& keypoints, Mat& ou
bool isRandColor = _color == Scalar::all(-1);
CV_Assert( !outImage.empty() );
vector<KeyPoint>::const_iterator it = keypoints.begin(),
std::vector<KeyPoint>::const_iterator it = keypoints.begin(),
end = keypoints.end();
for( ; it != end; ++it )
{
@@ -122,8 +120,8 @@ void drawKeypoints( const Mat& image, const vector<KeyPoint>& keypoints, Mat& ou
}
}
static void _prepareImgAndDrawKeypoints( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
static void _prepareImgAndDrawKeypoints( const Mat& img1, const std::vector<KeyPoint>& keypoints1,
const Mat& img2, const std::vector<KeyPoint>& keypoints2,
Mat& outImg, Mat& outImg1, Mat& outImg2,
const Scalar& singlePointColor, int flags )
{
@@ -184,11 +182,11 @@ static inline void _drawMatch( Mat& outImg, Mat& outImg1, Mat& outImg2 ,
color, 1, CV_AA, draw_shift_bits );
}
void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<DMatch>& matches1to2, Mat& outImg,
void drawMatches( const Mat& img1, const std::vector<KeyPoint>& keypoints1,
const Mat& img2, const std::vector<KeyPoint>& keypoints2,
const std::vector<DMatch>& matches1to2, Mat& outImg,
const Scalar& matchColor, const Scalar& singlePointColor,
const vector<char>& matchesMask, int flags )
const std::vector<char>& matchesMask, int flags )
{
if( !matchesMask.empty() && matchesMask.size() != matches1to2.size() )
CV_Error( CV_StsBadSize, "matchesMask must have the same size as matches1to2" );
@@ -213,11 +211,11 @@ void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
}
}
void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<vector<DMatch> >& matches1to2, Mat& outImg,
void drawMatches( const Mat& img1, const std::vector<KeyPoint>& keypoints1,
const Mat& img2, const std::vector<KeyPoint>& keypoints2,
const std::vector<std::vector<DMatch> >& matches1to2, Mat& outImg,
const Scalar& matchColor, const Scalar& singlePointColor,
const vector<vector<char> >& matchesMask, int flags )
const std::vector<std::vector<char> >& matchesMask, int flags )
{
if( !matchesMask.empty() && matchesMask.size() != matches1to2.size() )
CV_Error( CV_StsBadSize, "matchesMask must have the same size as matches1to2" );

View File

@@ -54,7 +54,7 @@ bool DynamicAdaptedFeatureDetector::empty() const
return adjuster_.empty() || adjuster_->empty();
}
void DynamicAdaptedFeatureDetector::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
void DynamicAdaptedFeatureDetector::detectImpl(const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
//for oscillation testing
bool down = false;
@@ -98,7 +98,7 @@ FastAdjuster::FastAdjuster( int init_thresh, bool nonmax, int min_thresh, int ma
min_thresh_(min_thresh), max_thresh_(max_thresh)
{}
void FastAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
void FastAdjuster::detectImpl(const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
FastFeatureDetector(thresh_, nonmax_).detect(image, keypoints, mask);
}
@@ -133,7 +133,7 @@ StarAdjuster::StarAdjuster(double initial_thresh, double min_thresh, double max_
min_thresh_(min_thresh), max_thresh_(max_thresh)
{}
void StarAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
void StarAdjuster::detectImpl(const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
StarFeatureDetector detector_tmp(16, cvRound(thresh_), 10, 8, 3);
detector_tmp.detect(image, keypoints, mask);
@@ -167,7 +167,7 @@ SurfAdjuster::SurfAdjuster( double initial_thresh, double min_thresh, double max
min_thresh_(min_thresh), max_thresh_(max_thresh)
{}
void SurfAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, const cv::Mat& mask) const
void SurfAdjuster::detectImpl(const Mat& image, std::vector<KeyPoint>& keypoints, const cv::Mat& mask) const
{
Ptr<FeatureDetector> surf = FeatureDetector::create("SURF");
surf->set("hessianThreshold", thresh_);
@@ -199,7 +199,7 @@ Ptr<AdjusterAdapter> SurfAdjuster::clone() const
return cloned_obj;
}
Ptr<AdjusterAdapter> AdjusterAdapter::create( const string& detectorType )
Ptr<AdjusterAdapter> AdjusterAdapter::create( const std::string& detectorType )
{
Ptr<AdjusterAdapter> adapter;

View File

@@ -44,7 +44,6 @@
#include <limits>
using namespace cv;
using namespace std;
template<typename _Tp> static int solveQuadratic(_Tp a, _Tp b, _Tp c, _Tp& x1, _Tp& x2)
{
@@ -89,7 +88,7 @@ static inline Point2f applyHomography( const Mat_<double>& H, const Point2f& pt
double w = 1./z;
return Point2f( (float)((H(0,0)*pt.x + H(0,1)*pt.y + H(0,2))*w), (float)((H(1,0)*pt.x + H(1,1)*pt.y + H(1,2))*w) );
}
return Point2f( numeric_limits<float>::max(), numeric_limits<float>::max() );
return Point2f( std::numeric_limits<float>::max(), std::numeric_limits<float>::max() );
}
static inline void linearizeHomographyAt( const Mat_<double>& H, const Point2f& pt, Mat_<double>& A )
@@ -108,7 +107,7 @@ static inline void linearizeHomographyAt( const Mat_<double>& H, const Point2f&
A(1,1) = H(1,1)/p3 - p2*H(2,1)/p3_2; // fydx
}
else
A.setTo(Scalar::all(numeric_limits<double>::max()));
A.setTo(Scalar::all(std::numeric_limits<double>::max()));
}
class EllipticKeyPoint
@@ -117,14 +116,14 @@ public:
EllipticKeyPoint();
EllipticKeyPoint( const Point2f& _center, const Scalar& _ellipse );
static void convert( const vector<KeyPoint>& src, vector<EllipticKeyPoint>& dst );
static void convert( const vector<EllipticKeyPoint>& src, vector<KeyPoint>& dst );
static void convert( const std::vector<KeyPoint>& src, std::vector<EllipticKeyPoint>& dst );
static void convert( const std::vector<EllipticKeyPoint>& src, std::vector<KeyPoint>& dst );
static Mat_<double> getSecondMomentsMatrix( const Scalar& _ellipse );
Mat_<double> getSecondMomentsMatrix() const;
void calcProjection( const Mat_<double>& H, EllipticKeyPoint& projection ) const;
static void calcProjection( const vector<EllipticKeyPoint>& src, const Mat_<double>& H, vector<EllipticKeyPoint>& dst );
static void calcProjection( const std::vector<EllipticKeyPoint>& src, const Mat_<double>& H, std::vector<EllipticKeyPoint>& dst );
Point2f center;
Scalar ellipse; // 3 elements a, b, c: ax^2+2bxy+cy^2=1
@@ -178,7 +177,7 @@ void EllipticKeyPoint::calcProjection( const Mat_<double>& H, EllipticKeyPoint&
projection = EllipticKeyPoint( dstCenter, Scalar(dstM(0,0), dstM(0,1), dstM(1,1)) );
}
void EllipticKeyPoint::convert( const vector<KeyPoint>& src, vector<EllipticKeyPoint>& dst )
void EllipticKeyPoint::convert( const std::vector<KeyPoint>& src, std::vector<EllipticKeyPoint>& dst )
{
if( !src.empty() )
{
@@ -193,7 +192,7 @@ void EllipticKeyPoint::convert( const vector<KeyPoint>& src, vector<EllipticKeyP
}
}
void EllipticKeyPoint::convert( const vector<EllipticKeyPoint>& src, vector<KeyPoint>& dst )
void EllipticKeyPoint::convert( const std::vector<EllipticKeyPoint>& src, std::vector<KeyPoint>& dst )
{
if( !src.empty() )
{
@@ -207,26 +206,26 @@ void EllipticKeyPoint::convert( const vector<EllipticKeyPoint>& src, vector<KeyP
}
}
void EllipticKeyPoint::calcProjection( const vector<EllipticKeyPoint>& src, const Mat_<double>& H, vector<EllipticKeyPoint>& dst )
void EllipticKeyPoint::calcProjection( const std::vector<EllipticKeyPoint>& src, const Mat_<double>& H, std::vector<EllipticKeyPoint>& dst )
{
if( !src.empty() )
{
assert( !H.empty() && H.cols == 3 && H.rows == 3);
dst.resize(src.size());
vector<EllipticKeyPoint>::const_iterator srcIt = src.begin();
vector<EllipticKeyPoint>::iterator dstIt = dst.begin();
std::vector<EllipticKeyPoint>::const_iterator srcIt = src.begin();
std::vector<EllipticKeyPoint>::iterator dstIt = dst.begin();
for( ; srcIt != src.end(); ++srcIt, ++dstIt )
srcIt->calcProjection(H, *dstIt);
}
}
static void filterEllipticKeyPointsByImageSize( vector<EllipticKeyPoint>& keypoints, const Size& imgSize )
static void filterEllipticKeyPointsByImageSize( std::vector<EllipticKeyPoint>& keypoints, const Size& imgSize )
{
if( !keypoints.empty() )
{
vector<EllipticKeyPoint> filtered;
std::vector<EllipticKeyPoint> filtered;
filtered.reserve(keypoints.size());
vector<EllipticKeyPoint>::const_iterator it = keypoints.begin();
std::vector<EllipticKeyPoint>::const_iterator it = keypoints.begin();
for( int i = 0; it != keypoints.end(); ++it, i++ )
{
if( it->center.x + it->boundingBox.width < imgSize.width &&
@@ -315,8 +314,8 @@ struct SIdx
};
};
static void computeOneToOneMatchedOverlaps( const vector<EllipticKeyPoint>& keypoints1, const vector<EllipticKeyPoint>& keypoints2t,
bool commonPart, vector<SIdx>& overlaps, float minOverlap )
static void computeOneToOneMatchedOverlaps( const std::vector<EllipticKeyPoint>& keypoints1, const std::vector<EllipticKeyPoint>& keypoints2t,
bool commonPart, std::vector<SIdx>& overlaps, float minOverlap )
{
CV_Assert( minOverlap >= 0.f );
overlaps.clear();
@@ -374,9 +373,9 @@ static void computeOneToOneMatchedOverlaps( const vector<EllipticKeyPoint>& keyp
}
}
sort( overlaps.begin(), overlaps.end() );
std::sort( overlaps.begin(), overlaps.end() );
typedef vector<SIdx>::iterator It;
typedef std::vector<SIdx>::iterator It;
It pos = overlaps.begin();
It end = overlaps.end();
@@ -390,11 +389,11 @@ static void computeOneToOneMatchedOverlaps( const vector<EllipticKeyPoint>& keyp
}
static void calculateRepeatability( const Mat& img1, const Mat& img2, const Mat& H1to2,
const vector<KeyPoint>& _keypoints1, const vector<KeyPoint>& _keypoints2,
const std::vector<KeyPoint>& _keypoints1, const std::vector<KeyPoint>& _keypoints2,
float& repeatability, int& correspondencesCount,
Mat* thresholdedOverlapMask=0 )
{
vector<EllipticKeyPoint> keypoints1, keypoints2, keypoints1t, keypoints2t;
std::vector<EllipticKeyPoint> keypoints1, keypoints2, keypoints1t, keypoints2t;
EllipticKeyPoint::convert( _keypoints1, keypoints1 );
EllipticKeyPoint::convert( _keypoints2, keypoints2 );
@@ -427,7 +426,7 @@ static void calculateRepeatability( const Mat& img1, const Mat& img2, const Mat&
size_t minCount = MIN( size1, size2 );
// calculate overlap errors
vector<SIdx> overlaps;
std::vector<SIdx> overlaps;
computeOneToOneMatchedOverlaps( keypoints1, keypoints2t, ifEvaluateDetectors, overlaps, overlapThreshold/*min overlap*/ );
correspondencesCount = -1;
@@ -453,12 +452,12 @@ static void calculateRepeatability( const Mat& img1, const Mat& img2, const Mat&
}
void cv::evaluateFeatureDetector( const Mat& img1, const Mat& img2, const Mat& H1to2,
vector<KeyPoint>* _keypoints1, vector<KeyPoint>* _keypoints2,
std::vector<KeyPoint>* _keypoints1, std::vector<KeyPoint>* _keypoints2,
float& repeatability, int& correspCount,
const Ptr<FeatureDetector>& _fdetector )
{
Ptr<FeatureDetector> fdetector(_fdetector);
vector<KeyPoint> *keypoints1, *keypoints2, buf1, buf2;
std::vector<KeyPoint> *keypoints1, *keypoints2, buf1, buf2;
keypoints1 = _keypoints1 != 0 ? _keypoints1 : &buf1;
keypoints2 = _keypoints2 != 0 ? _keypoints2 : &buf2;
@@ -489,13 +488,13 @@ static inline float precision( int correctMatchCount, int falseMatchCount )
return correctMatchCount + falseMatchCount ? (float)correctMatchCount / (float)(correctMatchCount + falseMatchCount) : -1;
}
void cv::computeRecallPrecisionCurve( const vector<vector<DMatch> >& matches1to2,
const vector<vector<uchar> >& correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve )
void cv::computeRecallPrecisionCurve( const std::vector<std::vector<DMatch> >& matches1to2,
const std::vector<std::vector<uchar> >& correctMatches1to2Mask,
std::vector<Point2f>& recallPrecisionCurve )
{
CV_Assert( matches1to2.size() == correctMatches1to2Mask.size() );
vector<DMatchForEvaluation> allMatches;
std::vector<DMatchForEvaluation> allMatches;
int correspondenceCount = 0;
for( size_t i = 0; i < matches1to2.size(); i++ )
{
@@ -525,7 +524,7 @@ void cv::computeRecallPrecisionCurve( const vector<vector<DMatch> >& matches1to2
}
}
float cv::getRecall( const vector<Point2f>& recallPrecisionCurve, float l_precision )
float cv::getRecall( const std::vector<Point2f>& recallPrecisionCurve, float l_precision )
{
int nearestPointIndex = getNearestPoint( recallPrecisionCurve, l_precision );
@@ -537,7 +536,7 @@ float cv::getRecall( const vector<Point2f>& recallPrecisionCurve, float l_precis
return recall;
}
int cv::getNearestPoint( const vector<Point2f>& recallPrecisionCurve, float l_precision )
int cv::getNearestPoint( const std::vector<Point2f>& recallPrecisionCurve, float l_precision )
{
int nearestPointIndex = -1;
@@ -559,18 +558,18 @@ int cv::getNearestPoint( const vector<Point2f>& recallPrecisionCurve, float l_pr
}
void cv::evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, const Mat& H1to2,
vector<KeyPoint>& keypoints1, vector<KeyPoint>& keypoints2,
vector<vector<DMatch> >* _matches1to2, vector<vector<uchar> >* _correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve,
std::vector<KeyPoint>& keypoints1, std::vector<KeyPoint>& keypoints2,
std::vector<std::vector<DMatch> >* _matches1to2, std::vector<std::vector<uchar> >* _correctMatches1to2Mask,
std::vector<Point2f>& recallPrecisionCurve,
const Ptr<GenericDescriptorMatcher>& _dmatcher )
{
Ptr<GenericDescriptorMatcher> dmatcher = _dmatcher;
dmatcher->clear();
vector<vector<DMatch> > *matches1to2, buf1;
std::vector<std::vector<DMatch> > *matches1to2, buf1;
matches1to2 = _matches1to2 != 0 ? _matches1to2 : &buf1;
vector<vector<uchar> > *correctMatches1to2Mask, buf2;
std::vector<std::vector<uchar> > *correctMatches1to2Mask, buf2;
correctMatches1to2Mask = _correctMatches1to2Mask != 0 ? _correctMatches1to2Mask : &buf2;
if( keypoints1.empty() )

View File

@@ -283,7 +283,7 @@ FastFeatureDetector::FastFeatureDetector( int _threshold, bool _nonmaxSuppressio
: threshold(_threshold), nonmaxSuppression(_nonmaxSuppression), type((short)_type)
{}
void FastFeatureDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void FastFeatureDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );

View File

@@ -44,7 +44,7 @@
using namespace cv;
Ptr<Feature2D> Feature2D::create( const string& feature2DType )
Ptr<Feature2D> Feature2D::create( const std::string& feature2DType )
{
return Algorithm::create<Feature2D>("Feature2D." + feature2DType);
}

View File

@@ -114,7 +114,7 @@ void FREAK::buildPattern()
patternScale0 = patternScale;
patternLookup.resize(FREAK_NB_SCALES*FREAK_NB_ORIENTATION*FREAK_NB_POINTS);
double scaleStep = pow(2.0, (double)(nOctaves)/FREAK_NB_SCALES ); // 2 ^ ( (nOctaves-1) /nbScales)
double scaleStep = std::pow(2.0, (double)(nOctaves)/FREAK_NB_SCALES ); // 2 ^ ( (nOctaves-1) /nbScales)
double scalingFactor, alpha, beta, theta = 0;
// pattern definition, radius normalized to 1.0 (outer point position+sigma=1.0)
@@ -132,7 +132,7 @@ void FREAK::buildPattern()
// fill the lookup table
for( int scaleIdx=0; scaleIdx < FREAK_NB_SCALES; ++scaleIdx ) {
patternSizes[scaleIdx] = 0; // proper initialization
scalingFactor = pow(scaleStep,scaleIdx); //scale of the pattern, scaleStep ^ scaleIdx
scalingFactor = std::pow(scaleStep,scaleIdx); //scale of the pattern, scaleStep ^ scaleIdx
for( int orientationIdx = 0; orientationIdx < FREAK_NB_ORIENTATION; ++orientationIdx ) {
theta = double(orientationIdx)* 2*CV_PI/double(FREAK_NB_ORIENTATION); // orientation of the pattern
@@ -239,7 +239,7 @@ void FREAK::computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat
if( scaleNormalized ) {
for( size_t k = keypoints.size(); k--; ) {
//Is k non-zero? If so, decrement it and continue"
kpScaleIdx[k] = max( (int)(log(keypoints[k].size/FREAK_SMALLEST_KP_SIZE)*sizeCst+0.5) ,0);
kpScaleIdx[k] = std::max( (int)(std::log(keypoints[k].size/FREAK_SMALLEST_KP_SIZE)*sizeCst+0.5) ,0);
if( kpScaleIdx[k] >= FREAK_NB_SCALES )
kpScaleIdx[k] = FREAK_NB_SCALES-1;
@@ -254,7 +254,7 @@ void FREAK::computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat
}
}
else {
const int scIdx = max( (int)(1.0986122886681*sizeCst+0.5) ,0);
const int scIdx = std::max( (int)(1.0986122886681*sizeCst+0.5) ,0);
for( size_t k = keypoints.size(); k--; ) {
kpScaleIdx[k] = scIdx; // equivalent to the formule when the scale is normalized with a constant size of keypoints[k].size=3*SMALLEST_KP_SIZE
if( kpScaleIdx[k] >= FREAK_NB_SCALES ) {
@@ -495,7 +495,7 @@ uchar FREAK::meanIntensity( const cv::Mat& image, const cv::Mat& integral,
}
// pair selection algorithm from a set of training images and corresponding keypoints
vector<int> FREAK::selectPairs(const std::vector<Mat>& images
std::vector<int> FREAK::selectPairs(const std::vector<Mat>& images
, std::vector<std::vector<KeyPoint> >& keypoints
, const double corrTresh
, bool verbose )

View File

@@ -58,7 +58,7 @@ size_t KeyPoint::hash() const
return _Val;
}
void write(FileStorage& fs, const string& objname, const vector<KeyPoint>& keypoints)
void write(FileStorage& fs, const std::string& objname, const std::vector<KeyPoint>& keypoints)
{
WriteStructContext ws(fs, objname, CV_NODE_SEQ + CV_NODE_FLOW);
@@ -77,7 +77,7 @@ void write(FileStorage& fs, const string& objname, const vector<KeyPoint>& keypo
}
void read(const FileNode& node, vector<KeyPoint>& keypoints)
void read(const FileNode& node, std::vector<KeyPoint>& keypoints)
{
keypoints.resize(0);
FileNodeIterator it = node.begin(), it_end = node.end();
@@ -91,7 +91,7 @@ void read(const FileNode& node, vector<KeyPoint>& keypoints)
void KeyPoint::convert(const std::vector<KeyPoint>& keypoints, std::vector<Point2f>& points2f,
const vector<int>& keypointIndexes)
const std::vector<int>& keypointIndexes)
{
if( keypointIndexes.empty() )
{
@@ -138,8 +138,8 @@ float KeyPoint::overlap( const KeyPoint& kp1, const KeyPoint& kp2 )
float ovrl = 0.f;
// one circle is completely encovered by the other => no intersection points!
if( min( a, b ) + c <= max( a, b ) )
return min( a_2, b_2 ) / max( a_2, b_2 );
if( std::min( a, b ) + c <= std::max( a, b ) )
return std::min( a_2, b_2 ) / std::max( a_2, b_2 );
if( c < a + b ) // circles intersect
{
@@ -189,7 +189,7 @@ struct KeypointResponseGreater
};
// takes keypoints and culls them by the response
void KeyPointsFilter::retainBest(vector<KeyPoint>& keypoints, int n_points)
void KeyPointsFilter::retainBest(std::vector<KeyPoint>& keypoints, int n_points)
{
//this is only necessary if the keypoints size is greater than the number of desired points.
if( n_points > 0 && keypoints.size() > (size_t)n_points )
@@ -204,7 +204,7 @@ void KeyPointsFilter::retainBest(vector<KeyPoint>& keypoints, int n_points)
//this is the boundary response, and in the case of FAST may be ambigous
float ambiguous_response = keypoints[n_points - 1].response;
//use std::partition to grab all of the keypoints with the boundary response.
vector<KeyPoint>::const_iterator new_end =
std::vector<KeyPoint>::const_iterator new_end =
std::partition(keypoints.begin() + n_points, keypoints.end(),
KeypointResponseGreaterThanThreshold(ambiguous_response));
//resize the keypoints, given this new end point. nth_element and partition reordered the points inplace
@@ -225,7 +225,7 @@ struct RoiPredicate
Rect r;
};
void KeyPointsFilter::runByImageBorder( vector<KeyPoint>& keypoints, Size imageSize, int borderSize )
void KeyPointsFilter::runByImageBorder( std::vector<KeyPoint>& keypoints, Size imageSize, int borderSize )
{
if( borderSize > 0)
{
@@ -253,7 +253,7 @@ struct SizePredicate
float minSize, maxSize;
};
void KeyPointsFilter::runByKeypointSize( vector<KeyPoint>& keypoints, float minSize, float maxSize )
void KeyPointsFilter::runByKeypointSize( std::vector<KeyPoint>& keypoints, float minSize, float maxSize )
{
CV_Assert( minSize >= 0 );
CV_Assert( maxSize >= 0);
@@ -277,7 +277,7 @@ private:
MaskPredicate& operator=(const MaskPredicate&);
};
void KeyPointsFilter::runByPixelsMask( vector<KeyPoint>& keypoints, const Mat& mask )
void KeyPointsFilter::runByPixelsMask( std::vector<KeyPoint>& keypoints, const Mat& mask )
{
if( mask.empty() )
return;
@@ -287,7 +287,7 @@ void KeyPointsFilter::runByPixelsMask( vector<KeyPoint>& keypoints, const Mat& m
struct KeyPoint_LessThan
{
KeyPoint_LessThan(const vector<KeyPoint>& _kp) : kp(&_kp) {}
KeyPoint_LessThan(const std::vector<KeyPoint>& _kp) : kp(&_kp) {}
bool operator()(int i, int j) const
{
const KeyPoint& kp1 = (*kp)[i];
@@ -309,14 +309,14 @@ struct KeyPoint_LessThan
return i < j;
}
const vector<KeyPoint>* kp;
const std::vector<KeyPoint>* kp;
};
void KeyPointsFilter::removeDuplicated( vector<KeyPoint>& keypoints )
void KeyPointsFilter::removeDuplicated( std::vector<KeyPoint>& keypoints )
{
int i, j, n = (int)keypoints.size();
vector<int> kpidx(n);
vector<uchar> mask(n, (uchar)1);
std::vector<int> kpidx(n);
std::vector<uchar> mask(n, (uchar)1);
for( i = 0; i < n; i++ )
kpidx[i] = i;

View File

@@ -49,7 +49,7 @@
namespace cv
{
Mat windowedMatchingMask( const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
Mat windowedMatchingMask( const std::vector<KeyPoint>& keypoints1, const std::vector<KeyPoint>& keypoints2,
float maxDeltaX, float maxDeltaY )
{
if( keypoints1.empty() || keypoints2.empty() )
@@ -83,7 +83,7 @@ DescriptorMatcher::DescriptorCollection::DescriptorCollection( const DescriptorC
DescriptorMatcher::DescriptorCollection::~DescriptorCollection()
{}
void DescriptorMatcher::DescriptorCollection::set( const vector<Mat>& descriptors )
void DescriptorMatcher::DescriptorCollection::set( const std::vector<Mat>& descriptors )
{
clear();
@@ -175,7 +175,7 @@ int DescriptorMatcher::DescriptorCollection::size() const
/*
* DescriptorMatcher
*/
static void convertMatches( const vector<vector<DMatch> >& knnMatches, vector<DMatch>& matches )
static void convertMatches( const std::vector<std::vector<DMatch> >& knnMatches, std::vector<DMatch>& matches )
{
matches.clear();
matches.reserve( knnMatches.size() );
@@ -190,12 +190,12 @@ static void convertMatches( const vector<vector<DMatch> >& knnMatches, vector<DM
DescriptorMatcher::~DescriptorMatcher()
{}
void DescriptorMatcher::add( const vector<Mat>& descriptors )
void DescriptorMatcher::add( const std::vector<Mat>& descriptors )
{
trainDescCollection.insert( trainDescCollection.end(), descriptors.begin(), descriptors.end() );
}
const vector<Mat>& DescriptorMatcher::getTrainDescriptors() const
const std::vector<Mat>& DescriptorMatcher::getTrainDescriptors() const
{
return trainDescCollection;
}
@@ -213,37 +213,37 @@ bool DescriptorMatcher::empty() const
void DescriptorMatcher::train()
{}
void DescriptorMatcher::match( const Mat& queryDescriptors, const Mat& trainDescriptors, vector<DMatch>& matches, const Mat& mask ) const
void DescriptorMatcher::match( const Mat& queryDescriptors, const Mat& trainDescriptors, std::vector<DMatch>& matches, const Mat& mask ) const
{
Ptr<DescriptorMatcher> tempMatcher = clone(true);
tempMatcher->add( vector<Mat>(1, trainDescriptors) );
tempMatcher->match( queryDescriptors, matches, vector<Mat>(1, mask) );
tempMatcher->add( std::vector<Mat>(1, trainDescriptors) );
tempMatcher->match( queryDescriptors, matches, std::vector<Mat>(1, mask) );
}
void DescriptorMatcher::knnMatch( const Mat& queryDescriptors, const Mat& trainDescriptors, vector<vector<DMatch> >& matches, int knn,
void DescriptorMatcher::knnMatch( const Mat& queryDescriptors, const Mat& trainDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
const Mat& mask, bool compactResult ) const
{
Ptr<DescriptorMatcher> tempMatcher = clone(true);
tempMatcher->add( vector<Mat>(1, trainDescriptors) );
tempMatcher->knnMatch( queryDescriptors, matches, knn, vector<Mat>(1, mask), compactResult );
tempMatcher->add( std::vector<Mat>(1, trainDescriptors) );
tempMatcher->knnMatch( queryDescriptors, matches, knn, std::vector<Mat>(1, mask), compactResult );
}
void DescriptorMatcher::radiusMatch( const Mat& queryDescriptors, const Mat& trainDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
void DescriptorMatcher::radiusMatch( const Mat& queryDescriptors, const Mat& trainDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const Mat& mask, bool compactResult ) const
{
Ptr<DescriptorMatcher> tempMatcher = clone(true);
tempMatcher->add( vector<Mat>(1, trainDescriptors) );
tempMatcher->radiusMatch( queryDescriptors, matches, maxDistance, vector<Mat>(1, mask), compactResult );
tempMatcher->add( std::vector<Mat>(1, trainDescriptors) );
tempMatcher->radiusMatch( queryDescriptors, matches, maxDistance, std::vector<Mat>(1, mask), compactResult );
}
void DescriptorMatcher::match( const Mat& queryDescriptors, vector<DMatch>& matches, const vector<Mat>& masks )
void DescriptorMatcher::match( const Mat& queryDescriptors, std::vector<DMatch>& matches, const std::vector<Mat>& masks )
{
vector<vector<DMatch> > knnMatches;
std::vector<std::vector<DMatch> > knnMatches;
knnMatch( queryDescriptors, knnMatches, 1, masks, true /*compactResult*/ );
convertMatches( knnMatches, matches );
}
void DescriptorMatcher::checkMasks( const vector<Mat>& masks, int queryDescriptorsCount ) const
void DescriptorMatcher::checkMasks( const std::vector<Mat>& masks, int queryDescriptorsCount ) const
{
if( isMaskSupported() && !masks.empty() )
{
@@ -262,8 +262,8 @@ void DescriptorMatcher::checkMasks( const vector<Mat>& masks, int queryDescripto
}
}
void DescriptorMatcher::knnMatch( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
void DescriptorMatcher::knnMatch( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
const std::vector<Mat>& masks, bool compactResult )
{
matches.clear();
if( empty() || queryDescriptors.empty() )
@@ -277,8 +277,8 @@ void DescriptorMatcher::knnMatch( const Mat& queryDescriptors, vector<vector<DMa
knnMatchImpl( queryDescriptors, matches, knn, masks, compactResult );
}
void DescriptorMatcher::radiusMatch( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
void DescriptorMatcher::radiusMatch( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks, bool compactResult )
{
matches.clear();
if( empty() || queryDescriptors.empty() )
@@ -303,7 +303,7 @@ bool DescriptorMatcher::isPossibleMatch( const Mat& mask, int queryIdx, int trai
return mask.empty() || mask.at<uchar>(queryIdx, trainIdx);
}
bool DescriptorMatcher::isMaskedOut( const vector<Mat>& masks, int queryIdx )
bool DescriptorMatcher::isMaskedOut( const std::vector<Mat>& masks, int queryIdx )
{
size_t outCount = 0;
for( size_t i = 0; i < masks.size(); i++ )
@@ -337,8 +337,8 @@ Ptr<DescriptorMatcher> BFMatcher::clone( bool emptyTrainData ) const
}
void BFMatcher::knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
void BFMatcher::knnMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
const std::vector<Mat>& masks, bool compactResult )
{
const int IMGIDX_SHIFT = 18;
const int IMGIDX_ONE = (1 << IMGIDX_SHIFT);
@@ -380,8 +380,8 @@ void BFMatcher::knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch>
const float* distptr = dist.ptr<float>(qIdx);
const int* nidxptr = nidx.ptr<int>(qIdx);
matches.push_back( vector<DMatch>() );
vector<DMatch>& mq = matches.back();
matches.push_back( std::vector<DMatch>() );
std::vector<DMatch>& mq = matches.back();
mq.reserve(knn);
for( int k = 0; k < nidx.cols; k++ )
@@ -398,8 +398,8 @@ void BFMatcher::knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch>
}
void BFMatcher::radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches,
float maxDistance, const vector<Mat>& masks, bool compactResult )
void BFMatcher::radiusMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches,
float maxDistance, const std::vector<Mat>& masks, bool compactResult )
{
if( queryDescriptors.empty() || trainDescCollection.empty() )
{
@@ -428,7 +428,7 @@ void BFMatcher::radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMat
{
const float* distptr = distf.ptr<float>(qIdx);
vector<DMatch>& mq = matches[qIdx];
std::vector<DMatch>& mq = matches[qIdx];
for( int k = 0; k < distf.cols; k++ )
{
if( distptr[k] <= maxDistance )
@@ -456,7 +456,7 @@ void BFMatcher::radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMat
/*
* Factory function for DescriptorMatcher creating
*/
Ptr<DescriptorMatcher> DescriptorMatcher::create( const string& descriptorMatcherType )
Ptr<DescriptorMatcher> DescriptorMatcher::create( const std::string& descriptorMatcherType )
{
DescriptorMatcher* dm = 0;
if( !descriptorMatcherType.compare( "FlannBased" ) )
@@ -501,7 +501,7 @@ FlannBasedMatcher::FlannBasedMatcher( const Ptr<flann::IndexParams>& _indexParam
CV_Assert( !_searchParams.empty() );
}
void FlannBasedMatcher::add( const vector<Mat>& descriptors )
void FlannBasedMatcher::add( const std::vector<Mat>& descriptors )
{
DescriptorMatcher::add( descriptors );
for( size_t i = 0; i < descriptors.size(); i++ )
@@ -740,7 +740,7 @@ Ptr<DescriptorMatcher> FlannBasedMatcher::clone( bool emptyTrainData ) const
}
void FlannBasedMatcher::convertToDMatches( const DescriptorCollection& collection, const Mat& indices, const Mat& dists,
vector<vector<DMatch> >& matches )
std::vector<std::vector<DMatch> >& matches )
{
matches.resize( indices.rows );
for( int i = 0; i < indices.rows; i++ )
@@ -763,8 +763,8 @@ void FlannBasedMatcher::convertToDMatches( const DescriptorCollection& collectio
}
}
void FlannBasedMatcher::knnMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
void FlannBasedMatcher::knnMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
const std::vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
Mat indices( queryDescriptors.rows, knn, CV_32SC1 );
Mat dists( queryDescriptors.rows, knn, CV_32FC1);
@@ -773,8 +773,8 @@ void FlannBasedMatcher::knnMatchImpl( const Mat& queryDescriptors, vector<vector
convertToDMatches( mergedDescriptors, indices, dists, matches );
}
void FlannBasedMatcher::radiusMatchImpl( const Mat& queryDescriptors, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
void FlannBasedMatcher::radiusMatchImpl( const Mat& queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
const int count = mergedDescriptors.size(); // TODO do count as param?
Mat indices( queryDescriptors.rows, count, CV_32SC1, Scalar::all(-1) );
@@ -812,8 +812,8 @@ GenericDescriptorMatcher::KeyPointCollection::KeyPointCollection( const KeyPoint
std::copy( collection.startIndices.begin(), collection.startIndices.end(), startIndices.begin() );
}
void GenericDescriptorMatcher::KeyPointCollection::add( const vector<Mat>& _images,
const vector<vector<KeyPoint> >& _points )
void GenericDescriptorMatcher::KeyPointCollection::add( const std::vector<Mat>& _images,
const std::vector<std::vector<KeyPoint> >& _points )
{
CV_Assert( !_images.empty() );
CV_Assert( _images.size() == _points.size() );
@@ -856,12 +856,12 @@ size_t GenericDescriptorMatcher::KeyPointCollection::imageCount() const
return images.size();
}
const vector<vector<KeyPoint> >& GenericDescriptorMatcher::KeyPointCollection::getKeypoints() const
const std::vector<std::vector<KeyPoint> >& GenericDescriptorMatcher::KeyPointCollection::getKeypoints() const
{
return keypoints;
}
const vector<KeyPoint>& GenericDescriptorMatcher::KeyPointCollection::getKeypoints( int imgIdx ) const
const std::vector<KeyPoint>& GenericDescriptorMatcher::KeyPointCollection::getKeypoints( int imgIdx ) const
{
CV_Assert( imgIdx < (int)imageCount() );
return keypoints[imgIdx];
@@ -897,7 +897,7 @@ void GenericDescriptorMatcher::KeyPointCollection::getLocalIdx( int globalPointI
localPointIdx = globalPointIdx - startIndices[imgIdx];
}
const vector<Mat>& GenericDescriptorMatcher::KeyPointCollection::getImages() const
const std::vector<Mat>& GenericDescriptorMatcher::KeyPointCollection::getImages() const
{
return images;
}
@@ -917,8 +917,8 @@ GenericDescriptorMatcher::GenericDescriptorMatcher()
GenericDescriptorMatcher::~GenericDescriptorMatcher()
{}
void GenericDescriptorMatcher::add( const vector<Mat>& images,
vector<vector<KeyPoint> >& keypoints )
void GenericDescriptorMatcher::add( const std::vector<Mat>& images,
std::vector<std::vector<KeyPoint> >& keypoints )
{
CV_Assert( !images.empty() );
CV_Assert( images.size() == keypoints.size() );
@@ -933,12 +933,12 @@ void GenericDescriptorMatcher::add( const vector<Mat>& images,
trainPointCollection.add( images, keypoints );
}
const vector<Mat>& GenericDescriptorMatcher::getTrainImages() const
const std::vector<Mat>& GenericDescriptorMatcher::getTrainImages() const
{
return trainPointCollection.getImages();
}
const vector<vector<KeyPoint> >& GenericDescriptorMatcher::getTrainKeypoints() const
const std::vector<std::vector<KeyPoint> >& GenericDescriptorMatcher::getTrainKeypoints() const
{
return trainPointCollection.getKeypoints();
}
@@ -951,10 +951,10 @@ void GenericDescriptorMatcher::clear()
void GenericDescriptorMatcher::train()
{}
void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints ) const
void GenericDescriptorMatcher::classify( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints ) const
{
vector<DMatch> matches;
std::vector<DMatch> matches;
match( queryImage, queryKeypoints, trainImage, trainKeypoints, matches );
// remap keypoint indices to descriptors
@@ -962,9 +962,9 @@ void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>
queryKeypoints[matches[i].queryIdx].class_id = trainKeypoints[matches[i].trainIdx].class_id;
}
void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>& queryKeypoints )
void GenericDescriptorMatcher::classify( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints )
{
vector<DMatch> matches;
std::vector<DMatch> matches;
match( queryImage, queryKeypoints, matches );
// remap keypoint indices to descriptors
@@ -972,51 +972,51 @@ void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>
queryKeypoints[matches[i].queryIdx].class_id = trainPointCollection.getKeyPoint( matches[i].trainIdx, matches[i].trainIdx ).class_id;
}
void GenericDescriptorMatcher::match( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<DMatch>& matches, const Mat& mask ) const
void GenericDescriptorMatcher::match( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<DMatch>& matches, const Mat& mask ) const
{
Ptr<GenericDescriptorMatcher> tempMatcher = clone( true );
vector<vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->match( queryImage, queryKeypoints, matches, vector<Mat>(1, mask) );
std::vector<std::vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( std::vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->match( queryImage, queryKeypoints, matches, std::vector<Mat>(1, mask) );
vecTrainPoints[0].swap( trainKeypoints );
}
void GenericDescriptorMatcher::knnMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<vector<DMatch> >& matches, int knn, const Mat& mask, bool compactResult ) const
void GenericDescriptorMatcher::knnMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<std::vector<DMatch> >& matches, int knn, const Mat& mask, bool compactResult ) const
{
Ptr<GenericDescriptorMatcher> tempMatcher = clone( true );
vector<vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->knnMatch( queryImage, queryKeypoints, matches, knn, vector<Mat>(1, mask), compactResult );
std::vector<std::vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( std::vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->knnMatch( queryImage, queryKeypoints, matches, knn, std::vector<Mat>(1, mask), compactResult );
vecTrainPoints[0].swap( trainKeypoints );
}
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, vector<KeyPoint>& trainKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
const Mat& trainImage, std::vector<KeyPoint>& trainKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const Mat& mask, bool compactResult ) const
{
Ptr<GenericDescriptorMatcher> tempMatcher = clone( true );
vector<vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->radiusMatch( queryImage, queryKeypoints, matches, maxDistance, vector<Mat>(1, mask), compactResult );
std::vector<std::vector<KeyPoint> > vecTrainPoints(1, trainKeypoints);
tempMatcher->add( std::vector<Mat>(1, trainImage), vecTrainPoints );
tempMatcher->radiusMatch( queryImage, queryKeypoints, matches, maxDistance, std::vector<Mat>(1, mask), compactResult );
vecTrainPoints[0].swap( trainKeypoints );
}
void GenericDescriptorMatcher::match( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<DMatch>& matches, const vector<Mat>& masks )
void GenericDescriptorMatcher::match( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<DMatch>& matches, const std::vector<Mat>& masks )
{
vector<vector<DMatch> > knnMatches;
std::vector<std::vector<DMatch> > knnMatches;
knnMatch( queryImage, queryKeypoints, knnMatches, 1, masks, false );
convertMatches( knnMatches, matches );
}
void GenericDescriptorMatcher::knnMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
void GenericDescriptorMatcher::knnMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, int knn,
const std::vector<Mat>& masks, bool compactResult )
{
matches.clear();
@@ -1030,9 +1030,9 @@ void GenericDescriptorMatcher::knnMatch( const Mat& queryImage, vector<KeyPoint>
knnMatchImpl( queryImage, queryKeypoints, matches, knn, masks, compactResult );
}
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks, bool compactResult )
{
matches.clear();
@@ -1060,8 +1060,8 @@ bool GenericDescriptorMatcher::empty() const
/*
* Factory function for GenericDescriptorMatch creating
*/
Ptr<GenericDescriptorMatcher> GenericDescriptorMatcher::create( const string& genericDescritptorMatcherType,
const string &paramsFilename )
Ptr<GenericDescriptorMatcher> GenericDescriptorMatcher::create( const std::string& genericDescritptorMatcherType,
const std::string &paramsFilename )
{
Ptr<GenericDescriptorMatcher> descriptorMatcher =
Algorithm::create<GenericDescriptorMatcher>("DescriptorMatcher." + genericDescritptorMatcherType);
@@ -1092,10 +1092,10 @@ VectorDescriptorMatcher::VectorDescriptorMatcher( const Ptr<DescriptorExtractor>
VectorDescriptorMatcher::~VectorDescriptorMatcher()
{}
void VectorDescriptorMatcher::add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection )
void VectorDescriptorMatcher::add( const std::vector<Mat>& imgCollection,
std::vector<std::vector<KeyPoint> >& pointCollection )
{
vector<Mat> descriptors;
std::vector<Mat> descriptors;
extractor->compute( imgCollection, pointCollection, descriptors );
matcher->add( descriptors );
@@ -1120,18 +1120,18 @@ bool VectorDescriptorMatcher::isMaskSupported()
return matcher->isMaskSupported();
}
void VectorDescriptorMatcher::knnMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
void VectorDescriptorMatcher::knnMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, int knn,
const std::vector<Mat>& masks, bool compactResult )
{
Mat queryDescriptors;
extractor->compute( queryImage, queryKeypoints, queryDescriptors );
matcher->knnMatch( queryDescriptors, matches, knn, masks, compactResult );
}
void VectorDescriptorMatcher::radiusMatchImpl( const Mat& queryImage, vector<KeyPoint>& queryKeypoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
void VectorDescriptorMatcher::radiusMatchImpl( const Mat& queryImage, std::vector<KeyPoint>& queryKeypoints,
std::vector<std::vector<DMatch> >& matches, float maxDistance,
const std::vector<Mat>& masks, bool compactResult )
{
Mat queryDescriptors;
extractor->compute( queryImage, queryKeypoints, queryDescriptors );

View File

@@ -1263,7 +1263,7 @@ MSER::MSER( int _delta, int _min_area, int _max_area,
{
}
void MSER::operator()( const Mat& image, vector<vector<Point> >& dstcontours, const Mat& mask ) const
void MSER::operator()( const Mat& image, std::vector<std::vector<Point> >& dstcontours, const Mat& mask ) const
{
CvMat _image = image, _mask, *pmask = 0;
if( mask.data )
@@ -1281,19 +1281,19 @@ void MSER::operator()( const Mat& image, vector<vector<Point> >& dstcontours, co
}
void MserFeatureDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void MserFeatureDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
vector<vector<Point> > msers;
std::vector<std::vector<Point> > msers;
(*this)(image, msers, mask);
vector<vector<Point> >::const_iterator contour_it = msers.begin();
std::vector<std::vector<Point> >::const_iterator contour_it = msers.begin();
Rect r(0, 0, image.cols, image.rows);
for( ; contour_it != msers.end(); ++contour_it )
{
// TODO check transformation from MSER region to KeyPoint
RotatedRect rect = fitEllipse(Mat(*contour_it));
float diam = sqrt(rect.size.height*rect.size.width);
float diam = std::sqrt(rect.size.height*rect.size.width);
if( diam > std::numeric_limits<float>::epsilon() && r.contains(rect.center) )
keypoints.push_back( KeyPoint(rect.center, diam) );

View File

@@ -50,7 +50,7 @@ const int DESCRIPTOR_SIZE = 32;
* blockSize x blockSize patch at given points in an image
*/
static void
HarrisResponses(const Mat& img, vector<KeyPoint>& pts, int blockSize, float harris_k)
HarrisResponses(const Mat& img, std::vector<KeyPoint>& pts, int blockSize, float harris_k)
{
CV_Assert( img.type() == CV_8UC1 && blockSize*blockSize <= 2048 );
@@ -95,7 +95,7 @@ HarrisResponses(const Mat& img, vector<KeyPoint>& pts, int blockSize, float harr
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static float IC_Angle(const Mat& image, const int half_k, Point2f pt,
const vector<int> & u_max)
const std::vector<int> & u_max)
{
int m_01 = 0, m_10 = 0;
@@ -246,7 +246,7 @@ static void computeOrbDescriptor(const KeyPoint& kpt,
}
static void initializeOrbPattern( const Point* pattern0, vector<Point>& pattern, int ntuples, int tupleSize, int poolSize )
static void initializeOrbPattern( const Point* pattern0, std::vector<Point>& pattern, int ntuples, int tupleSize, int poolSize )
{
RNG rng(0x12345678);
int i, k, k1;
@@ -577,7 +577,7 @@ int ORB::descriptorType() const
* @param mask the mask to apply
* @param keypoints the resulting keypoints
*/
void ORB::operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const
void ORB::operator()(InputArray image, InputArray mask, std::vector<KeyPoint>& keypoints) const
{
(*this)(image, mask, keypoints, noArray(), false);
}
@@ -589,11 +589,11 @@ void ORB::operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoi
* @param scale the scale at which we compute the orientation
* @param keypoints the resulting keypoints
*/
static void computeOrientation(const Mat& image, vector<KeyPoint>& keypoints,
int halfPatchSize, const vector<int>& umax)
static void computeOrientation(const Mat& image, std::vector<KeyPoint>& keypoints,
int halfPatchSize, const std::vector<int>& umax)
{
// Process each keypoint
for (vector<KeyPoint>::iterator keypoint = keypoints.begin(),
for (std::vector<KeyPoint>::iterator keypoint = keypoints.begin(),
keypointEnd = keypoints.end(); keypoint != keypointEnd; ++keypoint)
{
keypoint->angle = IC_Angle(image, halfPatchSize, keypoint->pt, umax);
@@ -606,18 +606,18 @@ static void computeOrientation(const Mat& image, vector<KeyPoint>& keypoints,
* @param mask_pyramid the masks to apply at every level
* @param keypoints the resulting keypoints, clustered per level
*/
static void computeKeyPoints(const vector<Mat>& imagePyramid,
const vector<Mat>& maskPyramid,
vector<vector<KeyPoint> >& allKeypoints,
static void computeKeyPoints(const std::vector<Mat>& imagePyramid,
const std::vector<Mat>& maskPyramid,
std::vector<std::vector<KeyPoint> >& allKeypoints,
int nfeatures, int firstLevel, double scaleFactor,
int edgeThreshold, int patchSize, int scoreType )
{
int nlevels = (int)imagePyramid.size();
vector<int> nfeaturesPerLevel(nlevels);
std::vector<int> nfeaturesPerLevel(nlevels);
// fill the extractors and descriptors for the corresponding scales
float factor = (float)(1.0 / scaleFactor);
float ndesiredFeaturesPerScale = nfeatures*(1 - factor)/(1 - (float)pow((double)factor, (double)nlevels));
float ndesiredFeaturesPerScale = nfeatures*(1 - factor)/(1 - (float)std::pow((double)factor, (double)nlevels));
int sumFeatures = 0;
for( int level = 0; level < nlevels-1; level++ )
@@ -633,12 +633,12 @@ static void computeKeyPoints(const vector<Mat>& imagePyramid,
// pre-compute the end of a row in a circular patch
int halfPatchSize = patchSize / 2;
vector<int> umax(halfPatchSize + 2);
std::vector<int> umax(halfPatchSize + 2);
int v, v0, vmax = cvFloor(halfPatchSize * sqrt(2.f) / 2 + 1);
int vmin = cvCeil(halfPatchSize * sqrt(2.f) / 2);
int v, v0, vmax = cvFloor(halfPatchSize * std::sqrt(2.f) / 2 + 1);
int vmin = cvCeil(halfPatchSize * std::sqrt(2.f) / 2);
for (v = 0; v <= vmax; ++v)
umax[v] = cvRound(sqrt((double)halfPatchSize * halfPatchSize - v * v));
umax[v] = cvRound(std::sqrt((double)halfPatchSize * halfPatchSize - v * v));
// Make sure we are symmetric
for (v = halfPatchSize, v0 = 0; v >= vmin; --v)
@@ -656,7 +656,7 @@ static void computeKeyPoints(const vector<Mat>& imagePyramid,
int featuresNum = nfeaturesPerLevel[level];
allKeypoints[level].reserve(featuresNum*2);
vector<KeyPoint> & keypoints = allKeypoints[level];
std::vector<KeyPoint> & keypoints = allKeypoints[level];
// Detect FAST features, 20 is a good threshold
FastFeatureDetector fd(20, true);
@@ -680,7 +680,7 @@ static void computeKeyPoints(const vector<Mat>& imagePyramid,
float sf = getScale(level, firstLevel, scaleFactor);
// Set the level of the coordinates
for (vector<KeyPoint>::iterator keypoint = keypoints.begin(),
for (std::vector<KeyPoint>::iterator keypoint = keypoints.begin(),
keypointEnd = keypoints.end(); keypoint != keypointEnd; ++keypoint)
{
keypoint->octave = level;
@@ -699,8 +699,8 @@ static void computeKeyPoints(const vector<Mat>& imagePyramid,
* @param keypoints the keypoints to use
* @param descriptors the resulting descriptors
*/
static void computeDescriptors(const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors,
const vector<Point>& pattern, int dsize, int WTA_K)
static void computeDescriptors(const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors,
const std::vector<Point>& pattern, int dsize, int WTA_K)
{
//convert to grayscale if more than one color
CV_Assert(image.type() == CV_8UC1);
@@ -720,7 +720,7 @@ static void computeDescriptors(const Mat& image, vector<KeyPoint>& keypoints, Ma
* @param do_keypoints if true, the keypoints are computed, otherwise used as an input
* @param do_descriptors if true, also computes the descriptors
*/
void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _keypoints,
void ORB::operator()( InputArray _image, InputArray _mask, std::vector<KeyPoint>& _keypoints,
OutputArray _descriptors, bool useProvidedKeypoints) const
{
CV_Assert(patchSize >= 2);
@@ -760,7 +760,7 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
}
// Pre-compute the scale pyramids
vector<Mat> imagePyramid(levelsNum), maskPyramid(levelsNum);
std::vector<Mat> imagePyramid(levelsNum), maskPyramid(levelsNum);
for (int level = 0; level < levelsNum; ++level)
{
float scale = 1/getScale(level, firstLevel, scaleFactor);
@@ -811,7 +811,7 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
}
// Pre-compute the keypoints (we keep the best over all scales, so this has to be done beforehand
vector < vector<KeyPoint> > allKeypoints;
std::vector < std::vector<KeyPoint> > allKeypoints;
if( do_keypoints )
{
// Get keypoints, those will be far enough from the border that no check will be required for the descriptor
@@ -820,18 +820,18 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
edgeThreshold, patchSize, scoreType);
// make sure we have the right number of keypoints keypoints
/*vector<KeyPoint> temp;
/*std::vector<KeyPoint> temp;
for (int level = 0; level < n_levels; ++level)
{
vector<KeyPoint>& keypoints = all_keypoints[level];
std::vector<KeyPoint>& keypoints = all_keypoints[level];
temp.insert(temp.end(), keypoints.begin(), keypoints.end());
keypoints.clear();
}
KeyPoint::retainBest(temp, n_features_);
for (vector<KeyPoint>::iterator keypoint = temp.begin(),
for (std::vector<KeyPoint>::iterator keypoint = temp.begin(),
keypoint_end = temp.end(); keypoint != keypoint_end; ++keypoint)
all_keypoints[keypoint->octave].push_back(*keypoint);*/
}
@@ -842,7 +842,7 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
// Cluster the input keypoints depending on the level they were computed at
allKeypoints.resize(levelsNum);
for (vector<KeyPoint>::iterator keypoint = _keypoints.begin(),
for (std::vector<KeyPoint>::iterator keypoint = _keypoints.begin(),
keypointEnd = _keypoints.end(); keypoint != keypointEnd; ++keypoint)
allKeypoints[keypoint->octave].push_back(*keypoint);
@@ -852,16 +852,16 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
if (level == firstLevel)
continue;
vector<KeyPoint> & keypoints = allKeypoints[level];
std::vector<KeyPoint> & keypoints = allKeypoints[level];
float scale = 1/getScale(level, firstLevel, scaleFactor);
for (vector<KeyPoint>::iterator keypoint = keypoints.begin(),
for (std::vector<KeyPoint>::iterator keypoint = keypoints.begin(),
keypointEnd = keypoints.end(); keypoint != keypointEnd; ++keypoint)
keypoint->pt *= scale;
}
}
Mat descriptors;
vector<Point> pattern;
std::vector<Point> pattern;
if( do_descriptors )
{
@@ -902,7 +902,7 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
for (int level = 0; level < levelsNum; ++level)
{
// Get the features and compute their orientation
vector<KeyPoint>& keypoints = allKeypoints[level];
std::vector<KeyPoint>& keypoints = allKeypoints[level];
int nkeypoints = (int)keypoints.size();
// Compute the descriptors
@@ -926,7 +926,7 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
if (level != firstLevel)
{
float scale = getScale(level, firstLevel, scaleFactor);
for (vector<KeyPoint>::iterator keypoint = keypoints.begin(),
for (std::vector<KeyPoint>::iterator keypoint = keypoints.begin(),
keypointEnd = keypoints.end(); keypoint != keypointEnd; ++keypoint)
keypoint->pt *= scale;
}
@@ -935,12 +935,12 @@ void ORB::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& _ke
}
}
void ORB::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
void ORB::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask) const
{
(*this)(image, mask, keypoints, noArray(), false);
}
void ORB::computeImpl( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors) const
void ORB::computeImpl( const Mat& image, std::vector<KeyPoint>& keypoints, Mat& descriptors) const
{
(*this)(image, Mat(), keypoints, descriptors, true);
}

View File

@@ -334,7 +334,7 @@ static bool StarDetectorSuppressLines( const Mat& responses, const Mat& sizes, P
static void
StarDetectorSuppressNonmax( const Mat& responses, const Mat& sizes,
vector<KeyPoint>& keypoints, int border,
std::vector<KeyPoint>& keypoints, int border,
int responseThreshold,
int lineThresholdProjected,
int lineThresholdBinarized,
@@ -426,7 +426,7 @@ StarDetector::StarDetector(int _maxSize, int _responseThreshold,
{}
void StarDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
void StarDetector::detectImpl( const Mat& image, std::vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
@@ -435,7 +435,7 @@ void StarDetector::detectImpl( const Mat& image, vector<KeyPoint>& keypoints, co
KeyPointsFilter::runByPixelsMask( keypoints, mask );
}
void StarDetector::operator()(const Mat& img, vector<KeyPoint>& keypoints) const
void StarDetector::operator()(const Mat& img, std::vector<KeyPoint>& keypoints) const
{
Mat responses, sizes;
int border = StarDetectorComputeResponses( img, responses, sizes, maxSize );

View File

@@ -42,6 +42,7 @@
#include "test_precomp.hpp"
using namespace std;
using namespace cv;
class CV_BRISKTest : public cvtest::BaseTest

View File

@@ -42,6 +42,7 @@
#include "test_precomp.hpp"
using namespace std;
using namespace cv;
class CV_FastTest : public cvtest::BaseTest

View File

@@ -46,6 +46,7 @@
#include <vector>
#include <iostream>
using namespace std;
using namespace cv;
using namespace cv::flann;

View File

@@ -42,6 +42,7 @@
#include "test_precomp.hpp"
#include "opencv2/highgui/highgui.hpp"
using namespace std;
using namespace cv;
TEST(Features2D_ORB, _1996)