almost finished Python wrappers

This commit is contained in:
Vadim Pisarevsky 2010-11-02 17:58:22 +00:00
parent eda72a3e8b
commit 5b6a755719
27 changed files with 1292 additions and 634 deletions

View File

@ -443,12 +443,6 @@ enum
//! computes the best-fit perspective transformation mapping srcPoints to dstPoints.
CV_EXPORTS_AS(findHomographyAndOutliers) Mat findHomography( const Mat& srcPoints,
const Mat& dstPoints,
CV_OUT Mat& mask, int method=0,
double ransacReprojThreshold=3 );
//! computes the best-fit perspective transformation mapping srcPoints to dstPoints.
CV_EXPORTS Mat findHomography( const Mat& srcPoints,
const Mat& dstPoints,
vector<uchar>& mask, int method=0,
double ransacReprojThreshold=3 );
@ -493,14 +487,14 @@ CV_EXPORTS_AS(composeRT_J) void composeRT( const Mat& rvec1, const Mat& tvec1,
CV_OUT Mat& dt3dr2, CV_OUT Mat& dt3dt2 );
//! projects points from the model coordinate space to the image coordinates. Takes the intrinsic and extrinsic camera parameters into account
CV_EXPORTS void projectPoints( const Mat& objectPoints,
CV_EXPORTS_W void projectPoints( const Mat& objectPoints,
const Mat& rvec, const Mat& tvec,
const Mat& cameraMatrix,
const Mat& distCoeffs,
CV_OUT vector<Point2f>& imagePoints );
//! projects points from the model coordinate space to the image coordinates. Also computes derivatives of the image coordinates w.r.t the intrinsic and extrinsic camera parameters
CV_EXPORTS void projectPoints( const Mat& objectPoints,
CV_EXPORTS_AS(projectPointsJ) void projectPoints( const Mat& objectPoints,
const Mat& rvec, const Mat& tvec,
const Mat& cameraMatrix,
const Mat& distCoeffs,
@ -518,7 +512,7 @@ CV_EXPORTS_W void solvePnP( const Mat& objectPoints,
bool useExtrinsicGuess=false );
//! initializes camera matrix from a few 3D points and the corresponding projections.
CV_EXPORTS Mat initCameraMatrix2D( const vector<vector<Point3f> >& objectPoints,
CV_EXPORTS_W Mat initCameraMatrix2D( const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints,
Size imageSize, double aspectRatio=1. );
@ -527,9 +521,9 @@ enum { CALIB_CB_ADAPTIVE_THRESH = 1, CALIB_CB_NORMALIZE_IMAGE = 2,
CALIB_CB_FILTER_QUADS = 4, CALIB_CB_FAST_CHECK = 8 };
//! finds checkerboard pattern of the specified size in the image
CV_EXPORTS bool findChessboardCorners( const Mat& image, Size patternSize,
CV_OUT vector<Point2f>& corners,
int flags=CALIB_CB_ADAPTIVE_THRESH+
CV_EXPORTS_W bool findChessboardCorners( const Mat& image, Size patternSize,
CV_OUT vector<Point2f>& corners,
int flags=CALIB_CB_ADAPTIVE_THRESH+
CALIB_CB_NORMALIZE_IMAGE );
//! draws the checkerboard pattern (found or partly found) in the image
@ -558,7 +552,7 @@ enum
};
//! finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
CV_EXPORTS double calibrateCamera( const vector<vector<Point3f> >& objectPoints,
CV_EXPORTS_W double calibrateCamera( const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints,
Size imageSize,
CV_IN_OUT Mat& cameraMatrix,
@ -578,7 +572,7 @@ CV_EXPORTS_W void calibrationMatrixValues( const Mat& cameraMatrix,
CV_OUT double& aspectRatio );
//! finds intrinsic and extrinsic parameters of a stereo camera
CV_EXPORTS double stereoCalibrate( const vector<vector<Point3f> >& objectPoints,
CV_EXPORTS_W double stereoCalibrate( const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints1,
const vector<vector<Point2f> >& imagePoints2,
CV_IN_OUT Mat& cameraMatrix1, CV_IN_OUT Mat& distCoeffs1,
@ -615,7 +609,7 @@ CV_EXPORTS_W bool stereoRectifyUncalibrated( const Mat& points1, const Mat& poin
double threshold=5 );
//! computes the rectification transformations for 3-head camera, where all the heads are on the same line.
CV_EXPORTS float rectify3Collinear( const Mat& cameraMatrix1, const Mat& distCoeffs1,
CV_EXPORTS_W float rectify3Collinear( const Mat& cameraMatrix1, const Mat& distCoeffs1,
const Mat& cameraMatrix2, const Mat& distCoeffs2,
const Mat& cameraMatrix3, const Mat& distCoeffs3,
const vector<vector<Point2f> >& imgpt1,

View File

@ -972,7 +972,7 @@ public:
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 1, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -984,7 +984,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -996,7 +996,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1008,7 +1008,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1020,7 +1020,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1032,7 +1032,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1044,7 +1044,7 @@ public:
typedef int work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1056,7 +1056,7 @@ public:
typedef value_type work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1068,7 +1068,7 @@ public:
typedef value_type work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1080,7 +1080,7 @@ public:
typedef value_type work_type;
typedef value_type channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = 1,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 1,
fmt=DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1092,7 +1092,7 @@ public:
typedef Vec<typename DataType<_Tp>::work_type, cn> work_type;
typedef _Tp channel_type;
typedef value_type vec_type;
enum { depth = DataDepth<channel_type>::value, channels = cn,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = cn,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
};
@ -1103,7 +1103,7 @@ public:
typedef std::complex<_Tp> value_type;
typedef value_type work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 2,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 2,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1115,7 +1115,7 @@ public:
typedef Complex<_Tp> value_type;
typedef value_type work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 2,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 2,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1127,7 +1127,7 @@ public:
typedef Point_<_Tp> value_type;
typedef Point_<typename DataType<_Tp>::work_type> work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 2,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 2,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1139,7 +1139,7 @@ public:
typedef Point3_<_Tp> value_type;
typedef Point3_<typename DataType<_Tp>::work_type> work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 3,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 3,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1151,7 +1151,7 @@ public:
typedef Size_<_Tp> value_type;
typedef Size_<typename DataType<_Tp>::work_type> work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 2,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 2,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1163,7 +1163,7 @@ public:
typedef Rect_<_Tp> value_type;
typedef Rect_<typename DataType<_Tp>::work_type> work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 4,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 4,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1175,7 +1175,7 @@ public:
typedef Scalar_<_Tp> value_type;
typedef Scalar_<typename DataType<_Tp>::work_type> work_type;
typedef _Tp channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 4,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 4,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -1187,7 +1187,7 @@ public:
typedef Range value_type;
typedef value_type work_type;
typedef int channel_type;
enum { depth = DataDepth<channel_type>::value, channels = 2,
enum { generic = 0, depth = DataDepth<channel_type>::value, channels = 2,
fmt = ((channels-1)<<8) + DataDepth<channel_type>::fmt,
type = CV_MAKETYPE(depth, channels) };
typedef Vec<channel_type, channels> vec_type;
@ -2154,27 +2154,27 @@ CV_EXPORTS_W void calcCovarMatrix( const Mat& samples, CV_OUT Mat& covar, CV_OUT
}
\endcode
*/
class CV_EXPORTS_W PCA
class CV_EXPORTS PCA
{
public:
//! default constructor
CV_WRAP PCA();
PCA();
//! the constructor that performs PCA
CV_WRAP PCA(const Mat& data, const Mat& mean, int flags, int maxComponents=0);
PCA(const Mat& data, const Mat& mean, int flags, int maxComponents=0);
//! operator that performs PCA. The previously stored data, if any, is released
CV_WRAP_AS(compute) PCA& operator()(const Mat& data, const Mat& mean, int flags, int maxComponents=0);
PCA& operator()(const Mat& data, const Mat& mean, int flags, int maxComponents=0);
//! projects vector from the original space to the principal components subspace
Mat project(const Mat& vec) const;
//! projects vector from the original space to the principal components subspace
CV_WRAP void project(const Mat& vec, CV_OUT Mat& result) const;
void project(const Mat& vec, CV_OUT Mat& result) const;
//! reconstructs the original vector from the projection
Mat backProject(const Mat& vec) const;
//! reconstructs the original vector from the projection
CV_WRAP void backProject(const Mat& vec, CV_OUT Mat& result) const;
void backProject(const Mat& vec, CV_OUT Mat& result) const;
CV_PROP Mat eigenvectors; //!< eigenvectors of the covariation matrix
CV_PROP Mat eigenvalues; //!< eigenvalues of the covariation matrix
CV_PROP Mat mean; //!< mean value subtracted before the projection and added after the back projection
Mat eigenvectors; //!< eigenvectors of the covariation matrix
Mat eigenvalues; //!< eigenvalues of the covariation matrix
Mat mean; //!< mean value subtracted before the projection and added after the back projection
};
/*!
@ -2273,7 +2273,7 @@ CV_WRAP static inline void randn(CV_OUT Mat& dst, const Scalar& mean, const Scal
{ theRNG().fill(dst, RNG::NORMAL, mean, stddev); }
//! shuffles the input array elements
CV_EXPORTS_W void randShuffle(Mat& dst, double iterFactor=1., RNG* rng=0);
CV_EXPORTS void randShuffle(Mat& dst, double iterFactor=1., RNG* rng=0);
//! draws the line segment (pt1, pt2) in the image
CV_EXPORTS_W void line(Mat& img, Point pt1, Point pt2, const Scalar& color,
@ -3576,30 +3576,46 @@ public:
CV_WRAP KDTree();
//! the full constructor that builds the search tree
CV_WRAP KDTree(const Mat& _points, bool copyAndReorderPoints=false);
//! the full constructor that builds the search tree
CV_WRAP KDTree(const Mat& _points, const Mat& _labels, bool copyAndReorderPoints=false);
//! builds the search tree
CV_WRAP void build(const Mat& _points, bool copyAndReorderPoints=false);
//! builds the search tree
CV_WRAP void build(const Mat& _points, const Mat& _labels, bool copyAndReorderPoints=false);
//! finds the K nearest neighbors of "vec" while looking at Emax (at most) leaves
int findNearest(const float* vec,
int K, int Emax, int* neighborsIdx,
Mat* neighbors=0, float* dist=0) const;
Mat* neighbors=0, float* dist=0, int* labels=0) const;
//! finds the K nearest neighbors while looking at Emax (at most) leaves
int findNearest(const float* vec, int K, int Emax,
vector<int>* neighborsIdx,
Mat* neighbors=0, vector<float>* dist=0) const;
Mat* neighbors=0,
vector<float>* dist=0,
vector<int>* labels=0) const;
CV_WRAP int findNearest(const vector<float>& vec, int K, int Emax,
CV_OUT vector<int>* neighborsIdx,
CV_OUT Mat* neighbors=0,
CV_OUT vector<float>* dist=0,
CV_OUT vector<int>* labels=0) const;
//! finds all the points from the initial set that belong to the specified box
void findOrthoRange(const float* minBounds, const float* maxBounds,
vector<int>* neighborsIdx, Mat* neighbors=0) const;
vector<int>* neighborsIdx, Mat* neighbors=0,
vector<int>* labels=0) const;
CV_WRAP void findOrthoRange(const vector<float>& minBounds, const vector<float>& maxBounds,
CV_OUT vector<int>* neighborsIdx, CV_OUT Mat* neighbors=0,
CV_OUT vector<int>* labels=0) const;
//! returns vectors with the specified indices
void getPoints(const int* idx, size_t nidx, Mat& pts) const;
void getPoints(const int* idx, size_t nidx, Mat& pts, vector<int>* labels=0) const;
//! returns vectors with the specified indices
void getPoints(const Mat& idxs, Mat& pts) const;
CV_WRAP void getPoints(const vector<int>& idxs, Mat& pts, CV_OUT vector<int>* labels=0) const;
//! return a vector with the specified index
const float* getPoint(int ptidx) const;
const float* getPoint(int ptidx, int* label=0) const;
//! returns the search space dimensionality
CV_WRAP int dims() const;
vector<Node> nodes; //!< all the tree nodes
CV_PROP Mat points; //!< all the points. It can be a reordered copy of the input vector set or the original vector set.
CV_PROP vector<int> labels; //!< the parallel array of labels.
CV_PROP int maxDepth; //!< maximum depth of the search tree. Do not modify it
CV_PROP_RW int normType; //!< type of the distance (cv::NORM_L1 or cv::NORM_L2) used for search. Initially set to cv::NORM_L2, but you can modify it
};
@ -3743,7 +3759,7 @@ public:
//! returns the top-level mapping. YAML supports multiple streams
CV_WRAP FileNode root(int streamidx=0) const;
//! returns the specified element of the top-level mapping
CV_WRAP FileNode operator[](const string& nodename) const;
FileNode operator[](const string& nodename) const;
//! returns the specified element of the top-level mapping
CV_WRAP FileNode operator[](const char* nodename) const;
@ -3777,7 +3793,7 @@ class CV_EXPORTS FileNodeIterator;
Note that file nodes are only used for navigating file storages opened for reading.
When a file storage is opened for writing, no data is stored in memory after it is written.
*/
class CV_EXPORTS_W FileNode
class CV_EXPORTS_W_SIMPLE FileNode
{
public:
//! type of the file storage node
@ -3813,7 +3829,6 @@ public:
//! returns type of the node
CV_WRAP int type() const;
CV_WRAP int rawDataSize(const string& fmt) const;
//! returns true if the node is empty
CV_WRAP bool empty() const;
//! returns true if the node is a "none" object
@ -3835,13 +3850,13 @@ public:
//! returns the number of elements in the node, if it is a sequence or mapping, or 1 otherwise.
CV_WRAP size_t size() const;
//! returns the node content as an integer. If the node stores floating-point number, it is rounded.
CV_WRAP operator int() const;
operator int() const;
//! returns the node content as float
CV_WRAP operator float() const;
operator float() const;
//! returns the node content as double
CV_WRAP operator double() const;
operator double() const;
//! returns the node content as text string
CV_WRAP operator string() const;
operator string() const;
//! returns pointer to the underlying file node
CvFileNode* operator *();

View File

@ -2990,8 +2990,7 @@ template<typename _Tp, class _LT> void sort( vector<_Tp>& vec, _LT LT=_LT() )
{
_Tp *lb;
_Tp *ub;
}
stack[48];
} stack[48];
size_t total = vec.size();

View File

@ -161,8 +161,9 @@ typedef signed char schar;
#define CV_CARRAY(counter)
#define CV_CUSTOM_CARRAY(args)
#define CV_EXPORTS_W CV_EXPORTS
#define CV_EXPORTS_W_SIMPLE CV_EXPORTS
#define CV_EXPORTS_AS(synonym) CV_EXPORTS
#define CV_EXPORTS_AS_MAP CV_EXPORTS
#define CV_EXPORTS_W_MAP CV_EXPORTS
#define CV_IN_OUT
#define CV_OUT
#define CV_PROP

View File

@ -3555,6 +3555,13 @@ KDTree::KDTree(const Mat& _points, bool _copyData)
build(_points, _copyData);
}
KDTree::KDTree(const Mat& _points, const Mat& _labels, bool _copyData)
{
maxDepth = -1;
normType = NORM_L2;
build(_points, _labels, _copyData);
}
struct SubTree
{
SubTree() : first(0), last(0), nodeIdx(0), depth(0) {}
@ -3630,10 +3637,16 @@ computeSums( const Mat& points, const size_t* ofs, int a, int b, double* sums )
}
}
void KDTree::build(const Mat& _points, bool _copyData)
{
CV_Assert(_points.type() == CV_32F);
build(_points, Mat(), _copyData);
}
void KDTree::build(const Mat& _points, const Mat& _labels, bool _copyData)
{
CV_Assert(_points.type() == CV_32F && !_points.empty());
vector<KDTree::Node>().swap(nodes);
if( !_copyData )
@ -3643,13 +3656,22 @@ void KDTree::build(const Mat& _points, bool _copyData)
points.release();
points.create(_points.size(), _points.type());
}
int i, j, n = _points.rows, dims = _points.cols, top = 0;
const float* data = _points.ptr<float>(0);
float* dstdata = points.ptr<float>(0);
size_t step = _points.step1();
size_t dstep = points.step1();
int ptpos = 0;
labels.resize(n);
const int* _labels_data = 0;
if( !_labels.empty() )
{
int nlabels = _labels.checkVector(1, CV_32S, true);
CV_Assert(nlabels == n);
_labels_data = (const int*)_labels.data;
}
Mat sumstack(MAX_TREE_DEPTH*2, dims*2, CV_64F);
SubTree stack[MAX_TREE_DEPTH*2];
@ -3675,7 +3697,8 @@ void KDTree::build(const Mat& _points, bool _copyData)
if( count == 1 )
{
int idx = _copyData ? ptpos++ : (int)(ptofs[first]/step);
int idx0 = (int)(ptofs[first]/step);
int idx = _copyData ? ptpos++ : idx0;
nodes[nidx].idx = ~idx;
if( _copyData )
{
@ -3684,6 +3707,7 @@ void KDTree::build(const Mat& _points, bool _copyData)
for( j = 0; j < dims; j++ )
dst[j] = src[j];
}
labels[idx] = _labels_data ? _labels_data[idx0] : idx0;
_maxDepth = std::max(_maxDepth, depth);
continue;
}
@ -3723,7 +3747,8 @@ void KDTree::build(const Mat& _points, bool _copyData)
int KDTree::findNearest(const float* vec, int K, int emax,
vector<int>* neighborsIdx,
Mat* neighbors,
vector<float>* dist) const
vector<float>* dist,
vector<int>* labels) const
{
K = std::min(K, points.rows);
CV_Assert(K > 0);
@ -3731,15 +3756,44 @@ int KDTree::findNearest(const float* vec, int K, int emax,
neighborsIdx->resize(K);
if(dist)
dist->resize(K);
if(labels)
labels->resize(K);
K = findNearest(vec, K, emax, neighborsIdx ? &(*neighborsIdx)[0] : 0,
neighbors, dist ? &(*dist)[0] : 0);
neighbors, dist ? &(*dist)[0] : 0, labels ? &(*labels)[0] : 0);
if(neighborsIdx)
neighborsIdx->resize(K);
if(dist)
dist->resize(K);
if(labels)
labels->resize(K);
return K;
}
int KDTree::findNearest(const vector<float>& vec, int K, int emax,
vector<int>* neighborsIdx,
Mat* neighbors,
vector<float>* dist,
vector<int>* labels) const
{
CV_Assert((int)vec.size() == points.cols);
K = std::min(K, points.rows);
CV_Assert(K > 0);
if(neighborsIdx)
neighborsIdx->resize(K);
if(dist)
dist->resize(K);
if(labels)
labels->resize(K);
K = findNearest(&vec[0], K, emax, neighborsIdx ? &(*neighborsIdx)[0] : 0,
neighbors, dist ? &(*dist)[0] : 0, labels ? &(*labels)[0] : 0);
if(neighborsIdx)
neighborsIdx->resize(K);
if(dist)
dist->resize(K);
if(labels)
labels->resize(K);
return K;
}
struct PQueueElem
{
@ -3752,7 +3806,7 @@ struct PQueueElem
int KDTree::findNearest(const float* vec, int K, int emax,
int* _neighborsIdx, Mat* _neighbors,
float* _dist) const
float* _dist, int* _labels) const
{
K = std::min(K, points.rows);
@ -3885,6 +3939,11 @@ int KDTree::findNearest(const float* vec, int K, int emax,
for( i = 0; i < K; i++ )
_dist[i] = std::sqrt(dist[i]);
}
if( _labels )
{
for( i = 0; i < K; i++ )
_labels[i] = labels[idx[i]];
}
if( _neighbors )
getPoints(idx, K, *_neighbors);
@ -3893,7 +3952,8 @@ int KDTree::findNearest(const float* vec, int K, int emax,
void KDTree::findOrthoRange(const float* L, const float* R,
vector<int>* neighborsIdx, Mat* neighbors) const
vector<int>* neighborsIdx,
Mat* neighbors, vector<int>* _labels) const
{
int dims = points.cols;
@ -3931,48 +3991,73 @@ void KDTree::findOrthoRange(const float* L, const float* R,
}
if( neighbors )
getPoints( &(*idx)[0], idx->size(), *neighbors );
getPoints( &(*idx)[0], idx->size(), *neighbors, _labels );
}
void KDTree::getPoints(const int* idx, size_t nidx, Mat& pts) const
void KDTree::findOrthoRange(const vector<float>& L, const vector<float>& R,
vector<int>* neighborsIdx, Mat* neighbors, vector<int>* _labels) const
{
size_t dims = points.cols;
CV_Assert(L.size() == dims && R.size() == dims);
findOrthoRange(&L[0], &R[0], neighborsIdx, neighbors, _labels);
}
void KDTree::getPoints(const int* idx, size_t nidx, Mat& pts, vector<int>* _labels) const
{
int dims = points.cols, n = (int)nidx;
pts.create( n, dims, points.type());
if(_labels)
_labels->resize(nidx);
for( int i = 0; i < n; i++ )
{
int k = idx[i];
CV_Assert( (unsigned)k < (unsigned)points.rows );
const float* src = points.ptr<float>(k);
std::copy(src, src + dims, pts.ptr<float>(i));
if(_labels)
(*_labels)[i] = labels[k];
}
}
void KDTree::getPoints(const Mat& idx, Mat& pts) const
void KDTree::getPoints(const vector<int>& idx, Mat& pts, vector<int>* _labels) const
{
CV_Assert(idx.type() == CV_32S && idx.isContinuous() &&
(idx.cols == 1 || idx.rows == 1));
int dims = points.cols;
int i, nidx = idx.cols + idx.rows - 1;
int i, nidx = (int)idx.size();
pts.create( nidx, dims, points.type());
const int* _idx = idx.ptr<int>();
if(_labels)
_labels->resize(nidx);
for( i = 0; i < nidx; i++ )
{
int k = _idx[i];
int k = idx[i];
CV_Assert( (unsigned)k < (unsigned)points.rows );
const float* src = points.ptr<float>(k);
std::copy(src, src + dims, pts.ptr<float>(i));
if(_labels) (*_labels)[i] = labels[k];
}
}
const float* KDTree::getPoint(int ptidx) const
const float* KDTree::getPoint(int ptidx, int* label) const
{
CV_Assert( (unsigned)ptidx < (unsigned)points.rows);
if(label)
*label = label[ptidx];
return points.ptr<float>(ptidx);
}
int KDTree::dims() const
{
return !points.empty() ? points.cols : 0;
}
////////////////////////////////////////////////////////////////////////////////
schar* seqPush( CvSeq* seq, const void* element )
{

View File

@ -5130,7 +5130,6 @@ void* FileNode::readObj() const
return cvRead( (CvFileStorage*)fs, (CvFileNode*)node );
}
FileNodeIterator::FileNodeIterator()
{
fs = 0;

View File

@ -216,18 +216,18 @@ public:
(usually represented as a feature vector). The keypoints representing the same object in different images can then be matched using
cv::KDTree or another method.
*/
class CV_EXPORTS KeyPoint
class CV_EXPORTS_W_SIMPLE KeyPoint
{
public:
//! the default constructor
KeyPoint() : pt(0,0), size(0), angle(-1), response(0), octave(0), class_id(-1) {}
CV_WRAP KeyPoint() : pt(0,0), size(0), angle(-1), response(0), octave(0), class_id(-1) {}
//! the full constructor
KeyPoint(Point2f _pt, float _size, float _angle=-1,
float _response=0, int _octave=0, int _class_id=-1)
: pt(_pt), size(_size), angle(_angle),
response(_response), octave(_octave), class_id(_class_id) {}
//! another form of the full constructor
KeyPoint(float x, float y, float _size, float _angle=-1,
CV_WRAP KeyPoint(float x, float y, float _size, float _angle=-1,
float _response=0, int _octave=0, int _class_id=-1)
: pt(x, y), size(_size), angle(_angle),
response(_response), octave(_octave), class_id(_class_id) {}
@ -245,18 +245,18 @@ public:
//! area of keypoint regions union (now keypoint region is circle)
static float overlap(const KeyPoint& kp1, const KeyPoint& kp2);
Point2f pt; //!< coordinates of the keypoints
float size; //!< diameter of the meaningfull keypoint neighborhood
float angle; //!< computed orientation of the keypoint (-1 if not applicable)
float response; //!< the response by which the most strong keypoints have been selected. Can be used for the further sorting or subsampling
int octave; //!< octave (pyramid layer) from which the keypoint has been extracted
int class_id; //!< object class (if the keypoints need to be clustered by an object they belong to)
CV_PROP_RW Point2f pt; //!< coordinates of the keypoints
CV_PROP_RW float size; //!< diameter of the meaningfull keypoint neighborhood
CV_PROP_RW float angle; //!< computed orientation of the keypoint (-1 if not applicable)
CV_PROP_RW float response; //!< the response by which the most strong keypoints have been selected. Can be used for the further sorting or subsampling
CV_PROP_RW int octave; //!< octave (pyramid layer) from which the keypoint has been extracted
CV_PROP_RW int class_id; //!< object class (if the keypoints need to be clustered by an object they belong to)
};
//! writes vector of keypoints to the file storage
CV_EXPORTS_W void write(FileStorage& fs, const string& name, const vector<KeyPoint>& keypoints);
CV_EXPORTS void write(FileStorage& fs, const string& name, const vector<KeyPoint>& keypoints);
//! reads vector of keypoints from the specified file storage node
CV_EXPORTS_W void read(const FileNode& node, CV_OUT vector<KeyPoint>& keypoints);
CV_EXPORTS void read(const FileNode& node, CV_OUT vector<KeyPoint>& keypoints);
/*!
SIFT implementation.

View File

@ -88,7 +88,7 @@ CV_EXPORTS_W void imshow( const string& winname, const Mat& mat );
typedef void (CV_CDECL *TrackbarCallback)(int pos, void* userdata);
CV_EXPORTS_W int createTrackbar( const string& trackbarname, const string& winname,
CV_EXPORTS int createTrackbar( const string& trackbarname, const string& winname,
int* value, int count,
TrackbarCallback onChange CV_DEFAULT(0),
void* userdata CV_DEFAULT(0));
@ -99,7 +99,7 @@ CV_EXPORTS_W void setTrackbarPos( const string& trackbarname, const string& winn
typedef void (*MouseCallback )(int event, int x, int y, int flags, void* param);
//! assigns callback for mouse events
CV_EXPORTS_W void setMouseCallback( const string& windowName, MouseCallback onMouse, void* param=0);
CV_EXPORTS void setMouseCallback( const string& windowName, MouseCallback onMouse, void* param=0);
CV_EXPORTS_W Mat imread( const string& filename, int flags=1 );
CV_EXPORTS_W bool imwrite( const string& filename, const Mat& img,
@ -131,7 +131,8 @@ public:
CV_WRAP virtual bool grab();
CV_WRAP virtual bool retrieve(CV_OUT Mat& image, int channel=0);
CV_WRAP_AS(read) virtual VideoCapture& operator >> (CV_OUT Mat& image);
virtual VideoCapture& operator >> (CV_OUT Mat& image);
CV_WRAP virtual bool read(CV_OUT Mat& image);
CV_WRAP virtual bool set(int propId, double value);
CV_WRAP virtual double get(int propId);
@ -152,7 +153,8 @@ public:
CV_WRAP virtual bool open(const string& filename, int fourcc, double fps,
Size frameSize, bool isColor=true);
CV_WRAP virtual bool isOpened() const;
CV_WRAP_AS(write) virtual VideoWriter& operator << (const Mat& image);
virtual VideoWriter& operator << (const Mat& image);
CV_WRAP virtual void write(const Mat& image);
protected:
Ptr<CvVideoWriter> writer;

View File

@ -408,6 +408,15 @@ bool VideoCapture::retrieve(Mat& image, int channel)
}
return true;
}
bool VideoCapture::read(Mat& image)
{
if(!grab())
image.release();
else
retrieve(image);
return !image.empty();
}
VideoCapture& VideoCapture::operator >> (Mat& image)
{
@ -451,11 +460,16 @@ bool VideoWriter::isOpened() const
{
return !writer.empty();
}
VideoWriter& VideoWriter::operator << (const Mat& image)
void VideoWriter::write(const Mat& image)
{
IplImage _img = image;
cvWriteFrame(writer, &_img);
}
VideoWriter& VideoWriter::operator << (const Mat& image)
{
write(image);
return *this;
}

View File

@ -459,23 +459,23 @@ CV_EXPORTS void cornerSubPix( const Mat& image, vector<Point2f>& corners,
TermCriteria criteria );
//! finds the strong enough corners where the cornerMinEigenVal() or cornerHarris() report the local maxima
CV_EXPORTS void goodFeaturesToTrack( const Mat& image, CV_OUT vector<Point2f>& corners,
CV_EXPORTS_W void goodFeaturesToTrack( const Mat& image, CV_OUT vector<Point2f>& corners,
int maxCorners, double qualityLevel, double minDistance,
const Mat& mask=Mat(), int blockSize=3,
bool useHarrisDetector=false, double k=0.04 );
//! finds lines in the black-n-white image using the standard or pyramid Hough transform
CV_EXPORTS void HoughLines( const Mat& image, CV_OUT vector<Vec2f>& lines,
CV_EXPORTS_W void HoughLines( const Mat& image, CV_OUT vector<Vec2f>& lines,
double rho, double theta, int threshold,
double srn=0, double stn=0 );
//! finds line segments in the black-n-white image using probabalistic Hough transform
CV_EXPORTS void HoughLinesP( Mat& image, CV_OUT vector<Vec4i>& lines,
CV_EXPORTS_W void HoughLinesP( Mat& image, CV_OUT vector<Vec4i>& lines,
double rho, double theta, int threshold,
double minLineLength=0, double maxLineGap=0 );
//! finds circles in the grayscale image using 2+1 gradient Hough transform
CV_EXPORTS void HoughCircles( const Mat& image, CV_OUT vector<Vec3f>& circles,
CV_EXPORTS_W void HoughCircles( const Mat& image, CV_OUT vector<Vec3f>& circles,
int method, double dp, double minDist,
double param1=100, double param2=100,
int minRadius=0, int maxRadius=0 );
@ -716,13 +716,13 @@ enum { FLOODFILL_FIXED_RANGE = 1 << 16,
//! fills the semi-uniform image region starting from the specified seed point
CV_EXPORTS_W int floodFill( Mat& image,
Point seedPoint, Scalar newVal, Rect* rect=0,
Point seedPoint, Scalar newVal, CV_OUT Rect* rect=0,
Scalar loDiff=Scalar(), Scalar upDiff=Scalar(),
int flags=4 );
//! fills the semi-uniform image region and/or the mask starting from the specified seed point
CV_EXPORTS_AS(floodFillMask) int floodFill( Mat& image, Mat& mask,
Point seedPoint, Scalar newVal, Rect* rect=0,
Point seedPoint, Scalar newVal, CV_OUT Rect* rect=0,
Scalar loDiff=Scalar(), Scalar upDiff=Scalar(),
int flags=4 );
@ -730,7 +730,7 @@ CV_EXPORTS_AS(floodFillMask) int floodFill( Mat& image, Mat& mask,
CV_EXPORTS_W void cvtColor( const Mat& src, CV_OUT Mat& dst, int code, int dstCn=0 );
//! raster image moments
class CV_EXPORTS Moments
class CV_EXPORTS_W_MAP Moments
{
public:
//! the default constructor
@ -744,11 +744,11 @@ public:
operator CvMoments() const;
//! spatial moments
double m00, m10, m01, m20, m11, m02, m30, m21, m12, m03;
CV_PROP_RW double m00, m10, m01, m20, m11, m02, m30, m21, m12, m03;
//! central moments
double mu20, mu11, mu02, mu30, mu21, mu12, mu03;
CV_PROP_RW double mu20, mu11, mu02, mu30, mu21, mu12, mu03;
//! central normalized moments
double nu20, nu11, nu02, nu30, nu21, nu12, nu03;
CV_PROP_RW double nu20, nu11, nu02, nu30, nu21, nu12, nu03;
};
//! computes moments of the rasterized shape or a vector of points

View File

@ -188,7 +188,7 @@ CV_INLINE CvParamLattice cvDefaultParamLattice( void )
#define CV_TRAIN_ERROR 0
#define CV_TEST_ERROR 1
class CV_EXPORTS_AS(StatModel) CvStatModel
class CV_EXPORTS_W CvStatModel
{
public:
CvStatModel();
@ -217,7 +217,7 @@ protected:
class CvMLData;
struct CV_EXPORTS CvParamGrid
struct CV_EXPORTS_W_MAP CvParamGrid
{
// SVM params type
enum { SVM_C=0, SVM_GAMMA=1, SVM_P=2, SVM_NU=3, SVM_COEF=4, SVM_DEGREE=5 };
@ -236,12 +236,12 @@ struct CV_EXPORTS CvParamGrid
//CvParamGrid( int param_id );
bool check() const;
double min_val;
double max_val;
double step;
CV_PROP_RW double min_val;
CV_PROP_RW double max_val;
CV_PROP_RW double step;
};
class CV_EXPORTS_AS(NormalBayesClassifier) CvNormalBayesClassifier : public CvStatModel
class CV_EXPORTS_W CvNormalBayesClassifier : public CvStatModel
{
public:
CV_WRAP CvNormalBayesClassifier();
@ -262,7 +262,7 @@ public:
CV_WRAP virtual bool train( const cv::Mat& trainData, const cv::Mat& responses,
const cv::Mat& varIdx = cv::Mat(), const cv::Mat& sampleIdx=cv::Mat(),
bool update=false );
CV_WRAP virtual float predict( const cv::Mat& samples, cv::Mat* results=0 ) const;
CV_WRAP virtual float predict( const cv::Mat& samples, CV_OUT cv::Mat* results=0 ) const;
#endif
virtual void write( CvFileStorage* storage, const char* name ) const;
@ -287,7 +287,7 @@ protected:
\****************************************************************************************/
// k Nearest Neighbors
class CV_EXPORTS_AS(KNearest) CvKNearest : public CvStatModel
class CV_EXPORTS_W CvKNearest : public CvStatModel
{
public:
@ -312,9 +312,11 @@ public:
const cv::Mat& sampleIdx=cv::Mat(), bool isRegression=false,
int maxK=32, bool updateBase=false );
CV_WRAP virtual float find_nearest( const cv::Mat& samples, int k, cv::Mat* results=0,
virtual float find_nearest( const cv::Mat& samples, int k, cv::Mat* results=0,
const float** neighbors=0, cv::Mat* neighborResponses=0,
cv::Mat* dist=0 ) const;
CV_WRAP virtual float find_nearest( const cv::Mat& samples, int k, CV_OUT cv::Mat& results,
CV_OUT cv::Mat& neighborResponses, CV_OUT cv::Mat& dists) const;
#endif
virtual void clear();
@ -344,7 +346,7 @@ protected:
\****************************************************************************************/
// SVM training parameters
struct CV_EXPORTS_AS_MAP CvSVMParams
struct CV_EXPORTS_W_MAP CvSVMParams
{
CvSVMParams();
CvSVMParams( int _svm_type, int _kernel_type,
@ -352,17 +354,17 @@ struct CV_EXPORTS_AS_MAP CvSVMParams
double Cvalue, double _nu, double _p,
CvMat* _class_weights, CvTermCriteria _term_crit );
int svm_type;
int kernel_type;
double degree; // for poly
double gamma; // for poly/rbf/sigmoid
double coef0; // for poly/sigmoid
CV_PROP_RW int svm_type;
CV_PROP_RW int kernel_type;
CV_PROP_RW double degree; // for poly
CV_PROP_RW double gamma; // for poly/rbf/sigmoid
CV_PROP_RW double coef0; // for poly/sigmoid
double C; // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
double nu; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
double p; // for CV_SVM_EPS_SVR
CV_PROP_RW double C; // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
CV_PROP_RW double nu; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
CV_PROP_RW double p; // for CV_SVM_EPS_SVR
CvMat* class_weights; // for CV_SVM_C_SVC
CvTermCriteria term_crit; // termination criteria
CV_PROP_RW CvTermCriteria term_crit; // termination criteria
};
@ -507,7 +509,7 @@ struct CvSVMDecisionFunc
// SVM model
class CV_EXPORTS_AS(SVM) CvSVM : public CvStatModel
class CV_EXPORTS_W CvSVM : public CvStatModel
{
public:
// SVM type
@ -554,18 +556,18 @@ public:
CV_WRAP virtual bool train_auto( const cv::Mat& trainData, const cv::Mat& responses,
const cv::Mat& varIdx, const cv::Mat& sampleIdx, CvSVMParams params,
int k_fold = 10,
CvParamGrid Cgrid = get_default_grid(CvSVM::C),
CvParamGrid gammaGrid = get_default_grid(CvSVM::GAMMA),
CvParamGrid pGrid = get_default_grid(CvSVM::P),
CvParamGrid nuGrid = get_default_grid(CvSVM::NU),
CvParamGrid coeffGrid = get_default_grid(CvSVM::COEF),
CvParamGrid degreeGrid = get_default_grid(CvSVM::DEGREE) );
CvParamGrid Cgrid = CvSVM::get_default_grid(CvSVM::C),
CvParamGrid gammaGrid = CvSVM::get_default_grid(CvSVM::GAMMA),
CvParamGrid pGrid = CvSVM::get_default_grid(CvSVM::P),
CvParamGrid nuGrid = CvSVM::get_default_grid(CvSVM::NU),
CvParamGrid coeffGrid = CvSVM::get_default_grid(CvSVM::COEF),
CvParamGrid degreeGrid = CvSVM::get_default_grid(CvSVM::DEGREE) );
CV_WRAP virtual float predict( const cv::Mat& sample, bool returnDFVal=false ) const;
#endif
CV_WRAP virtual int get_support_vector_count() const;
virtual const float* get_support_vector(int i) const;
CV_WRAP virtual CvSVMParams get_params() const { return params; };
virtual CvSVMParams get_params() const { return params; };
CV_WRAP virtual void clear();
static CvParamGrid get_default_grid( int param_id );
@ -608,7 +610,7 @@ protected:
* Expectation - Maximization *
\****************************************************************************************/
struct CV_EXPORTS_AS_MAP CvEMParams
struct CV_EXPORTS_W_MAP CvEMParams
{
CvEMParams() : nclusters(10), cov_mat_type(1/*CvEM::COV_MAT_DIAGONAL*/),
start_step(0/*CvEM::START_AUTO_STEP*/), probs(0), weights(0), means(0), covs(0)
@ -624,18 +626,18 @@ struct CV_EXPORTS_AS_MAP CvEMParams
probs(_probs), weights(_weights), means(_means), covs(_covs), term_crit(_term_crit)
{}
int nclusters;
int cov_mat_type;
int start_step;
CV_PROP_RW int nclusters;
CV_PROP_RW int cov_mat_type;
CV_PROP_RW int start_step;
const CvMat* probs;
const CvMat* weights;
const CvMat* means;
const CvMat** covs;
CvTermCriteria term_crit;
CV_PROP_RW CvTermCriteria term_crit;
};
class CV_EXPORTS_AS(EM) CvEM : public CvStatModel
class CV_EXPORTS_W CvEM : public CvStatModel
{
public:
// Type of covariation matrices
@ -659,23 +661,33 @@ public:
#ifndef SWIG
CV_WRAP CvEM( const cv::Mat& samples, const cv::Mat& sampleIdx=cv::Mat(),
CvEMParams params=CvEMParams(), cv::Mat* labels=0 );
CvEMParams params=CvEMParams() );
CV_WRAP virtual bool train( const cv::Mat& samples, const cv::Mat& sampleIdx=cv::Mat(),
CvEMParams params=CvEMParams(), cv::Mat* labels=0 );
CV_WRAP virtual bool train( const cv::Mat& samples,
const cv::Mat& sampleIdx=cv::Mat(),
CvEMParams params=CvEMParams(),
CV_OUT cv::Mat* labels=0 );
CV_WRAP virtual float predict( const cv::Mat& sample, cv::Mat* probs ) const;
CV_WRAP virtual float predict( const cv::Mat& sample, CV_OUT cv::Mat* probs=0 ) const;
CV_WRAP int getNClusters() const;
CV_WRAP cv::Mat getMeans() const;
CV_WRAP void getCovs(CV_OUT std::vector<cv::Mat>& covs) const;
CV_WRAP cv::Mat getWeights() const;
CV_WRAP cv::Mat getProbs() const;
CV_WRAP inline double getLikelihood() const { return log_likelihood; };
#endif
CV_WRAP virtual void clear();
CV_WRAP int get_nclusters() const;
CV_WRAP const CvMat* get_means() const;
CV_WRAP const CvMat** get_covs() const;
CV_WRAP const CvMat* get_weights() const;
CV_WRAP const CvMat* get_probs() const;
int get_nclusters() const;
const CvMat* get_means() const;
const CvMat** get_covs() const;
const CvMat* get_weights() const;
const CvMat* get_probs() const;
CV_WRAP inline double get_log_likelihood () const { return log_likelihood; };
inline double get_log_likelihood () const { return log_likelihood; };
// inline const CvMat * get_log_weight_div_det () const { return log_weight_div_det; };
// inline const CvMat * get_inv_eigen_values () const { return inv_eigen_values; };
@ -771,16 +783,16 @@ struct CvDTreeNode
};
struct CV_EXPORTS_AS_MAP CvDTreeParams
struct CV_EXPORTS_W_MAP CvDTreeParams
{
int max_categories;
int max_depth;
int min_sample_count;
int cv_folds;
bool use_surrogates;
bool use_1se_rule;
bool truncate_pruned_tree;
float regression_accuracy;
CV_PROP_RW int max_categories;
CV_PROP_RW int max_depth;
CV_PROP_RW int min_sample_count;
CV_PROP_RW int cv_folds;
CV_PROP_RW bool use_surrogates;
CV_PROP_RW bool use_1se_rule;
CV_PROP_RW bool truncate_pruned_tree;
CV_PROP_RW float regression_accuracy;
const float* priors;
CvDTreeParams() : max_categories(10), max_depth(INT_MAX), min_sample_count(10),
@ -914,7 +926,7 @@ namespace cv
struct ForestTreeBestSplitFinder;
}
class CV_EXPORTS_AS(DTree) CvDTree : public CvStatModel
class CV_EXPORTS_W CvDTree : public CvStatModel
{
public:
CV_WRAP CvDTree();
@ -945,9 +957,10 @@ public:
CV_WRAP virtual CvDTreeNode* predict( const cv::Mat& sample, const cv::Mat& missingDataMask=cv::Mat(),
bool preprocessedInput=false ) const;
CV_WRAP virtual cv::Mat getVarImportance();
#endif
CV_WRAP virtual const CvMat* get_var_importance();
virtual const CvMat* get_var_importance();
CV_WRAP virtual void clear();
virtual void read( CvFileStorage* fs, CvFileNode* node );
@ -1047,12 +1060,12 @@ protected:
};
struct CV_EXPORTS_AS_MAP CvRTParams : public CvDTreeParams
struct CV_EXPORTS_W_MAP CvRTParams : public CvDTreeParams
{
//Parameters for the forest
bool calc_var_importance; // true <=> RF processes variable importance
int nactive_vars;
CvTermCriteria term_crit;
CV_PROP_RW bool calc_var_importance; // true <=> RF processes variable importance
CV_PROP_RW int nactive_vars;
CV_PROP_RW CvTermCriteria term_crit;
CvRTParams() : CvDTreeParams( 5, 10, 0, false, 10, 0, false, false, 0 ),
calc_var_importance(false), nactive_vars(0)
@ -1077,7 +1090,7 @@ struct CV_EXPORTS_AS_MAP CvRTParams : public CvDTreeParams
};
class CV_EXPORTS_AS(RTrees) CvRTrees : public CvStatModel
class CV_EXPORTS_W CvRTrees : public CvStatModel
{
public:
CV_WRAP CvRTrees();
@ -1100,11 +1113,12 @@ public:
CvRTParams params=CvRTParams() );
CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing = cv::Mat() ) const;
CV_WRAP virtual float predict_prob( const cv::Mat& sample, const cv::Mat& missing = cv::Mat() ) const;
CV_WRAP virtual cv::Mat getVarImportance();
#endif
CV_WRAP virtual void clear();
CV_WRAP virtual const CvMat* get_var_importance();
virtual const CvMat* get_var_importance();
virtual float get_proximity( const CvMat* sample1, const CvMat* sample2,
const CvMat* missing1 = 0, const CvMat* missing2 = 0 ) const;
@ -1176,7 +1190,7 @@ protected:
virtual void split_node_data( CvDTreeNode* n );
};
class CV_EXPORTS_AS(ERTrees) CvERTrees : public CvRTrees
class CV_EXPORTS_W CvERTrees : public CvRTrees
{
public:
CV_WRAP CvERTrees();
@ -1203,12 +1217,12 @@ protected:
* Boosted tree classifier *
\****************************************************************************************/
struct CV_EXPORTS_AS_MAP CvBoostParams : public CvDTreeParams
struct CV_EXPORTS_W_MAP CvBoostParams : public CvDTreeParams
{
int boost_type;
int weak_count;
int split_criteria;
double weight_trim_rate;
CV_PROP_RW int boost_type;
CV_PROP_RW int weak_count;
CV_PROP_RW int split_criteria;
CV_PROP_RW double weight_trim_rate;
CvBoostParams();
CvBoostParams( int boost_type, int weak_count, double weight_trim_rate,
@ -1265,7 +1279,7 @@ protected:
};
class CV_EXPORTS_AS(Boost) CvBoost : public CvStatModel
class CV_EXPORTS_W CvBoost : public CvStatModel
{
public:
// Boosting type
@ -1313,8 +1327,8 @@ public:
bool update=false );
CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing=cv::Mat(),
cv::Mat* weak_responses=0, CvSlice slice=CV_WHOLE_SEQ,
bool rawMode=false, bool returnSum=false ) const;
const cv::Range& slice=cv::Range::all(), bool rawMode=false,
bool returnSum=false ) const;
#endif
virtual float calc_error( CvMLData* _data, int type , std::vector<float> *resp = 0 ); // type in {CV_TRAIN_ERROR, CV_TEST_ERROR}
@ -1382,12 +1396,12 @@ protected:
// Each tree prediction is multiplied on shrinkage value.
struct CV_EXPORTS_AS_MAP CvGBTreesParams : public CvDTreeParams
struct CV_EXPORTS_W_MAP CvGBTreesParams : public CvDTreeParams
{
int weak_count;
int loss_function_type;
float subsample_portion;
float shrinkage;
CV_PROP_RW int weak_count;
CV_PROP_RW int loss_function_type;
CV_PROP_RW float subsample_portion;
CV_PROP_RW float shrinkage;
CvGBTreesParams();
CvGBTreesParams( int loss_function_type, int weak_count, float shrinkage,
@ -1442,7 +1456,7 @@ struct CV_EXPORTS_AS_MAP CvGBTreesParams : public CvDTreeParams
class CV_EXPORTS_AS(GBTrees) CvGBTrees : public CvStatModel
class CV_EXPORTS_W CvGBTrees : public CvStatModel
{
public:
@ -1523,7 +1537,7 @@ public:
// OUTPUT
// RESULT
*/
CV_WRAP CvGBTrees( const CvMat* trainData, int tflag,
CvGBTrees( const CvMat* trainData, int tflag,
const CvMat* responses, const CvMat* varIdx=0,
const CvMat* sampleIdx=0, const CvMat* varType=0,
const CvMat* missingDataMask=0,
@ -1575,7 +1589,7 @@ public:
// RESULT
// Error state.
*/
CV_WRAP virtual bool train( const CvMat* trainData, int tflag,
virtual bool train( const CvMat* trainData, int tflag,
const CvMat* responses, const CvMat* varIdx=0,
const CvMat* sampleIdx=0, const CvMat* varType=0,
const CvMat* missingDataMask=0,
@ -1631,7 +1645,7 @@ public:
// RESULT
// Predicted value.
*/
CV_WRAP virtual float predict( const CvMat* sample, const CvMat* missing=0,
virtual float predict( const CvMat* sample, const CvMat* missing=0,
CvMat* weakResponses=0, CvSlice slice = CV_WHOLE_SEQ,
int k=-1 ) const;
@ -1702,7 +1716,25 @@ public:
*/
virtual void read( CvFileStorage* fs, CvFileNode* node );
// new-style C++ interface
CV_WRAP CvGBTrees( const cv::Mat& trainData, int tflag,
const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
const cv::Mat& missingDataMask=cv::Mat(),
CvGBTreesParams params=CvGBTreesParams() );
CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
const cv::Mat& missingDataMask=cv::Mat(),
CvGBTreesParams params=CvGBTreesParams(),
bool update=false );
CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing=cv::Mat(),
const cv::Range& slice = cv::Range::all(),
int k=-1 ) const;
protected:
/*
@ -1893,7 +1925,7 @@ protected:
/////////////////////////////////// Multi-Layer Perceptrons //////////////////////////////
struct CV_EXPORTS_AS_MAP CvANN_MLP_TrainParams
struct CV_EXPORTS_W_MAP CvANN_MLP_TrainParams
{
CvANN_MLP_TrainParams();
CvANN_MLP_TrainParams( CvTermCriteria term_crit, int train_method,
@ -1902,29 +1934,29 @@ struct CV_EXPORTS_AS_MAP CvANN_MLP_TrainParams
enum { BACKPROP=0, RPROP=1 };
CvTermCriteria term_crit;
int train_method;
CV_PROP_RW CvTermCriteria term_crit;
CV_PROP_RW int train_method;
// backpropagation parameters
double bp_dw_scale, bp_moment_scale;
CV_PROP_RW double bp_dw_scale, bp_moment_scale;
// rprop parameters
double rp_dw0, rp_dw_plus, rp_dw_minus, rp_dw_min, rp_dw_max;
CV_PROP_RW double rp_dw0, rp_dw_plus, rp_dw_minus, rp_dw_min, rp_dw_max;
};
class CV_EXPORTS_AS(ANN_MLP) CvANN_MLP : public CvStatModel
class CV_EXPORTS_W CvANN_MLP : public CvStatModel
{
public:
CV_WRAP CvANN_MLP();
CvANN_MLP( const CvMat* layerSizes,
int activateFunc=SIGMOID_SYM,
int activateFunc=CvANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0 );
virtual ~CvANN_MLP();
virtual void create( const CvMat* layerSizes,
int activateFunc=SIGMOID_SYM,
int activateFunc=CvANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0 );
virtual int train( const CvMat* inputs, const CvMat* outputs,
@ -1935,11 +1967,11 @@ public:
#ifndef SWIG
CV_WRAP CvANN_MLP( const cv::Mat& layerSizes,
int activateFunc=SIGMOID_SYM,
int activateFunc=CvANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0 );
CV_WRAP virtual void create( const cv::Mat& layerSizes,
int activateFunc=SIGMOID_SYM,
int activateFunc=CvANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0 );
CV_WRAP virtual int train( const cv::Mat& inputs, const cv::Mat& outputs,

View File

@ -1501,6 +1501,17 @@ void CvANN_MLP::read( CvFileStorage* fs, CvFileNode* node )
using namespace cv;
CvANN_MLP::CvANN_MLP( const Mat& _layer_sizes, int _activ_func,
double _f_param1, double _f_param2 )
{
layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0;
rng = cvRNG(-1);
default_model_name = "my_nn";
create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
}
void CvANN_MLP::create( const Mat& _layer_sizes, int _activ_func,
double _f_param1, double _f_param2 )
{

View File

@ -2138,11 +2138,10 @@ CvBoost::train( const Mat& _train_data, int _tflag,
float
CvBoost::predict( const Mat& _sample, const Mat& _missing,
Mat* weak_responses, CvSlice slice,
bool raw_mode, bool return_sum ) const
const Range& slice, bool raw_mode, bool return_sum ) const
{
CvMat sample = _sample, mmask = _missing, wr, *pwr = 0;
if( weak_responses )
CvMat sample = _sample, mmask = _missing;
/*if( weak_responses )
{
int weak_count = cvSliceLength( slice, weak );
if( weak_count >= weak->total )
@ -2156,8 +2155,10 @@ CvBoost::predict( const Mat& _sample, const Mat& _missing,
weak_responses->cols + weak_responses->rows - 1 == weak_count) )
weak_responses->create(weak_count, 1, CV_32FC1);
pwr = &(wr = *weak_responses);
}
return predict(&sample, &mmask, pwr, slice, raw_mode, return_sum);
}*/
return predict(&sample, _missing.empty() ? &mmask : 0, 0,
slice == Range::all() ? CV_WHOLE_SEQ : cvSlice(slice.start, slice.end),
raw_mode, return_sum);
}
/* End of file. */

View File

@ -1109,14 +1109,13 @@ const CvMat* CvEM::get_probs() const
using namespace cv;
CvEM::CvEM( const Mat& samples, const Mat& sample_idx,
CvEMParams params, Mat* labels )
CvEM::CvEM( const Mat& samples, const Mat& sample_idx, CvEMParams params )
{
means = weights = probs = inv_eigen_values = log_weight_div_det = 0;
covs = cov_rotate_mats = 0;
// just invoke the train() method
train(samples, sample_idx, params, labels);
train(samples, sample_idx, params);
}
bool CvEM::train( const Mat& _samples, const Mat& _sample_idx,
@ -1154,5 +1153,33 @@ CvEM::predict( const Mat& _sample, Mat* _probs ) const
return predict(&sample, pprobs);
}
int CvEM::getNClusters() const
{
return params.nclusters;
}
Mat CvEM::getMeans() const
{
return Mat(means);
}
void CvEM::getCovs(vector<Mat>& _covs) const
{
int i, n = params.nclusters;
_covs.resize(n);
for( i = 0; i < n; i++ )
_covs[i] = Mat(covs[i]);
}
Mat CvEM::getWeights() const
{
return Mat(weights);
}
Mat CvEM::getProbs() const
{
return Mat(probs);
}
/* End of file. */

View File

@ -1040,3 +1040,49 @@ CvGBTrees::calc_error( CvMLData* _data, int type, std::vector<float> *resp )
return err;
}
CvGBTrees::CvGBTrees( const cv::Mat& trainData, int tflag,
const cv::Mat& responses, const cv::Mat& varIdx,
const cv::Mat& sampleIdx, const cv::Mat& varType,
const cv::Mat& missingDataMask,
CvGBTreesParams params )
{
data = 0;
weak = 0;
default_model_name = "my_boost_tree";
orig_response = sum_response = sum_response_tmp = 0;
weak_eval = subsample_train = subsample_test = 0;
missing = sample_idx = 0;
class_labels = 0;
class_count = 1;
delta = 0.0f;
clear();
train(trainData, tflag, responses, varIdx, sampleIdx, varType, missingDataMask, params, false);
}
bool CvGBTrees::train( const cv::Mat& trainData, int tflag,
const cv::Mat& responses, const cv::Mat& varIdx,
const cv::Mat& sampleIdx, const cv::Mat& varType,
const cv::Mat& missingDataMask,
CvGBTreesParams params,
bool update )
{
CvMat _trainData = trainData, _responses = responses;
CvMat _varIdx = varIdx, _sampleIdx = sampleIdx, _varType = _varType;
CvMat _missingDataMask = missingDataMask;
return train(&_trainData, tflag, &_responses, varIdx.empty() ? &_varIdx : 0,
sampleIdx.empty() ? &_sampleIdx : 0, varType.empty() ? &_varType : 0,
missingDataMask.empty() ? &_missingDataMask : 0, params, update);
}
float CvGBTrees::predict( const cv::Mat& sample, const cv::Mat& missing,
const cv::Range& slice, int k ) const
{
CvMat _sample = sample, _missing = missing;
return predict(&_sample, missing.empty() ? &_missing : 0, 0,
slice==cv::Range::all() ? CV_WHOLE_SEQ : cvSlice(slice.start, slice.end), k);
}

View File

@ -452,5 +452,12 @@ float CvKNearest::find_nearest( const Mat& _samples, int k, Mat* _results,
return find_nearest(&s, k, presults, _neighbors, pnresponses, pdist );
}
float CvKNearest::find_nearest( const cv::Mat& samples, int k, CV_OUT cv::Mat& results,
CV_OUT cv::Mat& neighborResponses, CV_OUT cv::Mat& dists) const
{
return find_nearest(samples, k, &results, 0, &neighborResponses, &dists);
}
/* End of file */

View File

@ -567,6 +567,26 @@ void CvNormalBayesClassifier::read( CvFileStorage* fs, CvFileNode* root_node )
using namespace cv;
CvNormalBayesClassifier::CvNormalBayesClassifier( const Mat& _train_data, const Mat& _responses,
const Mat& _var_idx, const Mat& _sample_idx )
{
var_count = var_all = 0;
var_idx = 0;
cls_labels = 0;
count = 0;
sum = 0;
productsum = 0;
avg = 0;
inv_eigen_values = 0;
cov_rotate_mats = 0;
c = 0;
default_model_name = "my_nb";
CvMat tdata = _train_data, responses = _responses, vidx = _var_idx, sidx = _sample_idx;
train(&tdata, &responses, vidx.data.ptr ? &vidx : 0,
sidx.data.ptr ? &sidx : 0);
}
bool CvNormalBayesClassifier::train( const Mat& _train_data, const Mat& _responses,
const Mat& _var_idx, const Mat& _sample_idx, bool update )
{

View File

@ -837,5 +837,9 @@ float CvRTrees::predict_prob( const Mat& _sample, const Mat& _missing) const
return predict_prob(&sample, mmask.data.ptr ? &mmask : 0);
}
Mat CvRTrees::getVarImportance()
{
return Mat(get_var_importance());
}
// End of file.

View File

@ -1985,6 +1985,21 @@ float CvSVM::predict( const CvMat* sample, bool returnDFVal ) const
}
CvSVM::CvSVM( const Mat& _train_data, const Mat& _responses,
const Mat& _var_idx, const Mat& _sample_idx, CvSVMParams _params )
{
decision_func = 0;
class_labels = 0;
class_weights = 0;
storage = 0;
var_idx = 0;
kernel = 0;
solver = 0;
default_model_name = "my_svm";
train( _train_data, _responses, _var_idx, _sample_idx, _params );
}
bool CvSVM::train( const Mat& _train_data, const Mat& _responses,
const Mat& _var_idx, const Mat& _sample_idx, CvSVMParams _params )
{

View File

@ -4074,4 +4074,9 @@ void CvDTree::read( CvFileStorage* fs, CvFileNode* node, CvDTreeTrainData* _data
__END__;
}
Mat CvDTree::getVarImportance()
{
return Mat(get_var_importance());
}
/* End of file. */

View File

@ -363,12 +363,13 @@ public:
CV_WRAP HOGDescriptor() : winSize(64,128), blockSize(16,16), blockStride(8,8),
cellSize(8,8), nbins(9), derivAperture(1), winSigma(-1),
histogramNormType(L2Hys), L2HysThreshold(0.2), gammaCorrection(true)
histogramNormType(HOGDescriptor::L2Hys), L2HysThreshold(0.2), gammaCorrection(true)
{}
CV_WRAP HOGDescriptor(Size _winSize, Size _blockSize, Size _blockStride,
Size _cellSize, int _nbins, int _derivAperture=1, double _winSigma=-1,
int _histogramNormType=L2Hys, double _L2HysThreshold=0.2, bool _gammaCorrection=false)
int _histogramNormType=HOGDescriptor::L2Hys,
double _L2HysThreshold=0.2, bool _gammaCorrection=false)
: winSize(_winSize), blockSize(_blockSize), blockStride(_blockStride), cellSize(_cellSize),
nbins(_nbins), derivAperture(_derivAperture), winSigma(_winSigma),
histogramNormType(_histogramNormType), L2HysThreshold(_L2HysThreshold),

View File

@ -18,14 +18,33 @@ include_directories(
"${CMAKE_SOURCE_DIR}/modules/legacy/include"
"${CMAKE_SOURCE_DIR}/modules/contrib/include"
)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
set(opencv_hdrs "${CMAKE_SOURCE_DIR}/modules/core/include/opencv2/core/core.hpp"
"${CMAKE_SOURCE_DIR}/modules/imgproc/include/opencv2/imgproc/imgproc.hpp"
"${CMAKE_SOURCE_DIR}/modules/video/include/opencv2/video/background_segm.hpp"
"${CMAKE_SOURCE_DIR}/modules/video/include/opencv2/video/tracking.hpp"
"${CMAKE_SOURCE_DIR}/modules/highgui/include/opencv2/highgui/highgui.hpp"
"${CMAKE_SOURCE_DIR}/modules/ml/include/opencv2/ml/ml.hpp"
"${CMAKE_SOURCE_DIR}/modules/features2d/include/opencv2/features2d/features2d.hpp"
"${CMAKE_SOURCE_DIR}/modules/calib3d/include/opencv2/calib3d/calib3d.hpp"
"${CMAKE_SOURCE_DIR}/modules/objdetect/include/opencv2/objdetect/objdetect.hpp"
"${CMAKE_SOURCE_DIR}/modules/python/opencv_extra_api.hpp")
set(generated_hdrs
"${CMAKE_CURRENT_BINARY_DIR}/pyopencv_generated_funcs.h"
"${CMAKE_CURRENT_BINARY_DIR}/pyopencv_generated_func_tab.h"
"${CMAKE_CURRENT_BINARY_DIR}/pyopencv_generated_types.h"
"${CMAKE_CURRENT_BINARY_DIR}/pyopencv_generated_type_reg.h"
"${CMAKE_CURRENT_BINARY_DIR}/pyopencv_generated_const_reg.h")
if(MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W3")
endif()
file(GLOB lib_srcs "*.cpp")
file(GLOB lib_hdrs "*.h")
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/generated0.i
@ -35,8 +54,16 @@ add_custom_command(
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/gen.py
)
add_custom_command(
OUTPUT ${generated_hdrs}
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/gen2.py ${CMAKE_CURRENT_BINARY_DIR} ${opencv_hdrs}
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/gen2.py
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/hdr_parser.py
DEPENDS ${opencv_hdrs}
)
set(the_target "opencv_python")
add_library(${the_target} ${lib_srcs} ${lib_hdrs} ${lib_int_hdrs} ${CMAKE_CURRENT_BINARY_DIR}/generated0.i)
add_library(${the_target} ${lib_srcs} ${lib_hdrs} ${lib_int_hdrs} ${CMAKE_CURRENT_BINARY_DIR}/generated0.i opencv2x.h opencv_extra_api.hpp ${generated_hdrs})
target_link_libraries(${the_target} ${PYTHON_LIBRARIES} opencv_core opencv_imgproc opencv_video opencv_ml opencv_features2d opencv_highgui opencv_calib3d opencv_objdetect opencv_legacy opencv_contrib)
set_target_properties(${the_target} PROPERTIES PREFIX "")

View File

@ -3848,20 +3848,13 @@ static int zero = 0;
#include "generated0.i"
#include "opencv2x.h"
#include "pyopencv_generated_types.h"
#include "pyopencv_generated_funcs.h"
static PyMethodDef methods[] = {
#if PYTHON_USE_NUMPY
{"fromarray", (PyCFunction)pycvfromarray, METH_KEYWORDS, "fromarray(array) -> cvmatnd"},
{"absdiff", (PyCFunction)cv::pyopencv_absdiff, METH_KEYWORDS, "absdiff(src1,src2,dst=None) -> dst"},
{"add", (PyCFunction)cv::pyopencv_add, METH_KEYWORDS, "add(src1,src2,dst=None,mask=None) -> dst"},
{"bitwise_and", (PyCFunction)cv::pyopencv_and, METH_KEYWORDS, "bitwise_and(src1,src2,dst=None,mask=None) -> dst"},
{"bitwise_or", (PyCFunction)cv::pyopencv_or, METH_KEYWORDS, "bitwise_or(src1,src2,dst=None,mask=None) -> dst"},
{"bitwise_xor", (PyCFunction)cv::pyopencv_xor, METH_KEYWORDS, "bitwise_xor(src1,src2,dst=None,mask=None) -> dst"},
{"max", (PyCFunction)cv::pyopencv_max, METH_KEYWORDS, "max(src1,src2,dst=None) -> dst"},
{"min", (PyCFunction)cv::pyopencv_min, METH_KEYWORDS, "min(src1,src2,dst=None) -> dst"},
{"subtract", (PyCFunction)cv::pyopencv_subtract, METH_KEYWORDS, "subtract(src1,src2,dst=None,mask=None) -> dst"},
#endif
//{"CalcOpticalFlowFarneback", (PyCFunction)pycvCalcOpticalFlowFarneback, METH_KEYWORDS, "CalcOpticalFlowFarneback(prev, next, flow, pyr_scale=0.5, levels=3, win_size=15, iterations=3, poly_n=7, poly_sigma=1.5, flags=0) -> None"},
@ -3872,6 +3865,7 @@ static PyMethodDef methods[] = {
{"temp_test", temp_test, METH_VARARGS},
#include "generated1.i"
#include "pyopencv_generated_func_tab.h"
{NULL, NULL},
};
@ -3887,7 +3881,8 @@ static int to_ok(PyTypeObject *to)
return (PyType_Ready(to) == 0);
}
#define MKTYPE(NAME) do { NAME##_specials(); if (!to_ok(&NAME##_Type)) return; } while (0)
#define MKTYPE(NAME) NAME##_specials(); if (!to_ok(&NAME##_Type)) return
#define MKTYPE2(NAME) pyopencv_##NAME##_specials(); if (!to_ok(&pyopencv_##NAME##_Type)) return
using namespace cv;
@ -3923,6 +3918,7 @@ void initcv()
MKTYPE(memtrack);
#include "generated4.i"
#include "pyopencv_generated_type_reg.h"
m = Py_InitModule(MODULESTR"", methods);
d = PyModule_GetDict(m);
@ -3944,6 +3940,7 @@ void initcv()
#define PUBLISH(I) PyDict_SetItemString(d, #I, PyInt_FromLong(I))
#define PUBLISHU(I) PyDict_SetItemString(d, #I, PyLong_FromUnsignedLong(I))
#define PUBLISH2(I, value) PyDict_SetItemString(d, #I, PyLong_FromLong(value))
PUBLISHU(IPL_DEPTH_8U);
PUBLISHU(IPL_DEPTH_8S);
@ -4018,6 +4015,7 @@ void initcv()
PUBLISH(GC_EVAL);
#include "generated2.i"
#include "pyopencv_generated_const_reg.h"
#if 0
{

View File

@ -1,13 +1,13 @@
import opencv_parser, sys, re, cStringIO
import hdr_parser, sys, re, os, cStringIO
from string import Template
gen_template_check_self = Template(""" if(!PyObject_TypeCheck(self, &pyopencv_${name}_Type))
return failmsg("Incorrect type of self (must be '${name}' or its derivative)");
$cname* _self_ = ((pyopencv_${name}_t*)self)->v;
return failmsgp("Incorrect type of self (must be '${name}' or its derivative)");
$cname* _self_ = ${amp}((pyopencv_${name}_t*)self)->v;
""")
gen_template_call_constructor = Template("""self = PyObject_NEW(pyopencv_${name}_t, &pyopencv_${name}_Type);
if(self) ERRWRAP2(self->v = new $cname""")
if(self) ERRWRAP2(self->v = $op$cname""")
gen_template_parse_args = Template("""const char* keywords[] = { $kw_list, NULL };
if( PyArg_ParseTupleAndKeywords(args, kw, "$fmtspec", (char**)keywords, $parse_arglist)$code_cvt )""")
@ -16,37 +16,88 @@ gen_template_func_body = Template("""$code_decl
$code_parse
{
$code_fcall;
return $code_retval;
$code_ret;
}
""")
gen_template_set_prop_from_map = Template("""
if( PyMapping_HasKeyString(src, "$propname") )
gen_template_simple_type_decl = Template("""
struct pyopencv_${name}_t
{
PyObject_HEAD
${cname} v;
};
static PyTypeObject pyopencv_${name}_Type =
{
PyObject_HEAD_INIT(&PyType_Type)
0,
MODULESTR".$wname",
sizeof(pyopencv_${name}_t),
};
static void pyopencv_${name}_dealloc(PyObject* self)
{
PyObject_Del(self);
}
static PyObject* pyopencv_from(const ${cname}& r)
{
pyopencv_${name}_t *m = PyObject_NEW(pyopencv_${name}_t, &pyopencv_${name}_Type);
m->v = r;
return (PyObject*)m;
}
static bool pyopencv_to(PyObject* src, ${cname}& dst, const char* name="<unknown>")
{
if( src == NULL or src == Py_None )
return true;
if(!PyObject_TypeCheck(src, &pyopencv_${name}_Type))
{
tmp = PyMapping_GetItemString(src, "$propname");
ok = tmp && pyopencv_to_$proptype(tmp, dst.$propname);
Py_DECREF(tmp);
if(!ok) return false;
}""")
failmsg("Expected ${cname} for argument '%s'", name);
return false;
}
dst = ((pyopencv_${name}_t*)src)->v;
return true;
}
""")
gen_template_decl_type = Template("""
/*
$cname is the OpenCV C struct
pyopencv_${name}_t is the Python object
*/
gen_template_type_decl = Template("""
struct pyopencv_${name}_t
{
PyObject_HEAD
${cname}* v;
};
static PyTypeObject pyopencv_${name}_Type =
{
PyObject_HEAD_INIT(&PyType_Type)
0,
MODULESTR".$wname",
sizeof(pyopencv_${name}_t),
};
static void pyopencv_${name}_dealloc(PyObject* self)
{
delete ((pyopencv_${name}_t*)self)->v;
PyObject_Del(self);
}
""")
gen_template_map_type_cvt = Template("""
static bool pyopencv_to(PyObject* src, ${cname}& dst, const char* name="<unknown>");
""")
gen_template_set_prop_from_map = Template("""
if( PyMapping_HasKeyString(src, (char*)"$propname") )
{
tmp = PyMapping_GetItemString(src, (char*)"$propname");
ok = tmp && pyopencv_to(tmp, dst.$propname);
Py_DECREF(tmp);
if(!ok) return false;
}""")
gen_template_type_impl = Template("""
static PyObject* pyopencv_${name}_repr(PyObject* self)
{
char str[1000];
@ -69,14 +120,6 @@ ${methods_inits}
{NULL, NULL}
};
static PyTypeObject pyopencv_${name}_Type =
{
PyObject_HEAD_INIT(&PyType_Type)
0,
MODULESTR".$wname",
sizeof(pyopencv_${name}_t),
};
static void pyopencv_${name}_specials(void)
{
pyopencv_${name}_Type.tp_base = ${baseptr};
@ -85,32 +128,13 @@ static void pyopencv_${name}_specials(void)
pyopencv_${name}_Type.tp_getset = pyopencv_${name}_getseters;
pyopencv_${name}_Type.tp_methods = pyopencv_${name}_methods;${extra_specials}
}
static PyObject* pyopencv_from_${name}_ptr(<$cname>* r)
{
pyopencv_${name}_t *m = PyObject_NEW(pyopencv_${name}_t, &pyopencv_${name}_Type);
m->v = r;
return (PyObject*)m;
}
static bool pyopencv_to_${name}_ptr(PyObject* src, <$cname>*& dst, const char* name="")
{
if( src == NULL or src == Py_None )
{
dst = 0;
return true;
}
if(!PyObject_TypeCheck(src, &pyopencv_${name}_Type))
return failmsg("Expected ${cname} for argument '%s'", name);
dst = ((pyopencv_${name}_t*)src)->v;
return true;
}
""")
gen_template_get_prop = Template("""
static PyObject* pyopencv_${name}_get_${member}(pyopencv_${name}_t* p, void *closure)
{
return pyopencv_from_${membertype}(p->v->${member});
return pyopencv_from(p->v${access}${member});
}
""")
@ -122,7 +146,7 @@ static int pyopencv_${name}_set_${member}(pyopencv_${name}_t* p, PyObject *value
PyErr_SetString(PyExc_TypeError, "Cannot delete the ${member} attribute");
return -1;
}
return pyopencv_to_${membertype}(value, p->v->${member}) ? 0 : -1;
return pyopencv_to(value, p->v${access}${member}) ? 0 : -1;
}
""")
@ -133,16 +157,16 @@ gen_template_rw_prop_init = Template("""
{(char*)"${member}", (getter)pyopencv_${name}_get_${member}, (setter)pyopencv_${name}_set_${member}, (char*)"${member}", NULL},""")
simple_argtype_mapping = {
"bool": ("bool", "b", "pyopencv_from_bool", "0"),
"int": ("int", "i", "pyopencv_from_int", "0"),
"float": ("float", "f", "pyopencv_from_float", "0.f"),
"double": ("double", "d", "pyopencv_from_double", "0"),
"c_string": ("char*", "s", "pyopencv_from_c_string", '""')
"bool": ("bool", "b", "0"),
"int": ("int", "i", "0"),
"float": ("float", "f", "0.f"),
"double": ("double", "d", "0"),
"c_string": ("char*", "s", '""')
}
class ClassProp(object):
def __init__(self, decl):
self.tp = decl[0]
self.tp = decl[0].replace("*", "_ptr")
self.name = decl[1]
self.readonly = True
if "/RW" in decl[3]:
@ -153,6 +177,7 @@ class ClassInfo(object):
self.cname = name.replace(".", "::")
self.name = self.wname = re.sub(r"^cv\.", "", name)
self.ismap = False
self.issimple = False
self.methods = {}
self.props = []
self.consts = {}
@ -170,52 +195,61 @@ class ClassInfo(object):
customname = True
elif m == "/Map":
self.ismap = True
elif m == "/Simple":
self.issimple = True
self.props = [ClassProp(p) for p in decl[3]]
if not customname and self.wname.startswith("Cv"):
self.wname = self.wname[2:]
def gen_map_code(self):
code = "static bool pyopencv_to_%s(PyObject* src, %s& dst)\n{\n PyObject* tmp;\n bool ok;\n" % (self.name, self.cname)
def gen_map_code(self, all_classes):
code = "static bool pyopencv_to(PyObject* src, %s& dst, const char* name)\n{\n PyObject* tmp;\n bool ok;\n" % (self.cname)
code += "".join([gen_template_set_prop_from_map.substitute(propname=p.name,proptype=p.tp) for p in self.props])
code += "\n return true;\n}"
if self.bases:
code += "\n return pyopencv_to(src, (%s&)dst, name);\n}\n" % all_classes[self.bases[0]].cname
else:
code += "\n return true;\n}\n"
return code
def gen_code(self, all_classes):
if self.ismap:
return self.gen_map_code()
return self.gen_map_code(all_classes)
getset_code = ""
getset_inits = ""
getset_code = cStringIO.StringIO()
getset_inits = cStringIO.StringIO()
sorted_props = [(p.name, p) for p in self.props]
sorted_props.sort()
access_op = "->"
if self.issimple:
access_op = "."
for pname, p in sorted_props:
getset_code += gen_template_get_prop.substitute(name=self.name, member=pname, membertype=p.tp)
getset_code.write(gen_template_get_prop.substitute(name=self.name, member=pname, membertype=p.tp, access=access_op))
if p.readonly:
getset_inits += gen_template_prop_init.substitute(name=self.name, member=pname)
getset_inits.write(gen_template_prop_init.substitute(name=self.name, member=pname))
else:
getset_code += gen_template_set_prop.substitute(name=self.name, member=pname, membertype=p.tp)
getset_inits += gen_template_rw_prop_init.substitute(name=self.name, member=pname)
getset_code.write(gen_template_set_prop.substitute(name=self.name, member=pname, membertype=p.tp, access=access_op))
getset_inits.write(gen_template_rw_prop_init.substitute(name=self.name, member=pname))
methods_code = ""
methods_inits = ""
methods_code = cStringIO.StringIO()
methods_inits = cStringIO.StringIO()
sorted_methods = self.methods.items()
sorted_methods.sort()
for mname, m in sorted_methods:
methods_code += m.gen_code(all_classes)
methods_inits += m.get_tab_entry()
methods_code.write(m.gen_code(all_classes))
methods_inits.write(m.get_tab_entry())
baseptr = "NULL"
if self.bases and all_classes.has_key(self.bases[0]):
baseptr = "&pyopencv_" + all_classes[self.bases[0]].name + "_Type"
code = gen_template_decl_type.substitute(name=self.name, wname=self.wname, cname=self.cname,
getset_code=getset_code, getset_inits=getset_inits,
methods_code=methods_code, methods_inits=methods_inits,
code = gen_template_type_impl.substitute(name=self.name, wname=self.wname, cname=self.cname,
getset_code=getset_code.getvalue(), getset_inits=getset_inits.getvalue(),
methods_code=methods_code.getvalue(), methods_inits=methods_inits.getvalue(),
baseptr=baseptr, extra_specials="")
return code
@ -258,13 +292,16 @@ class ArgInfo(object):
self.py_outputarg = False
def isbig(self):
return self.tp == "Mat" or self.tp.startswith("vector")
return self.tp == "Mat" or self.tp == "vector_Mat"# or self.tp.startswith("vector")
class FuncVariant(object):
def __init__(self, name, decl, isconstructor):
self.name = name
def __init__(self, classname, name, decl, isconstructor):
self.classname = classname
self.name = self.wname = name
self.isconstructor = isconstructor
if self.isconstructor and self.wname.startswith("Cv"):
self.wname = self.wname[2:]
self.rettype = decl[1]
if self.rettype == "void":
self.rettype = ""
@ -303,7 +340,7 @@ class FuncVariant(object):
# the list of output parameters. Also includes input/output parameters.
outlist = []
firstoptarg = 0
firstoptarg = 1000000
argno = -1
for a in self.args:
argno += 1
@ -317,8 +354,8 @@ class FuncVariant(object):
continue
if not a.defval:
arglist.append((a.name, argno))
firstoptarg = argno+1
else:
firstoptarg = min(firstoptarg, len(arglist))
# if there are some array output parameters before the first default parameter, they
# are added as optional parameters before the first optional parameter
if outarr_list:
@ -327,7 +364,10 @@ class FuncVariant(object):
arglist.append((a.name, argno))
if outarr_list:
firstoptarg = min(firstoptarg, len(arglist))
arglist += outarr_list
firstoptarg = min(firstoptarg, len(arglist))
noptargs = len(arglist) - firstoptarg
argnamelist = [aname for aname, argno in arglist]
argstr = ", ".join(argnamelist[:firstoptarg])
@ -338,13 +378,17 @@ class FuncVariant(object):
elif self.isconstructor:
assert outlist == []
outlist = [("self", -1)]
if outlist:
if self.isconstructor:
classname = self.classname
if classname.startswith("Cv"):
classname=classname[2:]
outstr = "<%s object>" % (classname,)
elif outlist:
outstr = ", ".join([o[0] for o in outlist])
elif self.isconstructor:
outstr = self.classname + " object"
else:
outstr = "None"
self.py_docstring = "%s(%s) -> %s" % (self.name, argstr, outstr)
self.py_docstring = "%s(%s) -> %s" % (self.wname, argstr, outstr)
self.py_noptargs = noptargs
self.py_arglist = arglist
for aname, argno in arglist:
@ -364,14 +408,17 @@ class FuncInfo(object):
self.variants = []
def add_variant(self, decl):
self.variants.append(FuncVariant(self.name, decl, self.isconstructor))
self.variants.append(FuncVariant(self.classname, self.name, decl, self.isconstructor))
def get_wrapper_name(self):
name = self.name
if self.classname:
cn = self.classname + "_"
classname = self.classname + "_"
if "[" in name:
name = "getelem"
else:
cn = ""
return "pyopencv_" + cn + self.name
classname = ""
return "pyopencv_" + classname + name
def get_wrapper_prototype(self):
full_fname = self.get_wrapper_name()
@ -395,11 +442,11 @@ class FuncInfo(object):
# Instead of ClassName(args ...) -> object or ClassName() -> object
# we write ClassName([args ...]) -> object
if have_empty_constructor and len(self.variants) == 2:
idx = self.variants[1].arglist != []
idx = self.variants[1].py_arglist != []
docstring_list = ["[" + self.variants[idx].py_docstring + "]"]
return Template(' {"$py_funcname", (PyCFunction)$wrap_funcname, METH_KEYWORDS, "$py_docstring"},\n'
).substitute(py_funcname = self.name, wrap_funcname=self.get_wrapper_name(),
).substitute(py_funcname = self.variants[0].wname, wrap_funcname=self.get_wrapper_name(),
py_docstring = " or ".join(docstring_list))
def gen_code(self, all_classes):
@ -414,7 +461,10 @@ class FuncInfo(object):
if self.classname:
selfinfo = all_classes[self.classname]
if not self.isconstructor:
code += gen_template_check_self.substitute(name=selfinfo.name, cname=selfinfo.cname)
amp = ""
if selfinfo.issimple:
amp = "&"
code += gen_template_check_self.substitute(name=selfinfo.name, cname=selfinfo.cname, amp=amp)
fullname = selfinfo.wname + "." + fullname
all_code_variants = []
@ -427,7 +477,10 @@ class FuncInfo(object):
if self.isconstructor:
code_decl += " pyopencv_%s_t* self = 0;\n" % selfinfo.name
code_fcall = gen_template_call_constructor.substitute(name=selfinfo.name, cname=selfinfo.cname)
op = "new "
if selfinfo.issimple:
op = ""
code_fcall = gen_template_call_constructor.substitute(name=selfinfo.name, cname=selfinfo.cname, op=op)
else:
code_fcall = "ERRWRAP2( "
if v.rettype:
@ -459,22 +512,24 @@ class FuncInfo(object):
print "Error: type with star: a.tp=%s, tp=%s, tp1=%s" % (a.tp, tp, tp1)
sys.exit(-1)
amapping = simple_argtype_mapping.get(tp, (tp, "O", "pyopencv_from_" + tp1, defval0))
amapping = simple_argtype_mapping.get(tp, (tp, "O", defval0))
all_cargs.append(amapping)
if a.py_inputarg:
if amapping[1] == "O":
code_decl += " PyObject* pyobj_%s = NULL;\n" % (a.name,)
parse_arglist.append("pyobj_" + a.name)
code_cvt_list.append("pyopencv_to_%s(pyobj_%s, %s)" % (tp1, a.name, a.name))
code_cvt_list.append("pyopencv_to(pyobj_%s, %s)" % (a.name, a.name))
else:
parse_arglist.append(a.name)
defval = a.defval
if not defval:
defval = amapping[3]
defval = amapping[2]
# "tp arg = tp();" is equivalent to "tp arg;" in the case of complex types
if defval == tp + "()" and amapping[1] == "O":
defval = ""
if a.outputarg and not a.inputarg:
defval = ""
if defval:
code_decl += " %s %s=%s;\n" % (amapping[0], a.name, defval)
else:
@ -496,7 +551,7 @@ class FuncInfo(object):
if v.rettype:
tp = v.rettype
tp1 = tp.replace("*", "_ptr")
amapping = simple_argtype_mapping.get(tp, (tp, "O", "pyopencv_from_" + tp1, "0"))
amapping = simple_argtype_mapping.get(tp, (tp, "O", "0"))
all_cargs.append(amapping)
if v.args:
@ -519,13 +574,13 @@ class FuncInfo(object):
code_parse = "if(PyObject_Size(args) == 0 && PyObject_Size(kw) == 0)"
if len(v.py_outlist) == 0:
code_retval = "Py_RETURN_NONE"
code_ret = "Py_RETURN_NONE"
elif len(v.py_outlist) == 1:
if self.isconstructor:
code_retval = "self"
code_ret = "return (PyObject*)self"
else:
aname, argno = v.py_outlist[0]
code_retval = "%s(%s)" % (all_cargs[argno][2], aname)
code_ret = "return pyopencv_from(%s)" % (aname,)
else:
# ther is more than 1 return parameter; form the tuple out of them
fmtspec = "N"*len(v.py_outlist)
@ -533,11 +588,11 @@ class FuncInfo(object):
for aname, argno in v.py_outlist:
amapping = all_cargs[argno]
backcvt_arg_list.append("%s(%s)" % (amapping[2], aname))
code_retval = "Py_BuildTuple(\"(%s)\", %s)" % \
(fmtspec, ", ".join([all_cargs[argno][2] + "(" + aname + ")" for aname, argno in v.py_outlist]))
code_ret = "return Py_BuildValue(\"(%s)\", %s)" % \
(fmtspec, ", ".join(["pyopencv_from(" + aname + ")" for aname, argno in v.py_outlist]))
all_code_variants.append(gen_template_func_body.substitute(code_decl=code_decl,
code_parse=code_parse, code_fcall=code_fcall, code_retval=code_retval))
code_parse=code_parse, code_fcall=code_fcall, code_ret=code_ret))
if len(all_code_variants)==1:
# if the function/method has only 1 signature, then just put it
@ -559,7 +614,7 @@ class PythonWrapperGenerator(object):
self.consts = {}
self.code_types = cStringIO.StringIO()
self.code_funcs = cStringIO.StringIO()
self.code_functab = cStringIO.StringIO()
self.code_func_tab = cStringIO.StringIO()
self.code_type_reg = cStringIO.StringIO()
self.code_const_reg = cStringIO.StringIO()
@ -629,28 +684,41 @@ class PythonWrapperGenerator(object):
f.write(buf.getvalue())
f.close()
def gen(self, api_list, output_path):
def gen(self, srcfiles, output_path):
self.clear()
parser = hdr_parser.CppHeaderParser()
# step 1: scan the list of declarations and build more descriptive maps of classes, consts, functions
for decl in api_list:
name = decl[0]
if name.startswith("struct") or name.startswith("class"):
# class/struct
p = name.find(" ")
stype = name[:p]
name = name[p+1:].strip()
self.add_class(stype, name, decl)
elif name.startswith("const"):
# constant
self.add_const(name.replace("const ", "").strip(), decl)
else:
# function
self.add_func(decl)
# step 1: scan the headers and build more descriptive maps of classes, consts, functions
for hdr in srcfiles:
decls = parser.parse(hdr)
for decl in decls:
name = decl[0]
if name.startswith("struct") or name.startswith("class"):
# class/struct
p = name.find(" ")
stype = name[:p]
name = name[p+1:].strip()
self.add_class(stype, name, decl)
elif name.startswith("const"):
# constant
self.add_const(name.replace("const ", "").strip(), decl)
else:
# function
self.add_func(decl)
# step 2: generate code for the classes and their methods
classlist = self.classes.items()
classlist.sort()
for name, classinfo in classlist:
if classinfo.ismap:
self.code_types.write(gen_template_map_type_cvt.substitute(name=name, cname=classinfo.cname))
else:
if classinfo.issimple:
templ = gen_template_simple_type_decl
else:
templ = gen_template_type_decl
self.code_types.write(templ.substitute(name=name, wname=classinfo.wname, cname=classinfo.cname))
for name, classinfo in classlist:
code = classinfo.gen_code(self.classes)
self.code_types.write(code)
@ -663,6 +731,7 @@ class PythonWrapperGenerator(object):
for name, func in funclist:
code = func.gen_code(self.classes)
self.code_funcs.write(code)
self.code_func_tab.write(func.get_tab_entry())
# step 4: generate the code for constants
constlist = self.consts.items()
@ -672,18 +741,19 @@ class PythonWrapperGenerator(object):
# That's it. Now save all the files
self.save(output_path, "pyopencv_generated_funcs.h", self.code_funcs)
self.save(output_path, "pyopencv_generated_func_tab.h", self.code_functab)
self.save(output_path, "pyopencv_generated_func_tab.h", self.code_func_tab)
self.save(output_path, "pyopencv_generated_const_reg.h", self.code_const_reg)
self.save(output_path, "pyopencv_generated_types.h", self.code_types)
self.save(output_path, "pyopencv_generated_type_reg.h", self.code_type_reg)
def generate_all():
decls = opencv_parser.parse_all()
generator = PythonWrapperGenerator()
generator.gen(decls, "/Users/vp/tmp")
if __name__ == "__main__":
generate_all()
srcfiles = hdr_parser.opencv_hdr_list
dstdir = "/Users/vp/tmp"
if len(sys.argv) > 2:
dstdir = sys.argv[1]
srcfiles = sys.argv[2:]
generator = PythonWrapperGenerator()
generator.gen(srcfiles, dstdir)

View File

@ -1,19 +1,17 @@
import os, sys, re
# the list only for debugging. The real list, used in the real OpenCV build, is specified in CMakeLists.txt
opencv_hdr_list = [
"../core/include/opencv2/core/core.hpp",
#"../core/include/opencv2/core/core_c.h",
"../ml/include/opencv2/ml/ml.hpp",
"../imgproc/include/opencv2/imgproc/imgproc.hpp",
#"../imgproc/include/opencv2/imgproc/imgproc_c.h",
"../calib3d/include/opencv2/calib3d/calib3d.hpp",
"../features2d/include/opencv2/features2d/features2d.hpp",
"../video/include/opencv2/video/tracking.hpp",
"../video/include/opencv2/video/background_segm.hpp",
"../objdetect/include/opencv2/objdetect/objdetect.hpp",
"../highgui/include/opencv2/highgui/highgui.hpp",
#"../highgui/include/opencv2/highgui/highgui_c.h",
"opencv_api_extra.hpp",
"opencv_extra_api.hpp",
]
"""
@ -40,7 +38,6 @@ class CppHeaderParser(object):
return s
def get_macro_arg(self, arg_str, npos):
npos2 = npos3 = arg_str.find("(", npos)
if npos2 < 0:
print "Error: no arguments for the macro at %d" % (self.lineno,)
@ -226,9 +223,12 @@ class CppHeaderParser(object):
"""
l = decl_str
modlist = []
if "CV_EXPORTS_AS_MAP" in l:
l = l.replace("CV_EXPORTS_AS_MAP", "")
if "CV_EXPORTS_W_MAP" in l:
l = l.replace("CV_EXPORTS_W_MAP", "")
modlist.append("/Map")
if "CV_EXPORTS_W_SIMPLE" in l:
l = l.replace("CV_EXPORTS_W_SIMPLE", "")
modlist.append("/Simple")
npos = l.find("CV_EXPORTS_AS")
if npos >= 0:
macro_arg, npos3 = self.get_macro_arg(l, npos)
@ -258,6 +258,7 @@ class CppHeaderParser(object):
("CV_WRAP" in decl_str) or ("CV_WRAP_AS" in decl_str)):
return []
top = self.block_stack[-1]
func_modlist = []
npos = decl_str.find("CV_EXPORTS_AS")
@ -270,6 +271,7 @@ class CppHeaderParser(object):
arg, npos3 = self.get_macro_arg(decl_str, npos)
func_modlist.append("="+arg)
decl_str = decl_str[:npos] + decl_str[npos3+1:]
# filter off some common prefixes, which are meaningless for Python wrappers.
# note that we do not strip "static" prefix, which does matter;
# it means class methods, not instance methods
@ -277,7 +279,7 @@ class CppHeaderParser(object):
("CV_EXPORTS_W", ""), ("CV_EXPORTS", ""), ("CV_WRAP ", " "), ("static CV_INLINE", ""), ("CV_INLINE", "")]).strip()
static_method = False
context = self.block_stack[-1][0]
context = top[0]
if decl_str.startswith("static") and (context == "class" or context == "struct"):
decl_str = decl_str[len("static"):].lstrip()
static_method = True
@ -306,7 +308,7 @@ class CppHeaderParser(object):
rettype, funcname, modlist, argno = self.parse_arg(decl_start, -1)
if argno >= 0:
classname = self.block_stack[-1][1]
classname = top[1]
if rettype == classname or rettype == "~" + classname:
rettype, funcname = "", rettype
else:
@ -492,7 +494,7 @@ class CppHeaderParser(object):
if (context == "struct" or context == "class") and end_token == ";" and stmt:
# looks like it's member declaration; append the members to the class declaration
class_decl = stack_top[self.CLASS_DECL]
if ("CV_PROP" in stmt) or (class_decl and ("/Map" in class_decl[2])):
if ("CV_PROP" in stmt): # or (class_decl and ("/Map" in class_decl[2])):
var_modlist = []
if "CV_PROP_RW" in stmt:
var_modlist.append("/RW")
@ -656,28 +658,10 @@ class CppHeaderParser(object):
else:
print
def parse_all():
if __name__ == '__main__':
parser = CppHeaderParser()
decls = []
for hname in opencv_hdr_list:
decls += parser.parse(hname)
return decls
if __name__ == '__main__':
parser = CppHeaderParser()
decls = parse_all()
if 0:
pass
elif 1:
CppHeaderParser().print_decls(decls)
print len(decls)
else:
fcounts = {}
for f in decls:
fname = f[0]
fcounts[fname] = fcounts.get(fname, []) + [f]
items = fcounts.items()
items.sort()
for fname, flist in items:
if len(flist) > 1:
parser.print_decls(flist)
parser.print_decls(decls)
print len(decls)

View File

@ -2,9 +2,15 @@
#define OPENCV2X_PYTHON_WRAPPERS
#include "opencv2/core/core.hpp"
namespace cv
{
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/ml/ml.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/video/background_segm.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv_extra_api.hpp"
#define ERRWRAP2(expr) \
try \
@ -17,6 +23,37 @@ catch (const cv::Exception &e) \
return 0; \
}
using namespace cv;
typedef vector<uchar> vector_uchar;
typedef vector<int> vector_int;
typedef vector<float> vector_float;
typedef vector<double> vector_double;
typedef vector<Point> vector_Point;
typedef vector<Point2f> vector_Point2f;
typedef vector<Vec2f> vector_Vec2f;
typedef vector<Vec3f> vector_Vec3f;
typedef vector<Vec4i> vector_Vec4i;
typedef vector<Rect> vector_Rect;
typedef vector<KeyPoint> vector_KeyPoint;
typedef vector<Mat> vector_Mat;
typedef vector<vector<Point> > vector_vector_Point;
typedef vector<vector<Point2f> > vector_vector_Point2f;
typedef vector<vector<Point3f> > vector_vector_Point3f;
static PyObject* failmsgp(const char *fmt, ...)
{
char str[1000];
va_list ap;
va_start(ap, fmt);
vsnprintf(str, sizeof(str), fmt, ap);
va_end(ap);
PyErr_SetString(PyExc_TypeError, str);
return 0;
}
static size_t REFCOUNT_OFFSET = (size_t)&(((PyObject*)0)->ob_refcnt) +
(0x12345678 != *(const size_t*)"\x78\x56\x34\x12\0\0\0\0\0")*sizeof(int);
@ -40,7 +77,6 @@ public:
uchar*& datastart, uchar*& data, size_t* step)
{
static int ncalls = 0;
printf("NumpyAllocator::allocate: %d\n", ncalls++);
int depth = CV_MAT_DEPTH(type);
int cn = CV_MAT_CN(type);
@ -65,7 +101,7 @@ public:
CV_Error_(CV_StsError, ("The numpy array of typenum=%d, ndims=%d can not be created", typenum, dims));
refcount = refcountFromPyObject(o);
npy_intp* _strides = PyArray_STRIDES(o);
for( i = 0; i < dims-1; i++ )
for( i = 0; i < dims - (cn > 1); i++ )
step[i] = (size_t)_strides[i];
datastart = data = (uchar*)PyArray_DATA(o);
}
@ -73,11 +109,11 @@ public:
void deallocate(int* refcount, uchar* datastart, uchar* data)
{
static int ncalls = 0;
printf("NumpyAllocator::deallocate: %d\n", ncalls++);
if( !refcount )
return;
PyObject* o = pyObjectFromRefcount(refcount);
Py_INCREF(o);
Py_DECREF(o);
}
};
@ -86,16 +122,21 @@ NumpyAllocator g_numpyAllocator;
enum { ARG_NONE = 0, ARG_MAT = 1, ARG_SCALAR = 2 };
static int pyobjToMat(const PyObject* o, Mat& m, const char* name = "<unknown>", bool allowND=true)
static int pyopencv_to(const PyObject* o, Mat& m, const char* name = "<unknown>", bool allowND=true)
{
static int call_idx = 0;
printf("pyobjToMatND: %d\n", call_idx++);
if( !PyArray_Check(o) ) {
if( o == Py_None )
return ARG_NONE;
if(!o || o == Py_None)
{
if( !m.data )
m.allocator = &g_numpyAllocator;
return true;
}
if( !PyArray_Check(o) )
{
failmsg("%s is not a numpy array", name);
return -1;
return false;
}
int typenum = PyArray_TYPE(o);
@ -108,18 +149,18 @@ static int pyobjToMat(const PyObject* o, Mat& m, const char* name = "<unknown>",
if( type < 0 )
{
failmsg("%s data type = %d is not supported", name, typenum);
return -1;
return false;
}
int ndims = PyArray_NDIM(o);
if(ndims >= CV_MAX_DIM)
{
failmsg("%s dimensionality (=%d) is too high", name, ndims);
return -1;
return false;
}
int size[CV_MAX_DIM+1];
size_t step[CV_MAX_DIM+1], elemsize = CV_ELEM_SIZE(type);
size_t step[CV_MAX_DIM+1], elemsize = CV_ELEM_SIZE1(type);
const npy_intp* _sizes = PyArray_DIMS(o);
const npy_intp* _strides = PyArray_STRIDES(o);
@ -135,64 +176,55 @@ static int pyobjToMat(const PyObject* o, Mat& m, const char* name = "<unknown>",
ndims++;
}
m = Mat(ndims, size, type, PyArray_DATA(o), step);
if (!allowND)
if( ndims == 3 && size[2] <= CV_CN_MAX && step[1] == elemsize*size[2] )
{
if( ndims <= 2 )
;
else if( ndims == 3 )
{
if( size[2] > CV_CN_MAX || step[1] != elemsize*size[2] )
{
failmsg("%s is not contiguous, thus it can not be interpreted as image", name);
return -1;
}
m.dims--;
m.flags = (m.flags & ~CV_MAT_TYPE_MASK) | CV_MAKETYPE(type, size[2]);
}
else
{
failmsg("%s is not contiguous or has more than 3 dimensions, thus it can not be interpreted as image", name);
return -1;
}
ndims--;
type |= CV_MAKETYPE(0, size[2]);
}
if( ndims > 2 && !allowND )
{
failmsg("%s has more than 2 dimensions", name);
return false;
}
m = Mat(ndims, size, type, PyArray_DATA(o), step);
if( m.data )
{
m.refcount = refcountFromPyObject(o);
++*m.refcount; // protect the original numpy array from deallocation
// (since Mat destructor will decrement the reference counter)
m.addref(); // protect the original numpy array from deallocation
// (since Mat destructor will decrement the reference counter)
};
m.allocator = &g_numpyAllocator;
return ARG_MAT;
return true;
}
static void makeEmptyMat(Mat& m)
static PyObject* pyopencv_from(const Mat& m)
{
m = Mat();
m.allocator = &g_numpyAllocator;
Mat temp, *p = (Mat*)&m;
if(!p->refcount || p->allocator != &g_numpyAllocator)
{
pyopencv_to(Py_None, temp);
m.copyTo(temp);
p = &temp;
}
p->addref();
return pyObjectFromRefcount(p->refcount);
}
static int pyobjToMat(const PyObject* o, Mat& m, const char* name = "<unknown>")
{
Mat temp;
int code = pyobjToMat(o, temp, name, false);
if(code > 0)
m = Mat(temp);
return code;
}
static int pyobjToScalar(PyObject *o, Scalar& s, const char *name = "<unknown>")
static bool pyopencv_to(PyObject *o, Scalar& s, const char *name = "<unknown>")
{
if(!o || o == Py_None)
return true;
if (PySequence_Check(o)) {
PyObject *fi = PySequence_Fast(o, name);
if (fi == NULL)
return -1;
return false;
if (4 < PySequence_Fast_GET_SIZE(fi))
{
failmsg("Scalar value for argument '%s' is longer than 4", name);
return -1;
return false;
}
for (Py_ssize_t i = 0; i < PySequence_Fast_GET_SIZE(fi); i++) {
PyObject *item = PySequence_Fast_GET_ITEM(fi, i);
@ -200,7 +232,7 @@ static int pyobjToScalar(PyObject *o, Scalar& s, const char *name = "<unknown>")
s[i] = PyFloat_AsDouble(item);
} else {
failmsg("Scalar value for argument '%s' is not numeric", name);
return -1;
return false;
}
}
Py_DECREF(fi);
@ -209,148 +241,456 @@ static int pyobjToScalar(PyObject *o, Scalar& s, const char *name = "<unknown>")
s[0] = PyFloat_AsDouble(o);
} else {
failmsg("Scalar value for argument '%s' is not numeric", name);
return -1;
return false;
}
}
return ARG_SCALAR;
return true;
}
static int pyobjToMatOrScalar(PyObject* obj, Mat& m, Scalar& s, const char* name, bool allowND)
static inline PyObject* pyopencv_from(const Scalar& src)
{
if( PyArray_Check(obj) || (obj == Py_None))
return pyobjToMat(obj, m, name, allowND);
return pyobjToScalar(obj, s, name);
return Py_BuildValue("(dddd)", src[0], src[1], src[2], src[3]);
}
static void pyc_add_mm(const Mat& a, const Mat& b, Mat& c, const Mat& mask) { add(a, b, c, mask); }
static void pyc_add_ms(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool) { add(a, s, c, mask); }
static void pyc_subtract_mm(const Mat& a, const Mat& b, Mat& c, const Mat& mask) { subtract(a, b, c, mask); }
static void pyc_subtract_ms(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool rev)
static PyObject* pyopencv_from(bool value)
{
if( !rev )
subtract(a, s, c, mask);
return PyBool_FromLong(value);
}
static bool pyopencv_to(PyObject* obj, bool& value, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
int _val = PyObject_IsTrue(obj);
if(_val < 0)
return false;
value = _val > 0;
return true;
}
static PyObject* pyopencv_from(size_t value)
{
return PyLong_FromUnsignedLong((unsigned long)value);
}
static PyObject* pyopencv_from(int value)
{
return PyInt_FromLong(value);
}
static bool pyopencv_to(PyObject* obj, int& value, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
value = (int)PyInt_AsLong(obj);
return value != -1 || !PyErr_Occurred();
}
static PyObject* pyopencv_from(double value)
{
return PyFloat_FromDouble(value);
}
static bool pyopencv_to(PyObject* obj, double& value, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
if(PyInt_CheckExact(obj))
value = (double)PyInt_AS_LONG(obj);
else
subtract(s, a, c, mask);
value = PyFloat_AsDouble(obj);
return !PyErr_Occurred();
}
static void pyc_and_mm(const Mat& a, const Mat& b, Mat& c, const Mat& mask) { bitwise_and(a, b, c, mask); }
static void pyc_and_ms(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool) { bitwise_and(a, s, c, mask); }
static void pyc_or_mm(const Mat& a, const Mat& b, Mat& c, const Mat& mask) { bitwise_or(a, b, c, mask); }
static void pyc_or_ms(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool) { bitwise_or(a, s, c, mask); }
static void pyc_xor_mm(const Mat& a, const Mat& b, Mat& c, const Mat& mask) { bitwise_xor(a, b, c, mask); }
static void pyc_xor_ms(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool) { bitwise_xor(a, s, c, mask); }
static void pyc_absdiff_mm(const Mat& a, const Mat& b, Mat& c, const Mat&) { absdiff(a, b, c); }
static void pyc_absdiff_ms(const Mat& a, const Scalar& s, Mat& c, const Mat&, bool) { absdiff(a, s, c); }
static void pyc_min_mm(const Mat& a, const Mat& b, Mat& c, const Mat&) { min(a, b, c); }
static void pyc_min_ms(const Mat& a, const Scalar& s, Mat& c, const Mat&, bool)
static PyObject* pyopencv_from(float value)
{
CV_Assert( s.isReal() );
min(a, s[0], c);
}
static void pyc_max_mm(const Mat& a, const Mat& b, Mat& c, const Mat&) { max(a, b, c); }
static void pyc_max_ms(const Mat& a, const Scalar& s, Mat& c, const Mat&, bool)
{
CV_Assert( s.isReal() );
max(a, s[0], c);
return PyFloat_FromDouble(value);
}
typedef void (*BinaryOp)(const Mat& a, const Mat& b, Mat& c, const Mat& mask);
typedef void (*BinaryOpS)(const Mat& a, const Scalar& s, Mat& c, const Mat& mask, bool rev);
static PyObject *pyopencv_binary_op(PyObject* args, PyObject* kw, BinaryOp binOp, BinaryOpS binOpS, bool supportMask)
static bool pyopencv_to(PyObject* obj, float& value, const char* name = "<unknown>")
{
PyObject *pysrc1 = 0, *pysrc2 = 0, *pydst = 0, *pymask = 0;
Mat src1, src2, dst, mask;
Scalar alpha, beta;
if( supportMask )
if(!obj || obj == Py_None)
return true;
if(PyInt_CheckExact(obj))
value = (float)PyInt_AS_LONG(obj);
else
value = (float)PyFloat_AsDouble(obj);
return !PyErr_Occurred();
}
static PyObject* pyopencv_from(const string& value)
{
return PyString_FromString(value.empty() ? "" : value.c_str());
}
static bool pyopencv_to(PyObject* obj, string& value, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
char* str = PyString_AsString(obj);
if(!str)
return false;
value = string(str);
return true;
}
static inline bool pyopencv_to(PyObject* obj, Size& sz, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
return PyArg_Parse(obj, "ii", &sz.width, &sz.height) > 0;
}
static inline PyObject* pyopencv_from(const Size& sz)
{
return Py_BuildValue("(ii)", sz.width, sz.height);
}
static inline bool pyopencv_to(PyObject* obj, Rect& r, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
return PyArg_Parse(obj, "iiii", &r.x, &r.y, &r.width, &r.height) > 0;
}
static inline PyObject* pyopencv_from(const Rect& r)
{
return Py_BuildValue("(iiii)", r.x, r.y, r.width, r.height);
}
static inline bool pyopencv_to(PyObject* obj, Range& r, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
if(PyObject_Size(obj) == 0)
{
const char *keywords[] = { "src1", "src2", "dst", "mask", 0 };
if( !PyArg_ParseTupleAndKeywords(args, kw, "OO|OO", (char**)keywords,
&pysrc1, &pysrc2, &pydst, &pymask))
return 0;
r = Range::all();
return true;
}
else
return PyArg_Parse(obj, "ii", &r.start, &r.end) > 0;
}
static inline PyObject* pyopencv_from(const Range& r)
{
return Py_BuildValue("(ii)", r.start, r.end);
}
static inline bool pyopencv_to(PyObject* obj, CvSlice& r, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
if(PyObject_Size(obj) == 0)
{
const char *keywords[] = { "src1", "src2", "dst", 0 };
if( !PyArg_ParseTupleAndKeywords(args, kw, "OO|O", (char**)keywords,
&pysrc1, &pysrc2, &pydst))
return 0;
r = CV_WHOLE_SEQ;
return true;
}
return PyArg_Parse(obj, "ii", &r.start_index, &r.end_index) > 0;
}
static inline PyObject* pyopencv_from(const CvSlice& r)
{
return Py_BuildValue("(ii)", r.start_index, r.end_index);
}
static inline bool pyopencv_to(PyObject* obj, Point& p, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
if(PyComplex_CheckExact(obj))
{
Py_complex c = PyComplex_AsCComplex(obj);
p.x = saturate_cast<int>(c.real);
p.y = saturate_cast<int>(c.imag);
return true;
}
return PyArg_Parse(obj, "ii", &p.x, &p.y) > 0;
}
static inline bool pyopencv_to(PyObject* obj, Point2f& p, const char* name = "<unknown>")
{
if(!obj || obj == Py_None)
return true;
if(PyComplex_CheckExact(obj))
{
Py_complex c = PyComplex_AsCComplex(obj);
p.x = saturate_cast<float>(c.real);
p.y = saturate_cast<float>(c.imag);
return true;
}
return PyArg_Parse(obj, "ff", &p.x, &p.y) > 0;
}
static inline PyObject* pyopencv_from(const Point& p)
{
return Py_BuildValue("(ii)", p.x, p.y);
}
static inline PyObject* pyopencv_from(const Point2f& p)
{
return Py_BuildValue("(dd)", p.x, p.y);
}
static inline bool pyopencv_to(PyObject* obj, Vec3d& v, const char* name = "<unknown>")
{
if(!obj)
return true;
return PyArg_Parse(obj, "ddd", &v[0], &v[1], &v[2]) > 0;
}
static inline PyObject* pyopencv_from(const Vec3d& v)
{
return Py_BuildValue("(ddd)", v[0], v[1], v[2]);
}
static inline PyObject* pyopencv_from(const Point2d& p)
{
return Py_BuildValue("(dd)", p.x, p.y);
}
template<typename _Tp> struct pyopencvVecConverter
{
static bool to(PyObject* obj, vector<_Tp>& value, const char* name="<unknown>")
{
typedef typename DataType<_Tp>::channel_type _Cp;
if(!obj)
return true;
if (PyArray_Check(obj))
{
Mat m;
pyopencv_to(obj, m, name);
m.copyTo(value);
}
if (!PySequence_Check(obj))
return false;
PyObject *seq = PySequence_Fast(obj, name);
if (seq == NULL)
return false;
int i, j, n = (int)PySequence_Fast_GET_SIZE(seq);
value.resize(n);
int type = DataType<_Tp>::type;
int depth = CV_MAT_DEPTH(type), channels = CV_MAT_CN(type);
PyObject** items = PySequence_Fast_ITEMS(seq);
for( i = 0; i < n; i++ )
{
PyObject* item = items[i];
PyObject* seq_i = 0;
PyObject** items_i = &item;
_Cp* data = (_Cp*)&value[i];
if( channels == 2 && PyComplex_CheckExact(item) )
{
Py_complex c = PyComplex_AsCComplex(obj);
data[0] = saturate_cast<_Cp>(c.real);
data[1] = saturate_cast<_Cp>(c.imag);
continue;
}
if( channels > 1 )
{
if( PyArray_Check(obj))
{
Mat src;
pyopencv_to(obj, src, name);
if( src.dims != 2 || src.channels() != 1 ||
((src.cols != 1 || src.rows != channels) &&
(src.cols != channels || src.rows != 1)))
break;
Mat dst(src.rows, src.cols, depth, data);
src.convertTo(dst, type);
if( dst.data != (uchar*)data )
break;
continue;
}
seq_i = PySequence_Fast(item, name);
if( !seq_i || (int)PySequence_Fast_GET_SIZE(seq_i) != channels )
{
Py_XDECREF(seq_i);
break;
}
items_i = PySequence_Fast_ITEMS(seq_i);
}
for( j = 0; j < channels; j++ )
{
PyObject* item_ij = items_i[j];
if( PyInt_Check(item_ij))
{
int v = PyInt_AsLong(item_ij);
if( v == -1 && PyErr_Occurred() )
break;
data[j] = saturate_cast<_Cp>(v);
}
else if( PyFloat_Check(item_ij))
{
double v = PyFloat_AsDouble(item_ij);
if( PyErr_Occurred() )
break;
data[j] = saturate_cast<_Cp>(v);
}
else
break;
}
Py_XDECREF(seq_i);
if( j < channels )
break;
}
Py_DECREF(seq);
return i == n;
}
int code_src1 = pysrc1 ? pyobjToMatOrScalar(pysrc1, src1, alpha, "src1", true) : -1;
int code_src2 = pysrc2 ? pyobjToMatOrScalar(pysrc2, src2, beta, "src2", true) : -1;
int code_dst = pydst ? pyobjToMat(pydst, dst, "dst", true) : 0;
int code_mask = pymask ? pyobjToMat(pymask, mask, "mask", true) : 0;
if( code_src1 < 0 || code_src2 < 0 || code_dst < 0 || code_mask < 0 )
return 0;
if( code_src1 == ARG_SCALAR && code_src2 == ARG_SCALAR )
static PyObject* from(const vector<_Tp>& value)
{
failmsg("Both %s and %s are scalars", "src1", "src2");
if(value.empty())
return PyTuple_New(0);
Mat src((int)value.size(), DataType<_Tp>::channels, DataType<_Tp>::depth, (uchar*)&value[0]);
return pyopencv_from(src);
}
};
template<typename _Tp> static inline bool pyopencv_to(PyObject* obj, vector<_Tp>& value, const char* name="<unknown>")
{
return pyopencvVecConverter<_Tp>::to(obj, value, name);
}
template<typename _Tp> static inline PyObject* pyopencv_from(const vector<_Tp>& value)
{
return pyopencvVecConverter<_Tp>::from(value);
}
static PyObject* pyopencv_from(const KeyPoint&);
static bool pyopencv_from(PyObject*,KeyPoint&,const char*);
template<typename _Tp> static inline bool pyopencv_to_generic_vec(PyObject* obj, vector<_Tp>& value, const char* name="<unknown>")
{
if (!PySequence_Check(obj))
return false;
PyObject *seq = PySequence_Fast(obj, name);
if (seq == NULL)
return false;
int i, n = (int)PySequence_Fast_GET_SIZE(seq);
value.resize(n);
PyObject** items = PySequence_Fast_ITEMS(seq);
for( i = 0; i < n; i++ )
{
PyObject* item = items[i];
if(!pyopencv_to(item, value[i], name))
break;
}
Py_DECREF(seq);
return i == n;
}
template<typename _Tp> static inline PyObject* pyopencv_from_generic_vec(const vector<_Tp>& value)
{
int i, n = (int)value.size();
PyObject* seq = PyTuple_New(n);
for( i = 0; i < n; i++ )
{
PyObject* item = pyopencv_from(value[i]);
if(!item)
break;
PyTuple_SET_ITEM(seq, i, item);
}
if( i < n )
{
Py_DECREF(seq);
return 0;
}
return seq;
}
template<typename _Tp> struct pyopencvVecConverter<vector<_Tp> >
{
static bool to(PyObject* obj, vector<vector<_Tp> >& value, const char* name="<unknown>")
{
return pyopencv_to_generic_vec(obj, value, name);
}
if( code_dst == 0 )
makeEmptyMat(dst);
static PyObject* from(const vector<vector<_Tp> >& value)
{
return pyopencv_from_generic_vec(value);
}
};
template<> struct pyopencvVecConverter<Mat>
{
static bool to(PyObject* obj, vector<Mat>& value, const char* name="<unknown>")
{
return pyopencv_to_generic_vec(obj, value, name);
}
ERRWRAP2(code_src1 != ARG_SCALAR && code_src2 != ARG_SCALAR ? binOp(src1, src2, dst, mask) :
code_src1 != ARG_SCALAR ? binOpS(src2, alpha, dst, mask, true) : binOpS(src1, beta, dst, mask, false));
static PyObject* from(const vector<Mat>& value)
{
return pyopencv_from_generic_vec(value);
}
};
template<> struct pyopencvVecConverter<KeyPoint>
{
static bool to(PyObject* obj, vector<KeyPoint>& value, const char* name="<unknown>")
{
return pyopencv_to_generic_vec(obj, value, name);
}
PyObject* result = pyObjectFromRefcount(dst.refcount);
int D = PyArray_NDIM(result);
const npy_intp* sz = PyArray_DIMS(result);
printf("Result: check = %d, ndims = %d, size = (%d x %d), typenum=%d\n", PyArray_Check(result),
D, (int)sz[0], D >= 2 ? (int)sz[1] : 1, PyArray_TYPE(result));
//Py_INCREF(result);
return result;
}
static PyObject* from(const vector<KeyPoint>& value)
{
return pyopencv_from_generic_vec(value);
}
};
static PyObject* pyopencv_add(PyObject* self, PyObject* args, PyObject* kw)
static inline bool pyopencv_to(PyObject *obj, CvTermCriteria& dst, const char *name="<unknown>")
{
return pyopencv_binary_op(args, kw, pyc_add_mm, pyc_add_ms, true);
if(!obj)
return true;
return PyArg_ParseTuple(obj, "iid", &dst.type, &dst.max_iter, &dst.epsilon) > 0;
}
static PyObject* pyopencv_subtract(PyObject* self, PyObject* args, PyObject* kw)
static inline PyObject* pyopencv_from(const CvTermCriteria& src)
{
return pyopencv_binary_op(args, kw, pyc_subtract_mm, pyc_subtract_ms, true);
return Py_BuildValue("(iid)", src.type, src.max_iter, src.epsilon);
}
static PyObject* pyopencv_and(PyObject* self, PyObject* args, PyObject* kw)
static inline bool pyopencv_to(PyObject *obj, TermCriteria& dst, const char *name="<unknown>")
{
return pyopencv_binary_op(args, kw, pyc_and_mm, pyc_and_ms, true);
if(!obj)
return true;
return PyArg_ParseTuple(obj, "iid", &dst.type, &dst.maxCount, &dst.epsilon) > 0;
}
static PyObject* pyopencv_or(PyObject* self, PyObject* args, PyObject* kw)
static inline PyObject* pyopencv_from(const TermCriteria& src)
{
return pyopencv_binary_op(args, kw, pyc_or_mm, pyc_or_ms, true);
return Py_BuildValue("(iid)", src.type, src.maxCount, src.epsilon);
}
static PyObject* pyopencv_xor(PyObject* self, PyObject* args, PyObject* kw)
static inline bool pyopencv_to(PyObject *obj, RotatedRect& dst, const char *name="<unknown>")
{
return pyopencv_binary_op(args, kw, pyc_xor_mm, pyc_xor_ms, true);
if(!obj)
return true;
return PyArg_ParseTuple(obj, "(ff)(ff)f", &dst.center.x, &dst.center.y, &dst.size.width, &dst.size.height, &dst.angle) > 0;
}
static PyObject* pyopencv_absdiff(PyObject* self, PyObject* args, PyObject* kw)
static inline PyObject* pyopencv_from(const RotatedRect& src)
{
return pyopencv_binary_op(args, kw, pyc_absdiff_mm, pyc_absdiff_ms, true);
return Py_BuildValue("((ff)(ff)f)", src.center.x, src.center.y, src.size.width, src.size.height, src.angle);
}
static PyObject* pyopencv_min(PyObject* self, PyObject* args, PyObject* kw)
static inline PyObject* pyopencv_from(const Moments& m)
{
return pyopencv_binary_op(args, kw, pyc_min_mm, pyc_min_ms, true);
return Py_BuildValue("{s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d,s:d}",
"m00", m.m00, "m10", m.m10, "m01", m.m01,
"m20", m.m20, "m11", m.m11, "m02", m.m02,
"m30", m.m30, "m21", m.m21, "m12", m.m12, "m03", m.m03,
"mu20", m.mu20, "mu11", m.mu11, "mu02", m.mu02,
"mu30", m.mu30, "mu21", m.mu21, "mu12", m.mu12, "mu03", m.mu03,
"nu20", m.nu20, "nu11", m.nu11, "nu02", m.nu02,
"nu30", m.nu30, "nu21", m.nu21, "nu12", m.nu12, "mu03", m.nu03);
}
static PyObject* pyopencv_max(PyObject* self, PyObject* args, PyObject* kw)
{
return pyopencv_binary_op(args, kw, pyc_max_mm, pyc_max_ms, true);
}
}
#endif

View File

@ -3,6 +3,7 @@
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/calib3d/calib3d.hpp"
namespace cv
@ -29,13 +30,19 @@ CV_WRAP_AS(getCPUTickCount) static inline double getCPUTickCount_()
return (double)getCPUTickCount();
}
CV_WRAP void randShuffle(const Mat& src, CV_OUT Mat& dst, double iterFactor=1.)
{
src.copyTo(dst);
randShuffle(dst, iterFactor, 0);
}
CV_WRAP static inline void SVDecomp(const Mat& src, CV_OUT Mat& w, CV_OUT Mat& u, CV_OUT Mat& vt, int flags=0 )
{
SVD::compute(src, w, u, vt, flags);
}
CV_WRAP static inline void SVDBackSubst( const Mat& w, const Mat& u, const Mat& vt,
const Mat& rhs, CV_OUT Mat& dst )
CV_WRAP static inline void SVBackSubst( const Mat& w, const Mat& u, const Mat& vt,
const Mat& rhs, CV_OUT Mat& dst )
{
SVD::backSubst(w, u, vt, rhs, dst);
}
@ -53,10 +60,8 @@ CV_WRAP static inline bool eigen(const Mat& src, bool computeEigenvectors,
CV_OUT Mat& eigenvalues, CV_OUT Mat& eigenvectors,
int lowindex=-1, int highindex=-1)
{
if(computeEigenvectors)
eigen(src, eigenvalues, eigenvectors, lowindex, highindex);
else
eigen(src, eigenvalues, lowindex, highindex);
return computeEigenvectors ? eigen(src, eigenvalues, eigenvectors, lowindex, highindex) :
eigen(src, eigenvalues, lowindex, highindex);
}
CV_WRAP static inline void fillConvexPoly(Mat& img, const Mat& points,
@ -82,10 +87,10 @@ CV_WRAP static inline void fillPoly(Mat& img, const vector<Mat>& pts,
{
const Mat& p = pts[i];
CV_Assert(p.checkVector(2, CV_32S) >= 0);
ptsptr[i] = (const Point*)p.data;
ptsptr[i] = (Point*)p.data;
npts[i] = p.rows*p.cols*p.channels()/2;
}
fillPoly(img, ptsptr, npts, (int)pts.size(), color, lineType, shift, offset);
fillPoly(img, (const Point**)ptsptr, npts, (int)pts.size(), color, lineType, shift, offset);
}
CV_WRAP static inline void polylines(Mat& img, const vector<Mat>& pts,
@ -103,12 +108,41 @@ CV_WRAP static inline void polylines(Mat& img, const vector<Mat>& pts,
{
const Mat& p = pts[i];
CV_Assert(p.checkVector(2, CV_32S) >= 0);
ptsptr[i] = (const Point*)p.data;
ptsptr[i] = (Point*)p.data;
npts[i] = p.rows*p.cols*p.channels()/2;
}
polylines(img, ptsptr, npts, (int)pts.size(), isClosed, color, thickness, lineType, shift);
polylines(img, (const Point**)ptsptr, npts, (int)pts.size(), isClosed, color, thickness, lineType, shift);
}
CV_WRAP static inline void PCACompute(const Mat& data, CV_OUT Mat& mean,
CV_OUT Mat& eigenvectors, int maxComponents=0)
{
PCA pca;
pca.mean = mean;
pca.eigenvectors = eigenvectors;
pca(data, Mat(), 0, maxComponents);
pca.mean.copyTo(mean);
pca.eigenvectors.copyTo(eigenvectors);
}
CV_WRAP static inline void PCAProject(const Mat& data, const Mat& mean,
const Mat& eigenvectors, CV_OUT Mat& result)
{
PCA pca;
pca.mean = mean;
pca.eigenvectors = eigenvectors;
pca.project(data, result);
}
CV_WRAP static inline void PCABackProject(const Mat& data, const Mat& mean,
const Mat& eigenvectors, CV_OUT Mat& result)
{
PCA pca;
pca.mean = mean;
pca.eigenvectors = eigenvectors;
pca.backProject(data, result);
}
/////////////////////////// imgproc /////////////////////////////////
CV_WRAP static inline void HuMoments(const Moments& m, CV_OUT vector<double>& hu)
@ -142,9 +176,8 @@ CV_WRAP static inline void calcHist( const vector<Mat>& images, const vector<int
float* _ranges[CV_MAX_DIM];
if( rsz > 0 )
{
_all_ranges = _ranges;
for( int i = 0; i < rsz/2; i++ )
_ranges[i] = &ranges[i*2];
for( i = 0; i < rsz/2; i++ )
_ranges[i] = (float*)&ranges[i*2];
}
calcHist(&images[0], (int)images.size(), csz ? &channels[0] : 0,
mask, hist, dims, &histSize[0], rsz ? (const float**)_ranges : 0,
@ -164,11 +197,10 @@ CV_WRAP void calcBackProject( const vector<Mat>& images, const vector<int>& chan
float* _ranges[CV_MAX_DIM];
if( rsz > 0 )
{
_all_ranges = _ranges;
for( int i = 0; i < rsz/2; i++ )
_ranges[i] = &ranges[i*2];
for( i = 0; i < rsz/2; i++ )
_ranges[i] = (float*)&ranges[i*2];
}
calcHist(&images[0], (int)images.size(), csz ? &channels[0] : 0,
calcBackProject(&images[0], (int)images.size(), csz ? &channels[0] : 0,
hist, dst, rsz ? (const float**)_ranges : 0, scale, true);
}
@ -283,17 +315,17 @@ CV_WRAP static inline void approxPolyDP( const Mat& curve,
if( curve.depth() == CV_32S )
{
vector<Point> result;
approxyPolyDP(curve, result, epsilon, closed);
approxPolyDP(curve, result, epsilon, closed);
Mat(result).copyTo(approxCurve);
}
else if( curve.depth() == CV_32F )
{
vector<Point2f> result;
approxyPolyDP(curve, result, epsilon, closed);
approxPolyDP(curve, result, epsilon, closed);
Mat(result).copyTo(approxCurve);
}
else
CV_Error(CV_StsUnsupportedFormat, "")
CV_Error(CV_StsUnsupportedFormat, "");
}
@ -319,22 +351,28 @@ CV_WRAP static inline void convexHull( const Mat& points, CV_OUT Mat& hull, bool
}
}
CV_WRAP static inline void fitLine( const Mat& points, CV_OUT CV_CUSTOM_CARRAY(points) float* line,
CV_WRAP static inline void fitLine( const Mat& points, CV_OUT vector<float>& line,
int distType, double param, double reps, double aeps )
{
if(points.channels() == 2 || points.cols == 2)
fitLine(points, *(Vec4f*)line, distType, param, reps, aeps);
{
line.resize(4);
fitLine(points, *(Vec4f*)&line[0], distType, param, reps, aeps);
}
else
fitLine(points, *(Vec6f*)line, distType, param, reps, aeps);
{
line.resize(6);
fitLine(points, *(Vec6f*)&line[0], distType, param, reps, aeps);
}
}
CV_WRAP static inline int estimateAffine3D( const Mat& from, const Mat& to,
CV_OUT Mat& dst, CV_OUT Mat& outliers,
double param1 = 3.0, double param2 = 0.99 )
{
vector<int> outliers_vec;
vector<uchar> outliers_vec;
int res = estimateAffine3D(from, to, dst, outliers_vec, param1, param2);
outliers_vec.copyTo(outliers);
Mat(outliers_vec).copyTo(outliers);
return res;
}
@ -350,89 +388,11 @@ CV_WRAP static inline void cornerSubPix( const Mat& image, Mat& corners,
return;
CvMat _image = image;
cvCornerSubPix(&_image, (CvPoint2D32f*)corners.data, n, winSize, zeroZone, criteria);
cvFindCornerSubPix(&_image, (CvPoint2D32f*)corners.data, n, winSize, zeroZone, criteria);
}
CV_WRAP static inline void goodFeaturesToTrack( const Mat& image, CV_OUT Mat& corners,
int maxCorners, double qualityLevel, double minDistance,
const Mat& mask=Mat(), int blockSize=3,
bool useHarrisDetector=false, double k=0.04 )
{
vector<Point2f> corners_vec;
goodFeaturesToTrack(image, corners_vec, maxCorners, qualityLevel,
minDistance, mask, blockSize, useHarrisDetector, k);
Mat(corners_vec).copyTo(corners);
}
CV_WRAP static inline void HoughLines( const Mat& image, CV_OUT Mat& lines,
double rho, double theta, int threshold,
double srn=0, double stn=0 )
{
vector<Vec2f>& lines_vec;
HoughLines(image, lines_vec, rho, theta, threshold, srn, stn);
Mat(lines_vec).copyTo(lines);
}
CV_WRAP static inline void HoughLinesP( Mat& image, CV_OUT Mat& lines,
double rho, double theta, int threshold,
double minLineLength=0, double maxLineGap=0 )
{
vector<Vec4i>& lines_vec;
HoughLinesP(image, lines_vec, rho, theta, threshold, minLineLength, maxLineGap);
Mat(lines_vec).copyTo(lines);
}
CV_WRAP static inline void HoughCircles( const Mat& image, CV_OUT vector<Vec3f>& circles,
int method, double dp, double minDist,
double param1=100, double param2=100,
int minRadius=0, int maxRadius=0 )
{
vector<Vec3f>& circles_vec;
HoughCircles(image, circles_vec, method, dp, minDist, param1, param2, minRadius, maxRadius);
Mat(circles_vec).copyTo(circles);
}
/////////////////////////////// calib3d ///////////////////////////////////////////
CV_WRAP static inline bool findChessboardCorners( const Mat& image, Size patternSize,
CV_OUT Mat& corners,
int flags=CALIB_CB_ADAPTIVE_THRESH+
CALIB_CB_NORMALIZE_IMAGE )
{
vector<Point2f> corners_vec;
bool res = findChessboardCorners(image, patternSize, corners_vec, flags);
corners_vec.copyTo(corners);
return res;
}
CV_WRAP static inline void projectPoints( const Mat& objectPoints,
const Mat& rvec, const Mat& tvec,
const Mat& cameraMatrix,
const Mat& distCoeffs,
CV_OUT Mat& imagePoints )
{
vector<Point2f> imgpt;
projectPoints(objectPoints, rvec, tvec, cameraMatrix, distCoeffs, imgpt);
Mat(imgpt).copyTo(imagePoints);
}
CV_WRAP static inline void projectPointsJ( const Mat& objectPoints,
const Mat& rvec, const Mat& tvec,
const Mat& cameraMatrix,
const Mat& distCoeffs,
CV_OUT Mat& imagePoints,
CV_OUT Mat& dpdrot, CV_OUT Mat& dpdt, CV_OUT Mat& dpdf,
CV_OUT Mat& dpdc, CV_OUT Mat& dpddist,
double aspectRatio=0 )
{
vector<Point2f> imgpt;
projectPoints(objectPoints, rvec, tvec, cameraMatri, distCoeffs, imgpt,
dpdrot, dpdt, dpdf, dpdc, dpddist, aspectRatio);
Mat(imgpt).copyTo(imagePoints);
}
CV_WRAP static inline void convertPointsHomogeneous( const Mat& src, CV_OUT Mat& dst )
{
int n;
@ -441,12 +401,12 @@ CV_WRAP static inline void convertPointsHomogeneous( const Mat& src, CV_OUT Mat&
else if( (n = src.checkVector(3)) >= 0 )
dst.create(n, 3, src.depth());
else
CV_Assert(CV_StsBadSize, "");
CV_Error(CV_StsBadSize, "");
CvMat _src = src, _dst = dst;
cvConvertHomogeneous(&_src, &_dst);
cvConvertPointsHomogeneous(&_src, &_dst);
}
/*
//! initializes camera matrix from a few 3D points and the corresponding projections.
CV_WRAP static inline Mat initCameraMatrix2D( const vector<Mat>& objectPoints,
const vector<Mat>& imagePoints,
@ -472,7 +432,7 @@ CV_WRAP static inline double calibrateCamera( const vector<Mat>& objectPoints,
vector<vector<Point2f> > _imagePoints;
mv2vv(objectPoints, _objectPoints);
mv2vv(imagePoints, _imagePoints);
return calibrateCamera(_objectPoints, _imagePoints, imageSize, cameraMatrix, distCoeffs, rvec, tvecs, flags);
return calibrateCamera(_objectPoints, _imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
}
@ -493,12 +453,11 @@ CV_WRAP static inline double stereoCalibrate( const vector<Mat>& objectPoints,
mv2vv(objectPoints, _objectPoints);
mv2vv(imagePoints1, _imagePoints1);
mv2vv(imagePoints2, _imagePoints2);
return stereoCalibrate(_objectPoints, _imagePoints1, _imagePoints2, cameraMatrix1, cameraMatrix2,
imageSize, R, T, E, F, criteria, flags);
return stereoCalibrate(_objectPoints, _imagePoints1, _imagePoints2, cameraMatrix1, distCoeffs1,
cameraMatrix2, distCoeffs2, imageSize, R, T, E, F, criteria, flags);
}
CV_WRAP static float rectify3Collinear( const Mat& cameraMatrix1, const Mat& distCoeffs1,
CV_WRAP static inline float rectify3Collinear( const Mat& cameraMatrix1, const Mat& distCoeffs1,
const Mat& cameraMatrix2, const Mat& distCoeffs2,
const Mat& cameraMatrix3, const Mat& distCoeffs3,
const vector<Mat>& imgpt1, const vector<Mat>& imgpt3,
@ -520,6 +479,8 @@ CV_WRAP static float rectify3Collinear( const Mat& cameraMatrix1, const Mat& dis
R12, T12, R13, T13, R1, R2, R3, P1, P2, P3,
Q, alpha, newImgSize, roi1, roi2, flags);
}
*/
}
}
#endif