Merge pull request #6453 from sovrasov:extend_calibrateCamera

This commit is contained in:
Vadim Pisarevsky 2016-06-16 18:28:49 +00:00
commit 4142e737b0
5 changed files with 267 additions and 28 deletions

View File

@ -767,6 +767,14 @@ k-th translation vector (see the next output parameter description) brings the c
from the model coordinate space (in which object points are specified) to the world coordinate
space, that is, a real position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).
@param tvecs Output vector of translation vectors estimated for each pattern view.
@param stdDeviationsIntrinsics Output vector of standard deviations estimated for intrinsic parameters.
Order of deviations values:
\f$(f_x, f_y, c_x, c_y, k_1, k_2, p_1, p_2, k_3, k_4, k_5, k_6 , s_1, s_2, s_3,
s_4, \tau_x, \tau_y)\f$ If one of parameters is not estimated, it's deviation is equals to zero.
@param stdDeviationsExtrinsics Output vector of standard deviations estimated for extrinsic parameters.
Order of deviations values: \f$(R_1, T_1, \dotsc , R_M, T_M)\f$ where M is number of pattern views,
\f$R_i, T_i\f$ are concatenated 1x3 vectors.
@param perViewErrors Output vector of average re-projection errors estimated for each pattern view.
@param flags Different flags that may be zero or a combination of the following values:
- **CV_CALIB_USE_INTRINSIC_GUESS** cameraMatrix contains valid initial values of
fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
@ -841,6 +849,24 @@ The function returns the final re-projection error.
@sa
findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort
*/
CV_EXPORTS_AS(calibrateCameraExtended) double calibrateCamera( InputArrayOfArrays objectPoints,
InputArrayOfArrays imagePoints, Size imageSize,
InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
OutputArray stdDeviationsIntrinsics,
OutputArray stdDeviationsExtrinsics,
OutputArray perViewErrors,
int flags = 0, TermCriteria criteria = TermCriteria(
TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) );
/** @overload double calibrateCamera( InputArrayOfArrays objectPoints,
InputArrayOfArrays imagePoints, Size imageSize,
InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
OutputArray stdDeviations, OutputArray perViewErrors,
int flags = 0, TermCriteria criteria = TermCriteria(
TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) )
*/
CV_EXPORTS_W double calibrateCamera( InputArrayOfArrays objectPoints,
InputArrayOfArrays imagePoints, Size imageSize,
InputOutputArray cameraMatrix, InputOutputArray distCoeffs,

View File

@ -246,6 +246,7 @@ CVAPI(void) cvDrawChessboardCorners( CvArr* image, CvSize pattern_size,
#define CV_CALIB_TILTED_MODEL 262144
#define CV_CALIB_FIX_TAUX_TAUY 524288
#define CV_CALIB_NINTRINSIC 18
/* Finds intrinsic and extrinsic camera parameters
from a few views of known calibration pattern */

View File

@ -1181,7 +1181,6 @@ CV_IMPL void cvFindExtrinsicCameraParams2( const CvMat* objectPoints,
cvConvert( &_t, tvec );
}
CV_IMPL void cvInitIntrinsicParams2D( const CvMat* objectPoints,
const CvMat* imagePoints, const CvMat* npoints,
CvSize imageSize, CvMat* cameraMatrix,
@ -1270,15 +1269,37 @@ CV_IMPL void cvInitIntrinsicParams2D( const CvMat* objectPoints,
cvConvert( &_a, cameraMatrix );
}
static void subMatrix(const cv::Mat& src, cv::Mat& dst, const std::vector<uchar>& cols,
const std::vector<uchar>& rows) {
int nonzeros_cols = cv::countNonZero(cols);
cv::Mat tmp(src.rows, nonzeros_cols, CV_64FC1);
/* finds intrinsic and extrinsic camera parameters
from a few views of known calibration pattern */
CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
for (int i = 0, j = 0; i < (int)cols.size(); i++)
{
if (cols[i])
{
src.col(i).copyTo(tmp.col(j++));
}
}
int nonzeros_rows = cv::countNonZero(rows);
dst.create(nonzeros_rows, nonzeros_cols, CV_64FC1);
for (int i = 0, j = 0; i < (int)rows.size(); i++)
{
if (rows[i])
{
tmp.row(i).copyTo(dst.row(j++));
}
}
}
static double cvCalibrateCamera2Internal( const CvMat* objectPoints,
const CvMat* imagePoints, const CvMat* npoints,
CvSize imageSize, CvMat* cameraMatrix, CvMat* distCoeffs,
CvMat* rvecs, CvMat* tvecs, int flags, CvTermCriteria termCrit )
CvMat* rvecs, CvMat* tvecs, CvMat* stdDevs,
CvMat* perViewErrors, int flags, CvTermCriteria termCrit )
{
const int NINTRINSIC = 18;
const int NINTRINSIC = CV_CALIB_NINTRINSIC;
double reprojErr = 0;
Matx33d A;
@ -1338,6 +1359,20 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
"1xn or nx1 array or 1-channel nx3 array, where n is the number of views" );
}
if( stdDevs )
{
cn = CV_MAT_CN(stdDevs->type);
if( !CV_IS_MAT(stdDevs) ||
(CV_MAT_DEPTH(stdDevs->type) != CV_32F && CV_MAT_DEPTH(stdDevs->type) != CV_64F) ||
((stdDevs->rows != (nimages*6 + NINTRINSIC) || stdDevs->cols*cn != 1) &&
(stdDevs->rows != 1 || stdDevs->cols != (nimages*6 + NINTRINSIC) || cn != 1)) )
#define STR__(x) #x
#define STR_(x) STR__(x)
CV_Error( CV_StsBadArg, "the output array of standard deviations vectors must be 1-channel "
"1x(n*6 + NINTRINSIC) or (n*6 + NINTRINSIC)x1 array, where n is the number of views,"
" NINTRINSIC = " STR_(CV_CALIB_NINTRINSIC));
}
if( (CV_MAT_TYPE(cameraMatrix->type) != CV_32FC1 &&
CV_MAT_TYPE(cameraMatrix->type) != CV_64FC1) ||
cameraMatrix->rows != 3 || cameraMatrix->cols != 3 )
@ -1367,6 +1402,7 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
Mat matM( 1, total, CV_64FC3 );
Mat _m( 1, total, CV_64FC2 );
Mat allErrors(1, total, CV_64FC2);
if(CV_MAT_CN(objectPoints->type) == 3) {
cvarrToMat(objectPoints).convertTo(matM, CV_64F);
@ -1518,6 +1554,7 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
double* _errNorm = 0;
bool proceed = solver.updateAlt( _param, _JtJ, _JtErr, _errNorm );
double *param = solver.param->data.db, *pparam = solver.prevParam->data.db;
bool calcJ = solver.state == CvLevMarq::CALC_J || (!proceed && stdDevs);
if( flags & CALIB_FIX_ASPECT_RATIO )
{
@ -1528,8 +1565,10 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
A(0, 0) = param[0]; A(1, 1) = param[1]; A(0, 2) = param[2]; A(1, 2) = param[3];
std::copy(param + 4, param + 4 + 14, k);
if( !proceed )
if ( !proceed && !stdDevs && !perViewErrors )
break;
else if ( !proceed && stdDevs )
cvZero(_JtJ);
reprojErr = 0;
@ -1543,6 +1582,7 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
CvMat _Mi(matM.colRange(pos, pos + ni));
CvMat _mi(_m.colRange(pos, pos + ni));
CvMat _me(allErrors.colRange(pos, pos + ni));
_Je.resize(ni*2); _Ji.resize(ni*2); _err.resize(ni*2);
CvMat _dpdr(_Je.colRange(0, 3));
@ -1552,7 +1592,7 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
CvMat _dpdk(_Ji.colRange(4, NINTRINSIC));
CvMat _mp(_err.reshape(2, 1));
if( solver.state == CvLevMarq::CALC_J )
if( calcJ )
{
cvProjectPoints2( &_Mi, &_ri, &_ti, &matA, &_k, &_mp, &_dpdr, &_dpdt,
(flags & CALIB_FIX_FOCAL_LENGTH) ? 0 : &_dpdf,
@ -1563,8 +1603,10 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
cvProjectPoints2( &_Mi, &_ri, &_ti, &matA, &_k, &_mp );
cvSub( &_mp, &_mi, &_mp );
if (perViewErrors || stdDevs)
cvCopy(&_mp, &_me);
if( solver.state == CvLevMarq::CALC_J )
if( calcJ )
{
Mat JtJ(cvarrToMat(_JtJ)), JtErr(cvarrToMat(_JtErr));
@ -1581,15 +1623,52 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
}
if( _errNorm )
*_errNorm = reprojErr;
if( !proceed )
{
if( stdDevs )
{
Mat mask = cvarrToMat(solver.mask);
int nparams_nz = countNonZero(mask);
Mat JtJinv, JtJN;
JtJN.create(nparams_nz, nparams_nz, CV_64F);
subMatrix(cvarrToMat(_JtJ), JtJN, mask, mask);
completeSymm(JtJN, false);
cv::invert(JtJN, JtJinv, DECOMP_SVD);
//sigma2 is deviation of the noise
//see any papers about variance of the least squares estimator for
//detailed description of the variance estimation methods
double sigma2 = norm(allErrors, NORM_L2SQR) / (total - nparams_nz);
Mat stdDevsM = cvarrToMat(stdDevs);
int j = 0;
for ( int s = 0; s < nparams; s++ )
if( mask.data[s] )
{
stdDevsM.at<double>(s) = std::sqrt(JtJinv.at<double>(j,j) * sigma2);
j++;
}
else
stdDevsM.at<double>(s) = 0.;
}
break;
}
}
// 4. store the results
cvConvert( &matA, cameraMatrix );
cvConvert( &_k, distCoeffs );
for( i = 0; i < nimages; i++ )
for( i = 0, pos = 0; i < nimages; i++ )
{
CvMat src, dst;
if( perViewErrors )
{
ni = npoints->data.i[i*npstep];
perViewErrors->data.db[i] = std::sqrt(cv::norm(allErrors.colRange(pos, pos + ni),
NORM_L2SQR) / ni);
pos+=ni;
}
if( rvecs )
{
src = cvMat( 3, 1, CV_64F, solver.param->data.db + NINTRINSIC + i*6 );
@ -1622,6 +1701,17 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
}
/* finds intrinsic and extrinsic camera parameters
from a few views of known calibration pattern */
CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
const CvMat* imagePoints, const CvMat* npoints,
CvSize imageSize, CvMat* cameraMatrix, CvMat* distCoeffs,
CvMat* rvecs, CvMat* tvecs, int flags, CvTermCriteria termCrit )
{
return cvCalibrateCamera2Internal(objectPoints, imagePoints, npoints, imageSize, cameraMatrix,
distCoeffs, rvecs, tvecs, NULL, NULL, flags, termCrit);
}
void cvCalibrationMatrixValues( const CvMat *calibMatr, CvSize imgSize,
double apertureWidth, double apertureHeight, double *fovx, double *fovy,
double *focalLength, CvPoint2D64f *principalPoint, double *pasp )
@ -1772,7 +1862,7 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
if( !(flags & (CV_CALIB_FIX_INTRINSIC|CV_CALIB_USE_INTRINSIC_GUESS)))
{
cvCalibrateCamera2( objectPoints, imagePoints[k],
npoints, imageSize, &K[k], &Dist[k], 0, 0, flags );
npoints, imageSize, &K[k], &Dist[k], NULL, NULL, flags );
}
}
@ -3091,7 +3181,6 @@ static void collectCalibrationData( InputArrayOfArrays objectPoints,
}
}
static Mat prepareCameraMatrix(Mat& cameraMatrix0, int rtype)
{
Mat cameraMatrix = Mat::eye(3, 3, rtype);
@ -3287,10 +3376,23 @@ cv::Mat cv::initCameraMatrix2D( InputArrayOfArrays objectPoints,
}
double cv::calibrateCamera( InputArrayOfArrays _objectPoints,
InputArrayOfArrays _imagePoints,
Size imageSize, InputOutputArray _cameraMatrix, InputOutputArray _distCoeffs,
OutputArrayOfArrays _rvecs, OutputArrayOfArrays _tvecs, int flags, TermCriteria criteria )
{
return calibrateCamera(_objectPoints, _imagePoints, imageSize, _cameraMatrix, _distCoeffs,
_rvecs, _tvecs, noArray(), noArray(), noArray(), flags, criteria);
}
double cv::calibrateCamera(InputArrayOfArrays _objectPoints,
InputArrayOfArrays _imagePoints,
Size imageSize, InputOutputArray _cameraMatrix, InputOutputArray _distCoeffs,
OutputArrayOfArrays _rvecs, OutputArrayOfArrays _tvecs,
OutputArray stdDeviationsIntrinsics,
OutputArray stdDeviationsExtrinsics,
OutputArray _perViewErrors, int flags, TermCriteria criteria )
{
int rtype = CV_64F;
Mat cameraMatrix = _cameraMatrix.getMat();
@ -3304,14 +3406,17 @@ double cv::calibrateCamera( InputArrayOfArrays _objectPoints,
int nimages = int(_objectPoints.total());
CV_Assert( nimages > 0 );
Mat objPt, imgPt, npoints, rvecM, tvecM;
Mat objPt, imgPt, npoints, rvecM, tvecM, stdDeviationsM, errorsM;
bool rvecs_needed = _rvecs.needed(), tvecs_needed = _tvecs.needed();
bool rvecs_needed = _rvecs.needed(), tvecs_needed = _tvecs.needed(),
stddev_needed = stdDeviationsIntrinsics.needed(), errors_needed = _perViewErrors.needed(),
stddev_ext_needed = stdDeviationsExtrinsics.needed();
bool rvecs_mat_vec = _rvecs.isMatVector();
bool tvecs_mat_vec = _tvecs.isMatVector();
if( rvecs_needed ) {
if( rvecs_needed )
{
_rvecs.create(nimages, 1, CV_64FC3);
if(rvecs_mat_vec)
@ -3320,7 +3425,8 @@ double cv::calibrateCamera( InputArrayOfArrays _objectPoints,
rvecM = _rvecs.getMat();
}
if( tvecs_needed ) {
if( tvecs_needed )
{
_tvecs.create(nimages, 1, CV_64FC3);
if(tvecs_mat_vec)
@ -3329,16 +3435,46 @@ double cv::calibrateCamera( InputArrayOfArrays _objectPoints,
tvecM = _tvecs.getMat();
}
if( stddev_needed || stddev_ext_needed )
{
stdDeviationsM.create(nimages*6 + CV_CALIB_NINTRINSIC, 1, CV_64F);
}
if( errors_needed )
{
_perViewErrors.create(nimages, 1, CV_64F);
errorsM = _perViewErrors.getMat();
}
collectCalibrationData( _objectPoints, _imagePoints, noArray(),
objPt, imgPt, 0, npoints );
CvMat c_objPt = objPt, c_imgPt = imgPt, c_npoints = npoints;
CvMat c_cameraMatrix = cameraMatrix, c_distCoeffs = distCoeffs;
CvMat c_rvecM = rvecM, c_tvecM = tvecM;
CvMat c_rvecM = rvecM, c_tvecM = tvecM, c_stdDev = stdDeviationsM, c_errors = errorsM;
double reprojErr = cvCalibrateCamera2(&c_objPt, &c_imgPt, &c_npoints, imageSize,
double reprojErr = cvCalibrateCamera2Internal(&c_objPt, &c_imgPt, &c_npoints, imageSize,
&c_cameraMatrix, &c_distCoeffs,
rvecs_needed ? &c_rvecM : NULL,
tvecs_needed ? &c_tvecM : NULL, flags, criteria );
tvecs_needed ? &c_tvecM : NULL,
stddev_needed ? &c_stdDev : NULL,
errors_needed ? &c_errors : NULL, flags, criteria );
if( stddev_needed )
{
stdDeviationsIntrinsics.create(CV_CALIB_NINTRINSIC, 1, CV_64F);
Mat stdDeviationsIntrinsicsMat = stdDeviationsIntrinsics.getMat();
std::memcpy(stdDeviationsIntrinsicsMat.ptr(), stdDeviationsM.ptr(),
CV_CALIB_NINTRINSIC*sizeof(double));
}
if ( stddev_ext_needed )
{
stdDeviationsExtrinsics.create(nimages*6, 1, CV_64F);
Mat stdDeviationsExtrinsicsMat = stdDeviationsExtrinsics.getMat();
std::memcpy(stdDeviationsExtrinsicsMat.ptr(),
stdDeviationsM.ptr() + CV_CALIB_NINTRINSIC*sizeof(double),
nimages*6*sizeof(double));
}
// overly complicated and inefficient rvec/ tvec handling to support vector<Mat>
for(int i = 0; i < nimages; i++ )

View File

@ -241,6 +241,8 @@ bool CvLevMarq::updateAlt( const CvMat*& _param, CvMat*& _JtJ, CvMat*& _JtErr, d
cvNorm(param, prevParam, CV_RELATIVE_L2) < criteria.epsilon )
{
_param = param;
_JtJ = JtJ;
_JtErr = JtErr;
state = DONE;
return false;
}

View File

@ -259,7 +259,7 @@ protected:
virtual void calibrate( int imageCount, int* pointCounts,
CvSize imageSize, CvPoint2D64f* imagePoints, CvPoint3D64f* objectPoints,
double* distortionCoeffs, double* cameraMatrix, double* translationVectors,
double* rotationMatrices, int flags ) = 0;
double* rotationMatrices, double *stdDevs, double* perViewErrors, int flags ) = 0;
virtual void project( int pointCount, CvPoint3D64f* objectPoints,
double* rotationMatrix, double* translationVector,
double* cameraMatrix, double* distortion, CvPoint2D64f* imagePoints ) = 0;
@ -303,9 +303,13 @@ void CV_CameraCalibrationTest::run( int start_from )
double* transVects;
double* rotMatrs;
double* stdDevs;
double* perViewErrors;
double* goodTransVects;
double* goodRotMatrs;
double* goodPerViewErrors;
double* goodStdDevs;
double cameraMatrix[3*3];
double distortion[5]={0,0,0,0,0};
@ -424,9 +428,13 @@ void CV_CameraCalibrationTest::run( int start_from )
/* Allocate memory for translate vectors and rotmatrixs*/
transVects = (double*)cvAlloc(3 * 1 * numImages * sizeof(double));
rotMatrs = (double*)cvAlloc(3 * 3 * numImages * sizeof(double));
stdDevs = (double*)cvAlloc((CV_CALIB_NINTRINSIC + 6*numImages) * sizeof(double));
perViewErrors = (double*)cvAlloc(numImages * sizeof(double));
goodTransVects = (double*)cvAlloc(3 * 1 * numImages * sizeof(double));
goodRotMatrs = (double*)cvAlloc(3 * 3 * numImages * sizeof(double));
goodPerViewErrors = (double*)cvAlloc(numImages * sizeof(double));
goodStdDevs = (double*)cvAlloc((CV_CALIB_NINTRINSIC + 6*numImages) * sizeof(double));
/* Read object points */
i = 0;/* shift for current point */
@ -501,6 +509,13 @@ void CV_CameraCalibrationTest::run( int start_from )
}
}
/* Read good stdDeviations */
for (i = 0; i < CV_CALIB_NINTRINSIC + numImages*6; i++)
{
values_read = fscanf(file, "%lf", goodStdDevs + i);
CV_Assert(values_read == 1);
}
calibFlags = 0
// + CV_CALIB_FIX_PRINCIPAL_POINT
// + CV_CALIB_ZERO_TANGENT_DIST
@ -526,6 +541,8 @@ void CV_CameraCalibrationTest::run( int start_from )
cameraMatrix,
transVects,
rotMatrs,
stdDevs,
perViewErrors,
calibFlags );
/* ---- Reproject points to the image ---- */
@ -553,6 +570,8 @@ void CV_CameraCalibrationTest::run( int start_from )
meanDy = 0;
for( currImage = 0; currImage < numImages; currImage++ )
{
double imageMeanDx = 0;
double imageMeanDy = 0;
for( currPoint = 0; currPoint < etalonSize.width * etalonSize.height; currPoint++ )
{
rx = reprojectPoints[i].x;
@ -563,6 +582,9 @@ void CV_CameraCalibrationTest::run( int start_from )
meanDx += dx;
meanDy += dy;
imageMeanDx += dx*dx;
imageMeanDy += dy*dy;
dx = fabs(dx);
dy = fabs(dy);
@ -573,6 +595,13 @@ void CV_CameraCalibrationTest::run( int start_from )
maxDy = dy;
i++;
}
goodPerViewErrors[currImage] = sqrt( (imageMeanDx + imageMeanDy) /
(etalonSize.width * etalonSize.height));
//only for c-version of test (it does not provides evaluation of perViewErrors
//and returns zeros)
if(perViewErrors[currImage] == 0.0)
perViewErrors[currImage] = goodPerViewErrors[currImage];
}
meanDx /= numImages * etalonSize.width * etalonSize.height;
@ -613,6 +642,23 @@ void CV_CameraCalibrationTest::run( int start_from )
if( code < 0 )
goto _exit_;
/* ----- Compare per view re-projection errors ----- */
code = compare(perViewErrors,goodPerViewErrors, numImages,0.1,"per view errors vector");
if( code < 0 )
goto _exit_;
/* ----- Compare standard deviations of parameters ----- */
//only for c-version of test (it does not provides evaluation of stdDevs
//and returns zeros)
for ( i = 0; i < CV_CALIB_NINTRINSIC + 6*numImages; i++)
{
if(stdDevs[i] == 0.0)
stdDevs[i] = goodStdDevs[i];
}
code = compare(stdDevs,goodStdDevs, CV_CALIB_NINTRINSIC + 6*numImages,.5,"stdDevs vector");
if( code < 0 )
goto _exit_;
if( maxDx > 1.0 )
{
ts->printf( cvtest::TS::LOG,
@ -636,8 +682,12 @@ void CV_CameraCalibrationTest::run( int start_from )
cvFree(&transVects);
cvFree(&rotMatrs);
cvFree(&stdDevs);
cvFree(&perViewErrors);
cvFree(&goodTransVects);
cvFree(&goodRotMatrs);
cvFree(&goodPerViewErrors);
cvFree(&goodStdDevs);
fclose(file);
file = 0;
@ -676,7 +726,7 @@ protected:
virtual void calibrate( int imageCount, int* pointCounts,
CvSize imageSize, CvPoint2D64f* imagePoints, CvPoint3D64f* objectPoints,
double* distortionCoeffs, double* cameraMatrix, double* translationVectors,
double* rotationMatrices, int flags );
double* rotationMatrices, double *stdDevs, double* perViewErrors, int flags );
virtual void project( int pointCount, CvPoint3D64f* objectPoints,
double* rotationMatrix, double* translationVector,
double* cameraMatrix, double* distortion, CvPoint2D64f* imagePoints );
@ -685,11 +735,19 @@ protected:
void CV_CameraCalibrationTest_C::calibrate(int imageCount, int* pointCounts,
CvSize imageSize, CvPoint2D64f* imagePoints, CvPoint3D64f* objectPoints,
double* distortionCoeffs, double* cameraMatrix, double* translationVectors,
double* rotationMatrices, int flags )
double* rotationMatrices, double *stdDevs, double *perViewErrors, int flags )
{
int i, total = 0;
for( i = 0; i < imageCount; i++ )
{
perViewErrors[i] = 0.0;
total += pointCounts[i];
}
for( i = 0; i < CV_CALIB_NINTRINSIC + imageCount*6; i++)
{
stdDevs[i] = 0.0;
}
CvMat _objectPoints = cvMat(1, total, CV_64FC3, objectPoints);
CvMat _imagePoints = cvMat(1, total, CV_64FC2, imagePoints);
@ -700,8 +758,7 @@ void CV_CameraCalibrationTest_C::calibrate( int imageCount, int* pointCounts,
CvMat _translationVectors = cvMat(imageCount, 3, CV_64F, translationVectors);
cvCalibrateCamera2(&_objectPoints, &_imagePoints, &_pointCounts, imageSize,
&_cameraMatrix, &_distCoeffs, &_rotationMatrices, &_translationVectors,
flags);
&_cameraMatrix, &_distCoeffs, &_rotationMatrices, &_translationVectors, flags);
}
void CV_CameraCalibrationTest_C::project( int pointCount, CvPoint3D64f* objectPoints,
@ -728,7 +785,7 @@ protected:
virtual void calibrate( int imageCount, int* pointCounts,
CvSize imageSize, CvPoint2D64f* imagePoints, CvPoint3D64f* objectPoints,
double* distortionCoeffs, double* cameraMatrix, double* translationVectors,
double* rotationMatrices, int flags );
double* rotationMatrices, double *stdDevs, double* perViewErrors, int flags );
virtual void project( int pointCount, CvPoint3D64f* objectPoints,
double* rotationMatrix, double* translationVector,
double* cameraMatrix, double* distortion, CvPoint2D64f* imagePoints );
@ -737,13 +794,15 @@ protected:
void CV_CameraCalibrationTest_CPP::calibrate(int imageCount, int* pointCounts,
CvSize _imageSize, CvPoint2D64f* _imagePoints, CvPoint3D64f* _objectPoints,
double* _distortionCoeffs, double* _cameraMatrix, double* translationVectors,
double* rotationMatrices, int flags )
double* rotationMatrices, double *stdDevs, double *perViewErrors, int flags )
{
vector<vector<Point3f> > objectPoints( imageCount );
vector<vector<Point2f> > imagePoints( imageCount );
Size imageSize = _imageSize;
Mat cameraMatrix, distCoeffs(1,4,CV_64F,Scalar::all(0));
vector<Mat> rvecs, tvecs;
Mat stdDevsMatInt, stdDevsMatExt;
Mat perViewErrorsMat;
CvPoint3D64f* op = _objectPoints;
CvPoint2D64f* ip = _imagePoints;
@ -770,8 +829,23 @@ void CV_CameraCalibrationTest_CPP::calibrate( int imageCount, int* pointCounts,
distCoeffs,
rvecs,
tvecs,
stdDevsMatInt,
stdDevsMatExt,
perViewErrorsMat,
flags );
assert( stdDevsMatInt.type() == CV_64F );
assert( stdDevsMatInt.total() == static_cast<size_t>(CV_CALIB_NINTRINSIC) );
memcpy( stdDevs, stdDevsMatInt.ptr(), CV_CALIB_NINTRINSIC*sizeof(double) );
assert( stdDevsMatExt.type() == CV_64F );
assert( stdDevsMatExt.total() == static_cast<size_t>(6*imageCount) );
memcpy( stdDevs + CV_CALIB_NINTRINSIC, stdDevsMatExt.ptr(), 6*imageCount*sizeof(double) );
assert( perViewErrorsMat.type() == CV_64F);
assert( perViewErrorsMat.total() == static_cast<size_t>(imageCount) );
memcpy( perViewErrors, perViewErrorsMat.ptr(), imageCount*sizeof(double) );
assert( cameraMatrix.type() == CV_64FC1 );
memcpy( _cameraMatrix, cameraMatrix.ptr(), 9*sizeof(double) );