fixed multiple warnings from VS2010.

This commit is contained in:
Vadim Pisarevsky
2010-11-25 16:55:46 +00:00
parent 7e5c11a920
commit 1286c1db45
36 changed files with 178 additions and 177 deletions

View File

@@ -2158,7 +2158,7 @@ inline void BruteForceMatcher<Distance>::commonKnnMatchImpl( BruteForceMatcher<D
Point minLoc;
minMaxLoc( allDists[iIdx], &minVal, 0, &minLoc, 0 );
if( minVal < bestMatch.distance )
bestMatch = DMatch( qIdx, minLoc.x, iIdx, (float)minVal );
bestMatch = DMatch( qIdx, minLoc.x, (int)iIdx, (float)minVal );
}
}
if( bestMatch.trainIdx == -1 )
@@ -2214,7 +2214,7 @@ inline void BruteForceMatcher<Distance>::commonRadiusMatchImpl( BruteForceMatche
matcher.trainDescCollection[iIdx].step*tIdx);
DistanceType d = matcher.distance(d1, d2, dimension);
if( d < maxDistance )
curMatches->push_back( DMatch( qIdx, tIdx, iIdx, (float)d ) );
curMatches->push_back( DMatch( qIdx, tIdx, (int)iIdx, (float)d ) );
}
}
}

View File

@@ -100,7 +100,7 @@ Mat BOWKMeansTrainer::cluster() const
Mat mergedDescriptors( descCount, descriptors[0].cols, descriptors[0].type() );
for( size_t i = 0, start = 0; i < descriptors.size(); i++ )
{
Mat submut = mergedDescriptors.rowRange(start, start + descriptors[i].rows);
Mat submut = mergedDescriptors.rowRange((int)start, (int)(start + descriptors[i].rows));
descriptors[i].copyTo(submut);
start += descriptors[i].rows;
}

View File

@@ -166,7 +166,7 @@ void BriefDescriptorExtractor::computeImpl(const Mat& image, std::vector<KeyPoin
//Remove keypoints very close to the border
removeBorderKeypoints(keypoints, image.size(), PATCH_SIZE/2 + KERNEL_SIZE/2);
descriptors = Mat::zeros(keypoints.size(), bytes_, CV_8U);
descriptors = Mat::zeros((int)keypoints.size(), bytes_, CV_8U);
test_fn_(sum, keypoints, descriptors);
}

View File

@@ -307,7 +307,7 @@ int RandomizedTree::getIndex(uchar* patch_data) const
int child_offset = nodes_[index](patch_data);
index = 2*index + 1 + child_offset;
}
return index - nodes_.size();
return (int)(index - nodes_.size());
}
void RandomizedTree::train(std::vector<BaseKeypoint> const& base_set,
@@ -323,7 +323,7 @@ void RandomizedTree::train(std::vector<BaseKeypoint> const& base_set,
int depth, int views, size_t reduced_num_dim,
int num_quant_bits)
{
init(base_set.size(), depth, rng);
init((int)base_set.size(), depth, rng);
Mat patch;
@@ -466,24 +466,24 @@ void RandomizedTree::compressLeaves(size_t reduced_num_dim)
}
// DO NOT FREE RETURNED POINTER
float *cs_phi = CSMatrixGenerator::getCSMatrix(reduced_num_dim, classes_, CSMatrixGenerator::PDT_BERNOULLI);
float *cs_phi = CSMatrixGenerator::getCSMatrix((int)reduced_num_dim, classes_, CSMatrixGenerator::PDT_BERNOULLI);
float *cs_posteriors = new float[num_leaves_ * reduced_num_dim]; // temp, num_leaves_ x reduced_num_dim
for (int i=0; i<num_leaves_; ++i) {
float *post = getPosteriorByIndex(i);
float *prod = &cs_posteriors[i*reduced_num_dim];
Mat A( reduced_num_dim, classes_, CV_32FC1, cs_phi );
Mat A( (int)reduced_num_dim, classes_, CV_32FC1, cs_phi );
Mat X( classes_, 1, CV_32FC1, post );
Mat Y( reduced_num_dim, 1, CV_32FC1, prod );
Mat Y( (int)reduced_num_dim, 1, CV_32FC1, prod );
Y = A*X;
}
// copy new posteriors
freePosteriors(3);
allocPosteriorsAligned(num_leaves_, reduced_num_dim);
allocPosteriorsAligned(num_leaves_, (int)reduced_num_dim);
for (int i=0; i<num_leaves_; ++i)
memcpy(posteriors_[i], &cs_posteriors[i*reduced_num_dim], reduced_num_dim*sizeof(float));
classes_ = reduced_num_dim;
classes_ = (int)reduced_num_dim;
delete [] cs_posteriors;
}
@@ -682,8 +682,8 @@ void RTreeClassifier::train(std::vector<BaseKeypoint> const& base_set,
}
num_quant_bits_ = num_quant_bits;
classes_ = reduced_num_dim; // base_set.size();
original_num_classes_ = base_set.size();
classes_ = (int)reduced_num_dim; // base_set.size();
original_num_classes_ = (int)base_set.size();
trees_.resize(num_trees);
printf("[OK] Training trees: base size=%i, reduced size=%i\n", (int)base_set.size(), (int)reduced_num_dim);
@@ -899,7 +899,7 @@ void RTreeClassifier::write(const char* file_name) const
void RTreeClassifier::write(std::ostream &os) const
{
int num_trees = trees_.size();
int num_trees = (int)trees_.size();
os.write((char*)(&num_trees), sizeof(num_trees));
os.write((char*)(&classes_), sizeof(classes_));
os.write((char*)(&original_num_classes_), sizeof(original_num_classes_));
@@ -953,9 +953,9 @@ void RTreeClassifier::setFloatPosteriorsFromTextfile_176(std::string url)
float RTreeClassifier::countZeroElements()
{
int flt_zeros = 0;
int ui8_zeros = 0;
int num_elem = trees_[0].classes();
size_t flt_zeros = 0;
size_t ui8_zeros = 0;
size_t num_elem = trees_[0].classes();
for (int i=0; i<(int)trees_.size(); ++i)
for (int k=0; k<(int)trees_[i].num_leaves_; ++k) {
float *p = trees_[i].getPosteriorByIndex(k);

View File

@@ -113,7 +113,7 @@ Ptr<DescriptorExtractor> DescriptorExtractor::create(const string& descriptorExt
{
DescriptorExtractor* de = 0;
int pos = 0;
size_t pos = 0;
if (!descriptorExtractorType.compare("SIFT"))
{
de = new SiftDescriptorExtractor();

View File

@@ -100,7 +100,7 @@ void FeatureDetector::write( FileStorage& ) const
Ptr<FeatureDetector> FeatureDetector::create( const string& detectorType )
{
FeatureDetector* fd = 0;
int pos = 0;
size_t pos = 0;
if( !detectorType.compare( "FAST" ) )
{

View File

@@ -74,12 +74,12 @@ void DynamicAdaptedFeatureDetector::detectImpl(const Mat& image, vector<KeyPoint
if (int(keypoints.size()) < min_features_)
{
down = true;
adjuster.tooFew(min_features_, keypoints.size());
adjuster.tooFew(min_features_, (int)keypoints.size());
}
else if (int(keypoints.size()) > max_features_)
{
up = true;
adjuster.tooMany(max_features_, keypoints.size());
adjuster.tooMany(max_features_, (int)keypoints.size());
}
else
thresh_good = true;
@@ -96,13 +96,13 @@ void FastAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, con
FastFeatureDetector(thresh_, nonmax_).detect(image, keypoints, mask);
}
void FastAdjuster::tooFew(int min, int n_detected)
void FastAdjuster::tooFew(int, int)
{
//fast is easy to adjust
thresh_--;
}
void FastAdjuster::tooMany(int max, int n_detected)
void FastAdjuster::tooMany(int, int)
{
//fast is easy to adjust
thresh_++;
@@ -121,18 +121,18 @@ StarAdjuster::StarAdjuster(double initial_thresh) :
void StarAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
{
StarFeatureDetector detector_tmp(16, thresh_, 10, 8, 3);
StarFeatureDetector detector_tmp(16, cvRound(thresh_), 10, 8, 3);
detector_tmp.detect(image, keypoints, mask);
}
void StarAdjuster::tooFew(int min, int n_detected)
void StarAdjuster::tooFew(int, int)
{
thresh_ *= 0.9;
if (thresh_ < 1.1)
thresh_ = 1.1;
}
void StarAdjuster::tooMany(int max, int n_detected)
void StarAdjuster::tooMany(int, int)
{
thresh_ *= 1.1;
}
@@ -152,14 +152,14 @@ void SurfAdjuster::detectImpl(const Mat& image, vector<KeyPoint>& keypoints, con
detector_tmp.detect(image, keypoints, mask);
}
void SurfAdjuster::tooFew(int min, int n_detected)
void SurfAdjuster::tooFew(int, int)
{
thresh_ *= 0.9;
if (thresh_ < 1.1)
thresh_ = 1.1;
}
void SurfAdjuster::tooMany(int max, int n_detected)
void SurfAdjuster::tooMany(int, int)
{
thresh_ *= 1.1;
}

View File

@@ -333,7 +333,7 @@ static void computeOneToOneMatchedOverlaps( const vector<EllipticKeyPoint>& keyp
{
float ov = (float)ac.bna / (float)ac.bua;
if( ov >= minOverlap )
overlaps.push_back(SIdx(ov, i1, i2));
overlaps.push_back(SIdx(ov, (int)i1, (int)i2));
}
}
}
@@ -385,10 +385,10 @@ static void calculateRepeatability( const Mat& img1, const Mat& img2, const Mat&
{
overlapThreshold = 1.f - 0.5f;
thresholdedOverlapMask->create( keypoints1.size(), keypoints2t.size(), CV_8UC1 );
thresholdedOverlapMask->create( (int)keypoints1.size(), (int)keypoints2t.size(), CV_8UC1 );
thresholdedOverlapMask->setTo( Scalar::all(0) );
}
int minCount = min( keypoints1.size(), keypoints2t.size() );
size_t minCount = min( keypoints1.size(), keypoints2t.size() );
// calculate overlap errors
vector<SIdx> overlaps;
@@ -402,7 +402,7 @@ static void calculateRepeatability( const Mat& img1, const Mat& img2, const Mat&
if( ifEvaluateDetectors )
{
// regions one-to-one matching
correspondencesCount = overlaps.size();
correspondencesCount = (int)overlaps.size();
repeatability = minCount ? (float)correspondencesCount / minCount : -1;
}
else
@@ -502,7 +502,7 @@ float cv::getRecall( const vector<Point2f>& recallPrecisionCurve, float l_precis
float curDiff = std::fabs(l_precision - recallPrecisionCurve[i].x);
if( curDiff <= minDiff )
{
bestIdx = i;
bestIdx = (int)i;
minDiff = curDiff;
}
}

View File

@@ -56,10 +56,11 @@ Mat windowedMatchingMask( const vector<KeyPoint>& keypoints1, const vector<KeyPo
if( keypoints1.empty() || keypoints2.empty() )
return Mat();
Mat mask( keypoints1.size(), keypoints2.size(), CV_8UC1 );
for( size_t i = 0; i < keypoints1.size(); i++ )
int n1 = (int)keypoints1.size(), n2 = (int)keypoints2.size();
Mat mask( n1, n2, CV_8UC1 );
for( int i = 0; i < n1; i++ )
{
for( size_t j = 0; j < keypoints2.size(); j++ )
for( int j = 0; j < n2; j++ )
{
Point2f diff = keypoints2[j].pt - keypoints1[i].pt;
mask.at<uchar>(i, j) = std::abs(diff.x) < maxDeltaX && std::abs(diff.y) < maxDeltaY;
@@ -166,11 +167,11 @@ void DescriptorMatcher::DescriptorCollection::getLocalIdx( int globalDescIdx, in
{
if( globalDescIdx < startIdxs[i] )
{
imgIdx = i - 1;
imgIdx = (int)(i - 1);
break;
}
}
imgIdx = imgIdx == -1 ? startIdxs.size() -1 : imgIdx;
imgIdx = imgIdx == -1 ? (int)(startIdxs.size() - 1) : imgIdx;
localDescIdx = globalDescIdx - startIdxs[imgIdx];
}
@@ -648,7 +649,7 @@ void GenericDescriptorMatcher::KeyPointCollection::add( const vector<Mat>& _imag
images.insert( images.end(), _images.begin(), _images.end() );
keypoints.insert( keypoints.end(), _points.begin(), _points.end() );
for( size_t i = 0; i < _points.size(); i++ )
pointCount += _points[i].size();
pointCount += (int)_points[i].size();
size_t prevSize = startIndices.size(), addSize = _images.size();
startIndices.resize( prevSize + addSize );
@@ -656,11 +657,11 @@ void GenericDescriptorMatcher::KeyPointCollection::add( const vector<Mat>& _imag
if( prevSize == 0 )
startIndices[prevSize] = 0; //first
else
startIndices[prevSize] = startIndices[prevSize-1] + keypoints[prevSize-1].size();
startIndices[prevSize] = (int)(startIndices[prevSize-1] + keypoints[prevSize-1].size());
for( size_t i = prevSize + 1; i < prevSize + addSize; i++ )
{
startIndices[i] = startIndices[i - 1] + keypoints[i - 1].size();
startIndices[i] = (int)(startIndices[i - 1] + keypoints[i - 1].size());
}
}
@@ -712,11 +713,11 @@ void GenericDescriptorMatcher::KeyPointCollection::getLocalIdx( int globalPointI
{
if( globalPointIdx < startIndices[i] )
{
imgIdx = i - 1;
imgIdx = (int)(i - 1);
break;
}
}
imgIdx = imgIdx == -1 ? startIndices.size() -1 : imgIdx;
imgIdx = imgIdx == -1 ? (int)(startIndices.size() - 1) : imgIdx;
localPointIdx = globalPointIdx - startIndices[imgIdx];
}
@@ -923,14 +924,14 @@ void OneWayDescriptorMatcher::train()
base = new OneWayDescriptorObject( params.patchSize, params.poseCount, params.pcaFilename,
params.trainPath, params.trainImagesList, params.minScale, params.maxScale, params.stepScale );
base->Allocate( trainPointCollection.keypointCount() );
prevTrainCount = trainPointCollection.keypointCount();
base->Allocate( (int)trainPointCollection.keypointCount() );
prevTrainCount = (int)trainPointCollection.keypointCount();
const vector<vector<KeyPoint> >& points = trainPointCollection.getKeypoints();
int count = 0;
for( size_t i = 0; i < points.size(); i++ )
{
IplImage _image = trainPointCollection.getImage(i);
IplImage _image = trainPointCollection.getImage((int)i);
for( size_t j = 0; j < points[i].size(); j++ )
base->InitializeDescriptor( count++, &_image, points[i][j], "" );
}
@@ -961,7 +962,7 @@ void OneWayDescriptorMatcher::knnMatchImpl( const Mat& queryImage, vector<KeyPoi
int descIdx = -1, poseIdx = -1;
float distance;
base->FindDescriptor( &_qimage, queryKeypoints[i].pt, descIdx, poseIdx, distance );
matches[i].push_back( DMatch(i, descIdx, distance) );
matches[i].push_back( DMatch((int)i, descIdx, distance) );
}
}
@@ -979,7 +980,7 @@ void OneWayDescriptorMatcher::radiusMatchImpl( const Mat& queryImage, vector<Key
float distance;
base->FindDescriptor( &_qimage, queryKeypoints[i].pt, descIdx, poseIdx, distance );
if( distance < maxDistance )
matches[i].push_back( DMatch(i, descIdx, distance) );
matches[i].push_back( DMatch((int)i, descIdx, distance) );
}
}
@@ -1060,7 +1061,7 @@ void FernDescriptorMatcher::train()
vector<vector<Point2f> > points( trainPointCollection.imageCount() );
for( size_t imgIdx = 0; imgIdx < trainPointCollection.imageCount(); imgIdx++ )
KeyPoint::convert( trainPointCollection.getKeypoints(imgIdx), points[imgIdx] );
KeyPoint::convert( trainPointCollection.getKeypoints((int)imgIdx), points[imgIdx] );
classifier = new FernClassifier( points, trainPointCollection.getImages(), vector<vector<int> >(), 0, // each points is a class
params.patchSize, params.signatureSize, params.nstructs, params.structSize,
@@ -1112,8 +1113,8 @@ void FernDescriptorMatcher::knnMatchImpl( const Mat& queryImage, vector<KeyPoint
if( -signature[ci] < bestMatch.distance )
{
int imgIdx = -1, trainIdx = -1;
trainPointCollection.getLocalIdx( ci , imgIdx, trainIdx );
bestMatch = DMatch( queryIdx, trainIdx, imgIdx, -signature[ci] );
trainPointCollection.getLocalIdx( (int)ci , imgIdx, trainIdx );
bestMatch = DMatch( (int)queryIdx, trainIdx, imgIdx, -signature[ci] );
}
}
@@ -1143,7 +1144,7 @@ void FernDescriptorMatcher::radiusMatchImpl( const Mat& queryImage, vector<KeyPo
{
int imgIdx = -1, trainIdx = -1;
trainPointCollection.getLocalIdx( ci , imgIdx, trainIdx );
matches[i].push_back( DMatch( i, trainIdx, imgIdx, -signature[ci] ) );
matches[i].push_back( DMatch( (int)i, trainIdx, imgIdx, -signature[ci] ) );
}
}
}

View File

@@ -831,10 +831,10 @@ void FernClassifier::prepare(int _nclasses, int _patchSize, int _signatureSize,
static int calcNumPoints( const vector<vector<Point2f> >& points )
{
int count = 0;
size_t count = 0;
for( size_t i = 0; i < points.size(); i++ )
count += points[i].size();
return count;
return (int)count;
}
void FernClassifier::train(const vector<vector<Point2f> >& points,

View File

@@ -44,7 +44,7 @@
#define __OPENCV_PRECOMP_H__
#if _MSC_VER >= 1200
#pragma warning( disable: 4251 4710 4711 4514 4996 )
#pragma warning( disable: 4251 4512 4710 4711 4514 4996 )
#endif
#ifdef HAVE_CONFIG_H

View File

@@ -1404,7 +1404,7 @@ Sift::detectKeypoints(VL::float_t threshold, VL::float_t edgeThreshold)
VL::float_t Dx=0,Dy=0,Ds=0,Dxx=0,Dyy=0,Dss=0,Dxy=0,Dxs=0,Dys=0 ;
VL::float_t b [3] ;
pixel_t* pt ;
pixel_t* pt = 0;
int dx = 0 ;
int dy = 0 ;
@@ -1697,7 +1697,7 @@ Sift::computeKeypointOrientations(VL::float_t angles [4], Keypoint keypoint)
prepareGrad(o) ;
// clear the SIFT histogram
std::fill(hist, hist + nbins, 0) ;
std::fill(hist, hist + nbins, 0.f) ;
// fill the SIFT histogram
pixel_t* pt = temp + xi * xo + yi * yo + (si - smin -1) * so ;
@@ -1896,7 +1896,7 @@ Sift::computeKeypointDescriptor
// make sure gradient buffer is up-to-date
prepareGrad(o) ;
std::fill( descr_pt, descr_pt + NBO*NBP*NBP, 0 ) ;
std::fill( descr_pt, descr_pt + NBO*NBP*NBP, 0.f ) ;
/* Center the scale space and the descriptor on the current keypoint.
* Note that dpt is pointing to the bin of center (SBP/2,SBP/2,0).