Fixed warnings.
This commit is contained in:
parent
0307dd19fd
commit
5496dedd6a
@ -1640,9 +1640,9 @@ public:
|
||||
CV_WRAP virtual void setOptimalParameters(int svmsgdType = SVMSGD::ASGD, int marginType = SVMSGD::SOFT_MARGIN) = 0;
|
||||
|
||||
/** @brief %Algorithm type, one of SVMSGD::SvmsgdType. */
|
||||
/** @see setAlgorithmType */
|
||||
/** @see setSvmsgdType */
|
||||
CV_WRAP virtual int getSvmsgdType() const = 0;
|
||||
/** @copybrief getAlgorithmType @see getAlgorithmType */
|
||||
/** @copybrief getSvmsgdType @see getSvmsgdType */
|
||||
CV_WRAP virtual void setSvmsgdType(int svmsgdType) = 0;
|
||||
|
||||
/** @brief %Margin type, one of SVMSGD::MarginType. */
|
||||
|
@ -172,7 +172,8 @@ void SVMSGDImpl::normalizeSamples(Mat &samples, Mat &average, float &multiplier)
|
||||
average = Mat(1, featuresCount, samples.type());
|
||||
for (int featureIndex = 0; featureIndex < featuresCount; featureIndex++)
|
||||
{
|
||||
average.at<float>(featureIndex) = mean(samples.col(featureIndex))[0];
|
||||
Scalar scalAverage = mean(samples.col(featureIndex))[0];
|
||||
average.at<float>(featureIndex) = static_cast<float>(scalAverage[0]);
|
||||
}
|
||||
|
||||
for (int sampleIndex = 0; sampleIndex < samplesCount; sampleIndex++)
|
||||
@ -182,7 +183,7 @@ void SVMSGDImpl::normalizeSamples(Mat &samples, Mat &average, float &multiplier)
|
||||
|
||||
double normValue = norm(samples);
|
||||
|
||||
multiplier = sqrt(samples.total()) / normValue;
|
||||
multiplier = static_cast<float>(sqrt(samples.total()) / normValue);
|
||||
|
||||
samples *= multiplier;
|
||||
}
|
||||
@ -228,11 +229,11 @@ float SVMSGDImpl::calcShift(InputArray _samples, InputArray _responses) const
|
||||
for (int samplesIndex = 0; samplesIndex < trainSamplesCount; samplesIndex++)
|
||||
{
|
||||
Mat currentSample = trainSamples.row(samplesIndex);
|
||||
float dotProduct = currentSample.dot(weights_);
|
||||
float dotProduct = static_cast<float>(currentSample.dot(weights_));
|
||||
|
||||
bool firstClass = isFirstClass(trainResponses.at<float>(samplesIndex));
|
||||
int index = firstClass ? 0:1;
|
||||
float signToMul = firstClass ? 1 : -1;
|
||||
int index = firstClass ? 0 : 1;
|
||||
float signToMul = firstClass ? 1.f : -1.f;
|
||||
float curDistance = dotProduct * signToMul;
|
||||
|
||||
if (curDistance < distanceToClasses[index])
|
||||
@ -263,7 +264,7 @@ bool SVMSGDImpl::train(const Ptr<TrainData>& data, int)
|
||||
if ( areEmpty.first || areEmpty.second )
|
||||
{
|
||||
weights_ = Mat::zeros(1, featureCount, CV_32F);
|
||||
shift_ = areEmpty.first ? -1 : 1;
|
||||
shift_ = areEmpty.first ? -1.f : 1.f;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -329,7 +330,7 @@ bool SVMSGDImpl::train(const Ptr<TrainData>& data, int)
|
||||
|
||||
if (params.marginType == SOFT_MARGIN)
|
||||
{
|
||||
shift_ = extendedWeights.at<float>(featureCount) - weights_.dot(average);
|
||||
shift_ = extendedWeights.at<float>(featureCount) - static_cast<float>(weights_.dot(average));
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -363,8 +364,8 @@ float SVMSGDImpl::predict( InputArray _samples, OutputArray _results, int ) cons
|
||||
for (int sampleIndex = 0; sampleIndex < nSamples; sampleIndex++)
|
||||
{
|
||||
Mat currentSample = samples.row(sampleIndex);
|
||||
float criterion = currentSample.dot(weights_) + shift_;
|
||||
results.at<float>(sampleIndex) = (criterion >= 0) ? 1 : -1;
|
||||
float criterion = static_cast<float>(currentSample.dot(weights_)) + shift_;
|
||||
results.at<float>(sampleIndex) = (criterion >= 0) ? 1.f : -1.f;
|
||||
}
|
||||
|
||||
return result;
|
||||
@ -530,9 +531,9 @@ void SVMSGDImpl::setOptimalParameters(int svmsgdType, int marginType)
|
||||
params.svmsgdType = SGD;
|
||||
params.marginType = (marginType == SOFT_MARGIN) ? SOFT_MARGIN :
|
||||
(marginType == HARD_MARGIN) ? HARD_MARGIN : ILLEGAL_MARGIN_TYPE;
|
||||
params.lambda = 0.0001;
|
||||
params.gamma0 = 0.05;
|
||||
params.c = 1;
|
||||
params.lambda = 0.0001f;
|
||||
params.gamma0 = 0.05f;
|
||||
params.c = 1.f;
|
||||
params.termCrit = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100000, 0.00001);
|
||||
break;
|
||||
|
||||
@ -540,9 +541,9 @@ void SVMSGDImpl::setOptimalParameters(int svmsgdType, int marginType)
|
||||
params.svmsgdType = ASGD;
|
||||
params.marginType = (marginType == SOFT_MARGIN) ? SOFT_MARGIN :
|
||||
(marginType == HARD_MARGIN) ? HARD_MARGIN : ILLEGAL_MARGIN_TYPE;
|
||||
params.lambda = 0.00001;
|
||||
params.gamma0 = 0.05;
|
||||
params.c = 0.75;
|
||||
params.lambda = 0.00001f;
|
||||
params.gamma0 = 0.05f;
|
||||
params.c = 0.75f;
|
||||
params.termCrit = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100000, 0.00001);
|
||||
break;
|
||||
|
||||
|
@ -124,7 +124,7 @@ void CV_SVMSGDTrainTest::makeTrainData(Mat weights, float shift)
|
||||
cv::Mat responses = cv::Mat::zeros(datasize, 1, CV_32FC1);
|
||||
for (int sampleIndex = 0; sampleIndex < datasize; sampleIndex++)
|
||||
{
|
||||
responses.at<float>(sampleIndex) = decisionFunction(samples.row(sampleIndex), weights, shift) > 0 ? 1 : -1;
|
||||
responses.at<float>(sampleIndex) = decisionFunction(samples.row(sampleIndex), weights, shift) > 0 ? 1.f : -1.f;
|
||||
}
|
||||
|
||||
data = TrainData::create(samples, cv::ml::ROW_SAMPLE, responses);
|
||||
@ -146,7 +146,7 @@ void CV_SVMSGDTrainTest::makeTestData(Mat weights, float shift)
|
||||
|
||||
for (int i = 0 ; i < testSamplesCount; i++)
|
||||
{
|
||||
testResponses.at<float>(i) = decisionFunction(testSamples.row(i), weights, shift) > 0 ? 1 : -1;
|
||||
testResponses.at<float>(i) = decisionFunction(testSamples.row(i), weights, shift) > 0 ? 1.f : -1.f;
|
||||
}
|
||||
}
|
||||
|
||||
@ -175,7 +175,7 @@ CV_SVMSGDTrainTest::CV_SVMSGDTrainTest(const Mat &weights, float shift, TrainDat
|
||||
|
||||
float CV_SVMSGDTrainTest::decisionFunction(const Mat &sample, const Mat &weights, float shift)
|
||||
{
|
||||
return sample.dot(weights) + shift;
|
||||
return static_cast<float>(sample.dot(weights)) + shift;
|
||||
}
|
||||
|
||||
void CV_SVMSGDTrainTest::run( int /*start_from*/ )
|
||||
@ -217,7 +217,7 @@ void makeWeightsAndShift(int featureCount, Mat &weights, float &shift)
|
||||
double upperLimit = 1;
|
||||
|
||||
rng.fill(weights, RNG::UNIFORM, lowerLimit, upperLimit);
|
||||
shift = rng.uniform(-featureCount, featureCount);
|
||||
shift = static_cast<float>(rng.uniform(-featureCount, featureCount));
|
||||
}
|
||||
|
||||
|
||||
@ -319,7 +319,7 @@ TEST(ML_SVMSGD, twoPoints)
|
||||
|
||||
float realShift = -500000.5;
|
||||
|
||||
float normRealWeights = norm(realWeights);
|
||||
float normRealWeights = static_cast<float>(norm(realWeights));
|
||||
realWeights /= normRealWeights;
|
||||
realShift /= normRealWeights;
|
||||
|
||||
@ -330,7 +330,7 @@ TEST(ML_SVMSGD, twoPoints)
|
||||
Mat foundWeights = svmsgd->getWeights();
|
||||
float foundShift = svmsgd->getShift();
|
||||
|
||||
float normFoundWeights = norm(foundWeights);
|
||||
float normFoundWeights = static_cast<float>(norm(foundWeights));
|
||||
foundWeights /= normFoundWeights;
|
||||
foundShift /= normFoundWeights;
|
||||
CV_Assert((norm(foundWeights - realWeights) < 0.001) && (abs((foundShift - realShift) / realShift) < 0.05));
|
||||
|
@ -97,7 +97,7 @@ bool findCrossPointWithBorders(const Mat &weights, float shift, const std::pair<
|
||||
if (xMin == xMax && weights.at<float>(1) != 0)
|
||||
{
|
||||
x = xMin;
|
||||
y = std::floor( - (weights.at<float>(0) * x + shift) / weights.at<float>(1));
|
||||
y = static_cast<int>(std::floor( - (weights.at<float>(0) * x + shift) / weights.at<float>(1)));
|
||||
if (y >= yMin && y <= yMax)
|
||||
{
|
||||
crossPoint.x = x;
|
||||
@ -108,7 +108,7 @@ bool findCrossPointWithBorders(const Mat &weights, float shift, const std::pair<
|
||||
else if (yMin == yMax && weights.at<float>(0) != 0)
|
||||
{
|
||||
y = yMin;
|
||||
x = std::floor( - (weights.at<float>(1) * y + shift) / weights.at<float>(0));
|
||||
x = static_cast<int>(std::floor( - (weights.at<float>(1) * y + shift) / weights.at<float>(0)));
|
||||
if (x >= xMin && x <= xMax)
|
||||
{
|
||||
crossPoint.x = x;
|
||||
@ -149,8 +149,8 @@ void redraw(Data data, const Point points[2])
|
||||
Scalar color;
|
||||
for (int i = 0; i < data.samples.rows; i++)
|
||||
{
|
||||
center.x = data.samples.at<float>(i,0);
|
||||
center.y = data.samples.at<float>(i,1);
|
||||
center.x = static_cast<int>(data.samples.at<float>(i,0));
|
||||
center.y = static_cast<int>(data.samples.at<float>(i,1));
|
||||
color = (data.responses.at<float>(i) > 0) ? Scalar(128,128,0) : Scalar(0,128,128);
|
||||
circle(data.img, center, radius, color, 5);
|
||||
}
|
||||
@ -163,8 +163,8 @@ void addPointRetrainAndRedraw(Data &data, int x, int y, int response)
|
||||
{
|
||||
Mat currentSample(1, 2, CV_32F);
|
||||
|
||||
currentSample.at<float>(0,0) = x;
|
||||
currentSample.at<float>(0,1) = y;
|
||||
currentSample.at<float>(0,0) = (float)x;
|
||||
currentSample.at<float>(0,1) = (float)y;
|
||||
data.samples.push_back(currentSample);
|
||||
data.responses.push_back(response);
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user