gpu classifier default values became same as to cpu implementation
This commit is contained in:
parent
e7f5978768
commit
60b73e7471
@ -1435,7 +1435,7 @@ public:
|
||||
bool load(const std::string& filename);
|
||||
void release();
|
||||
|
||||
int detectMultiScale(const GpuMat& image, GpuMat& scaledImageBuffer, GpuMat& objectsBuf, double scaleFactor = 1.2, int minNeighbors = 4/*, Size minSize = Size()*/);
|
||||
int detectMultiScale(const GpuMat& image, GpuMat& scaledImageBuffer, GpuMat& objectsBuf, double scaleFactor = 1.1, int minNeighbors = 4/*, Size minSize = Size()*/);
|
||||
void preallocateIntegralBuffer(cv::Size desired);
|
||||
|
||||
bool findLargestObject;
|
||||
|
Loading…
x
Reference in New Issue
Block a user