Merged the trunk r8408:8457 (inclusive)

This commit is contained in:
Andrey Kamaev 2012-05-30 08:47:34 +00:00
parent 7b5a45eac4
commit 949c4edf41
42 changed files with 828 additions and 266 deletions

View File

@ -5,20 +5,31 @@ endif()
project(tbb) project(tbb)
# 4.0 update 4 - works fine
set(tbb_ver "tbb40_20120408oss")
set(tbb_url "http://threadingbuildingblocks.org/uploads/77/185/4.0%20update%204/tbb40_20120408oss_src.tgz")
set(tbb_md5 "734b356da7fe0ed308741f3e6018251e")
set(tbb_version_file "version_string.ver")
# 4.0 update 3 - build broken # 4.0 update 3 - build broken
#set(tbb_ver "tbb40_20120201oss") #set(tbb_ver "tbb40_20120201oss")
#set(tbb_url "http://threadingbuildingblocks.org/uploads/77/182/4.0%20update%203/tbb40_20120201oss_src.tgz") #set(tbb_url "http://threadingbuildingblocks.org/uploads/77/182/4.0%20update%203/tbb40_20120201oss_src.tgz")
#set(tbb_md5 "4669e7d4adee018de7a7b8b972987218") #set(tbb_md5 "4669e7d4adee018de7a7b8b972987218")
#set(tbb_version_file "version_string.tmp")
# 4.0 update 2 - works fine # 4.0 update 2 - works fine
set(tbb_ver "tbb40_20111130oss") #set(tbb_ver "tbb40_20111130oss")
set(tbb_url "http://threadingbuildingblocks.org/uploads/77/180/4.0%20update%202/tbb40_20111130oss_src.tgz") #set(tbb_url "http://threadingbuildingblocks.org/uploads/77/180/4.0%20update%202/tbb40_20111130oss_src.tgz")
set(tbb_md5 "1e6926b21e865e79772119cd44fc3ad8") #set(tbb_md5 "1e6926b21e865e79772119cd44fc3ad8")
#set(tbb_version_file "version_string.tmp")
#set(tbb_need_GENERIC_DWORD_LOAD_STORE TRUE)
# 4.0 update 1 - works fine # 4.0 update 1 - works fine
#set(tbb_ver "tbb40_20111003oss") #set(tbb_ver "tbb40_20111003oss")
#set(tbb_url "http://threadingbuildingblocks.org/uploads/77/177/4.0%20update%201/tbb40_20111003oss_src.tgz") #set(tbb_url "http://threadingbuildingblocks.org/uploads/77/177/4.0%20update%201/tbb40_20111003oss_src.tgz")
#set(tbb_md5 "7b5d94eb35a563b29ef402e0fd8f15c9") #set(tbb_md5 "7b5d94eb35a563b29ef402e0fd8f15c9")
#set(tbb_version_file "version_string.tmp")
#set(tbb_need_GENERIC_DWORD_LOAD_STORE TRUE)
set(tbb_tarball "${CMAKE_CURRENT_SOURCE_DIR}/${tbb_ver}_src.tgz") set(tbb_tarball "${CMAKE_CURRENT_SOURCE_DIR}/${tbb_ver}_src.tgz")
set(tbb_src_dir "${CMAKE_CURRENT_BINARY_DIR}/${tbb_ver}") set(tbb_src_dir "${CMAKE_CURRENT_BINARY_DIR}/${tbb_ver}")
@ -92,16 +103,20 @@ list(APPEND lib_srcs "${tbb_src_dir}/src/rml/client/rml_tbb.cpp")
add_definitions(-D__TBB_DYNAMIC_LOAD_ENABLED=0 #required add_definitions(-D__TBB_DYNAMIC_LOAD_ENABLED=0 #required
-D__TBB_BUILD=1 #required -D__TBB_BUILD=1 #required
-D__TBB_SURVIVE_THREAD_SWITCH=0 #no cilk on Android -D__TBB_SURVIVE_THREAD_SWITCH=0 #no cilk on Android ?
-D__TBB_USE_GENERIC_DWORD_LOAD_STORE=1 #needed by TBB 4.0 update 1,2; fixed in TBB 4.0 update 3 but it has 2 new problems
-DUSE_PTHREAD #required -DUSE_PTHREAD #required
-DTBB_USE_GCC_BUILTINS=1 #required -DTBB_USE_GCC_BUILTINS=1 #required
-DTBB_USE_DEBUG=0 #just ot be sure -DTBB_USE_DEBUG=0 #just to be sure
-DTBB_NO_LEGACY=1 #don't need backward compatibility -DTBB_NO_LEGACY=1 #don't need backward compatibility
-DDO_ITT_NOTIFY=0 #it seems that we don't need these notifications -DDO_ITT_NOTIFY=0 #it seems that we don't need these notifications
) )
add_library(tbb STATIC ${lib_srcs} ${lib_hdrs} "${CMAKE_CURRENT_SOURCE_DIR}/android_additional.h" "${CMAKE_CURRENT_SOURCE_DIR}/version_string.tmp") if(tbb_need_GENERIC_DWORD_LOAD_STORE)
#needed by TBB 4.0 update 1,2; fixed in TBB 4.0 update 3 but it has 2 new problems
add_definitions(-D__TBB_USE_GENERIC_DWORD_LOAD_STORE=1)
endif()
add_library(tbb STATIC ${lib_srcs} ${lib_hdrs} "${CMAKE_CURRENT_SOURCE_DIR}/android_additional.h" "${CMAKE_CURRENT_SOURCE_DIR}/${tbb_version_file}")
target_link_libraries(tbb c m dl) target_link_libraries(tbb c m dl)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w -include \"${CMAKE_CURRENT_SOURCE_DIR}/android_additional.h\"") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w -include \"${CMAKE_CURRENT_SOURCE_DIR}/android_additional.h\"")

View File

@ -1,11 +1,9 @@
#define __TBB_VERSION_STRINGS \ #define __TBB_VERSION_STRINGS \
"TBB: BUILD_HOST buit as part of OpenCV" ENDL \ "TBB: BUILD_HOST Unknown" ENDL \
"TBB: BUILD_OS crosscompiling" ENDL \ "TBB: BUILD_OS Android" ENDL \
"TBB: BUILD_KERNEL UNKNOWN" ENDL \ "TBB: BUILD_KERNEL Unknown" ENDL \
"TBB: BUILD_GCC gcc version 4.4.3" ENDL \ "TBB: BUILD_GCC gcc version 4.4.3" ENDL \
"TBB: BUILD_GLIBC 2.11.1" ENDL \ "TBB: BUILD_GLIBC Unknown" ENDL \
"TBB: BUILD_LD " ENDL \ "TBB: BUILD_LD Unknown" ENDL \
"TBB: BUILD_TARGET on " ENDL \ "TBB: BUILD_TARGET Unknown" ENDL \
"TBB: BUILD_COMMAND TBD" ENDL \ "TBB: BUILD_COMMAND use cv::getBuildInformation() for details" ENDL \
#define __TBB_DATETIME "Sun Jan 01 00:00:01 UTC 2012"

9
3rdparty/tbb/version_string.ver vendored Normal file
View File

@ -0,0 +1,9 @@
#define __TBB_VERSION_STRINGS(N) \
#N": BUILD_HOST Unknown" ENDL \
#N": BUILD_OS Android" ENDL \
#N": BUILD_KERNEL Unknown" ENDL \
#N": BUILD_GCC gcc version 4.4.3" ENDL \
#N": BUILD_GLIBC Unknown" ENDL \
#N": BUILD_LD Unknown" ENDL \
#N": BUILD_TARGET Unknown" ENDL \
#N": BUILD_COMMAND use cv::getBuildInformation() for details" ENDL \

View File

@ -170,7 +170,7 @@ elseif(MSVC_IDE)
set(ENABLE_SOLUTION_FOLDERS0 ON) set(ENABLE_SOLUTION_FOLDERS0 ON)
else() else()
set(ENABLE_SOLUTION_FOLDERS0 OFF) set(ENABLE_SOLUTION_FOLDERS0 OFF)
endif() endif()
# OpenCV build options # OpenCV build options
# =================================================== # ===================================================
@ -311,7 +311,7 @@ if(UNIX)
include(cmake/OpenCVFindPkgConfig.cmake OPTIONAL) include(cmake/OpenCVFindPkgConfig.cmake OPTIONAL)
include(CheckFunctionExists) include(CheckFunctionExists)
include(CheckIncludeFile) include(CheckIncludeFile)
if(NOT APPLE) if(NOT APPLE)
CHECK_INCLUDE_FILE(alloca.h HAVE_ALLOCA_H) CHECK_INCLUDE_FILE(alloca.h HAVE_ALLOCA_H)
CHECK_FUNCTION_EXISTS(alloca HAVE_ALLOCA) CHECK_FUNCTION_EXISTS(alloca HAVE_ALLOCA)
@ -407,11 +407,32 @@ endif(WITH_UNICAP)
ocv_clear_vars(HAVE_PVAPI) ocv_clear_vars(HAVE_PVAPI)
if(WITH_PVAPI) if(WITH_PVAPI)
find_path(PVAPI_INCLUDE_PATH "PvApi.h" find_path(PVAPI_INCLUDE_PATH "PvApi.h"
PATHS "/usr/local/include" "/usr/include" PATHS /usr/local /opt /usr ENV ProgramFiles ENV ProgramW6432
PATH_SUFFIXES include "Allied Vision Technologies/GigESDK/inc-pc" "AVT GigE SDK/inc-pc" "GigESDK/inc-pc"
DOC "The path to PvAPI header") DOC "The path to PvAPI header")
if(PVAPI_INCLUDE_PATH) if(PVAPI_INCLUDE_PATH)
set(HAVE_PVAPI TRUE) if(X86 AND NOT WIN32)
endif() set(PVAPI_SDK_SUBDIR x86)
elseif(X86_64)
set(PVAPI_SDK_SUBDIR x64)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES arm)
set(PVAPI_SDK_SUBDIR arm)
endif()
get_filename_component(_PVAPI_LIBRARY "${PVAPI_INCLUDE_PATH}/../lib-pc" ABSOLUTE)
if(PVAPI_SDK_SUBDIR)
set(_PVAPI_LIBRARY "${_PVAPI_LIBRARY}/${PVAPI_SDK_SUBDIR}")
endif()
if(NOT WIN32 AND CMAKE_COMPILER_IS_GNUCXX)
set(_PVAPI_LIBRARY "${_PVAPI_LIBRARY}/${CMAKE_OPENCV_GCC_VERSION_MAJOR}.${CMAKE_OPENCV_GCC_VERSION_MINOR}")
endif()
set(PVAPI_LIBRARY "${_PVAPI_LIBRARY}/${CMAKE_STATIC_LIBRARY_PREFIX}PvAPI${CMAKE_STATIC_LIBRARY_SUFFIX}" CACHE PATH "The PvAPI library")
if(EXISTS "${PVAPI_LIBRARY}")
set(HAVE_PVAPI TRUE)
endif()
endif(PVAPI_INCLUDE_PATH)
endif(WITH_PVAPI) endif(WITH_PVAPI)
# --- Dc1394 --- # --- Dc1394 ---
@ -462,7 +483,7 @@ if(WITH_FFMPEG)
CHECK_MODULE(libavformat HAVE_FFMPEG_FORMAT) CHECK_MODULE(libavformat HAVE_FFMPEG_FORMAT)
CHECK_MODULE(libavutil HAVE_FFMPEG_UTIL) CHECK_MODULE(libavutil HAVE_FFMPEG_UTIL)
CHECK_MODULE(libswscale HAVE_FFMPEG_SWSCALE) CHECK_MODULE(libswscale HAVE_FFMPEG_SWSCALE)
CHECK_INCLUDE_FILE(libavformat/avformat.h HAVE_GENTOO_FFMPEG) CHECK_INCLUDE_FILE(libavformat/avformat.h HAVE_GENTOO_FFMPEG)
CHECK_INCLUDE_FILE(ffmpeg/avformat.h HAVE_FFMPEG_FFMPEG) CHECK_INCLUDE_FILE(ffmpeg/avformat.h HAVE_FFMPEG_FFMPEG)
if(NOT HAVE_GENTOO_FFMPEG AND NOT HAVE_FFMPEG_FFMPEG) if(NOT HAVE_GENTOO_FFMPEG AND NOT HAVE_FFMPEG_FFMPEG)
@ -589,7 +610,7 @@ include(cmake/OpenCVDetectPython.cmake REQUIRED)
if(ANDROID) if(ANDROID)
include(cmake/OpenCVDetectApacheAnt.cmake REQUIRED) include(cmake/OpenCVDetectApacheAnt.cmake REQUIRED)
include(cmake/OpenCVDetectAndroidSDK.cmake REQUIRED) include(cmake/OpenCVDetectAndroidSDK.cmake REQUIRED)
if(NOT ANDROID_TOOLS_Pkg_Revision GREATER 13) if(NOT ANDROID_TOOLS_Pkg_Revision GREATER 13)
message(WARNING "OpenCV requires Android SDK tools revision 14 or newer. Otherwise tests and samples will no be compiled.") message(WARNING "OpenCV requires Android SDK tools revision 14 or newer. Otherwise tests and samples will no be compiled.")
endif() endif()

View File

@ -32,15 +32,15 @@ if(MINGW)
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=40838 # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=40838
# here we are trying to workaround the problem # here we are trying to workaround the problem
include(CheckCXXCompilerFlag) include(CheckCXXCompilerFlag)
# CHECK_CXX_COMPILER_FLAG(-mstackrealign HAVE_STACKREALIGN_FLAG) CHECK_CXX_COMPILER_FLAG(-mstackrealign HAVE_STACKREALIGN_FLAG)
# if(HAVE_STACKREALIGN_FLAG) if(HAVE_STACKREALIGN_FLAG)
# set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign") set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign")
#else() else()
CHECK_CXX_COMPILER_FLAG(-mpreferred-stack-boundary=2 HAVE_PREFERRED_STACKBOUNDARY_FLAG) CHECK_CXX_COMPILER_FLAG(-mpreferred-stack-boundary=2 HAVE_PREFERRED_STACKBOUNDARY_FLAG)
if(HAVE_PREFERRED_STACKBOUNDARY_FLAG) if(HAVE_PREFERRED_STACKBOUNDARY_FLAG)
set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign") set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign")
endif() endif()
#endif() endif()
endif() endif()
if(CMAKE_COMPILER_IS_GNUCXX) if(CMAKE_COMPILER_IS_GNUCXX)

View File

@ -8,11 +8,11 @@ endif()
if(NOT APPLE) if(NOT APPLE)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
set(CMAKE_COMPILER_IS_GNUCXX 1) set(CMAKE_COMPILER_IS_GNUCXX 1)
unset(ENABLE_PRECOMPILED_HEADERS CACHE) set(ENABLE_PRECOMPILED_HEADERS OFF CACHE BOOL "" FORCE)
endif() endif()
if(CMAKE_C_COMPILER_ID STREQUAL "Clang") if(CMAKE_C_COMPILER_ID STREQUAL "Clang")
set(CMAKE_COMPILER_IS_GNUCC 1) set(CMAKE_COMPILER_IS_GNUCC 1)
unset(ENABLE_PRECOMPILED_HEADERS CACHE) set(ENABLE_PRECOMPILED_HEADERS OFF CACHE BOOL "" FORCE)
endif() endif()
endif() endif()

View File

@ -970,6 +970,8 @@ namespace cv
}; };
CV_EXPORTS void applyColorMap(InputArray src, OutputArray dst, int colormap); CV_EXPORTS void applyColorMap(InputArray src, OutputArray dst, int colormap);
CV_EXPORTS bool initModule_contrib();
} }

View File

@ -241,6 +241,11 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
pthread_mutex_lock(&mutex); pthread_mutex_lock(&mutex);
if (!isWorking()) {//it is a rare case, but may cause a crash
LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle from inner part of lock just before waiting");
pthread_mutex_unlock(&mutex);
break;
}
CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
pthread_cond_wait(&objectDetectorRun, &mutex); pthread_cond_wait(&objectDetectorRun, &mutex);
if (isWorking()) { if (isWorking()) {

View File

@ -713,4 +713,9 @@ Ptr<FaceRecognizer> createLBPHFaceRecognizer(int radius, int neighbors,
return new LBPH(radius, neighbors, grid_x, grid_y); return new LBPH(radius, neighbors, grid_x, grid_y);
} }
bool initModule_contrib()
{
return true;
}
} }

View File

@ -2357,7 +2357,7 @@ Algorithm::get
-------------- --------------
Returns the algorithm parameter Returns the algorithm parameter
.. ocv:function:: template<typename _Tp> typename ParamType<_Tp>::member_type get(const string& name) const .. ocv:function:: template<typename _Tp> typename ParamType<_Tp>::member_type Algorithm::get(const string& name) const
:param name: The parameter name. :param name: The parameter name.
@ -2378,13 +2378,13 @@ Algorithm::set
-------------- --------------
Sets the algorithm parameter Sets the algorithm parameter
.. ocv:function:: void set(const string& name, int value) .. ocv:function:: void Algorithm::set(const string& name, int value)
.. ocv:function:: void set(const string& name, double value) .. ocv:function:: void Algorithm::set(const string& name, double value)
.. ocv:function:: void set(const string& name, bool value) .. ocv:function:: void Algorithm::set(const string& name, bool value)
.. ocv:function:: void set(const string& name, const string& value) .. ocv:function:: void Algorithm::set(const string& name, const string& value)
.. ocv:function:: void set(const string& name, const Mat& value) .. ocv:function:: void Algorithm::set(const string& name, const Mat& value)
.. ocv:function:: void set(const string& name, const vector<Mat>& value) .. ocv:function:: void Algorithm::set(const string& name, const vector<Mat>& value)
.. ocv:function:: void set(const string& name, const Ptr<Algorithm>& value) .. ocv:function:: void Algorithm::set(const string& name, const Ptr<Algorithm>& value)
:param name: The parameter name. :param name: The parameter name.
:param value: The parameter value. :param value: The parameter value.
@ -2396,7 +2396,7 @@ Algorithm::write
---------------- ----------------
Stores algorithm parameters in a file storage Stores algorithm parameters in a file storage
.. ocv:function:: void write(FileStorage& fs) const .. ocv:function:: void Algorithm::write(FileStorage& fs) const
:param fs: File storage. :param fs: File storage.
@ -2413,7 +2413,7 @@ Algorithm::read
--------------- ---------------
Reads algorithm parameters from a file storage Reads algorithm parameters from a file storage
.. ocv:function:: void read(const FileNode& fn) .. ocv:function:: void Algorithm::read(const FileNode& fn)
:param fn: File node of the file storage. :param fn: File node of the file storage.
@ -2423,29 +2423,24 @@ Algorithm::getList
------------------ ------------------
Returns the list of registered algorithms Returns the list of registered algorithms
.. ocv:function:: void read(vector<string>& algorithms) .. ocv:function:: void Algorithm::getList(vector<string>& algorithms)
:param algorithms: The output vector of algorithm names. :param algorithms: The output vector of algorithm names.
This static method returns the list of registered algorithms in alphabetical order. This static method returns the list of registered algorithms in alphabetical order. Here is how to use it ::
vector<string> algorithms;
Algorithm::getList Algorithm::getList(algorithms);
------------------ cout << "Algorithms: " << algorithms.size() << endl;
Returns the list of registered algorithms for (size_t i=0; i < algorithms.size(); i++)
cout << algorithms[i] << endl;
.. ocv:function:: void read(vector<string>& algorithms)
:param algorithms: The output vector of algorithm names.
This static method returns the list of registered algorithms in alphabetical order.
Algorithm::create Algorithm::create
----------------- -----------------
Creates algorithm instance by name Creates algorithm instance by name
.. ocv:function:: template<typename _Tp> Ptr<_Tp> create(const string& name) .. ocv:function:: template<typename _Tp> Ptr<_Tp> Algorithm::create(const string& name)
:param name: The algorithm name, one of the names returned by ``Algorithm::getList()``. :param name: The algorithm name, one of the names returned by ``Algorithm::getList()``.

View File

@ -1434,7 +1434,7 @@ Finds the inverse or pseudo-inverse of a matrix.
The function ``invert`` inverts the matrix ``src`` and stores the result in ``dst`` . The function ``invert`` inverts the matrix ``src`` and stores the result in ``dst`` .
When the matrix ``src`` is singular or non-square, the function computes the pseudo-inverse matrix (the ``dst`` matrix) so that ``norm(src*dst - I)`` is minimal, where I is an identity matrix. When the matrix ``src`` is singular or non-square, the function computes the pseudo-inverse matrix (the ``dst`` matrix) so that ``norm(src*dst - I)`` is minimal, where I is an identity matrix.
In case of the ``DECOMP_LU`` method, the function returns the ``src`` determinant ( ``src`` must be square). If it is 0, the matrix is not inverted and ``dst`` is filled with zeros. In case of the ``DECOMP_LU`` method, the function returns non-zero value if the inverse has been successfully computed and 0 if ``src`` is singular.
In case of the ``DECOMP_SVD`` method, the function returns the inverse condition number of ``src`` (the ratio of the smallest singular value to the largest singular value) and 0 if ``src`` is singular. The SVD method calculates a pseudo-inverse matrix if ``src`` is singular. In case of the ``DECOMP_SVD`` method, the function returns the inverse condition number of ``src`` (the ratio of the smallest singular value to the largest singular value) and 0 if ``src`` is singular. The SVD method calculates a pseudo-inverse matrix if ``src`` is singular.

View File

@ -947,6 +947,9 @@ double cv::invert( InputArray _src, OutputArray _dst, int method )
bool result = false; bool result = false;
Mat src = _src.getMat(); Mat src = _src.getMat();
int type = src.type(); int type = src.type();
CV_Assert(type == CV_32F || type == CV_64F);
size_t esz = CV_ELEM_SIZE(type); size_t esz = CV_ELEM_SIZE(type);
int m = src.rows, n = src.cols; int m = src.rows, n = src.cols;
@ -969,7 +972,7 @@ double cv::invert( InputArray _src, OutputArray _dst, int method )
((double*)w.data)[n-1]/((double*)w.data)[0] : 0); ((double*)w.data)[n-1]/((double*)w.data)[0] : 0);
} }
CV_Assert( m == n && (type == CV_32F || type == CV_64F)); CV_Assert( m == n );
if( method == DECOMP_EIG ) if( method == DECOMP_EIG )
{ {

View File

@ -324,13 +324,13 @@ class CV_EXPORTS_W MSER : public FeatureDetector
{ {
public: public:
//! the full constructor //! the full constructor
explicit MSER( int _delta=5, int _min_area=60, int _max_area=14400, CV_WRAP explicit MSER( int _delta=5, int _min_area=60, int _max_area=14400,
double _max_variation=0.25, double _min_diversity=.2, double _max_variation=0.25, double _min_diversity=.2,
int _max_evolution=200, double _area_threshold=1.01, int _max_evolution=200, double _area_threshold=1.01,
double _min_margin=0.003, int _edge_blur_size=5 ); double _min_margin=0.003, int _edge_blur_size=5 );
//! the operator that extracts the MSERs from the image or the specific part of it //! the operator that extracts the MSERs from the image or the specific part of it
CV_WRAP_AS(detect) void operator()( const Mat& image, vector<vector<Point> >& msers, CV_WRAP_AS(detect) void operator()( const Mat& image, CV_OUT vector<vector<Point> >& msers,
const Mat& mask=Mat() ) const; const Mat& mask=Mat() ) const;
AlgorithmInfo* info() const; AlgorithmInfo* info() const;

View File

@ -96,7 +96,7 @@ elseif(APPLE)
list(APPEND HIGHGUI_LIBRARIES "-framework Cocoa") list(APPEND HIGHGUI_LIBRARIES "-framework Cocoa")
endif() endif()
endif() endif()
if(WIN32) if(WIN32)
list(APPEND highgui_srcs src/cap_vfw.cpp src/cap_cmu.cpp src/cap_dshow.cpp) list(APPEND highgui_srcs src/cap_vfw.cpp src/cap_cmu.cpp src/cap_dshow.cpp)
endif(WIN32) endif(WIN32)
@ -157,19 +157,8 @@ endif(HAVE_FFMPEG)
if(HAVE_PVAPI) if(HAVE_PVAPI)
add_definitions(-DHAVE_PVAPI) add_definitions(-DHAVE_PVAPI)
ocv_include_directories(${PVAPI_INCLUDE_PATH}) ocv_include_directories(${PVAPI_INCLUDE_PATH})
if(X86)
set(PVAPI_SDK_SUBDIR x86)
elseif(X86_64)
set(PVAPI_SDK_SUBDIR x64)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES arm)
set(PVAPI_SDK_SUBDIR arm)
endif()
if(PVAPI_SDK_SUBDIR AND CMAKE_COMPILER_IS_GNUCXX)
get_filename_component(PVAPI_EXPECTED_LIB_PATH "${PVAPI_INCLUDE_PATH}/../lib-pc/${PVAPI_SDK_SUBDIR}/${CMAKE_OPENCV_GCC_VERSION_MAJOR}.${CMAKE_OPENCV_GCC_VERSION_MINOR}" ABSOLUTE)
link_directories(${PVAPI_EXPECTED_LIB_PATH})
endif()
set(highgui_srcs src/cap_pvapi.cpp ${highgui_srcs}) set(highgui_srcs src/cap_pvapi.cpp ${highgui_srcs})
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} PvAPI) list(APPEND HIGHGUI_LIBRARIES ${PVAPI_LIBRARY})
endif() endif()
if(WITH_IMAGEIO) if(WITH_IMAGEIO)
@ -241,11 +230,11 @@ if(WIN32 AND WITH_FFMPEG)
if(MSVC64 OR MINGW64) if(MSVC64 OR MINGW64)
set(FFMPEG_SUFFIX _64) set(FFMPEG_SUFFIX _64)
endif() endif()
set(ffmpeg_bare_name "opencv_ffmpeg${FFMPEG_SUFFIX}.dll") set(ffmpeg_bare_name "opencv_ffmpeg${FFMPEG_SUFFIX}.dll")
set(ffmpeg_bare_name_ver "opencv_ffmpeg${OPENCV_DLLVERSION}${FFMPEG_SUFFIX}.dll") set(ffmpeg_bare_name_ver "opencv_ffmpeg${OPENCV_DLLVERSION}${FFMPEG_SUFFIX}.dll")
set(ffmpeg_path "${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/${ffmpeg_bare_name}") set(ffmpeg_path "${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/${ffmpeg_bare_name}")
#if(MSVC AND CMAKE_VERSION VERSION_GREATER "2.8.2") #if(MSVC AND CMAKE_VERSION VERSION_GREATER "2.8.2")
# add_custom_command(TARGET ${the_module} POST_BUILD # add_custom_command(TARGET ${the_module} POST_BUILD
# COMMAND ${CMAKE_COMMAND} -E copy "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/$<CONFIGURATION>/${ffmpeg_bare_name_ver}" # COMMAND ${CMAKE_COMMAND} -E copy "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/$<CONFIGURATION>/${ffmpeg_bare_name_ver}"
@ -265,7 +254,7 @@ if(WIN32 AND WITH_FFMPEG)
COMMAND ${CMAKE_COMMAND} -E copy "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${ffmpeg_bare_name_ver}" COMMAND ${CMAKE_COMMAND} -E copy "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${ffmpeg_bare_name_ver}"
COMMENT "Copying ${ffmpeg_path} to the output directory") COMMENT "Copying ${ffmpeg_path} to the output directory")
endif() endif()
install(FILES "${ffmpeg_path}" DESTINATION bin COMPONENT main RENAME "${ffmpeg_bare_name_ver}") install(FILES "${ffmpeg_path}" DESTINATION bin COMPONENT main RENAME "${ffmpeg_bare_name_ver}")
endif() endif()

View File

@ -57,7 +57,12 @@
#endif #endif
#include <PvApi.h> #include <PvApi.h>
#include <unistd.h> #ifdef WIN32
# include <io.h>
#else
# include <unistd.h>
#endif
#include <string> #include <string>
//#include <arpa/inet.h> //#include <arpa/inet.h>
@ -69,7 +74,7 @@ class CvCaptureCAM_PvAPI : public CvCapture
{ {
public: public:
CvCaptureCAM_PvAPI(); CvCaptureCAM_PvAPI();
virtual ~CvCaptureCAM_PvAPI() virtual ~CvCaptureCAM_PvAPI()
{ {
close(); close();
} }
@ -80,52 +85,55 @@ public:
virtual bool setProperty(int, double); virtual bool setProperty(int, double);
virtual bool grabFrame(); virtual bool grabFrame();
virtual IplImage* retrieveFrame(int); virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() virtual int getCaptureDomain()
{ {
return CV_CAP_PVAPI; return CV_CAP_PVAPI;
} }
protected: protected:
virtual void Sleep(unsigned int time); #ifndef WIN32
virtual void Sleep(unsigned int time);
#endif
typedef struct typedef struct
{ {
unsigned long UID; unsigned long UID;
tPvHandle Handle; tPvHandle Handle;
tPvFrame Frame; tPvFrame Frame;
} tCamera; } tCamera;
IplImage *frame; IplImage *frame;
IplImage *grayframe; IplImage *grayframe;
tCamera Camera; tCamera Camera;
tPvErr Errcode; tPvErr Errcode;
bool monocrome; bool monocrome;
}; };
CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI() CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI()
{ {
monocrome=false; monocrome=false;
memset(&this->Camera, 0, sizeof(this->Camera)); memset(&this->Camera, 0, sizeof(this->Camera));
} }
#ifndef WIN32
void CvCaptureCAM_PvAPI::Sleep(unsigned int time) void CvCaptureCAM_PvAPI::Sleep(unsigned int time)
{ {
struct timespec t,r; struct timespec t,r;
t.tv_sec = time / 1000; t.tv_sec = time / 1000;
t.tv_nsec = (time % 1000) * 1000000; t.tv_nsec = (time % 1000) * 1000000;
while(nanosleep(&t,&r)==-1) while(nanosleep(&t,&r)==-1)
t = r; t = r;
} }
#endif
void CvCaptureCAM_PvAPI::close() void CvCaptureCAM_PvAPI::close()
{ {
// Stop the acquisition & free the camera // Stop the acquisition & free the camera
PvCommandRun(Camera.Handle, "AcquisitionStop"); PvCommandRun(Camera.Handle, "AcquisitionStop");
PvCaptureEnd(Camera.Handle); PvCaptureEnd(Camera.Handle);
PvCameraClose(Camera.Handle); PvCameraClose(Camera.Handle);
PvUnInitialize(); PvUnInitialize();
} }
@ -134,47 +142,47 @@ void CvCaptureCAM_PvAPI::close()
bool CvCaptureCAM_PvAPI::open( int index ) bool CvCaptureCAM_PvAPI::open( int index )
{ {
tPvCameraInfo cameraList[MAX_CAMERAS]; tPvCameraInfo cameraList[MAX_CAMERAS];
tPvCameraInfo camInfo; tPvCameraInfo camInfo;
tPvIpSettings ipSettings; tPvIpSettings ipSettings;
if (PvInitialize()) { if (PvInitialize()) {
} }
//return false; //return false;
Sleep(1000); Sleep(1000);
//close(); //close();
int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL); int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);
if (numCameras <= 0 || index >= numCameras) if (numCameras <= 0 || index >= numCameras)
return false; return false;
Camera.UID = cameraList[index].UniqueId; Camera.UID = cameraList[index].UniqueId;
if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) { if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) {
/* /*
struct in_addr addr; struct in_addr addr;
addr.s_addr = ipSettings.CurrentIpAddress; addr.s_addr = ipSettings.CurrentIpAddress;
printf("Current address:\t%s\n",inet_ntoa(addr)); printf("Current address:\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpSubnet; addr.s_addr = ipSettings.CurrentIpSubnet;
printf("Current subnet:\t\t%s\n",inet_ntoa(addr)); printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpGateway; addr.s_addr = ipSettings.CurrentIpGateway;
printf("Current gateway:\t%s\n",inet_ntoa(addr)); printf("Current gateway:\t%s\n",inet_ntoa(addr));
*/ */
} }
else { else {
fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n"); fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
return false; return false;
} }
if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess) if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
{ {
//Set Pixel Format to BRG24 to follow conventions //Set Pixel Format to BRG24 to follow conventions
/*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24"); /*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24");
if (Errcode != ePvErrSuccess) if (Errcode != ePvErrSuccess)
{ {
@ -184,7 +192,7 @@ bool CvCaptureCAM_PvAPI::open( int index )
*/ */
tPvUint32 frameWidth, frameHeight, frameSize; tPvUint32 frameWidth, frameHeight, frameSize;
unsigned long maxSize; unsigned long maxSize;
char pixelFormat[256]; char pixelFormat[256];
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize); PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
PvAttrUint32Get(Camera.Handle, "Width", &frameWidth); PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
PvAttrUint32Get(Camera.Handle, "Height", &frameHeight); PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
@ -192,31 +200,31 @@ bool CvCaptureCAM_PvAPI::open( int index )
maxSize = 8228; maxSize = 8228;
//PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize); //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess) if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
return false; return false;
if (strcmp(pixelFormat, "Mono8")==0) { if (strcmp(pixelFormat, "Mono8")==0) {
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1); grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
grayframe->widthStep = (int)frameWidth; grayframe->widthStep = (int)frameWidth;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3); frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3; frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData; Camera.Frame.ImageBuffer = grayframe->imageData;
} }
else if (strcmp(pixelFormat, "Mono16")==0) { else if (strcmp(pixelFormat, "Mono16")==0) {
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1); grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
grayframe->widthStep = (int)frameWidth; grayframe->widthStep = (int)frameWidth;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3); frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
frame->widthStep = (int)frameWidth*3; frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData; Camera.Frame.ImageBuffer = grayframe->imageData;
} }
else if (strcmp(pixelFormat, "Bgr24")==0) { else if (strcmp(pixelFormat, "Bgr24")==0) {
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3); frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3; frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData; Camera.Frame.ImageBuffer = frame->imageData;
} }
else else
return false; return false;
// Start the camera // Start the camera
PvCaptureStart(Camera.Handle); PvCaptureStart(Camera.Handle);
@ -226,19 +234,19 @@ bool CvCaptureCAM_PvAPI::open( int index )
fprintf(stderr,"Could not set Prosilica Acquisition Mode\n"); fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
return false; return false;
} }
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess) if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{ {
fprintf(stderr,"Could not start Prosilica acquisition\n"); fprintf(stderr,"Could not start Prosilica acquisition\n");
return false; return false;
} }
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess) if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{ {
fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\""); fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\"");
return false; return false;
} }
return true; return true;
} }
fprintf(stderr,"Error cannot open camera\n"); fprintf(stderr,"Error cannot open camera\n");
@ -248,8 +256,8 @@ bool CvCaptureCAM_PvAPI::open( int index )
bool CvCaptureCAM_PvAPI::grabFrame() bool CvCaptureCAM_PvAPI::grabFrame()
{ {
//if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled) //if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled)
return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess; return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess;
} }
@ -257,13 +265,13 @@ IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int)
{ {
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) { if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) {
if (!monocrome) { if (!monocrome) {
cvMerge(grayframe,grayframe,grayframe,NULL,frame); cvMerge(grayframe,grayframe,grayframe,NULL,frame);
return frame; return frame;
} }
return grayframe; return grayframe;
} }
else return NULL; else return NULL;
} }
double CvCaptureCAM_PvAPI::getProperty( int property_id ) double CvCaptureCAM_PvAPI::getProperty( int property_id )
@ -279,26 +287,26 @@ double CvCaptureCAM_PvAPI::getProperty( int property_id )
PvAttrUint32Get(Camera.Handle, "Height", &nTemp); PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
return (double)nTemp; return (double)nTemp;
case CV_CAP_PROP_EXPOSURE: case CV_CAP_PROP_EXPOSURE:
PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp); PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp);
return (double)nTemp; return (double)nTemp;
case CV_CAP_PROP_FPS: case CV_CAP_PROP_FPS:
tPvFloat32 nfTemp; tPvFloat32 nfTemp;
PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp); PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp);
return (double)nfTemp; return (double)nfTemp;
case CV_CAP_PROP_PVAPI_MULTICASTIP: case CV_CAP_PROP_PVAPI_MULTICASTIP:
char mEnable[2]; char mEnable[2];
char mIp[11]; char mIp[11];
PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL); PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
if (strcmp(mEnable, "Off") == 0) { if (strcmp(mEnable, "Off") == 0) {
return -1; return -1;
} }
else { else {
long int ip; long int ip;
int a,b,c,d; int a,b,c,d;
PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL); PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d; sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
return (double)ip; return (double)ip;
} }
} }
return -1.0; return -1.0;
} }
@ -317,38 +325,38 @@ bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
*/ */
case CV_CAP_PROP_MONOCROME: case CV_CAP_PROP_MONOCROME:
if (value==1) { if (value==1) {
char pixelFormat[256]; char pixelFormat[256];
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL); PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0) { if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0) {
monocrome=true; monocrome=true;
} }
else else
return false; return false;
} }
else else
monocrome=false; monocrome=false;
break; break;
case CV_CAP_PROP_EXPOSURE: case CV_CAP_PROP_EXPOSURE:
if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess)) if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
break; break;
else else
return false; return false;
case CV_CAP_PROP_PVAPI_MULTICASTIP: case CV_CAP_PROP_PVAPI_MULTICASTIP:
if (value==-1) { if (value==-1) {
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess)) if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
break; break;
else else
return false; return false;
} }
else { else {
std::string ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255); std::string ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255);
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) && if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
(PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess)) (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
break; break;
else else
return false; return false;
} }
default: default:
return false; return false;
} }
@ -366,9 +374,4 @@ CvCapture* cvCreateCameraCapture_PvAPI( int index )
delete capture; delete capture;
return NULL; return NULL;
} }
#ifdef _MSC_VER
#pragma comment(lib, "PvAPI.lib")
#endif
#endif #endif

View File

@ -511,7 +511,7 @@ Unnormalized box filter is useful for computing various integral characteristics
.. seealso:: .. seealso::
:ocv:func:`boxFilter`, :ocv:func:`blur`,
:ocv:func:`bilateralFilter`, :ocv:func:`bilateralFilter`,
:ocv:func:`GaussianBlur`, :ocv:func:`GaussianBlur`,
:ocv:func:`medianBlur`, :ocv:func:`medianBlur`,

View File

@ -665,7 +665,7 @@ Computes the ideal point coordinates from the observed point coordinates.
:param src: Observed point coordinates, 1xN or Nx1 2-channel (CV_32FC2 or CV_64FC2). :param src: Observed point coordinates, 1xN or Nx1 2-channel (CV_32FC2 or CV_64FC2).
:param dst: Output ideal point coordinates after undistortion and reverse perspective transformation. :param dst: Output ideal point coordinates after undistortion and reverse perspective transformation. If matrix ``P`` is identity or omitted, ``dst`` will contain normalized point coordinates.
:param cameraMatrix: Camera matrix :math:`\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}` . :param cameraMatrix: Camera matrix :math:`\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}` .
@ -688,6 +688,7 @@ The function is similar to
(x',y') = undistort(x",y",dist_coeffs) (x',y') = undistort(x",y",dist_coeffs)
[X,Y,W]T = R*[x' y' 1]T [X,Y,W]T = R*[x' y' 1]T
x = X/W, y = Y/W x = X/W, y = Y/W
// only performed if P=[fx' 0 cx' [tx]; 0 fy' cy' [ty]; 0 0 1 [tz]] is specified
u' = x*fx' + cx' u' = x*fx' + cx'
v' = y*fy' + cy', v' = y*fy' + cy',

View File

@ -92,6 +92,56 @@ PERF_TEST_P(Size_MatType_BorderType3x3, blur3x3,
SANITY_CHECK(dst, 1e-3); SANITY_CHECK(dst, 1e-3);
} }
PERF_TEST_P(Size_MatType_BorderType3x3, box3x3,
testing::Combine(
testing::Values(szODD, szQVGA, szVGA, sz720p),
testing::Values(CV_8UC1, CV_16SC1, CV_32SC1, CV_32FC1, CV_32FC3),
testing::ValuesIn(BorderType3x3::all())
)
)
{
Size size = get<0>(GetParam());
int type = get<1>(GetParam());
BorderType3x3 btype = get<2>(GetParam());
Mat src(size, type);
Mat dst(size, type);
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE() boxFilter(src, dst, -1, Size(3,3), Point(-1,-1), false, btype);
SANITY_CHECK(dst, 1e-6, ERROR_RELATIVE);
}
PERF_TEST_P(Size_MatType_BorderType3x3, box3x3_inplace,
testing::Combine(
testing::Values(szODD, szQVGA, szVGA, sz720p),
testing::Values(CV_8UC1, CV_16SC1, CV_32SC1, CV_32FC1, CV_32FC3),
testing::ValuesIn(BorderType3x3::all())
)
)
{
Size size = get<0>(GetParam());
int type = get<1>(GetParam());
BorderType3x3 btype = get<2>(GetParam());
Mat src(size, type);
Mat dst(size, type);
declare.in(src, WARMUP_RNG).out(dst);
while(next())
{
src.copyTo(dst);
startTimer();
boxFilter(dst, dst, -1, Size(3,3), Point(-1,-1), false, btype);
stopTimer();
}
SANITY_CHECK(dst, 1e-6, ERROR_RELATIVE);
}
PERF_TEST_P(Size_MatType_BorderType, gaussianBlur5x5, PERF_TEST_P(Size_MatType_BorderType, gaussianBlur5x5,
testing::Combine( testing::Combine(
testing::Values(szODD, szQVGA, szVGA, sz720p), testing::Values(szODD, szQVGA, szVGA, sz720p),
@ -117,7 +167,7 @@ PERF_TEST_P(Size_MatType_BorderType, gaussianBlur5x5,
PERF_TEST_P(Size_MatType_BorderType, blur5x5, PERF_TEST_P(Size_MatType_BorderType, blur5x5,
testing::Combine( testing::Combine(
testing::Values(szODD, szQVGA, szVGA, sz720p), testing::Values(szODD, szQVGA, szVGA, sz720p),
testing::Values(CV_8UC1, CV_8UC4, CV_16UC1, CV_16SC1, CV_32FC1), testing::Values(CV_8UC1, CV_8UC4, CV_16UC1, CV_16SC1, CV_32FC1, CV_32FC3),
testing::ValuesIn(BorderType::all()) testing::ValuesIn(BorderType::all())
) )
) )

View File

@ -33,5 +33,8 @@ PERF_TEST_P(Img_BlockSize_ApertureSize_BorderType, cornerEigenValsAndVecs,
TEST_CYCLE() cornerEigenValsAndVecs(src, dst, blockSize, apertureSize, borderType); TEST_CYCLE() cornerEigenValsAndVecs(src, dst, blockSize, apertureSize, borderType);
SANITY_CHECK(dst, 2e-5); Mat l1;
extractChannel(dst, l1, 0);
SANITY_CHECK(l1, 2e-5);
} }

View File

@ -16,7 +16,7 @@ PERF_TEST_P(Img_BlockSize_ApertureSize_k_BorderType, cornerHarris,
testing::Values( "stitching/a1.jpg", "cv/shared/pic5.png"), testing::Values( "stitching/a1.jpg", "cv/shared/pic5.png"),
testing::Values( 3, 5 ), testing::Values( 3, 5 ),
testing::Values( 3, 5 ), testing::Values( 3, 5 ),
testing::Values( 1, 0.1 ), testing::Values( 0.04, 0.1 ),
testing::ValuesIn(BorderType::all()) testing::ValuesIn(BorderType::all())
) )
) )
@ -35,5 +35,5 @@ PERF_TEST_P(Img_BlockSize_ApertureSize_k_BorderType, cornerHarris,
TEST_CYCLE() cornerHarris(src, dst, blockSize, apertureSize, k, borderType); TEST_CYCLE() cornerHarris(src, dst, blockSize, apertureSize, k, borderType);
SANITY_CHECK(dst, 2e-6); SANITY_CHECK(dst, 2e-5);
} }

View File

@ -29,10 +29,10 @@ PERF_TEST_P(Image_MaxCorners_QualityLevel_MinDistance_BlockSize_UseHarris, goodF
if (image.empty()) if (image.empty())
FAIL() << "Unable to load source image" << filename; FAIL() << "Unable to load source image" << filename;
Mat corners; std::vector<Point2f> corners;
double minDistance = 1; double minDistance = 1;
TEST_CYCLE() goodFeaturesToTrack(image, corners, maxCorners, qualityLevel, minDistance, noArray(), blockSize, useHarrisDetector); TEST_CYCLE() goodFeaturesToTrack(image, corners, maxCorners, qualityLevel, minDistance, noArray(), blockSize, useHarrisDetector);
SANITY_CHECK(corners); //SANITY_CHECK(corners);
} }

View File

@ -247,6 +247,11 @@ cornerEigenValsVecs( const Mat& src, Mat& eigenv, int block_size,
int aperture_size, int op_type, double k=0., int aperture_size, int op_type, double k=0.,
int borderType=BORDER_DEFAULT ) int borderType=BORDER_DEFAULT )
{ {
#ifdef HAVE_TEGRA_OPTIMIZATION
if (tegra::cornerEigenValsVecs(src, eigenv, block_size, aperture_size, op_type, k, borderType))
return;
#endif
int depth = src.depth(); int depth = src.depth();
double scale = (double)(1 << ((aperture_size > 0 ? aperture_size : 3) - 1)) * block_size; double scale = (double)(1 << ((aperture_size > 0 ? aperture_size : 3) - 1)) * block_size;
if( aperture_size < 0 ) if( aperture_size < 0 )

View File

@ -279,7 +279,7 @@ cv::Ptr<cv::FilterEngine> cv::createBoxFilter( int srcType, int dstType, Size ks
{ {
int sdepth = CV_MAT_DEPTH(srcType); int sdepth = CV_MAT_DEPTH(srcType);
int cn = CV_MAT_CN(srcType), sumType = CV_64F; int cn = CV_MAT_CN(srcType), sumType = CV_64F;
if( sdepth < CV_32S && (!normalize || if( sdepth <= CV_32S && (!normalize ||
ksize.width*ksize.height <= (sdepth == CV_8U ? (1<<23) : ksize.width*ksize.height <= (sdepth == CV_8U ? (1<<23) :
sdepth == CV_16U ? (1 << 15) : (1 << 16))) ) sdepth == CV_16U ? (1 << 15) : (1 << 16))) )
sumType = CV_32S; sumType = CV_32S;

View File

@ -177,12 +177,13 @@ int CvMLData::read_csv(const char* filename)
ptr++; ptr++;
} }
cols_count++;
if ( cols_count == 0) if ( cols_count == 0)
{ {
fclose(file); fclose(file);
return -1; return -1;
} }
cols_count++;
// create temporary memory storage to store the whole database // create temporary memory storage to store the whole database
el_ptr = new float[cols_count]; el_ptr = new float[cols_count];

View File

@ -259,12 +259,12 @@ groupRectangles
Groups the object candidate rectangles. Groups the object candidate rectangles.
.. ocv:function:: void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps=0.2) .. ocv:function:: void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps=0.2)
.. ocv:function:: void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps=0.2)
.. ocv:pyfunction:: cv2.groupRectangles(rectList, groupThreshold[, eps]) -> None .. ocv:pyfunction:: cv2.groupRectangles(rectList, groupThreshold[, eps]) -> rectList, weights
.. ocv:pyfunction:: cv2.groupRectangles(rectList, groupThreshold[, eps]) -> weights
.. ocv:pyfunction:: cv2.groupRectangles(rectList, groupThreshold, eps, weights, levelWeights) -> None
:param rectList: Input/output vector of rectangles. Output vector includes retained and grouped rectangles.
:param rectList: Input/output vector of rectangles. Output vector includes retained and grouped rectangles. (The Python list is not modified in place.)
:param groupThreshold: Minimum possible number of rectangles minus 1. The threshold is used in a group of rectangles to retain it. :param groupThreshold: Minimum possible number of rectangles minus 1. The threshold is used in a group of rectangles to retain it.

View File

@ -57,6 +57,18 @@ private:
PyThreadState* _state; PyThreadState* _state;
}; };
class PyEnsureGIL
{
public:
PyEnsureGIL() : _state(PyGILState_Ensure()) {}
~PyEnsureGIL()
{
PyGILState_Release(_state);
}
private:
PyGILState_STATE _state;
};
#define ERRWRAP2(expr) \ #define ERRWRAP2(expr) \
try \ try \
{ \ { \
@ -139,6 +151,8 @@ public:
void allocate(int dims, const int* sizes, int type, int*& refcount, void allocate(int dims, const int* sizes, int type, int*& refcount,
uchar*& datastart, uchar*& data, size_t* step) uchar*& datastart, uchar*& data, size_t* step)
{ {
PyEnsureGIL gil;
int depth = CV_MAT_DEPTH(type); int depth = CV_MAT_DEPTH(type);
int cn = CV_MAT_CN(type); int cn = CV_MAT_CN(type);
const int f = (int)(sizeof(size_t)/8); const int f = (int)(sizeof(size_t)/8);
@ -169,6 +183,7 @@ public:
void deallocate(int* refcount, uchar* datastart, uchar* data) void deallocate(int* refcount, uchar* datastart, uchar* data)
{ {
PyEnsureGIL gil;
if( !refcount ) if( !refcount )
return; return;
PyObject* o = pyObjectFromRefcount(refcount); PyObject* o = pyObjectFromRefcount(refcount);

View File

@ -15,9 +15,9 @@ Calculates an optical flow for a sparse feature set using the iterative Lucas-Ka
.. ocv:cfunction:: void cvCalcOpticalFlowPyrLK( const CvArr* prev, const CvArr* curr, CvArr* prevPyr, CvArr* currPyr, const CvPoint2D32f* prevFeatures, CvPoint2D32f* currFeatures, int count, CvSize winSize, int level, char* status, float* trackError, CvTermCriteria criteria, int flags ) .. ocv:cfunction:: void cvCalcOpticalFlowPyrLK( const CvArr* prev, const CvArr* curr, CvArr* prevPyr, CvArr* currPyr, const CvPoint2D32f* prevFeatures, CvPoint2D32f* currFeatures, int count, CvSize winSize, int level, char* status, float* trackError, CvTermCriteria criteria, int flags )
.. ocv:pyoldfunction:: cv.CalcOpticalFlowPyrLK( prev, curr, prevPyr, currPyr, prevFeatures, winSize, level, criteria, flags, guesses=None) -> (currFeatures, status, trackError) .. ocv:pyoldfunction:: cv.CalcOpticalFlowPyrLK( prev, curr, prevPyr, currPyr, prevFeatures, winSize, level, criteria, flags, guesses=None) -> (currFeatures, status, trackError)
:param prevImg: First 8-bit single-channel or 3-channel input image. :param prevImg: First 8-bit input image or pyramid constructed by :ocv:func:`buildOpticalFlowPyramid`.
:param nextImg: Second input image of the same size and the same type as ``prevImg`` . :param nextImg: Second input image or pyramid of the same size and the same type as ``prevImg``.
:param prevPts: Vector of 2D points for which the flow needs to be found. The point coordinates must be single-precision floating-point numbers. :param prevPts: Vector of 2D points for which the flow needs to be found. The point coordinates must be single-precision floating-point numbers.
@ -29,27 +29,51 @@ Calculates an optical flow for a sparse feature set using the iterative Lucas-Ka
:param winSize: Size of the search window at each pyramid level. :param winSize: Size of the search window at each pyramid level.
:param maxLevel: 0-based maximal pyramid level number. If set to 0, pyramids are not used (single level). If set to 1, two levels are used, and so on. :param maxLevel: 0-based maximal pyramid level number. If set to 0, pyramids are not used (single level). If set to 1, two levels are used, and so on. If pyramids are passed to input then algorithm will use as many levels as pyramids have but no more than ``maxLevel``.
:param criteria: Parameter specifying the termination criteria of the iterative search algorithm (after the specified maximum number of iterations ``criteria.maxCount`` or when the search window moves by less than ``criteria.epsilon`` . :param criteria: Parameter specifying the termination criteria of the iterative search algorithm (after the specified maximum number of iterations ``criteria.maxCount`` or when the search window moves by less than ``criteria.epsilon`` .
:param flags: Operation flags: :param flags: Operation flags:
* **OPTFLOW_USE_INITIAL_FLOW** Use initial estimations stored in ``nextPts`` . If the flag is not set, then ``prevPts`` is copied to ``nextPts`` and is considered as the initial estimate. * **OPTFLOW_USE_INITIAL_FLOW** Use initial estimations stored in ``nextPts`` . If the flag is not set, then ``prevPts`` is copied to ``nextPts`` and is considered as the initial estimate.
* **OPTFLOW_LK_GET_MIN_EIGENVALS** Use minimum eigen values as a error measure (see ``minEigThreshold`` description). If the flag is not set, then L1 distance between patches around the original and a moved point divided by number of pixels in a window is used as a error measure.
* **OPTFLOW_LK_GET_MIN_EIGENVALS** Use minimum eigen values as a error measure (see ``minEigThreshold`` description). If the flag is not set, then L1 distance between patches around the original and a moved point divided by number of pixels in a window is used as a error measure.
:param minEigThreshold: The algorithm computes a minimum eigen value of a 2x2 normal matrix of optical flow equations (this matrix is called a spatial gradient matrix in [Bouguet00]_) divided by number of pixels in a window. If this value is less then ``minEigThreshold`` then a corresponding feature is filtered out and its flow is not computed. So it allows to remove bad points earlier and speed up the computation. :param minEigThreshold: The algorithm computes a minimum eigen value of a 2x2 normal matrix of optical flow equations (this matrix is called a spatial gradient matrix in [Bouguet00]_) divided by number of pixels in a window. If this value is less then ``minEigThreshold`` then a corresponding feature is filtered out and its flow is not computed. So it allows to remove bad points earlier and speed up the computation.
The function implements a sparse iterative version of the Lucas-Kanade optical flow in pyramids. See [Bouguet00]_. The function is parallelized with the TBB library. The function implements a sparse iterative version of the Lucas-Kanade optical flow in pyramids. See [Bouguet00]_. The function is parallelized with the TBB library.
buildOpticalFlowPyramid
-----------------------
Constructs the image pyramid which can be passed to :ocv:func:`calcOpticalFlowPyrLK`.
.. ocv:function:: int buildOpticalFlowPyramid(InputArray img, OutputArrayOfArrays pyramid, Size winSize, int maxLevel, bool withDerivatives = true, int pyrBorder = BORDER_REFLECT_101, int derivBorder = BORDER_CONSTANT, bool tryReuseInputImage = true)
.. ocv:pyfunction:: cv2.buildOpticalFlowPyramid(img, winSize, maxLevel[, pyramid[, withDerivatives[, pyrBorder[, derivBorder[, tryReuseInputImage]]]]]) -> retval, pyramid
:param img: 8-bit input image.
:param pyramid: output pyramid.
:param winSize: window size of optical flow algorithm. Must be not less than ``winSize`` argument of :ocv:func:`calcOpticalFlowPyrLK`. It is needed to calculate required padding for pyramid levels.
:param maxLevel: 0-based maximal pyramid level number.
:param withDerivatives: set to precompute gradients for the every pyramid level. If pyramid is constructed without the gradients then :ocv:func:`calcOpticalFlowPyrLK` will calculate them internally.
:param pyrBorder: the border mode for pyramid layers.
:param derivBorder: the border mode for gradients.
:param tryReuseInputImage: put ROI of input image into the pyramid if possible. You can pass ``false`` to force data copying.
:return: number of levels in constructed pyramid. Can be less than ``maxLevel``.
calcOpticalFlowFarneback calcOpticalFlowFarneback
---------------------------- ----------------------------
Computes a dense optical flow using the Gunnar Farneback's algorithm. Computes a dense optical flow using the Gunnar Farneback's algorithm.
.. ocv:function:: void calcOpticalFlowFarneback( InputArray prevImg, InputArray nextImg, InputOutputArray flow, double pyrScale, int levels, int winsize, int iterations, int polyN, double polySigma, int flags ) .. ocv:function:: void calcOpticalFlowFarneback( InputArray prevImg, InputArray nextImg, InputOutputArray flow, double pyrScale, int levels, int winsize, int iterations, int polyN, double polySigma, int flags )
.. ocv:cfunction:: void cvCalcOpticalFlowFarneback( const CvArr* prevImg, const CvArr* nextImg, CvArr* flow, double pyrScale, int levels, int winsize, int iterations, int polyN, double polySigma, int flags ) .. ocv:cfunction:: void cvCalcOpticalFlowFarneback( const CvArr* prevImg, const CvArr* nextImg, CvArr* flow, double pyrScale, int levels, int winsize, int iterations, int polyN, double polySigma, int flags )

View File

@ -53,7 +53,7 @@ static void calcSharrDeriv(const cv::Mat& src, cv::Mat& dst)
CV_Assert(depth == CV_8U); CV_Assert(depth == CV_8U);
dst.create(rows, cols, CV_MAKETYPE(DataType<deriv_type>::depth, cn*2)); dst.create(rows, cols, CV_MAKETYPE(DataType<deriv_type>::depth, cn*2));
#ifdef HAVE_TEGRA_OPTIMIZATION #ifdef HAVE_TEGRA_OPTIMIZATION
if (tegra::calcSharrDeriv(src, dst)) if (tegra::calcSharrDeriv(src, dst))
return; return;
#endif #endif
@ -655,6 +655,9 @@ void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
&& ofs.x + prevPyr[lvlStep1].cols + winSize.width <= fullSize.width && ofs.x + prevPyr[lvlStep1].cols + winSize.width <= fullSize.width
&& ofs.y + prevPyr[lvlStep1].rows + winSize.height <= fullSize.height); && ofs.y + prevPyr[lvlStep1].rows + winSize.height <= fullSize.height);
} }
if(levels1 < maxLevel)
maxLevel = levels1;
} }
if(_nextImg.kind() == _InputArray::STD_VECTOR_MAT) if(_nextImg.kind() == _InputArray::STD_VECTOR_MAT)
@ -680,19 +683,16 @@ void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
&& ofs.x + nextPyr[lvlStep2].cols + winSize.width <= fullSize.width && ofs.x + nextPyr[lvlStep2].cols + winSize.width <= fullSize.width
&& ofs.y + nextPyr[lvlStep2].rows + winSize.height <= fullSize.height); && ofs.y + nextPyr[lvlStep2].rows + winSize.height <= fullSize.height);
} }
if(levels2 < maxLevel)
maxLevel = levels2;
} }
if(levels1 >= 0 || levels2 >= 0)
maxLevel = std::max(levels1, levels2);
if (levels1 < 0) if (levels1 < 0)
maxLevel = levels1 = buildOpticalFlowPyramid(_prevImg, prevPyr, winSize, maxLevel, false); maxLevel = buildOpticalFlowPyramid(_prevImg, prevPyr, winSize, maxLevel, false);
if (levels2 < 0) if (levels2 < 0)
levels2 = buildOpticalFlowPyramid(_nextImg, nextPyr, winSize, maxLevel, false); maxLevel = buildOpticalFlowPyramid(_nextImg, nextPyr, winSize, maxLevel, false);
CV_Assert(levels1 == levels2);
if( (criteria.type & TermCriteria::COUNT) == 0 ) if( (criteria.type & TermCriteria::COUNT) == 0 )
criteria.maxCount = 30; criteria.maxCount = 30;

View File

@ -1,6 +1,14 @@
set(sample example-face-detection) set(sample example-face-detection)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS ${OpenCV_BINARY_DIR} SDK_TARGET 11 ${ANDROID_SDK_TARGET}) if(BUILD_FAT_JAVA_LIB)
set(native_deps opencv_java)
ocv_include_modules(opencv_contrib)
else()
set(native_deps opencv_contrib)
endif()
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS ${OpenCV_BINARY_DIR} SDK_TARGET 11 ${ANDROID_SDK_TARGET} NATIVE_DEPS ${native_deps})
if(TARGET ${sample}) if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample}) add_dependencies(opencv_android_examples ${sample})
endif() endif()

View File

@ -0,0 +1,21 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off
include ../includeOpenCV.mk
ifeq ("$(wildcard $(OPENCV_MK_PATH))","")
#try to load OpenCV.mk from default install location
include $(TOOLCHAIN_PREBUILT_ROOT)/user/share/OpenCV/OpenCV.mk
else
include $(OPENCV_MK_PATH)
endif
LOCAL_SRC_FILES := DetectionBaseTracker.cpp
LOCAL_C_INCLUDES := $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_base_tacker
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,3 @@
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a

View File

@ -0,0 +1,144 @@
#include <DetectionBaseTracker.h>
#include <opencv2/core/core.hpp>
#include <opencv2/contrib/detection_based_tracker.hpp>
#include <string>
#include <vector>
#include <android/log.h>
#define LOG_TAG "FaceDetection/DetectionBasedTracker"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
using namespace std;
using namespace cv;
vector<Rect> RectFaces;
inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeCreateObject
(JNIEnv * jenv, jclass jobj, jstring jFileName, jint faceSize)
{
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr);
jlong result = 0;
try
{
DetectionBasedTracker::Parameters DetectorParams;
if (faceSize > 0)
DetectorParams.minObjectSize = faceSize;
result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
}
catch(cv::Exception e)
{
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
return result;
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDestroyObject
(JNIEnv * jenv, jclass jobj, jlong thiz)
{
try
{
((DetectionBasedTracker*)thiz)->stop();
delete (DetectionBasedTracker*)thiz;
}
catch(cv::Exception e)
{
LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStart
(JNIEnv * jenv, jclass jobj, jlong thiz)
{
try
{
((DetectionBasedTracker*)thiz)->run();
}
catch(cv::Exception e)
{
LOGD("nativeStart catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStop
(JNIEnv * jenv, jclass jobj, jlong thiz)
{
try
{
((DetectionBasedTracker*)thiz)->stop();
}
catch(cv::Exception e)
{
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass jobj, jlong thiz, jint faceSize)
{
try
{
if (faceSize > 0)
{
DetectionBasedTracker::Parameters DetectorParams = \
((DetectionBasedTracker*)thiz)->getParameters();
DetectorParams.minObjectSize = faceSize;
((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
}
}
catch(cv::Exception e)
{
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDetect
(JNIEnv * jenv, jclass jobj, jlong thiz, jlong imageGray, jlong faces)
{
try
{
((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
}
catch(cv::Exception e)
{
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
}

View File

@ -0,0 +1,61 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_opencv_samples_fd_DetectionBaseTracker */
#ifndef _Included_org_opencv_samples_fd_DetectionBaseTracker
#define _Included_org_opencv_samples_fd_DetectionBaseTracker
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeCreateObject
* Signature: (Ljava/lang/String;F)J
*/
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeCreateObject
(JNIEnv *, jclass, jstring, jint);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeDestroyObject
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDestroyObject
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeStart
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStart
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeStop
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStop
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeSetFaceSize
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeDetect
* Signature: (JJJ)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDetect
(JNIEnv *, jclass, jlong, jlong, jlong);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,52 @@
package org.opencv.samples.fd;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBaseTracker
{
public DetectionBaseTracker(String filename, int faceSize)
{
mNativeObj = nativeCreateObject(filename, faceSize);
}
public void start()
{
nativeStart(mNativeObj);
}
public void stop()
{
nativeStop(mNativeObj);
}
public void setMinFaceSize(int faceSize)
{
nativeSetFaceSize(mNativeObj, faceSize);
}
public void detect(Mat imageGray, MatOfRect faces)
{
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release()
{
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
protected long mNativeObj = 0;
protected static native long nativeCreateObject(String filename, int faceSize);
protected static native void nativeDestroyObject(long thiz);
protected static native void nativeStart(long thiz);
protected static native void nativeStop(long thiz);
protected static native void nativeSetFaceSize(long thiz, int faceSize);
protected static native void nativeDetect(long thiz, long inputImage, long resultMat);
static
{
System.loadLibrary("detection_base_tacker");
}
}

View File

@ -16,13 +16,18 @@ public class FdActivity extends Activity {
private MenuItem mItemFace40; private MenuItem mItemFace40;
private MenuItem mItemFace30; private MenuItem mItemFace30;
private MenuItem mItemFace20; private MenuItem mItemFace20;
private MenuItem mItemType;
private FdView mView; private FdView mView;
public static float minFaceSize = 0.5f; private int mDetectorType = 0;
private String[] mDetectorName;
public FdActivity() { public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
mDetectorName = new String[2];
mDetectorName[0] = "Cascade";
mDetectorName[1] = "DBT";
} }
@Override @Override
@ -57,6 +62,7 @@ public class FdActivity extends Activity {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
mView = new FdView(this); mView = new FdView(this);
mView.setDtetectorType(mDetectorType);
setContentView(mView); setContentView(mView);
} }
@ -67,6 +73,8 @@ public class FdActivity extends Activity {
mItemFace40 = menu.add("Face size 40%"); mItemFace40 = menu.add("Face size 40%");
mItemFace30 = menu.add("Face size 30%"); mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%"); mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]);
return true; return true;
} }
@ -74,13 +82,19 @@ public class FdActivity extends Activity {
public boolean onOptionsItemSelected(MenuItem item) { public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item); Log.i(TAG, "Menu Item selected " + item);
if (item == mItemFace50) if (item == mItemFace50)
minFaceSize = 0.5f; mView.setMinFaceSize(0.5f);
else if (item == mItemFace40) else if (item == mItemFace40)
minFaceSize = 0.4f; mView.setMinFaceSize(0.4f);
else if (item == mItemFace30) else if (item == mItemFace30)
minFaceSize = 0.3f; mView.setMinFaceSize(0.3f);
else if (item == mItemFace20) else if (item == mItemFace20)
minFaceSize = 0.2f; mView.setMinFaceSize(0.2f);
else if (item == mItemType)
{
mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]);
mView.setDtetectorType(mDetectorType);
}
return true; return true;
} }
} }

View File

@ -23,10 +23,47 @@ import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase { class FdView extends SampleCvViewBase {
private static final String TAG = "Sample::FdView"; private static final String TAG = "Sample::FdView";
private Mat mRgba; private Mat mRgba;
private Mat mGray; private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mCascade;
private DetectionBaseTracker mTracker;
private CascadeClassifier mCascade; public final int CASCADE_DETECTOR = 0;
public final int DBT_DETECTOR = 1;
private int mDetectorType = CASCADE_DETECTOR;
public static int mFaceSize = 200;
public void setMinFaceSize(float faceSize)
{
int height = mGray.rows();
if (Math.round(height * faceSize) > 0);
{
mFaceSize = Math.round(height * faceSize);
}
mTracker.setMinFaceSize(mFaceSize);
}
public void setDtetectorType(int type)
{
if (mDetectorType != type)
{
mDetectorType = type;
if (type == DBT_DETECTOR)
{
Log.i(TAG, "Detection Base Tracker enabled");
mTracker.start();
}
else
{
Log.i(TAG, "Cascade detectior enabled");
mTracker.stop();
}
}
}
public FdView(Context context) { public FdView(Context context) {
super(context); super(context);
@ -34,8 +71,8 @@ class FdView extends SampleCvViewBase {
try { try {
InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface); InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE); File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
File cascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml"); mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(cascadeFile); FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096]; byte[] buffer = new byte[4096];
int bytesRead; int bytesRead;
@ -45,14 +82,15 @@ class FdView extends SampleCvViewBase {
is.close(); is.close();
os.close(); os.close();
mCascade = new CascadeClassifier(cascadeFile.getAbsolutePath()); mCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mCascade.empty()) { if (mCascade.empty()) {
Log.e(TAG, "Failed to load cascade classifier"); Log.e(TAG, "Failed to load cascade classifier");
mCascade = null; mCascade = null;
} else } else
Log.i(TAG, "Loaded cascade classifier from " + cascadeFile.getAbsolutePath()); Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
cascadeFile.delete(); mTracker = new DetectionBaseTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete(); cascadeDir.delete();
} catch (IOException e) { } catch (IOException e) {
@ -77,16 +115,26 @@ class FdView extends SampleCvViewBase {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mCascade != null) { MatOfRect faces = new MatOfRect();
int height = mGray.rows();
int faceSize = Math.round(height * FdActivity.minFaceSize); if (mDetectorType == CASCADE_DETECTOR)
MatOfRect faces = new MatOfRect(); {
mCascade.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE if (mCascade != null)
, new Size(faceSize, faceSize), new Size()); mCascade.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
, new Size(mFaceSize, mFaceSize), new Size());
for (Rect r : faces.toArray())
Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
} }
else if (mDetectorType == DBT_DETECTOR)
{
if (mTracker != null)
mTracker.detect(mGray, faces);
}
else
{
Log.e(TAG, "Detection method is not selected!");
}
for (Rect r : faces.toArray())
Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.RGB_565/*.ARGB_8888*/); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.RGB_565/*.ARGB_8888*/);
@ -110,9 +158,14 @@ class FdView extends SampleCvViewBase {
mRgba.release(); mRgba.release();
if (mGray != null) if (mGray != null)
mGray.release(); mGray.release();
if (mCascadeFile != null)
mCascadeFile.delete();
if (mTracker != null)
mTracker.release();
mRgba = null; mRgba = null;
mGray = null; mGray = null;
mCascadeFile = null;
} }
} }
} }

View File

@ -56,8 +56,8 @@ def mtx2rvec(R):
return axis * np.arctan2(s, c) return axis * np.arctan2(s, c)
def draw_str(dst, (x, y), s): def draw_str(dst, (x, y), s):
cv2.putText(dst, s, (x+1, y+1), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 0, 0), thickness = 2, linetype=cv2.CV_AA) cv2.putText(dst, s, (x+1, y+1), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 0, 0), thickness = 2, lineType=cv2.CV_AA)
cv2.putText(dst, s, (x, y), cv2.FONT_HERSHEY_PLAIN, 1.0, (255, 255, 255), linetype=cv2.CV_AA) cv2.putText(dst, s, (x, y), cv2.FONT_HERSHEY_PLAIN, 1.0, (255, 255, 255), lineType=cv2.CV_AA)
class Sketcher: class Sketcher:
def __init__(self, windowname, dests, colors_func): def __init__(self, windowname, dests, colors_func):

View File

@ -35,9 +35,10 @@ if __name__ == '__main__':
points, ref_distrs = make_gaussians(cluster_n, img_size) points, ref_distrs = make_gaussians(cluster_n, img_size)
print 'EM (opencv) ...' print 'EM (opencv) ...'
em = cv2.EM(points, params = dict( nclusters = cluster_n, cov_mat_type = cv2.EM_COV_MAT_GENERIC) ) em = cv2.EM(cluster_n, cv2.EM_COV_MAT_GENERIC)
means = em.getMeans() em.train(points)
covs = em.getCovs() means = em.getMat('means')
covs = em.getMatVector('covs')
found_distrs = zip(means, covs) found_distrs = zip(means, covs)
print 'ready!\n' print 'ready!\n'

View File

@ -25,8 +25,7 @@ from common import draw_str
lk_params = dict( winSize = (19, 19), lk_params = dict( winSize = (19, 19),
maxLevel = 2, maxLevel = 2,
criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03), criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
derivLambda = 0.0 )
feature_params = dict( maxCorners = 1000, feature_params = dict( maxCorners = 1000,
qualityLevel = 0.01, qualityLevel = 0.01,

View File

@ -24,8 +24,7 @@ from time import clock
lk_params = dict( winSize = (15, 15), lk_params = dict( winSize = (15, 15),
maxLevel = 2, maxLevel = 2,
criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03), criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
derivLambda = 0.0 )
feature_params = dict( maxCorners = 500, feature_params = dict( maxCorners = 500,
qualityLevel = 0.3, qualityLevel = 0.3,

View File

@ -1,4 +1,4 @@
help=''' '''
Data matrix detector sample. Data matrix detector sample.
Usage: Usage:
video_dmtx {<video device number>|<video file name>} video_dmtx {<video device number>|<video file name>}
@ -52,7 +52,7 @@ def data_matrix_demo(cap):
if __name__ == '__main__': if __name__ == '__main__':
print help print __doc__
if len(sys.argv) == 1: if len(sys.argv) == 1:
cap = cv2.VideoCapture(0) cap = cv2.VideoCapture(0)

View File

@ -0,0 +1,63 @@
import numpy as np
import cv2
from Queue import Queue
from threading import Thread
from collections import deque
class Worker(Thread):
def __init__(self, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
func, args, kargs = self.tasks.get()
try: func(*args, **kargs)
except Exception, e: print e
self.tasks.task_done()
class ThreadPool:
def __init__(self, num_threads):
self.tasks = Queue(num_threads)
for _ in range(num_threads): Worker(self.tasks)
def add_task(self, func, *args, **kargs):
self.tasks.put((func, args, kargs))
def wait_completion(self):
self.tasks.join()
if __name__ == '__main__':
results = deque()
def process_frame(i, frame):
global results
res = cv2.medianBlur(frame, 15)
results.append((i, res))
pool = ThreadPool(4)
cap = cv2.VideoCapture(0)
frame_count = 0
last_frame = None
last_count = -1
while True:
ret, frame = cap.read()
pool.add_task(process_frame, frame_count, frame.copy())
frame_count += 1
while len(results) > 0:
i, frame = results.popleft()
if i > last_count:
last_count, last_frame = i, frame
if last_frame is not None:
cv2.imshow('res', last_frame)
if cv2.waitKey(1) == 27:
break
pool.wait_completion()