diff --git a/3rdparty/tbb/CMakeLists.txt b/3rdparty/tbb/CMakeLists.txt index 3c6ae98f4..af1581349 100644 --- a/3rdparty/tbb/CMakeLists.txt +++ b/3rdparty/tbb/CMakeLists.txt @@ -122,15 +122,32 @@ file(GLOB lib_srcs "${tbb_src_dir}/src/tbb/*.cpp") file(GLOB lib_hdrs "${tbb_src_dir}/src/tbb/*.h") list(APPEND lib_srcs "${tbb_src_dir}/src/rml/client/rml_tbb.cpp") -add_definitions(-D__TBB_DYNAMIC_LOAD_ENABLED=0 #required - -D__TBB_BUILD=1 #required - -D__TBB_SURVIVE_THREAD_SWITCH=0 #no cilk support - -DUSE_PTHREAD #required for Unix - -DTBB_USE_GCC_BUILTINS=1 #required for ARM GCC - -DTBB_USE_DEBUG=0 #just to be sure - -DTBB_NO_LEGACY=1 #don't need backward compatibility - -DDO_ITT_NOTIFY=0 #it seems that we don't need these notifications - ) +if (WIN32) + add_definitions(-D__TBB_DYNAMIC_LOAD_ENABLED=0 + -D__TBB_BUILD=1 + -D_UNICODE + -DUNICODE + -DWINAPI_FAMILY=WINAPI_FAMILY_APP + -DDO_ITT_NOTIFY=0 + ) # defines were copied from windows.cl.inc +set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} /APPCONTAINER") +else() + add_definitions(-D__TBB_DYNAMIC_LOAD_ENABLED=0 #required + -D__TBB_BUILD=1 #required + -D__TBB_SURVIVE_THREAD_SWITCH=0 #no cilk support + -DTBB_USE_DEBUG=0 #just to be sure + -DTBB_NO_LEGACY=1 #don't need backward compatibility + -DDO_ITT_NOTIFY=0 #it seems that we don't need these notifications + ) +endif() + +if (HAVE_LIBPTHREAD) + add_definitions(-DUSE_PTHREAD) #required for Unix +endif() + +if (CMAKE_COMPILER_IS_GNUCXX) + add_definitions(-DTBB_USE_GCC_BUILTINS=1) #required for ARM GCC +endif() if(ANDROID_COMPILER_IS_CLANG) add_definitions(-D__TBB_GCC_BUILTIN_ATOMICS_PRESENT=1) @@ -145,7 +162,7 @@ endif() set(TBB_SOURCE_FILES ${lib_srcs} ${lib_hdrs}) -if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") +if (ARM AND NOT WIN32) if (NOT ANDROID) set(TBB_SOURCE_FILES ${TBB_SOURCE_FILES} "${CMAKE_CURRENT_SOURCE_DIR}/arm_linux_stub.cpp") endif() diff --git a/CMakeLists.txt b/CMakeLists.txt index e3c8259a1..b9823b372 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -118,6 +118,7 @@ OCV_OPTION(WITH_CUFFT "Include NVidia Cuda Fast Fourier Transform (FFT) OCV_OPTION(WITH_CUBLAS "Include NVidia Cuda Basic Linear Algebra Subprograms (BLAS) library support" OFF IF (CMAKE_VERSION VERSION_GREATER "2.8" AND NOT ANDROID AND NOT IOS) ) OCV_OPTION(WITH_NVCUVID "Include NVidia Video Decoding library support" OFF IF (CMAKE_VERSION VERSION_GREATER "2.8" AND NOT ANDROID AND NOT IOS AND NOT APPLE) ) OCV_OPTION(WITH_EIGEN "Include Eigen2/Eigen3 support" ON) +OCV_OPTION(WITH_VFW "Include Video for Windows support" ON IF (WIN32)) OCV_OPTION(WITH_FFMPEG "Include FFMPEG support" ON IF (NOT ANDROID AND NOT IOS)) OCV_OPTION(WITH_GSTREAMER "Include Gstreamer support" ON IF (UNIX AND NOT APPLE AND NOT ANDROID) ) OCV_OPTION(WITH_GTK "Include GTK support" ON IF (UNIX AND NOT APPLE AND NOT ANDROID) ) @@ -132,13 +133,15 @@ OCV_OPTION(WITH_PNG "Include PNG support" ON) OCV_OPTION(WITH_PVAPI "Include Prosilica GigE support" ON IF (NOT ANDROID AND NOT IOS) ) OCV_OPTION(WITH_GIGEAPI "Include Smartek GigE support" ON IF (NOT ANDROID AND NOT IOS) ) OCV_OPTION(WITH_QT "Build with Qt Backend support" OFF IF (NOT ANDROID AND NOT IOS) ) +OCV_OPTION(WITH_WIN32UI "Build with Win32 UI Backend support" ON IF (WIN32) ) OCV_OPTION(WITH_QUICKTIME "Use QuickTime for Video I/O insted of QTKit" OFF IF APPLE ) OCV_OPTION(WITH_TBB "Include Intel TBB support" OFF IF (NOT IOS) ) OCV_OPTION(WITH_CSTRIPES "Include C= support" OFF IF WIN32 ) OCV_OPTION(WITH_TIFF "Include TIFF support" ON IF (NOT IOS) ) OCV_OPTION(WITH_UNICAP "Include Unicap support (GPL)" OFF IF (UNIX AND NOT APPLE AND NOT ANDROID) ) OCV_OPTION(WITH_V4L "Include Video 4 Linux support" ON IF (UNIX AND NOT ANDROID) ) -OCV_OPTION(WITH_VIDEOINPUT "Build HighGUI with DirectShow support" ON IF WIN32 ) +OCV_OPTION(WITH_DSHOW "Build HighGUI with DirectShow support" ON IF (WIN32 AND NOT ARM) ) +OCV_OPTION(WITH_MSMF "Build HighGUI with Media Foundation support" OFF IF WIN32 ) OCV_OPTION(WITH_XIMEA "Include XIMEA cameras support" OFF IF (NOT ANDROID AND NOT APPLE) ) OCV_OPTION(WITH_XINE "Include Xine support (GPL)" OFF IF (UNIX AND NOT APPLE AND NOT ANDROID) ) OCV_OPTION(WITH_OPENCL "Include OpenCL Runtime support" ON IF (NOT ANDROID AND NOT IOS) ) @@ -169,7 +172,7 @@ OCV_OPTION(BUILD_JASPER "Build libjasper from source" WIN32 O OCV_OPTION(BUILD_JPEG "Build libjpeg from source" WIN32 OR ANDROID OR APPLE ) OCV_OPTION(BUILD_PNG "Build libpng from source" WIN32 OR ANDROID OR APPLE ) OCV_OPTION(BUILD_OPENEXR "Build openexr from source" WIN32 OR ANDROID OR APPLE ) -OCV_OPTION(BUILD_TBB "Download and build TBB from source" ANDROID IF CMAKE_COMPILER_IS_GNUCXX ) +OCV_OPTION(BUILD_TBB "Download and build TBB from source" ANDROID ) # OpenCV installation options # =================================================== @@ -598,8 +601,8 @@ else() if(DEFINED WITH_QT) status(" QT 4.x:" NO) endif() - if(WIN32) - status(" Win32 UI:" YES) + if(DEFINED WITH_WIN32UI) + status(" Win32 UI:" HAVE_WIN32UI THEN YES ELSE NO) else() if(APPLE) if(WITH_CARBON) @@ -656,6 +659,10 @@ endif() status("") status(" Video I/O:") +if (DEFINED WITH_VFW) + status(" Video for Windows:" HAVE_VFW THEN YES ELSE NO) +endif(DEFINED WITH_VFW) + if(DEFINED WITH_1394) status(" DC1394 1.x:" HAVE_DC1394 THEN "YES (ver ${ALIASOF_libdc1394_VERSION})" ELSE NO) status(" DC1394 2.x:" HAVE_DC1394_2 THEN "YES (ver ${ALIASOF_libdc1394-2_VERSION})" ELSE NO) @@ -738,9 +745,13 @@ if(DEFINED WITH_V4L) ELSE "${HAVE_CAMV4L_STR}/${HAVE_CAMV4L2_STR}") endif(DEFINED WITH_V4L) -if(DEFINED WITH_VIDEOINPUT) - status(" DirectShow:" HAVE_VIDEOINPUT THEN YES ELSE NO) -endif(DEFINED WITH_VIDEOINPUT) +if(DEFINED WITH_DSHOW) + status(" DirectShow:" HAVE_DSHOW THEN YES ELSE NO) +endif(DEFINED WITH_DSHOW) + +if(DEFINED WITH_MSMF) + status(" Media Foundation:" HAVE_MSMF THEN YES ELSE NO) +endif(DEFINED WITH_MSMF) if(DEFINED WITH_XIMEA) status(" XIMEA:" HAVE_XIMEA THEN YES ELSE NO) diff --git a/cmake/OpenCVDetectCUDA.cmake b/cmake/OpenCVDetectCUDA.cmake index e853a8d0a..91cf1504b 100644 --- a/cmake/OpenCVDetectCUDA.cmake +++ b/cmake/OpenCVDetectCUDA.cmake @@ -57,7 +57,7 @@ if(CUDA_FOUND) elseif(CUDA_GENERATION STREQUAL "Kepler") set(__cuda_arch_bin "3.0") elseif(CUDA_GENERATION STREQUAL "Auto") - execute_process( COMMAND "${CUDA_NVCC_EXECUTABLE}" "${OpenCV_SOURCE_DIR}/cmake/OpenCVDetectCudaArch.cu" "--run" + execute_process( COMMAND "${CUDA_NVCC_EXECUTABLE}" "${OpenCV_SOURCE_DIR}/cmake/checks/OpenCVDetectCudaArch.cu" "--run" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/" RESULT_VARIABLE _nvcc_res OUTPUT_VARIABLE _nvcc_out ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) diff --git a/cmake/OpenCVDetectCXXCompiler.cmake b/cmake/OpenCVDetectCXXCompiler.cmake index 6e0278000..504004bce 100644 --- a/cmake/OpenCVDetectCXXCompiler.cmake +++ b/cmake/OpenCVDetectCXXCompiler.cmake @@ -103,4 +103,6 @@ if(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64.*|x86_64.*|AMD64.*" OR CMAKE_GENERATOR set(X86_64 1) elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "i686.*|i386.*|x86.*|amd64.*|AMD64.*") set(X86 1) +elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "arm.*|ARM.*") + set(ARM 1) endif() diff --git a/cmake/OpenCVFindLibsGUI.cmake b/cmake/OpenCVFindLibsGUI.cmake index c883a80ce..14095442d 100644 --- a/cmake/OpenCVFindLibsGUI.cmake +++ b/cmake/OpenCVFindLibsGUI.cmake @@ -2,6 +2,16 @@ # Detect 3rd-party GUI libraries # ---------------------------------------------------------------------------- +#--- Win32 UI --- +ocv_clear_vars(HAVE_WIN32UI) +if(WITH_WIN32UI) + TRY_COMPILE(HAVE_WIN32UI + "${OPENCV_BINARY_DIR}/CMakeFiles/CMakeTmp" + "${OpenCV_SOURCE_DIR}/cmake/checks/win32uitest.cpp" + CMAKE_FLAGS "\"user32.lib\" \"gdi32.lib\"" + OUTPUT_VARIABLE OUTPUT) +endif(WITH_WIN32UI) + # --- QT4 --- ocv_clear_vars(HAVE_QT) if(WITH_QT) diff --git a/cmake/OpenCVFindLibsVideo.cmake b/cmake/OpenCVFindLibsVideo.cmake index 414918527..fbb47d486 100644 --- a/cmake/OpenCVFindLibsVideo.cmake +++ b/cmake/OpenCVFindLibsVideo.cmake @@ -2,6 +2,15 @@ # Detect 3rd-party video IO libraries # ---------------------------------------------------------------------------- +ocv_clear_vars(HAVE_VFW) +if (WITH_VFW) + TRY_COMPILE(HAVE_VFW + "${OPENCV_BINARY_DIR}/CMakeFiles/CMakeTmp" + "${OpenCV_SOURCE_DIR}/cmake/checks/vfwtest.cpp" + CMAKE_FLAGS "-DLINK_LIBRARIES:STRING=vfw32" + OUTPUT_VARIABLE OUTPUT) + endif(WITH_VFW) + # --- GStreamer --- ocv_clear_vars(HAVE_GSTREAMER) if(WITH_GSTREAMER) @@ -37,7 +46,7 @@ if(WITH_PVAPI) set(PVAPI_SDK_SUBDIR x86) elseif(X86_64) set(PVAPI_SDK_SUBDIR x64) - elseif(CMAKE_SYSTEM_PROCESSOR MATCHES arm) + elseif(ARM) set(PVAPI_SDK_SUBDIR arm) endif() @@ -111,7 +120,7 @@ endif(WITH_XIMEA) # --- FFMPEG --- ocv_clear_vars(HAVE_FFMPEG HAVE_FFMPEG_CODEC HAVE_FFMPEG_FORMAT HAVE_FFMPEG_UTIL HAVE_FFMPEG_SWSCALE HAVE_GENTOO_FFMPEG HAVE_FFMPEG_FFMPEG) if(WITH_FFMPEG) - if(WIN32) + if(WIN32 AND NOT ARM) include("${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/ffmpeg_version.cmake") elseif(UNIX) CHECK_MODULE(libavcodec HAVE_FFMPEG_CODEC) @@ -175,11 +184,16 @@ if(WITH_FFMPEG) endif(APPLE) endif(WITH_FFMPEG) -# --- VideoInput --- -if(WITH_VIDEOINPUT) +# --- VideoInput/DirectShow --- +if(WITH_DSHOW) # always have VideoInput on Windows - set(HAVE_VIDEOINPUT 1) -endif(WITH_VIDEOINPUT) + set(HAVE_DSHOW 1) +endif(WITH_DSHOW) + +# --- VideoInput/Microsoft Media Foundation --- +if(WITH_MSMF) + check_include_file(Mfapi.h HAVE_MSMF) +endif(WITH_MSMF) # --- Extra HighGUI libs on Windows --- if(WIN32) diff --git a/cmake/OpenCVLegacyOptions.cmake b/cmake/OpenCVLegacyOptions.cmake index a34c9e5ab..e05ad4c48 100644 --- a/cmake/OpenCVLegacyOptions.cmake +++ b/cmake/OpenCVLegacyOptions.cmake @@ -12,6 +12,7 @@ endmacro() ocv_legacy_option(BUILD_NEW_PYTHON_SUPPORT BUILD_opencv_python) ocv_legacy_option(BUILD_JAVA_SUPPORT BUILD_opencv_java) ocv_legacy_option(WITH_ANDROID_CAMERA BUILD_opencv_androidcamera) +ocv_legacy_option(WITH_VIDEOINPUT WITH_DSHOW) if(DEFINED OPENCV_BUILD_3RDPARTY_LIBS) set(BUILD_ZLIB ${OPENCV_BUILD_3RDPARTY_LIBS} CACHE BOOL "Set via depricated OPENCV_BUILD_3RDPARTY_LIBS" FORCE) diff --git a/cmake/OpenCVDetectCudaArch.cu b/cmake/checks/OpenCVDetectCudaArch.cu similarity index 100% rename from cmake/OpenCVDetectCudaArch.cu rename to cmake/checks/OpenCVDetectCudaArch.cu diff --git a/cmake/checks/vfwtest.cpp b/cmake/checks/vfwtest.cpp new file mode 100644 index 000000000..63d545788 --- /dev/null +++ b/cmake/checks/vfwtest.cpp @@ -0,0 +1,10 @@ + +#include +#include + +int main() +{ + AVIFileInit(); + AVIFileExit(); + return 0; +} \ No newline at end of file diff --git a/cmake/checks/win32uitest.cpp b/cmake/checks/win32uitest.cpp new file mode 100644 index 000000000..6f13a09cc --- /dev/null +++ b/cmake/checks/win32uitest.cpp @@ -0,0 +1,11 @@ +#include + +int main(int argc, char** argv) +{ + CreateWindow(NULL /*lpClassName*/, NULL /*lpWindowName*/, 0 /*dwStyle*/, 0 /*x*/, + 0 /*y*/, 0 /*nWidth*/, 0 /*nHeight*/, NULL /*hWndParent*/, NULL /*hMenu*/, + NULL /*hInstance*/, NULL /*lpParam*/); + DeleteDC(NULL); + + return 0; +} diff --git a/cmake/templates/cvconfig.h.cmake b/cmake/templates/cvconfig.h.cmake index 85522072e..db46af4b6 100644 --- a/cmake/templates/cvconfig.h.cmake +++ b/cmake/templates/cvconfig.h.cmake @@ -13,6 +13,9 @@ */ #cmakedefine HAVE_ALLOCA_H 1 +/* Video for Windows support */ +#cmakedefine HAVE_VFW + /* V4L capturing support */ #cmakedefine HAVE_CAMV4L @@ -55,6 +58,9 @@ /* GTK+ 2.0 Thread support */ #cmakedefine HAVE_GTHREAD +/* Win32 UI */ +#cmakedefine HAVE_WIN32UI + /* GTK+ 2.x toolkit */ #cmakedefine HAVE_GTK @@ -208,8 +214,11 @@ /* AMD's Basic Linear Algebra Subprograms Library*/ #cmakedefine HAVE_CLAMDBLAS -/* VideoInput library */ -#cmakedefine HAVE_VIDEOINPUT +/* DirectShow Video Capture library */ +#cmakedefine HAVE_DSHOW + +/* Microsoft Media Foundation Capture library */ +#cmakedefine HAVE_MSMF /* XIMEA camera support */ #cmakedefine HAVE_XIMEA diff --git a/modules/core/include/opencv2/core/types_c.h b/modules/core/include/opencv2/core/types_c.h index 33e7fe993..3a0830463 100644 --- a/modules/core/include/opencv2/core/types_c.h +++ b/modules/core/include/opencv2/core/types_c.h @@ -322,8 +322,12 @@ CV_INLINE int cvRound( double value ) return (int)lrint(value); # endif #else - // while this is not IEEE754-compliant rounding, it's usually a good enough approximation - return (int)(value + (value >= 0 ? 0.5 : -0.5)); + double intpart, fractpart; + fractpart = modf(value, &intpart); + if ((abs(fractpart) != 0.5) || ((((int)intpart) % 2) != 0)) + return (int)(value + (value >= 0 ? 0.5 : -0.5)); + else + return (int)intpart; #endif } diff --git a/modules/core/src/stat.cpp b/modules/core/src/stat.cpp index b62f10a2a..e069e5298 100644 --- a/modules/core/src/stat.cpp +++ b/modules/core/src/stat.cpp @@ -999,25 +999,22 @@ static int normHamming(const uchar* a, int n) { int i = 0, result = 0; #if CV_NEON - if (CPU_HAS_NEON_FEATURE) - { - uint32x4_t bits = vmovq_n_u32(0); - for (; i <= n - 16; i += 16) { - uint8x16_t A_vec = vld1q_u8 (a + i); - uint8x16_t bitsSet = vcntq_u8 (A_vec); - uint16x8_t bitSet8 = vpaddlq_u8 (bitsSet); - uint32x4_t bitSet4 = vpaddlq_u16 (bitSet8); - bits = vaddq_u32(bits, bitSet4); - } - uint64x2_t bitSet2 = vpaddlq_u32 (bits); - result = vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),0); - result += vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),2); + uint32x4_t bits = vmovq_n_u32(0); + for (; i <= n - 16; i += 16) { + uint8x16_t A_vec = vld1q_u8 (a + i); + uint8x16_t bitsSet = vcntq_u8 (A_vec); + uint16x8_t bitSet8 = vpaddlq_u8 (bitsSet); + uint32x4_t bitSet4 = vpaddlq_u16 (bitSet8); + bits = vaddq_u32(bits, bitSet4); } - else -#endif - for( ; i <= n - 4; i += 4 ) + uint64x2_t bitSet2 = vpaddlq_u32 (bits); + result = vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),0); + result += vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),2); +#else + for( ; i <= n - 4; i += 4 ) result += popCountTable[a[i]] + popCountTable[a[i+1]] + popCountTable[a[i+2]] + popCountTable[a[i+3]]; +#endif for( ; i < n; i++ ) result += popCountTable[a[i]]; return result; @@ -1027,27 +1024,24 @@ int normHamming(const uchar* a, const uchar* b, int n) { int i = 0, result = 0; #if CV_NEON - if (CPU_HAS_NEON_FEATURE) - { - uint32x4_t bits = vmovq_n_u32(0); - for (; i <= n - 16; i += 16) { - uint8x16_t A_vec = vld1q_u8 (a + i); - uint8x16_t B_vec = vld1q_u8 (b + i); - uint8x16_t AxorB = veorq_u8 (A_vec, B_vec); - uint8x16_t bitsSet = vcntq_u8 (AxorB); - uint16x8_t bitSet8 = vpaddlq_u8 (bitsSet); - uint32x4_t bitSet4 = vpaddlq_u16 (bitSet8); - bits = vaddq_u32(bits, bitSet4); - } - uint64x2_t bitSet2 = vpaddlq_u32 (bits); - result = vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),0); - result += vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),2); + uint32x4_t bits = vmovq_n_u32(0); + for (; i <= n - 16; i += 16) { + uint8x16_t A_vec = vld1q_u8 (a + i); + uint8x16_t B_vec = vld1q_u8 (b + i); + uint8x16_t AxorB = veorq_u8 (A_vec, B_vec); + uint8x16_t bitsSet = vcntq_u8 (AxorB); + uint16x8_t bitSet8 = vpaddlq_u8 (bitsSet); + uint32x4_t bitSet4 = vpaddlq_u16 (bitSet8); + bits = vaddq_u32(bits, bitSet4); } - else + uint64x2_t bitSet2 = vpaddlq_u32 (bits); + result = vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),0); + result += vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),2); +#else + for( ; i <= n - 4; i += 4 ) + result += popCountTable[a[i] ^ b[i]] + popCountTable[a[i+1] ^ b[i+1]] + + popCountTable[a[i+2] ^ b[i+2]] + popCountTable[a[i+3] ^ b[i+3]]; #endif - for( ; i <= n - 4; i += 4 ) - result += popCountTable[a[i] ^ b[i]] + popCountTable[a[i+1] ^ b[i+1]] + - popCountTable[a[i+2] ^ b[i+2]] + popCountTable[a[i+3] ^ b[i+3]]; for( ; i < n; i++ ) result += popCountTable[a[i] ^ b[i]]; return result; diff --git a/modules/core/test/test_arithm.cpp b/modules/core/test/test_arithm.cpp index ebc9eae64..a3e61f22a 100644 --- a/modules/core/test/test_arithm.cpp +++ b/modules/core/test/test_arithm.cpp @@ -1551,3 +1551,16 @@ TEST(Core_Add, AddToColumnWhen4Rows) ASSERT_EQ(0, countNonZero(m1 - m2)); } + +TEST(Core_round, CvRound) +{ + ASSERT_EQ(2, cvRound(2.0)); + ASSERT_EQ(2, cvRound(2.1)); + ASSERT_EQ(-2, cvRound(-2.1)); + ASSERT_EQ(3, cvRound(2.8)); + ASSERT_EQ(-3, cvRound(-2.8)); + ASSERT_EQ(2, cvRound(2.5)); + ASSERT_EQ(4, cvRound(3.5)); + ASSERT_EQ(-2, cvRound(-2.5)); + ASSERT_EQ(-4, cvRound(-3.5)); +} \ No newline at end of file diff --git a/modules/flann/include/opencv2/flann/dist.h b/modules/flann/include/opencv2/flann/dist.h index d2674305c..7380d0c5d 100644 --- a/modules/flann/include/opencv2/flann/dist.h +++ b/modules/flann/include/opencv2/flann/dist.h @@ -456,7 +456,6 @@ struct Hamming ResultType operator()(Iterator1 a, Iterator2 b, size_t size, ResultType /*worst_dist*/ = -1) const { ResultType result = 0; -#ifdef __GNUC__ #ifdef __ARM_NEON__ { uint32x4_t bits = vmovq_n_u32(0); @@ -473,7 +472,7 @@ struct Hamming result = vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),0); result += vgetq_lane_s32 (vreinterpretq_s32_u64(bitSet2),2); } -#else +#elif __GNUC__ { //for portability just use unsigned long -- and use the __builtin_popcountll (see docs for __builtin_popcountll) typedef unsigned long long pop_t; @@ -493,8 +492,8 @@ struct Hamming result += __builtin_popcountll(a_final ^ b_final); } } -#endif //NEON -#else +#else // NO NEON and NOT GNUC + typedef unsigned long long pop_t; HammingLUT lut; result = lut(reinterpret_cast (a), reinterpret_cast (b), size * sizeof(pop_t)); diff --git a/modules/highgui/CMakeLists.txt b/modules/highgui/CMakeLists.txt index 7e5fae39b..3eec81d11 100644 --- a/modules/highgui/CMakeLists.txt +++ b/modules/highgui/CMakeLists.txt @@ -89,7 +89,7 @@ if(HAVE_QT) if(${_have_flag}) set_source_files_properties(${_RCC_OUTFILES} PROPERTIES COMPILE_FLAGS -Wno-missing-declarations) endif() -elseif(WIN32) +elseif(HAVE_WIN32UI) list(APPEND highgui_srcs src/window_w32.cpp) elseif(HAVE_GTK) list(APPEND highgui_srcs src/window_gtk.cpp) @@ -105,9 +105,21 @@ elseif(APPLE) endif() endif() -if(WIN32) - list(APPEND highgui_srcs src/cap_vfw.cpp src/cap_cmu.cpp src/cap_dshow.cpp) -endif(WIN32) +if(WIN32 AND NOT ARM) + list(APPEND highgui_srcs src/cap_cmu.cpp) +endif() + +if (WIN32 AND HAVE_DSHOW) + list(APPEND highgui_srcs src/cap_dshow.cpp) +endif() + +if (WIN32 AND HAVE_MSMF) + list(APPEND highgui_srcs src/cap_msmf.cpp) +endif() + +if (WIN32 AND HAVE_VFW) + list(APPEND highgui_srcs src/cap_vfw.cpp) +endif() if(HAVE_XINE) list(APPEND highgui_srcs src/cap_xine.cpp) diff --git a/modules/highgui/include/opencv2/highgui/highgui_c.h b/modules/highgui/include/opencv2/highgui/highgui_c.h index 9c7166fc9..58840cbd3 100644 --- a/modules/highgui/include/opencv2/highgui/highgui_c.h +++ b/modules/highgui/include/opencv2/highgui/highgui_c.h @@ -297,6 +297,7 @@ enum CV_CAP_UNICAP =600, // Unicap drivers CV_CAP_DSHOW =700, // DirectShow (via videoInput) + CV_CAP_MSMF =1400, // Microsoft Media Foundation (via videoInput) CV_CAP_PVAPI =800, // PvAPI, Prosilica GigE SDK diff --git a/modules/highgui/perf/perf_precomp.hpp b/modules/highgui/perf/perf_precomp.hpp index ec8a447fa..529187d3b 100644 --- a/modules/highgui/perf/perf_precomp.hpp +++ b/modules/highgui/perf/perf_precomp.hpp @@ -20,9 +20,9 @@ defined(HAVE_GSTREAMER) || \ defined(HAVE_QUICKTIME) || \ defined(HAVE_AVFOUNDATION) || \ - /*defined(HAVE_OPENNI) || too specialized */ \ defined(HAVE_FFMPEG) || \ - defined(WIN32) /* assume that we have ffmpeg */ + defined(HAVE_VFW) + /*defined(HAVE_OPENNI) too specialized */ \ # define BUILD_WITH_VIDEO_INPUT_SUPPORT 1 #else @@ -34,7 +34,7 @@ defined(HAVE_QUICKTIME) || \ defined(HAVE_AVFOUNDATION) || \ defined(HAVE_FFMPEG) || \ - defined(WIN32) /* assume that we have ffmpeg */ + defined(HAVE_VFW) # define BUILD_WITH_VIDEO_OUTPUT_SUPPORT 1 #else # define BUILD_WITH_VIDEO_OUTPUT_SUPPORT 0 diff --git a/modules/highgui/src/cap.cpp b/modules/highgui/src/cap.cpp index 13475f263..2c3b3a94c 100644 --- a/modules/highgui/src/cap.cpp +++ b/modules/highgui/src/cap.cpp @@ -114,7 +114,7 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index) { int domains[] = { -#ifdef HAVE_VIDEOINPUT +#ifdef HAVE_DSHOW CV_CAP_DSHOW, #endif #if 1 @@ -168,7 +168,8 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index) // try every possibly installed camera API for (int i = 0; domains[i] >= 0; i++) { -#if defined(HAVE_VIDEOINPUT) || \ +#if defined(HAVE_DSHOW) || \ + defined(HAVE_MSMF) || \ defined(HAVE_TYZX) || \ defined(HAVE_VFW) || \ defined(HAVE_LIBV4L) || \ @@ -195,11 +196,18 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index) switch (domains[i]) { -#ifdef HAVE_VIDEOINPUT +#ifdef HAVE_MSMF + case CV_CAP_MSMF: + capture = cvCreateCameraCapture_MSMF (index); + if (capture) + return capture; + break; +#endif +#ifdef HAVE_DSHOW case CV_CAP_DSHOW: - capture = cvCreateCameraCapture_DShow (index); - if (capture) - return capture; + capture = cvCreateCameraCapture_DShow (index); + if (capture) + return capture; break; #endif diff --git a/modules/highgui/src/cap_dshow.cpp b/modules/highgui/src/cap_dshow.cpp index c2513d788..21fb947b1 100644 --- a/modules/highgui/src/cap_dshow.cpp +++ b/modules/highgui/src/cap_dshow.cpp @@ -41,7 +41,7 @@ #include "precomp.hpp" -#if (defined WIN32 || defined _WIN32) && defined HAVE_VIDEOINPUT +#if (defined WIN32 || defined _WIN32) && defined HAVE_DSHOW /* DirectShow-based Video Capturing module is based on @@ -3100,6 +3100,7 @@ HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter * return hr; } + /********************* Capturing video from camera via DirectShow *********************/ class CvCaptureCAM_DShow : public CvCapture diff --git a/modules/highgui/src/cap_ffmpeg.cpp b/modules/highgui/src/cap_ffmpeg.cpp index 657502acf..669ebda12 100644 --- a/modules/highgui/src/cap_ffmpeg.cpp +++ b/modules/highgui/src/cap_ffmpeg.cpp @@ -209,7 +209,7 @@ CvCapture* cvCreateFileCapture_FFMPEG_proxy(const char * filename) if( result->open( filename )) return result; delete result; -#if defined WIN32 || defined _WIN32 +#ifdef HAVE_VFW return cvCreateFileCapture_VFW(filename); #else return 0; @@ -263,9 +263,9 @@ CvVideoWriter* cvCreateVideoWriter_FFMPEG_proxy( const char* filename, int fourc if( result->open( filename, fourcc, fps, frameSize, isColor != 0 )) return result; delete result; -#if defined WIN32 || defined _WIN32 - return cvCreateVideoWriter_VFW(filename, fourcc, fps, frameSize, isColor); -#else +#ifdef HAVE_VFW + return cvCreateVideoWriter_VFW(filename, fourcc, fps, frameSize, isColor); + #else return 0; #endif } diff --git a/modules/highgui/src/cap_msmf.cpp b/modules/highgui/src/cap_msmf.cpp new file mode 100644 index 000000000..52b780463 --- /dev/null +++ b/modules/highgui/src/cap_msmf.cpp @@ -0,0 +1,2810 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// +// +// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. +// +// By downloading, copying, installing or using the software you agree to this license. +// If you do not agree to this license, do not download, install, +// copy or use the software. +// +// +// Intel License Agreement +// For Open Source Computer Vision Library +// +// Copyright (C) 2000, Intel Corporation, all rights reserved. +// Third party copyrights are property of their respective owners. +// +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: +// +// * Redistribution's of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// * Redistribution's in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// * The name of Intel Corporation may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// +// This software is provided by the copyright holders and contributors "as is" and +// any express or implied warranties, including, but not limited to, the implied +// warranties of merchantability and fitness for a particular purpose are disclaimed. +// In no event shall the Intel Corporation or contributors be liable for any direct, +// indirect, incidental, special, exemplary, or consequential damages +// (including, but not limited to, procurement of substitute goods or services; +// loss of use, data, or profits; or business interruption) however caused +// and on any theory of liability, whether in contract, strict liability, +// or tort (including negligence or otherwise) arising in any way out of +// the use of this software, even if advised of the possibility of such damage. +// +//M*/ +#include "precomp.hpp" +#if (defined WIN32 || defined _WIN32) && defined HAVE_MSMF +/* + Media Foundation-based Video Capturing module is based on + videoInput library by Evgeny Pereguda: + http://www.codeproject.com/Articles/559437/Capturing-of-video-from-web-camera-on-Windows-7-an + Originaly licensed under The Code Project Open License (CPOL) 1.02: + http://www.codeproject.com/info/cpol10.aspx +*/ +#include +#include +#include +#include +#include +#include +#include "Strsafe.h" +#include +#include +#include +#include +#include +#include +#include +#pragma warning(disable:4503) +#pragma comment(lib, "mfplat") +#pragma comment(lib, "mf") +#pragma comment(lib, "mfuuid") +#pragma comment(lib, "Strmiids") +#pragma comment(lib, "MinCore_Downlevel") +struct IMFMediaType; +struct IMFActivate; +struct IMFMediaSource; +struct IMFAttributes; +namespace +{ +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + /// Class for printing info into consol +class DebugPrintOut +{ +public: + ~DebugPrintOut(void); + static DebugPrintOut& getInstance(); + void printOut(const wchar_t *format, ...); + void setVerbose(bool state); + bool verbose; +private: + DebugPrintOut(void); +}; +// Structure for collecting info about types of video, which are supported by current video device +struct MediaType +{ + unsigned int MF_MT_FRAME_SIZE; + unsigned int height; + unsigned int width; + unsigned int MF_MT_YUV_MATRIX; + unsigned int MF_MT_VIDEO_LIGHTING; + unsigned int MF_MT_DEFAULT_STRIDE; + unsigned int MF_MT_VIDEO_CHROMA_SITING; + GUID MF_MT_AM_FORMAT_TYPE; + wchar_t *pMF_MT_AM_FORMAT_TYPEName; + unsigned int MF_MT_FIXED_SIZE_SAMPLES; + unsigned int MF_MT_VIDEO_NOMINAL_RANGE; + unsigned int MF_MT_FRAME_RATE; + unsigned int MF_MT_FRAME_RATE_low; + unsigned int MF_MT_PIXEL_ASPECT_RATIO; + unsigned int MF_MT_PIXEL_ASPECT_RATIO_low; + unsigned int MF_MT_ALL_SAMPLES_INDEPENDENT; + unsigned int MF_MT_FRAME_RATE_RANGE_MIN; + unsigned int MF_MT_FRAME_RATE_RANGE_MIN_low; + unsigned int MF_MT_SAMPLE_SIZE; + unsigned int MF_MT_VIDEO_PRIMARIES; + unsigned int MF_MT_INTERLACE_MODE; + unsigned int MF_MT_FRAME_RATE_RANGE_MAX; + unsigned int MF_MT_FRAME_RATE_RANGE_MAX_low; + GUID MF_MT_MAJOR_TYPE; + GUID MF_MT_SUBTYPE; + wchar_t *pMF_MT_MAJOR_TYPEName; + wchar_t *pMF_MT_SUBTYPEName; + MediaType(); + ~MediaType(); + void Clear(); +}; +/// Class for parsing info from IMFMediaType into the local MediaType +class FormatReader +{ +public: + static MediaType Read(IMFMediaType *pType); + ~FormatReader(void); +private: + FormatReader(void); +}; +DWORD WINAPI MainThreadFunction( LPVOID lpParam ); +typedef void(*emergensyStopEventCallback)(int, void *); +typedef unsigned char BYTE; +class RawImage +{ +public: + ~RawImage(void); + // Function of creation of the instance of the class + static long CreateInstance(RawImage **ppRImage,unsigned int size); + void setCopy(const BYTE * pSampleBuffer); + void fastCopy(const BYTE * pSampleBuffer); + unsigned char * getpPixels(); + bool isNew(); + unsigned int getSize(); +private: + bool ri_new; + unsigned int ri_size; + unsigned char *ri_pixels; + RawImage(unsigned int size); +}; +// Class for grabbing image from video stream +class ImageGrabber : public IMFSampleGrabberSinkCallback +{ +public: + ~ImageGrabber(void); + HRESULT initImageGrabber(IMFMediaSource *pSource, GUID VideoFormat); + HRESULT startGrabbing(void); + void stopGrabbing(); + RawImage *getRawImage(); + // Function of creation of the instance of the class + static HRESULT CreateInstance(ImageGrabber **ppIG,unsigned int deviceID); +private: + bool ig_RIE; + bool ig_Close; + long m_cRef; + unsigned int ig_DeviceID; + IMFMediaSource *ig_pSource; + IMFMediaSession *ig_pSession; + IMFTopology *ig_pTopology; + RawImage *ig_RIFirst; + RawImage *ig_RISecond; + RawImage *ig_RIOut; + ImageGrabber(unsigned int deviceID); + HRESULT CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivate, IMFTopology **ppTopo); + HRESULT AddSourceNode( + IMFTopology *pTopology, + IMFMediaSource *pSource, + IMFPresentationDescriptor *pPD, + IMFStreamDescriptor *pSD, + IMFTopologyNode **ppNode); + HRESULT AddOutputNode( + IMFTopology *pTopology, + IMFActivate *pActivate, + DWORD dwId, + IMFTopologyNode **ppNode); + // IUnknown methods + STDMETHODIMP QueryInterface(REFIID iid, void** ppv); + STDMETHODIMP_(ULONG) AddRef(); + STDMETHODIMP_(ULONG) Release(); + // IMFClockStateSink methods + STDMETHODIMP OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset); + STDMETHODIMP OnClockStop(MFTIME hnsSystemTime); + STDMETHODIMP OnClockPause(MFTIME hnsSystemTime); + STDMETHODIMP OnClockRestart(MFTIME hnsSystemTime); + STDMETHODIMP OnClockSetRate(MFTIME hnsSystemTime, float flRate); + // IMFSampleGrabberSinkCallback methods + STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock); + STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags, + LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer, + DWORD dwSampleSize); + STDMETHODIMP OnShutdown(); +}; +/// Class for controlling of thread of the grabbing raw data from video device +class ImageGrabberThread +{ + friend DWORD WINAPI MainThreadFunction( LPVOID lpParam ); +public: + ~ImageGrabberThread(void); + static HRESULT CreateInstance(ImageGrabberThread **ppIGT, IMFMediaSource *pSource, unsigned int deviceID); + void start(); + void stop(); + void setEmergencyStopEvent(void *userData, void(*func)(int, void *)); + ImageGrabber *getImageGrabber(); +protected: + virtual void run(); +private: + ImageGrabberThread(IMFMediaSource *pSource, unsigned int deviceID); + HANDLE igt_Handle; + DWORD igt_ThreadIdArray; + ImageGrabber *igt_pImageGrabber; + emergensyStopEventCallback igt_func; + void *igt_userData; + bool igt_stop; + unsigned int igt_DeviceID; +}; +// Structure for collecting info about one parametr of current video device +struct Parametr +{ + long CurrentValue; + long Min; + long Max; + long Step; + long Default; + long Flag; + Parametr(); +}; +// Structure for collecting info about 17 parametrs of current video device +struct CamParametrs +{ + Parametr Brightness; + Parametr Contrast; + Parametr Hue; + Parametr Saturation; + Parametr Sharpness; + Parametr Gamma; + Parametr ColorEnable; + Parametr WhiteBalance; + Parametr BacklightCompensation; + Parametr Gain; + Parametr Pan; + Parametr Tilt; + Parametr Roll; + Parametr Zoom; + Parametr Exposure; + Parametr Iris; + Parametr Focus; +}; +typedef std::wstring String; +typedef std::vector vectorNum; +typedef std::map SUBTYPEMap; +typedef std::map FrameRateMap; +typedef void(*emergensyStopEventCallback)(int, void *); +/// Class for controlling of video device +class videoDevice +{ +public: + videoDevice(void); + ~videoDevice(void); + void closeDevice(); + CamParametrs getParametrs(); + void setParametrs(CamParametrs parametrs); + void setEmergencyStopEvent(void *userData, void(*func)(int, void *)); + long readInfoOfDevice(IMFActivate *pActivate, unsigned int Num); + wchar_t *getName(); + int getCountFormats(); + unsigned int getWidth(); + unsigned int getHeight(); + MediaType getFormat(unsigned int id); + bool setupDevice(unsigned int w, unsigned int h, unsigned int idealFramerate = 0); + bool setupDevice(unsigned int id); + bool isDeviceSetup(); + bool isDeviceMediaSource(); + bool isDeviceRawDataSource(); + bool isFrameNew(); + IMFMediaSource *getMediaSource(); + RawImage *getRawImageOut(); +private: + enum typeLock + { + MediaSourceLock, + RawDataLock, + OpenLock + } vd_LockOut; + wchar_t *vd_pFriendlyName; + ImageGrabberThread *vd_pImGrTh; + CamParametrs vd_PrevParametrs; + unsigned int vd_Width; + unsigned int vd_Height; + unsigned int vd_CurrentNumber; + bool vd_IsSetuped; + std::map vd_CaptureFormats; + std::vector vd_CurrentFormats; + IMFMediaSource *vd_pSource; + emergensyStopEventCallback vd_func; + void *vd_userData; + long enumerateCaptureFormats(IMFMediaSource *pSource); + long setDeviceFormat(IMFMediaSource *pSource, unsigned long dwFormatIndex); + void buildLibraryofTypes(); + int findType(unsigned int size, unsigned int frameRate = 0); + long resetDevice(IMFActivate *pActivate); + long initDevice(); + long checkDevice(IMFAttributes *pAttributes, IMFActivate **pDevice); +}; +/// Class for managing of list of video devices +class videoDevices +{ +public: + ~videoDevices(void); + long initDevices(IMFAttributes *pAttributes); + static videoDevices& getInstance(); + videoDevice *getDevice(unsigned int i); + unsigned int getCount(); + void clearDevices(); +private: + UINT32 count; + std::vector vds_Devices; + videoDevices(void); +}; +// Class for creating of Media Foundation context +class Media_Foundation +{ +public: + virtual ~Media_Foundation(void); + static Media_Foundation& getInstance(); + bool buildListOfDevices(); +private: + Media_Foundation(void); +}; +/// The only visiable class for controlling of video devices in format singelton +class videoInput +{ +public: + virtual ~videoInput(void); + // Getting of static instance of videoInput class + static videoInput& getInstance(); + // Closing video device with deviceID + void closeDevice(int deviceID); + // Setting callback function for emergency events(for example: removing video device with deviceID) with userData + void setEmergencyStopEvent(int deviceID, void *userData, void(*func)(int, void *)); + // Closing all devices + void closeAllDevices(); + // Getting of parametrs of video device with deviceID + CamParametrs getParametrs(int deviceID); + // Setting of parametrs of video device with deviceID + void setParametrs(int deviceID, CamParametrs parametrs); + // Getting numbers of existence videodevices with listing in consol + unsigned int listDevices(bool silent = false); + // Getting numbers of formats, which are supported by videodevice with deviceID + unsigned int getCountFormats(int deviceID); + // Getting width of image, which is getting from videodevice with deviceID + unsigned int getWidth(int deviceID); + // Getting height of image, which is getting from videodevice with deviceID + unsigned int getHeight(int deviceID); + // Getting name of videodevice with deviceID + wchar_t *getNameVideoDevice(int deviceID); + // Getting interface MediaSource for Media Foundation from videodevice with deviceID + IMFMediaSource *getMediaSource(int deviceID); + // Getting format with id, which is supported by videodevice with deviceID + MediaType getFormat(int deviceID, int unsigned id); + // Checking of existence of the suitable video devices + bool isDevicesAcceable(); + // Checking of using the videodevice with deviceID + bool isDeviceSetup(int deviceID); + // Checking of using MediaSource from videodevice with deviceID + bool isDeviceMediaSource(int deviceID); + // Checking of using Raw Data of pixels from videodevice with deviceID + bool isDeviceRawDataSource(int deviceID); + // Setting of the state of outprinting info in console + static void setVerbose(bool state); + // Initialization of video device with deviceID by media type with id + bool setupDevice(int deviceID, unsigned int id = 0); + // Initialization of video device with deviceID by wisth w, height h and fps idealFramerate + bool setupDevice(int deviceID, unsigned int w, unsigned int h, unsigned int idealFramerate = 30); + // Checking of recivig of new frame from video device with deviceID + bool isFrameNew(int deviceID); + // Writing of Raw Data pixels from video device with deviceID with correction of RedAndBlue flipping flipRedAndBlue and vertical flipping flipImage + bool getPixels(int deviceID, unsigned char * pixels, bool flipRedAndBlue = false, bool flipImage = false); +private: + bool accessToDevices; + videoInput(void); + void processPixels(unsigned char * src, unsigned char * dst, unsigned int width, unsigned int height, unsigned int bpp, bool bRGB, bool bFlip); + void updateListOfDevices(); +}; +DebugPrintOut::DebugPrintOut(void):verbose(true) +{ +} +DebugPrintOut::~DebugPrintOut(void) +{ +} +DebugPrintOut& DebugPrintOut::getInstance() +{ + static DebugPrintOut instance; + return instance; +} +void DebugPrintOut::printOut(const wchar_t *format, ...) +{ + if(verbose) + { + int i = 0; + wchar_t *p = NULL; + va_list args; + va_start(args, format); + if(wcscmp(format, L"%i")) + { + i = va_arg (args, int); + } + if(wcscmp(format, L"%s")) + { + p = va_arg (args, wchar_t *); + } + wprintf(format, i,p); + va_end (args); + } +} +void DebugPrintOut::setVerbose(bool state) +{ + verbose = state; +} +LPCWSTR GetGUIDNameConstNew(const GUID& guid); +HRESULT GetGUIDNameNew(const GUID& guid, WCHAR **ppwsz); +HRESULT LogAttributeValueByIndexNew(IMFAttributes *pAttr, DWORD index); +HRESULT SpecialCaseAttributeValueNew(GUID guid, const PROPVARIANT& var, MediaType &out); +unsigned int *GetParametr(GUID guid, MediaType &out) +{ + if(guid == MF_MT_YUV_MATRIX) + return &(out.MF_MT_YUV_MATRIX); + if(guid == MF_MT_VIDEO_LIGHTING) + return &(out.MF_MT_VIDEO_LIGHTING); + if(guid == MF_MT_DEFAULT_STRIDE) + return &(out.MF_MT_DEFAULT_STRIDE); + if(guid == MF_MT_VIDEO_CHROMA_SITING) + return &(out.MF_MT_VIDEO_CHROMA_SITING); + if(guid == MF_MT_VIDEO_NOMINAL_RANGE) + return &(out.MF_MT_VIDEO_NOMINAL_RANGE); + if(guid == MF_MT_ALL_SAMPLES_INDEPENDENT) + return &(out.MF_MT_ALL_SAMPLES_INDEPENDENT); + if(guid == MF_MT_FIXED_SIZE_SAMPLES) + return &(out.MF_MT_FIXED_SIZE_SAMPLES); + if(guid == MF_MT_SAMPLE_SIZE) + return &(out.MF_MT_SAMPLE_SIZE); + if(guid == MF_MT_VIDEO_PRIMARIES) + return &(out.MF_MT_VIDEO_PRIMARIES); + if(guid == MF_MT_INTERLACE_MODE) + return &(out.MF_MT_INTERLACE_MODE); + return NULL; +} +HRESULT LogAttributeValueByIndexNew(IMFAttributes *pAttr, DWORD index, MediaType &out) +{ + WCHAR *pGuidName = NULL; + WCHAR *pGuidValName = NULL; + GUID guid = { 0 }; + PROPVARIANT var; + PropVariantInit(&var); + HRESULT hr = pAttr->GetItemByIndex(index, &guid, &var); + if (FAILED(hr)) + { + goto done; + } + hr = GetGUIDNameNew(guid, &pGuidName); + if (FAILED(hr)) + { + goto done; + } + hr = SpecialCaseAttributeValueNew(guid, var, out); + unsigned int *p; + if (FAILED(hr)) + { + goto done; + } + if (hr == S_FALSE) + { + switch (var.vt) + { + case VT_UI4: + p = GetParametr(guid, out); + if(p) + { + *p = var.ulVal; + } + break; + case VT_UI8: + break; + case VT_R8: + break; + case VT_CLSID: + if(guid == MF_MT_AM_FORMAT_TYPE) + { + hr = GetGUIDNameNew(*var.puuid, &pGuidValName); + if (SUCCEEDED(hr)) + { + out.MF_MT_AM_FORMAT_TYPE = MF_MT_AM_FORMAT_TYPE; + out.pMF_MT_AM_FORMAT_TYPEName = pGuidValName; + pGuidValName = NULL; + } + } + if(guid == MF_MT_MAJOR_TYPE) + { + hr = GetGUIDNameNew(*var.puuid, &pGuidValName); + if (SUCCEEDED(hr)) + { + out.MF_MT_MAJOR_TYPE = MF_MT_MAJOR_TYPE; + out.pMF_MT_MAJOR_TYPEName = pGuidValName; + pGuidValName = NULL; + } + } + if(guid == MF_MT_SUBTYPE) + { + hr = GetGUIDNameNew(*var.puuid, &pGuidValName); + if (SUCCEEDED(hr)) + { + out.MF_MT_SUBTYPE = MF_MT_SUBTYPE; + out.pMF_MT_SUBTYPEName = pGuidValName; + pGuidValName = NULL; + } + } + break; + case VT_LPWSTR: + break; + case VT_VECTOR | VT_UI1: + break; + case VT_UNKNOWN: + break; + default: + break; + } + } +done: + CoTaskMemFree(pGuidName); + CoTaskMemFree(pGuidValName); + PropVariantClear(&var); + return hr; +} +HRESULT GetGUIDNameNew(const GUID& guid, WCHAR **ppwsz) +{ + HRESULT hr = S_OK; + WCHAR *pName = NULL; + LPCWSTR pcwsz = GetGUIDNameConstNew(guid); + if (pcwsz) + { + size_t cchLength = 0; + hr = StringCchLengthW(pcwsz, STRSAFE_MAX_CCH, &cchLength); + if (FAILED(hr)) + { + goto done; + } + pName = (WCHAR*)CoTaskMemAlloc((cchLength + 1) * sizeof(WCHAR)); + if (pName == NULL) + { + hr = E_OUTOFMEMORY; + goto done; + } + hr = StringCchCopyW(pName, cchLength + 1, pcwsz); + if (FAILED(hr)) + { + goto done; + } + } + else + { + hr = StringFromCLSID(guid, &pName); + } +done: + if (FAILED(hr)) + { + *ppwsz = NULL; + CoTaskMemFree(pName); + } + else + { + *ppwsz = pName; + } + return hr; +} +void LogUINT32AsUINT64New(const PROPVARIANT& var, UINT32 &uHigh, UINT32 &uLow) +{ + Unpack2UINT32AsUINT64(var.uhVal.QuadPart, &uHigh, &uLow); +} +float OffsetToFloatNew(const MFOffset& offset) +{ + return offset.value + (static_cast(offset.fract) / 65536.0f); +} +HRESULT LogVideoAreaNew(const PROPVARIANT& var) +{ + if (var.caub.cElems < sizeof(MFVideoArea)) + { + return S_OK; + } + return S_OK; +} +HRESULT SpecialCaseAttributeValueNew(GUID guid, const PROPVARIANT& var, MediaType &out) +{ + if (guid == MF_MT_FRAME_SIZE) + { + UINT32 uHigh = 0, uLow = 0; + LogUINT32AsUINT64New(var, uHigh, uLow); + out.width = uHigh; + out.height = uLow; + out.MF_MT_FRAME_SIZE = out.width * out.height; + } + else + if (guid == MF_MT_FRAME_RATE) + { + UINT32 uHigh = 0, uLow = 0; + LogUINT32AsUINT64New(var, uHigh, uLow); + out.MF_MT_FRAME_RATE = uHigh; + out.MF_MT_FRAME_RATE_low = uLow; + } + else + if (guid == MF_MT_FRAME_RATE_RANGE_MAX) + { + UINT32 uHigh = 0, uLow = 0; + LogUINT32AsUINT64New(var, uHigh, uLow); + out.MF_MT_FRAME_RATE_RANGE_MAX = uHigh; + out.MF_MT_FRAME_RATE_RANGE_MAX_low = uLow; + } + else + if (guid == MF_MT_FRAME_RATE_RANGE_MIN) + { + UINT32 uHigh = 0, uLow = 0; + LogUINT32AsUINT64New(var, uHigh, uLow); + out.MF_MT_FRAME_RATE_RANGE_MIN = uHigh; + out.MF_MT_FRAME_RATE_RANGE_MIN_low = uLow; + } + else + if (guid == MF_MT_PIXEL_ASPECT_RATIO) + { + UINT32 uHigh = 0, uLow = 0; + LogUINT32AsUINT64New(var, uHigh, uLow); + out.MF_MT_PIXEL_ASPECT_RATIO = uHigh; + out.MF_MT_PIXEL_ASPECT_RATIO_low = uLow; + } + else + { + return S_FALSE; + } + return S_OK; +} +#ifndef IF_EQUAL_RETURN +#define IF_EQUAL_RETURN(param, val) if(val == param) return L#val +#endif +LPCWSTR GetGUIDNameConstNew(const GUID& guid) +{ + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE); + IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT); + IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES); + IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED); + IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX); + IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN); + IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO); + IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING); + IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE); + IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE); + IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE); + IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING); + IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE); + IF_EQUAL_RETURN(guid, MF_MT_PALETTE); + IF_EQUAL_RETURN(guid, MF_MT_USER_DATA); + IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT); + IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG); + // Media types + IF_EQUAL_RETURN(guid, MFMediaType_Audio); + IF_EQUAL_RETURN(guid, MFMediaType_Video); + IF_EQUAL_RETURN(guid, MFMediaType_Protected); + IF_EQUAL_RETURN(guid, MFMediaType_SAMI); + IF_EQUAL_RETURN(guid, MFMediaType_Script); + IF_EQUAL_RETURN(guid, MFMediaType_Image); + IF_EQUAL_RETURN(guid, MFMediaType_HTML); + IF_EQUAL_RETURN(guid, MFMediaType_Binary); + IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer); + IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44') + IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl') + IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264') + IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420') + IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2') + IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG); + IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V') + IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010') + IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016') + IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210') + IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216') + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8); + IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY') + IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210') + IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3') + IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P); + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T); + IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2') + IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU); + IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM + IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT + IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS + IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS + IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9 + IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3 + IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG + IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC + IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC + return NULL; +} +FormatReader::FormatReader(void) +{ +} +MediaType FormatReader::Read(IMFMediaType *pType) +{ + UINT32 count = 0; + HRESULT hr = S_OK; + MediaType out; + hr = pType->LockStore(); + if (FAILED(hr)) + { + return out; + } + hr = pType->GetCount(&count); + if (FAILED(hr)) + { + return out; + } + for (UINT32 i = 0; i < count; i++) + { + hr = LogAttributeValueByIndexNew(pType, i, out); + if (FAILED(hr)) + { + break; + } + } + hr = pType->UnlockStore(); + if (FAILED(hr)) + { + return out; + } + return out; +} +FormatReader::~FormatReader(void) +{ +} +#define CHECK_HR(x) if (FAILED(x)) { goto done; } +ImageGrabber::ImageGrabber(unsigned int deviceID): m_cRef(1), ig_DeviceID(deviceID), ig_pSource(NULL), ig_pSession(NULL), ig_pTopology(NULL), ig_RIE(true), ig_Close(false) +{ +} +ImageGrabber::~ImageGrabber(void) +{ + if (ig_pSession) + { + ig_pSession->Shutdown(); + } + //SafeRelease(&ig_pSession); + //SafeRelease(&ig_pTopology); + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: Destroing instance of the ImageGrabber class \n", ig_DeviceID); +} +HRESULT ImageGrabber::initImageGrabber(IMFMediaSource *pSource, GUID VideoFormat) +{ + IMFActivate *pSinkActivate = NULL; + IMFMediaType *pType = NULL; + IMFPresentationDescriptor *pPD = NULL; + IMFStreamDescriptor *pSD = NULL; + IMFMediaTypeHandler *pHandler = NULL; + IMFMediaType *pCurrentType = NULL; + HRESULT hr = S_OK; + MediaType MT; + // Clean up. + if (ig_pSession) + { + ig_pSession->Shutdown(); + } + SafeRelease(&ig_pSession); + SafeRelease(&ig_pTopology); + ig_pSource = pSource; + hr = pSource->CreatePresentationDescriptor(&pPD); + if (FAILED(hr)) + goto err; + BOOL fSelected; + hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD); + if (FAILED(hr)) + goto err; + hr = pSD->GetMediaTypeHandler(&pHandler); + if (FAILED(hr)) + goto err; + DWORD cTypes = 0; + hr = pHandler->GetMediaTypeCount(&cTypes); + if (FAILED(hr)) + goto err; + if(cTypes > 0) + { + hr = pHandler->GetCurrentMediaType(&pCurrentType); + if (FAILED(hr)) + goto err; + MT = FormatReader::Read(pCurrentType); + } +err: + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + SafeRelease(&pCurrentType); + unsigned int sizeRawImage = 0; + if(VideoFormat == MFVideoFormat_RGB24) + { + sizeRawImage = MT.MF_MT_FRAME_SIZE * 3; + } + else if(VideoFormat == MFVideoFormat_RGB32) + { + sizeRawImage = MT.MF_MT_FRAME_SIZE * 4; + } + CHECK_HR(hr = RawImage::CreateInstance(&ig_RIFirst, sizeRawImage)); + CHECK_HR(hr = RawImage::CreateInstance(&ig_RISecond, sizeRawImage)); + ig_RIOut = ig_RISecond; + // Configure the media type that the Sample Grabber will receive. + // Setting the major and subtype is usually enough for the topology loader + // to resolve the topology. + CHECK_HR(hr = MFCreateMediaType(&pType)); + CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); + CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, VideoFormat)); + // Create the sample grabber sink. + CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pType, this, &pSinkActivate)); + // To run as fast as possible, set this attribute (requires Windows 7): + CHECK_HR(hr = pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE)); + // Create the Media Session. + CHECK_HR(hr = MFCreateMediaSession(NULL, &ig_pSession)); + // Create the topology. + CHECK_HR(hr = CreateTopology(pSource, pSinkActivate, &ig_pTopology)); +done: + // Clean up. + if (FAILED(hr)) + { + if (ig_pSession) + { + ig_pSession->Shutdown(); + } + SafeRelease(&ig_pSession); + SafeRelease(&ig_pTopology); + } + SafeRelease(&pSinkActivate); + SafeRelease(&pType); + return hr; +} +void ImageGrabber::stopGrabbing() +{ + if(ig_pSession) + ig_pSession->Stop(); + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: Stopping of of grabbing of images\n", ig_DeviceID); +} +HRESULT ImageGrabber::startGrabbing(void) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + IMFMediaEvent *pEvent = NULL; + PROPVARIANT var; + PropVariantInit(&var); + HRESULT hr = S_OK; + CHECK_HR(hr = ig_pSession->SetTopology(0, ig_pTopology)); + CHECK_HR(hr = ig_pSession->Start(&GUID_NULL, &var)); + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: Start Grabbing of the images\n", ig_DeviceID); + for(;;) + { + HRESULT hrStatus = S_OK; + MediaEventType met; + if(!ig_pSession) break; + hr = ig_pSession->GetEvent(0, &pEvent); + if(!SUCCEEDED(hr)) + { + hr = S_OK; + goto done; + } + hr = pEvent->GetStatus(&hrStatus); + if(!SUCCEEDED(hr)) + { + hr = S_OK; + goto done; + } + hr = pEvent->GetType(&met); + if(!SUCCEEDED(hr)) + { + hr = S_OK; + goto done; + } + if (met == MESessionEnded) + { + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: MESessionEnded \n", ig_DeviceID); + ig_pSession->Stop(); + break; + } + if (met == MESessionStopped) + { + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: MESessionStopped \n", ig_DeviceID); + break; + } + if (met == MEVideoCaptureDeviceRemoved) + { + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: MEVideoCaptureDeviceRemoved \n", ig_DeviceID); + break; + } + SafeRelease(&pEvent); + } + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: Finish startGrabbing \n", ig_DeviceID); +done: + SafeRelease(&pEvent); + SafeRelease(&ig_pSession); + SafeRelease(&ig_pTopology); + return hr; +} +HRESULT ImageGrabber::CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivate, IMFTopology **ppTopo) +{ + IMFTopology *pTopology = NULL; + IMFPresentationDescriptor *pPD = NULL; + IMFStreamDescriptor *pSD = NULL; + IMFMediaTypeHandler *pHandler = NULL; + IMFTopologyNode *pNode1 = NULL; + IMFTopologyNode *pNode2 = NULL; + HRESULT hr = S_OK; + DWORD cStreams = 0; + CHECK_HR(hr = MFCreateTopology(&pTopology)); + CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD)); + CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams)); + for (DWORD i = 0; i < cStreams; i++) + { + // In this example, we look for audio streams and connect them to the sink. + BOOL fSelected = FALSE; + GUID majorType; + CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD)); + CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler)); + CHECK_HR(hr = pHandler->GetMajorType(&majorType)); + if (majorType == MFMediaType_Video && fSelected) + { + CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNode1)); + CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pNode2)); + CHECK_HR(hr = pNode1->ConnectOutput(0, pNode2, 0)); + break; + } + else + { + CHECK_HR(hr = pPD->DeselectStream(i)); + } + SafeRelease(&pSD); + SafeRelease(&pHandler); + } + *ppTopo = pTopology; + (*ppTopo)->AddRef(); +done: + SafeRelease(&pTopology); + SafeRelease(&pNode1); + SafeRelease(&pNode2); + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + return hr; +} +HRESULT ImageGrabber::AddSourceNode( + IMFTopology *pTopology, // Topology. + IMFMediaSource *pSource, // Media source. + IMFPresentationDescriptor *pPD, // Presentation descriptor. + IMFStreamDescriptor *pSD, // Stream descriptor. + IMFTopologyNode **ppNode) // Receives the node pointer. +{ + IMFTopologyNode *pNode = NULL; + HRESULT hr = S_OK; + CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode)); + CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource)); + CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD)); + CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD)); + CHECK_HR(hr = pTopology->AddNode(pNode)); + // Return the pointer to the caller. + *ppNode = pNode; + (*ppNode)->AddRef(); +done: + SafeRelease(&pNode); + return hr; +} +HRESULT ImageGrabber::AddOutputNode( + IMFTopology *pTopology, // Topology. + IMFActivate *pActivate, // Media sink activation object. + DWORD dwId, // Identifier of the stream sink. + IMFTopologyNode **ppNode) // Receives the node pointer. +{ + IMFTopologyNode *pNode = NULL; + HRESULT hr = S_OK; + CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode)); + CHECK_HR(hr = pNode->SetObject(pActivate)); + CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId)); + CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE)); + CHECK_HR(hr = pTopology->AddNode(pNode)); + // Return the pointer to the caller. + *ppNode = pNode; + (*ppNode)->AddRef(); +done: + SafeRelease(&pNode); + return hr; +} +HRESULT ImageGrabber::CreateInstance(ImageGrabber **ppIG, unsigned int deviceID) +{ + *ppIG = new (std::nothrow) ImageGrabber(deviceID); + if (ppIG == NULL) + { + return E_OUTOFMEMORY; + } + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"IMAGEGRABBER VIDEODEVICE %i: Creating instance of ImageGrabber\n", deviceID); + return S_OK; +} +STDMETHODIMP ImageGrabber::QueryInterface(REFIID riid, void** ppv) +{ + HRESULT hr = E_NOINTERFACE; + *ppv = NULL; + if(riid == IID_IUnknown || riid == IID_IMFSampleGrabberSinkCallback) + { + *ppv = static_cast(this); + hr = S_OK; + } + if(riid == IID_IMFClockStateSink) + { + *ppv = static_cast(this); + hr = S_OK; + } + if(SUCCEEDED(hr)) + { + reinterpret_cast(*ppv)->AddRef(); + } + return hr; +} +STDMETHODIMP_(ULONG) ImageGrabber::AddRef() +{ + return InterlockedIncrement(&m_cRef); +} +STDMETHODIMP_(ULONG) ImageGrabber::Release() +{ + ULONG cRef = InterlockedDecrement(&m_cRef); + if (cRef == 0) + { + delete this; + } + return cRef; +} +STDMETHODIMP ImageGrabber::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) +{ + (void)hnsSystemTime; + (void)llClockStartOffset; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnClockStop(MFTIME hnsSystemTime) +{ + (void)hnsSystemTime; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnClockPause(MFTIME hnsSystemTime) +{ + (void)hnsSystemTime; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnClockRestart(MFTIME hnsSystemTime) +{ + (void)hnsSystemTime; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnClockSetRate(MFTIME hnsSystemTime, float flRate) +{ + (void)flRate; + (void)hnsSystemTime; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnSetPresentationClock(IMFPresentationClock* pClock) +{ + (void)pClock; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags, + LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer, + DWORD dwSampleSize) +{ + (void)guidMajorMediaType; + (void)llSampleTime; + (void)dwSampleFlags; + (void)llSampleDuration; + (void)dwSampleSize; + if(ig_RIE) + { + ig_RIFirst->fastCopy(pSampleBuffer); + ig_RIOut = ig_RIFirst; + } + else + { + ig_RISecond->fastCopy(pSampleBuffer); + ig_RIOut = ig_RISecond; + } + ig_RIE = !ig_RIE; + return S_OK; +} +STDMETHODIMP ImageGrabber::OnShutdown() +{ + return S_OK; +} +RawImage *ImageGrabber::getRawImage() +{ + return ig_RIOut; +} +DWORD WINAPI MainThreadFunction( LPVOID lpParam ) +{ + ImageGrabberThread *pIGT = (ImageGrabberThread *)lpParam; + pIGT->run(); + return 0; +} +HRESULT ImageGrabberThread::CreateInstance(ImageGrabberThread **ppIGT, IMFMediaSource *pSource, unsigned int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + *ppIGT = new (std::nothrow) ImageGrabberThread(pSource, deviceID); + if (ppIGT == NULL) + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Memory cannot be allocated\n", deviceID); + return E_OUTOFMEMORY; + } + else + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Creating of the instance of ImageGrabberThread\n", deviceID); + return S_OK; +} +ImageGrabberThread::ImageGrabberThread(IMFMediaSource *pSource, unsigned int deviceID): igt_Handle(NULL), igt_stop(false) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + HRESULT hr = ImageGrabber::CreateInstance(&igt_pImageGrabber, deviceID); + igt_DeviceID = deviceID; + if(SUCCEEDED(hr)) + { + hr = igt_pImageGrabber->initImageGrabber(pSource, MFVideoFormat_RGB24); + if(!SUCCEEDED(hr)) + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: There is a problem with initialization of the instance of the ImageGrabber class\n", deviceID); + } + else + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Initialization of instance of the ImageGrabber class\n", deviceID); + } + } + else + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i There is a problem with creation of the instance of the ImageGrabber class\n", deviceID); + } +} +void ImageGrabberThread::setEmergencyStopEvent(void *userData, void(*func)(int, void *)) +{ + if(func) + { + igt_func = func; + igt_userData = userData; + } +} +ImageGrabberThread::~ImageGrabberThread(void) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Destroing ImageGrabberThread\n", igt_DeviceID); + delete igt_pImageGrabber; +} +void ImageGrabberThread::stop() +{ + igt_stop = true; + if(igt_pImageGrabber) + { + igt_pImageGrabber->stopGrabbing(); + } +} +void ImageGrabberThread::start() +{ + igt_Handle = CreateThread( + NULL, // default security attributes + 0, // use default stack size + MainThreadFunction, // thread function name + this, // argument to thread function + 0, // use default creation flags + &igt_ThreadIdArray); // returns the thread identifier +} +void ImageGrabberThread::run() +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if(igt_pImageGrabber) + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Thread for grabbing images is started\n", igt_DeviceID); + HRESULT hr = igt_pImageGrabber->startGrabbing(); + if(!SUCCEEDED(hr)) + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: There is a problem with starting the process of grabbing\n", igt_DeviceID); + } + } + else + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i The thread is finished without execution of grabbing\n", igt_DeviceID); + } + if(!igt_stop) + { + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Emergency Stop thread\n", igt_DeviceID); + if(igt_func) + { + igt_func(igt_DeviceID, igt_userData); + } + } + else + DPO->printOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Finish thread\n", igt_DeviceID); +} +ImageGrabber *ImageGrabberThread::getImageGrabber() +{ + return igt_pImageGrabber; +} +Media_Foundation::Media_Foundation(void) +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"MEDIA FOUNDATION: It cannot be created!!!\n"); + } +} +Media_Foundation::~Media_Foundation(void) +{ + HRESULT hr = MFShutdown(); + if(!SUCCEEDED(hr)) + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"MEDIA FOUNDATION: Resources cannot be released\n"); + } +} +bool Media_Foundation::buildListOfDevices() +{ + HRESULT hr = S_OK; + IMFAttributes *pAttributes = NULL; + CoInitialize(NULL); + hr = MFCreateAttributes(&pAttributes, 1); + if (SUCCEEDED(hr)) + { + hr = pAttributes->SetGUID( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID + ); + } + if (SUCCEEDED(hr)) + { + videoDevices *vDs = &videoDevices::getInstance(); + hr = vDs->initDevices(pAttributes); + } + else + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"MEDIA FOUNDATION: The access to the video cameras denied\n"); + } + SafeRelease(&pAttributes); + return (SUCCEEDED(hr)); +} +Media_Foundation& Media_Foundation::getInstance() +{ + static Media_Foundation instance; + return instance; +} +RawImage::RawImage(unsigned int size): ri_new(false), ri_pixels(NULL) +{ + ri_size = size; + ri_pixels = new unsigned char[size]; + memset((void *)ri_pixels,0,ri_size); +} +bool RawImage::isNew() +{ + return ri_new; +} +unsigned int RawImage::getSize() +{ + return ri_size; +} +RawImage::~RawImage(void) +{ + delete []ri_pixels; + ri_pixels = NULL; +} +long RawImage::CreateInstance(RawImage **ppRImage,unsigned int size) +{ + *ppRImage = new (std::nothrow) RawImage(size); + if (ppRImage == NULL) + { + return E_OUTOFMEMORY; + } + return S_OK; +} +void RawImage::setCopy(const BYTE * pSampleBuffer) +{ + memcpy(ri_pixels, pSampleBuffer, ri_size); + ri_new = true; +} +void RawImage::fastCopy(const BYTE * pSampleBuffer) +{ + memcpy(ri_pixels, pSampleBuffer, ri_size); + ri_new = true; +} +unsigned char * RawImage::getpPixels() +{ + ri_new = false; + return ri_pixels; +} +videoDevice::videoDevice(void): vd_IsSetuped(false), vd_LockOut(OpenLock), vd_pFriendlyName(NULL), + vd_Width(0), vd_Height(0), vd_pSource(NULL), vd_func(NULL), vd_userData(NULL) +{ +} +void videoDevice::setParametrs(CamParametrs parametrs) +{ + if(vd_IsSetuped) + { + if(vd_pSource) + { + Parametr *pParametr = (Parametr *)(¶metrs); + Parametr *pPrevParametr = (Parametr *)(&vd_PrevParametrs); + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if (SUCCEEDED(hr)) + { + for(unsigned int i = 0; i < 10; i++) + { + if(pPrevParametr[i].CurrentValue != pParametr[i].CurrentValue || pPrevParametr[i].Flag != pParametr[i].Flag) + hr = pProcAmp->Set(VideoProcAmp_Brightness + i, pParametr[i].CurrentValue, pParametr[i].Flag); + } + pProcAmp->Release(); + } + IAMCameraControl *pProcControl = NULL; + hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if (SUCCEEDED(hr)) + { + for(unsigned int i = 0; i < 7; i++) + { + if(pPrevParametr[10 + i].CurrentValue != pParametr[10 + i].CurrentValue || pPrevParametr[10 + i].Flag != pParametr[10 + i].Flag) + hr = pProcControl->Set(CameraControl_Pan+i, pParametr[10 + i].CurrentValue, pParametr[10 + i].Flag); + } + pProcControl->Release(); + } + vd_PrevParametrs = parametrs; + } + } +} +CamParametrs videoDevice::getParametrs() +{ + CamParametrs out; + if(vd_IsSetuped) + { + if(vd_pSource) + { + Parametr *pParametr = (Parametr *)(&out); + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if (SUCCEEDED(hr)) + { + for(unsigned int i = 0; i < 10; i++) + { + Parametr temp; + hr = pProcAmp->GetRange(VideoProcAmp_Brightness+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); + if (SUCCEEDED(hr)) + { + temp.CurrentValue = temp.Default; + pParametr[i] = temp; + } + } + pProcAmp->Release(); + } + IAMCameraControl *pProcControl = NULL; + hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if (SUCCEEDED(hr)) + { + for(unsigned int i = 0; i < 7; i++) + { + Parametr temp; + hr = pProcControl->GetRange(CameraControl_Pan+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); + if (SUCCEEDED(hr)) + { + temp.CurrentValue = temp.Default; + pParametr[10 + i] = temp; + } + } + pProcControl->Release(); + } + } + } + return out; +} +long videoDevice::resetDevice(IMFActivate *pActivate) +{ + HRESULT hr = -1; + vd_CurrentFormats.clear(); + if(vd_pFriendlyName) + CoTaskMemFree(vd_pFriendlyName); + vd_pFriendlyName = NULL; + if(pActivate) + { + IMFMediaSource *pSource = NULL; + hr = pActivate->GetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, + &vd_pFriendlyName, + NULL + ); + hr = pActivate->ActivateObject( + __uuidof(IMFMediaSource), + (void**)&pSource + ); + enumerateCaptureFormats(pSource); + buildLibraryofTypes(); + SafeRelease(&pSource); + if(FAILED(hr)) + { + vd_pFriendlyName = NULL; + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"VIDEODEVICE %i: IMFMediaSource interface cannot be created \n", vd_CurrentNumber); + } + } + return hr; +} +long videoDevice::readInfoOfDevice(IMFActivate *pActivate, unsigned int Num) +{ + HRESULT hr = -1; + vd_CurrentNumber = Num; + hr = resetDevice(pActivate); + return hr; +} +long videoDevice::checkDevice(IMFAttributes *pAttributes, IMFActivate **pDevice) +{ + HRESULT hr = S_OK; + IMFActivate **ppDevices = NULL; + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + UINT32 count; + wchar_t *newFriendlyName = NULL; + hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count); + if (SUCCEEDED(hr)) + { + if(count > 0) + { + if(count > vd_CurrentNumber) + { + hr = ppDevices[vd_CurrentNumber]->GetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, + &newFriendlyName, + NULL + ); + if (SUCCEEDED(hr)) + { + if(wcscmp(newFriendlyName, vd_pFriendlyName) != 0) + { + DPO->printOut(L"VIDEODEVICE %i: Chosen device cannot be found \n", vd_CurrentNumber); + hr = -1; + pDevice = NULL; + } + else + { + *pDevice = ppDevices[vd_CurrentNumber]; + (*pDevice)->AddRef(); + } + } + else + { + DPO->printOut(L"VIDEODEVICE %i: Name of device cannot be gotten \n", vd_CurrentNumber); + } + } + else + { + DPO->printOut(L"VIDEODEVICE %i: Number of devices more than corrent number of the device \n", vd_CurrentNumber); + hr = -1; + } + for(UINT32 i = 0; i < count; i++) + { + SafeRelease(&ppDevices[i]); + } + SafeRelease(ppDevices); + } + else + hr = -1; + } + else + { + DPO->printOut(L"VIDEODEVICE %i: List of DeviceSources cannot be enumerated \n", vd_CurrentNumber); + } + return hr; +} +long videoDevice::initDevice() +{ + HRESULT hr = -1; + IMFAttributes *pAttributes = NULL; + IMFActivate * vd_pActivate= NULL; + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + CoInitialize(NULL); + hr = MFCreateAttributes(&pAttributes, 1); + if (SUCCEEDED(hr)) + { + hr = pAttributes->SetGUID( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID + ); + } + if (SUCCEEDED(hr)) + { + hr = checkDevice(pAttributes, &vd_pActivate); + if (SUCCEEDED(hr) && vd_pActivate) + { + SafeRelease(&vd_pSource); + hr = vd_pActivate->ActivateObject( + __uuidof(IMFMediaSource), + (void**)&vd_pSource + ); + if (SUCCEEDED(hr)) + { + } + SafeRelease(&vd_pActivate); + } + else + { + DPO->printOut(L"VIDEODEVICE %i: Device there is not \n", vd_CurrentNumber); + } + } + else + { + DPO->printOut(L"VIDEODEVICE %i: The attribute of video cameras cannot be getting \n", vd_CurrentNumber); + } + SafeRelease(&pAttributes); + return hr; +} +MediaType videoDevice::getFormat(unsigned int id) +{ + if(id < vd_CurrentFormats.size()) + { + return vd_CurrentFormats[id]; + } + else return MediaType(); +} +int videoDevice::getCountFormats() +{ + return vd_CurrentFormats.size(); +} +void videoDevice::setEmergencyStopEvent(void *userData, void(*func)(int, void *)) +{ + vd_func = func; + vd_userData = userData; +} +void videoDevice::closeDevice() +{ + if(vd_IsSetuped) + { + vd_IsSetuped = false; + vd_pSource->Stop(); + SafeRelease(&vd_pSource); + if(vd_LockOut == RawDataLock) + { + vd_pImGrTh->stop(); + Sleep(500); + delete vd_pImGrTh; + } + vd_pImGrTh = NULL; + vd_LockOut = OpenLock; + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"VIDEODEVICE %i: Device is stopped \n", vd_CurrentNumber); + } +} +unsigned int videoDevice::getWidth() +{ + if(vd_IsSetuped) + return vd_Width; + else + return 0; +} +unsigned int videoDevice::getHeight() +{ + if(vd_IsSetuped) + return vd_Height; + else + return 0; +} +IMFMediaSource *videoDevice::getMediaSource() +{ + IMFMediaSource *out = NULL; + if(vd_LockOut == OpenLock) + { + vd_LockOut = MediaSourceLock; + out = vd_pSource; + } + return out; +} +int videoDevice::findType(unsigned int size, unsigned int frameRate) +{ + if(vd_CaptureFormats.size() == 0) + return 0; + FrameRateMap FRM = vd_CaptureFormats[size]; + if(FRM.size() == 0) + return 0; + UINT64 frameRateMax = 0; SUBTYPEMap STMMax; + if(frameRate == 0) + { + std::map::iterator f = FRM.begin(); + for(; f != FRM.end(); f++) + { + if((*f).first >= frameRateMax) + { + frameRateMax = (*f).first; + STMMax = (*f).second; + } + } + } + else + { + std::map::iterator f = FRM.begin(); + for(; f != FRM.end(); f++) + { + if((*f).first >= frameRateMax) + { + if(frameRate > (*f).first) + { + frameRateMax = (*f).first; + STMMax = (*f).second; + } + } + } + } + if(STMMax.size() == 0) + return 0; + std::map::iterator S = STMMax.begin(); + vectorNum VN = (*S).second; + if(VN.size() == 0) + return 0; + return VN[0]; +} +void videoDevice::buildLibraryofTypes() +{ + unsigned int size; + unsigned int framerate; + std::vector::iterator i = vd_CurrentFormats.begin(); + int count = 0; + for(; i != vd_CurrentFormats.end(); i++) + { + size = (*i).MF_MT_FRAME_SIZE; + framerate = (*i).MF_MT_FRAME_RATE; + FrameRateMap FRM = vd_CaptureFormats[size]; + SUBTYPEMap STM = FRM[framerate]; + String subType((*i).pMF_MT_SUBTYPEName); + vectorNum VN = STM[subType]; + VN.push_back(count); + STM[subType] = VN; + FRM[framerate] = STM; + vd_CaptureFormats[size] = FRM; + count++; + } +} +long videoDevice::setDeviceFormat(IMFMediaSource *pSource, unsigned long dwFormatIndex) +{ + IMFPresentationDescriptor *pPD = NULL; + IMFStreamDescriptor *pSD = NULL; + IMFMediaTypeHandler *pHandler = NULL; + IMFMediaType *pType = NULL; + HRESULT hr = pSource->CreatePresentationDescriptor(&pPD); + if (FAILED(hr)) + { + goto done; + } + BOOL fSelected; + hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD); + if (FAILED(hr)) + { + goto done; + } + hr = pSD->GetMediaTypeHandler(&pHandler); + if (FAILED(hr)) + { + goto done; + } + hr = pHandler->GetMediaTypeByIndex((DWORD)dwFormatIndex, &pType); + if (FAILED(hr)) + { + goto done; + } + hr = pHandler->SetCurrentMediaType(pType); +done: + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + SafeRelease(&pType); + return hr; +} +bool videoDevice::isDeviceSetup() +{ + return vd_IsSetuped; +} +RawImage * videoDevice::getRawImageOut() +{ + if(!vd_IsSetuped) return NULL; + if(vd_pImGrTh) + return vd_pImGrTh->getImageGrabber()->getRawImage(); + else + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"VIDEODEVICE %i: The instance of ImageGrabberThread class does not exist \n", vd_CurrentNumber); + } + return NULL; +} +bool videoDevice::isFrameNew() +{ + if(!vd_IsSetuped) return false; + if(vd_LockOut == RawDataLock || vd_LockOut == OpenLock) + { + if(vd_LockOut == OpenLock) + { + vd_LockOut = RawDataLock; + HRESULT hr = ImageGrabberThread::CreateInstance(&vd_pImGrTh, vd_pSource, vd_CurrentNumber); + if(FAILED(hr)) + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"VIDEODEVICE %i: The instance of ImageGrabberThread class cannot be created.\n", vd_CurrentNumber); + return false; + } + vd_pImGrTh->setEmergencyStopEvent(vd_userData, vd_func); + vd_pImGrTh->start(); + return true; + } + if(vd_pImGrTh) + return vd_pImGrTh->getImageGrabber()->getRawImage()->isNew(); + } + return false; +} +bool videoDevice::isDeviceMediaSource() +{ + if(vd_LockOut == MediaSourceLock) return true; + return false; +} +bool videoDevice::isDeviceRawDataSource() +{ + if(vd_LockOut == RawDataLock) return true; + return false; +} +bool videoDevice::setupDevice(unsigned int id) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if(!vd_IsSetuped) + { + HRESULT hr = -1; + hr = initDevice(); + if(SUCCEEDED(hr)) + { + vd_Width = vd_CurrentFormats[id].width; + vd_Height = vd_CurrentFormats[id].height; + hr = setDeviceFormat(vd_pSource, (DWORD) id); + vd_IsSetuped = (SUCCEEDED(hr)); + if(vd_IsSetuped) + DPO->printOut(L"\n\nVIDEODEVICE %i: Device is setuped \n", vd_CurrentNumber); + vd_PrevParametrs = getParametrs(); + return vd_IsSetuped; + } + else + { + DPO->printOut(L"VIDEODEVICE %i: Interface IMFMediaSource cannot be got \n", vd_CurrentNumber); + return false; + } + } + else + { + DPO->printOut(L"VIDEODEVICE %i: Device is setuped already \n", vd_CurrentNumber); + return false; + } +} +bool videoDevice::setupDevice(unsigned int w, unsigned int h, unsigned int idealFramerate) +{ + unsigned int id = findType(w * h, idealFramerate); + return setupDevice(id); +} +wchar_t *videoDevice::getName() +{ + return vd_pFriendlyName; +} +videoDevice::~videoDevice(void) +{ + closeDevice(); + SafeRelease(&vd_pSource); + if(vd_pFriendlyName) + CoTaskMemFree(vd_pFriendlyName); +} +long videoDevice::enumerateCaptureFormats(IMFMediaSource *pSource) +{ + IMFPresentationDescriptor *pPD = NULL; + IMFStreamDescriptor *pSD = NULL; + IMFMediaTypeHandler *pHandler = NULL; + IMFMediaType *pType = NULL; + HRESULT hr = pSource->CreatePresentationDescriptor(&pPD); + if (FAILED(hr)) + { + goto done; + } + BOOL fSelected; + hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD); + if (FAILED(hr)) + { + goto done; + } + hr = pSD->GetMediaTypeHandler(&pHandler); + if (FAILED(hr)) + { + goto done; + } + DWORD cTypes = 0; + hr = pHandler->GetMediaTypeCount(&cTypes); + if (FAILED(hr)) + { + goto done; + } + for (DWORD i = 0; i < cTypes; i++) + { + hr = pHandler->GetMediaTypeByIndex(i, &pType); + if (FAILED(hr)) + { + goto done; + } + MediaType MT = FormatReader::Read(pType); + vd_CurrentFormats.push_back(MT); + SafeRelease(&pType); + } +done: + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + SafeRelease(&pType); + return hr; +} +videoDevices::videoDevices(void): count(0) +{} +void videoDevices::clearDevices() +{ + std::vector::iterator i = vds_Devices.begin(); + for(; i != vds_Devices.end(); ++i) + delete (*i); + vds_Devices.clear(); +} +videoDevices::~videoDevices(void) +{ + clearDevices(); +} +videoDevice * videoDevices::getDevice(unsigned int i) +{ + if(i >= vds_Devices.size()) + { + return NULL; + } + if(i < 0) + { + return NULL; + } + return vds_Devices[i]; +} +long videoDevices::initDevices(IMFAttributes *pAttributes) +{ + HRESULT hr = S_OK; + IMFActivate **ppDevices = NULL; + clearDevices(); + hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count); + if (SUCCEEDED(hr)) + { + if(count > 0) + { + for(UINT32 i = 0; i < count; i++) + { + videoDevice *vd = new videoDevice; + vd->readInfoOfDevice(ppDevices[i], i); + vds_Devices.push_back(vd); + SafeRelease(&ppDevices[i]); + } + SafeRelease(ppDevices); + } + else + hr = -1; + } + else + { + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"VIDEODEVICES: The instances of the videoDevice class cannot be created\n"); + } + return hr; +} +size_t videoDevices::getCount() +{ + return vds_Devices.size(); +} +videoDevices& videoDevices::getInstance() +{ + static videoDevices instance; + return instance; +} +Parametr::Parametr() +{ + CurrentValue = 0; + Min = 0; + Max = 0; + Step = 0; + Default = 0; + Flag = 0; +} +MediaType::MediaType() +{ + pMF_MT_AM_FORMAT_TYPEName = NULL; + pMF_MT_MAJOR_TYPEName = NULL; + pMF_MT_SUBTYPEName = NULL; + Clear(); +} +MediaType::~MediaType() +{ + Clear(); +} +void MediaType::Clear() +{ + MF_MT_FRAME_SIZE = 0; + height = 0; + width = 0; + MF_MT_YUV_MATRIX = 0; + MF_MT_VIDEO_LIGHTING = 0; + MF_MT_DEFAULT_STRIDE = 0; + MF_MT_VIDEO_CHROMA_SITING = 0; + MF_MT_FIXED_SIZE_SAMPLES = 0; + MF_MT_VIDEO_NOMINAL_RANGE = 0; + MF_MT_FRAME_RATE = 0; + MF_MT_FRAME_RATE_low = 0; + MF_MT_PIXEL_ASPECT_RATIO = 0; + MF_MT_PIXEL_ASPECT_RATIO_low = 0; + MF_MT_ALL_SAMPLES_INDEPENDENT = 0; + MF_MT_FRAME_RATE_RANGE_MIN = 0; + MF_MT_FRAME_RATE_RANGE_MIN_low = 0; + MF_MT_SAMPLE_SIZE = 0; + MF_MT_VIDEO_PRIMARIES = 0; + MF_MT_INTERLACE_MODE = 0; + MF_MT_FRAME_RATE_RANGE_MAX = 0; + MF_MT_FRAME_RATE_RANGE_MAX_low = 0; + memset(&MF_MT_MAJOR_TYPE, 0, sizeof(GUID)); + memset(&MF_MT_AM_FORMAT_TYPE, 0, sizeof(GUID)); + memset(&MF_MT_SUBTYPE, 0, sizeof(GUID)); +} +videoInput::videoInput(void): accessToDevices(false) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"\n***** VIDEOINPUT LIBRARY - 2013 (Author: Evgeny Pereguda) *****\n\n"); + updateListOfDevices(); + if(!accessToDevices) + DPO->printOut(L"INITIALIZATION: Ther is not any suitable video device\n"); +} +void videoInput::updateListOfDevices() +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + Media_Foundation *MF = &Media_Foundation::getInstance(); + accessToDevices = MF->buildListOfDevices(); + if(!accessToDevices) + DPO->printOut(L"UPDATING: Ther is not any suitable video device\n"); +} +videoInput::~videoInput(void) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->printOut(L"\n***** CLOSE VIDEOINPUT LIBRARY - 2013 *****\n\n"); +} +IMFMediaSource *videoInput::getMediaSource(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + { + IMFMediaSource *out = VD->getMediaSource(); + if(!out) + DPO->printOut(L"VideoDevice %i: There is not any suitable IMFMediaSource interface\n", deviceID); + return out; + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return NULL; +} +bool videoInput::setupDevice(int deviceID, unsigned int id) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0 ) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + { + bool out = VD->setupDevice(id); + if(!out) + DPO->printOut(L"VIDEODEVICE %i: This device cannot be started\n", deviceID); + return out; + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return false; +} +bool videoInput::setupDevice(int deviceID, unsigned int w, unsigned int h, unsigned int idealFramerate) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0 ) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + { + bool out = VD->setupDevice(w, h, idealFramerate); + if(!out) + DPO->printOut(L"VIDEODEVICE %i: this device cannot be started\n", deviceID); + return out; + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n", deviceID); + } + return false; +} +MediaType videoInput::getFormat(int deviceID, unsigned int id) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return MediaType(); + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->getFormat(id); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return MediaType(); +} +bool videoInput::isDeviceSetup(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->isDeviceSetup(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return false; +} +bool videoInput::isDeviceMediaSource(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->isDeviceMediaSource(); + } + else + { + DPO->printOut(L"Device(s): There is not any suitable video device\n"); + } + return false; +} +bool videoInput::isDeviceRawDataSource(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + { + bool isRaw = VD->isDeviceRawDataSource(); + return isRaw; + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return false; +} +bool videoInput::isFrameNew(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return false; + } + if(accessToDevices) + { + if(!isDeviceSetup(deviceID)) + { + if(isDeviceMediaSource(deviceID)) + return false; + } + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + { + return VD->isFrameNew(); + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return false; +} +unsigned int videoInput::getCountFormats(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return 0; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->getCountFormats(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return 0; +} +void videoInput::closeAllDevices() +{ + videoDevices *VDS = &videoDevices::getInstance(); + for(unsigned int i = 0; i < VDS->getCount(); i++) + closeDevice(i); +} +void videoInput::setParametrs(int deviceID, CamParametrs parametrs) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice *VD = VDS->getDevice(deviceID); + if(VD) + VD->setParametrs(parametrs); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } +} +CamParametrs videoInput::getParametrs(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + CamParametrs out; + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return out; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice *VD = VDS->getDevice(deviceID); + if(VD) + out = VD->getParametrs(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return out; +} +void videoInput::closeDevice(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice *VD = VDS->getDevice(deviceID); + if(VD) + VD->closeDevice(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } +} +unsigned int videoInput::getWidth(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return 0; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->getWidth(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return 0; +} +unsigned int videoInput::getHeight(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return 0; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->getHeight(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return 0; +} +wchar_t *videoInput::getNameVideoDevice(int deviceID) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return NULL; + } + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + return VD->getName(); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return L"Empty"; +} +unsigned int videoInput::listDevices(bool silent) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + int out = 0; + if(accessToDevices) + { + videoDevices *VDS = &videoDevices::getInstance(); + out = VDS->getCount(); + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if(!silent)DPO->printOut(L"\nVIDEOINPUT SPY MODE!\n\n"); + if(!silent)DPO->printOut(L"SETUP: Looking For Capture Devices\n"); + for(int i = 0; i < out; i++) + { + if(!silent)DPO->printOut(L"SETUP: %i) %s \n",i, getNameVideoDevice(i)); + } + if(!silent)DPO->printOut(L"SETUP: %i Device(s) found\n\n", out); + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return out; +} +videoInput& videoInput::getInstance() +{ + static videoInput instance; + return instance; +} +bool videoInput::isDevicesAcceable() +{ + return accessToDevices; +} +void videoInput::setVerbose(bool state) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + DPO->setVerbose(state); +} +void videoInput::setEmergencyStopEvent(int deviceID, void *userData, void(*func)(int, void *)) +{ + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return; + } + if(accessToDevices) + { + if(func) + { + videoDevices *VDS = &videoDevices::getInstance(); + videoDevice * VD = VDS->getDevice(deviceID); + if(VD) + VD->setEmergencyStopEvent(userData, func); + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } +} +bool videoInput::getPixels(int deviceID, unsigned char * dstBuffer, bool flipRedAndBlue, bool flipImage) +{ + bool success = false; + unsigned int bytes = 3; + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + if (deviceID < 0) + { + DPO->printOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID); + return success; + } + if(accessToDevices) + { + bool isRaw = isDeviceRawDataSource(deviceID); + if(isRaw) + { + videoDevices *VDS = &videoDevices::getInstance(); + DebugPrintOut *DPO = &DebugPrintOut::getInstance(); + RawImage *RIOut = VDS->getDevice(deviceID)->getRawImageOut(); + if(RIOut) + { + unsigned int height = VDS->getDevice(deviceID)->getHeight(); + unsigned int width = VDS->getDevice(deviceID)->getWidth(); + unsigned int size = bytes * width * height; + if(size == RIOut->getSize()) + { + processPixels(RIOut->getpPixels(), dstBuffer, width, height, bytes, flipRedAndBlue, flipImage); + success = true; + } + else + { + DPO->printOut(L"ERROR: GetPixels() - bufferSizes do not match!\n"); + } + } + else + { + DPO->printOut(L"ERROR: GetPixels() - Unable to grab frame for device %i\n", deviceID); + } + } + else + { + DPO->printOut(L"ERROR: GetPixels() - Not raw data source device %i\n", deviceID); + } + } + else + { + DPO->printOut(L"VIDEODEVICE(s): There is not any suitable video device\n"); + } + return success; +} +void videoInput::processPixels(unsigned char * src, unsigned char * dst, unsigned int width, + unsigned int height, unsigned int bpp, bool bRGB, bool bFlip) +{ + unsigned int widthInBytes = width * bpp; + unsigned int numBytes = widthInBytes * height; + int *dstInt, *srcInt; + if(!bRGB) + { + if(bFlip) + { + for(unsigned int y = 0; y < height; y++) + { + dstInt = (int *)(dst + (y * widthInBytes)); + srcInt = (int *)(src + ( (height -y -1) * widthInBytes)); + memcpy(dstInt, srcInt, widthInBytes); + } + } + else + { + memcpy(dst, src, numBytes); + } + } + else + { + if(bFlip) + { + unsigned int x = 0; + unsigned int y = (height - 1) * widthInBytes; + src += y; + for(unsigned int i = 0; i < numBytes; i+=3) + { + if(x >= width) + { + x = 0; + src -= widthInBytes*2; + } + *dst = *(src+2); + dst++; + *dst = *(src+1); + dst++; + *dst = *src; + dst++; + src+=3; + x++; + } + } + else + { + for(unsigned int i = 0; i < numBytes; i+=3) + { + *dst = *(src+2); + dst++; + *dst = *(src+1); + dst++; + *dst = *src; + dst++; + src+=3; + } + } + } +} +} +/******* Capturing video from camera via Microsoft Media Foundation **********/ +class CvCaptureCAM_MSMF : public CvCapture +{ +public: + CvCaptureCAM_MSMF(); + virtual ~CvCaptureCAM_MSMF(); + virtual bool open( int index ); + virtual void close(); + virtual double getProperty(int); + virtual bool setProperty(int, double); + virtual bool grabFrame(); + virtual IplImage* retrieveFrame(int); + virtual int getCaptureDomain() { return CV_CAP_MSMF; } // Return the type of the capture object: CV_CAP_VFW, etc... +protected: + void init(); + int index, width, height,fourcc; + int widthSet, heightSet; + IplImage* frame; + videoInput VI; +}; +struct SuppressVideoInputMessages +{ + SuppressVideoInputMessages() { videoInput::setVerbose(true); } +}; +static SuppressVideoInputMessages do_it; +CvCaptureCAM_MSMF::CvCaptureCAM_MSMF(): + index(-1), + width(-1), + height(-1), + fourcc(-1), + widthSet(-1), + heightSet(-1), + frame(0), + VI(videoInput::getInstance()) +{ + CoInitialize(0); +} +CvCaptureCAM_MSMF::~CvCaptureCAM_MSMF() +{ + close(); + CoUninitialize(); +} +void CvCaptureCAM_MSMF::close() +{ + if( index >= 0 ) + { + VI.closeDevice(index); + index = -1; + cvReleaseImage(&frame); + } + widthSet = heightSet = width = height = -1; +} +// Initialize camera input +bool CvCaptureCAM_MSMF::open( int _index ) +{ + int try_index = _index; + int devices = 0; + close(); + devices = VI.listDevices(true); + if (devices == 0) + return false; + try_index = try_index < 0 ? 0 : (try_index > devices-1 ? devices-1 : try_index); + VI.setupDevice(try_index); + if( !VI.isFrameNew(try_index) ) + return false; + index = try_index; + return true; +} +bool CvCaptureCAM_MSMF::grabFrame() +{ + return true; +} +IplImage* CvCaptureCAM_MSMF::retrieveFrame(int) +{ + if( !frame || (int)VI.getWidth(index) != frame->width || (int)VI.getHeight(index) != frame->height ) + { + if (frame) + cvReleaseImage( &frame ); + unsigned int w = VI.getWidth(index), h = VI.getHeight(index); + frame = cvCreateImage( cvSize(w,h), 8, 3 ); + } + VI.getPixels( index, (uchar*)frame->imageData, false, true ); + return frame; +} +double CvCaptureCAM_MSMF::getProperty( int property_id ) +{ + // image format proprrties + switch( property_id ) + { + case CV_CAP_PROP_FRAME_WIDTH: + return VI.getWidth(index); + case CV_CAP_PROP_FRAME_HEIGHT: + return VI.getHeight(index); + case CV_CAP_PROP_FOURCC: + // FIXME: implement method in VideoInput back end + //return VI.getFourcc(index); + ; + case CV_CAP_PROP_FPS: + // FIXME: implement method in VideoInput back end + //return VI.getFPS(index); + ; + } + // video filter properties + switch( property_id ) + { + case CV_CAP_PROP_BRIGHTNESS: + case CV_CAP_PROP_CONTRAST: + case CV_CAP_PROP_HUE: + case CV_CAP_PROP_SATURATION: + case CV_CAP_PROP_SHARPNESS: + case CV_CAP_PROP_GAMMA: + case CV_CAP_PROP_MONOCROME: + case CV_CAP_PROP_WHITE_BALANCE_BLUE_U: + case CV_CAP_PROP_BACKLIGHT: + case CV_CAP_PROP_GAIN: + // FIXME: implement method in VideoInput back end + // if ( VI.getVideoSettingFilter(index, VI.getVideoPropertyFromCV(property_id), min_value, + // max_value, stepping_delta, current_value, flags,defaultValue) ) + // return (double)current_value; + return 0.; + } + // camera properties + switch( property_id ) + { + case CV_CAP_PROP_PAN: + case CV_CAP_PROP_TILT: + case CV_CAP_PROP_ROLL: + case CV_CAP_PROP_ZOOM: + case CV_CAP_PROP_EXPOSURE: + case CV_CAP_PROP_IRIS: + case CV_CAP_PROP_FOCUS: + // FIXME: implement method in VideoInput back end + // if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(property_id),min_value, + // max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value; + return 0.; + } + // unknown parameter or value not available + return -1; +} +bool CvCaptureCAM_MSMF::setProperty( int property_id, double value ) +{ + // image capture properties + bool handled = false; + switch( property_id ) + { + case CV_CAP_PROP_FRAME_WIDTH: + width = cvRound(value); + handled = true; + break; + case CV_CAP_PROP_FRAME_HEIGHT: + height = cvRound(value); + handled = true; + break; + case CV_CAP_PROP_FOURCC: + fourcc = (int)(unsigned long)(value); + if ( fourcc == -1 ) { + // following cvCreateVideo usage will pop up caprturepindialog here if fourcc=-1 + // TODO - how to create a capture pin dialog + } + handled = true; + break; + case CV_CAP_PROP_FPS: + // FIXME: implement method in VideoInput back end + // int fps = cvRound(value); + // if (fps != VI.getFPS(index)) + // { + // VI.stopDevice(index); + // VI.setIdealFramerate(index,fps); + // if (widthSet > 0 && heightSet > 0) + // VI.setupDevice(index, widthSet, heightSet); + // else + // VI.setupDevice(index); + // } + // return VI.isDeviceSetup(index); + ; + } + if ( handled ) { + // a stream setting + if( width > 0 && height > 0 ) + { + if( width != (int)VI.getWidth(index) || height != (int)VI.getHeight(index) )//|| fourcc != VI.getFourcc(index) ) + { + // FIXME: implement method in VideoInput back end + // int fps = static_cast(VI.getFPS(index)); + // VI.stopDevice(index); + // VI.setIdealFramerate(index, fps); + // VI.setupDeviceFourcc(index, width, height, fourcc); + } + bool success = VI.isDeviceSetup(index); + if (success) + { + widthSet = width; + heightSet = height; + width = height = fourcc = -1; + } + return success; + } + return true; + } + // show video/camera filter dialog + // FIXME: implement method in VideoInput back end + // if ( property_id == CV_CAP_PROP_SETTINGS ) { + // VI.showSettingsWindow(index); + // return true; + // } + //video Filter properties + switch( property_id ) + { + case CV_CAP_PROP_BRIGHTNESS: + case CV_CAP_PROP_CONTRAST: + case CV_CAP_PROP_HUE: + case CV_CAP_PROP_SATURATION: + case CV_CAP_PROP_SHARPNESS: + case CV_CAP_PROP_GAMMA: + case CV_CAP_PROP_MONOCROME: + case CV_CAP_PROP_WHITE_BALANCE_BLUE_U: + case CV_CAP_PROP_BACKLIGHT: + case CV_CAP_PROP_GAIN: + // FIXME: implement method in VideoInput back end + //return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(property_id),(long)value); + ; + } + //camera properties + switch( property_id ) + { + case CV_CAP_PROP_PAN: + case CV_CAP_PROP_TILT: + case CV_CAP_PROP_ROLL: + case CV_CAP_PROP_ZOOM: + case CV_CAP_PROP_EXPOSURE: + case CV_CAP_PROP_IRIS: + case CV_CAP_PROP_FOCUS: + // FIXME: implement method in VideoInput back end + //return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(property_id),(long)value); + ; + } + return false; +} +CvCapture* cvCreateCameraCapture_MSMF( int index ) +{ + CvCaptureCAM_MSMF* capture = new CvCaptureCAM_MSMF; + try + { + if( capture->open( index )) + return capture; + } + catch(...) + { + delete capture; + throw; + } + delete capture; + return 0; +} +#endif \ No newline at end of file diff --git a/modules/highgui/src/precomp.hpp b/modules/highgui/src/precomp.hpp index 9572e3010..aa327d6d7 100644 --- a/modules/highgui/src/precomp.hpp +++ b/modules/highgui/src/precomp.hpp @@ -103,14 +103,6 @@ struct CvVideoWriter virtual bool writeFrame(const IplImage*) { return false; } }; -#if defined WIN32 || defined _WIN32 -#define HAVE_VFW 1 - -/* uncomment to enable CMUCamera1394 fireware camera module */ -//#define HAVE_CMU1394 1 -#endif - - CvCapture * cvCreateCameraCapture_V4L( int index ); CvCapture * cvCreateCameraCapture_DC1394( int index ); CvCapture * cvCreateCameraCapture_DC1394_2( int index ); @@ -126,6 +118,7 @@ CvVideoWriter* cvCreateVideoWriter_Win32( const char* filename, int fourcc, CvVideoWriter* cvCreateVideoWriter_VFW( const char* filename, int fourcc, double fps, CvSize frameSize, int is_color ); CvCapture* cvCreateCameraCapture_DShow( int index ); +CvCapture* cvCreateCameraCapture_MSMF( int index ); CvCapture* cvCreateCameraCapture_OpenNI( int index ); CvCapture* cvCreateFileCapture_OpenNI( const char* filename ); CvCapture* cvCreateCameraCapture_Android( int index ); diff --git a/modules/highgui/src/window.cpp b/modules/highgui/src/window.cpp index b6086076c..6d2953464 100644 --- a/modules/highgui/src/window.cpp +++ b/modules/highgui/src/window.cpp @@ -57,7 +57,7 @@ CV_IMPL void cvSetWindowProperty(const char* name, int prop_id, double prop_valu #if defined (HAVE_QT) cvSetModeWindow_QT(name,prop_value); - #elif defined WIN32 || defined _WIN32 + #elif defined(HAVE_WIN32UI) cvSetModeWindow_W32(name,prop_value); #elif defined (HAVE_GTK) cvSetModeWindow_GTK(name,prop_value); @@ -96,7 +96,7 @@ CV_IMPL double cvGetWindowProperty(const char* name, int prop_id) #if defined (HAVE_QT) return cvGetModeWindow_QT(name); - #elif defined WIN32 || defined _WIN32 + #elif defined(HAVE_WIN32UI) return cvGetModeWindow_W32(name); #elif defined (HAVE_GTK) return cvGetModeWindow_GTK(name); @@ -113,7 +113,7 @@ CV_IMPL double cvGetWindowProperty(const char* name, int prop_id) #if defined (HAVE_QT) return cvGetPropWindow_QT(name); - #elif defined WIN32 || defined _WIN32 + #elif defined(HAVE_WIN32UI) return cvGetPropWindowAutoSize_W32(name); #elif defined (HAVE_GTK) return cvGetPropWindowAutoSize_GTK(name); @@ -126,7 +126,7 @@ CV_IMPL double cvGetWindowProperty(const char* name, int prop_id) #if defined (HAVE_QT) return cvGetRatioWindow_QT(name); - #elif defined WIN32 || defined _WIN32 + #elif defined(HAVE_WIN32UI) return cvGetRatioWindow_W32(name); #elif defined (HAVE_GTK) return cvGetRatioWindow_GTK(name); @@ -139,7 +139,7 @@ CV_IMPL double cvGetWindowProperty(const char* name, int prop_id) #if defined (HAVE_QT) return cvGetOpenGlProp_QT(name); - #elif defined WIN32 || defined _WIN32 + #elif defined(HAVE_WIN32UI) return cvGetOpenGlProp_W32(name); #elif defined (HAVE_GTK) return cvGetOpenGlProp_GTK(name); @@ -450,11 +450,11 @@ int cv::createButton(const string&, ButtonCallback, void*, int , bool ) #endif -#if defined WIN32 || defined _WIN32 // see window_w32.cpp +#if defined(HAVE_WIN32UI) // see window_w32.cpp #elif defined (HAVE_GTK) // see window_gtk.cpp -#elif defined (HAVE_COCOA) // see window_carbon.cpp +#elif defined (HAVE_COCOA) // see window_carbon.cpp #elif defined (HAVE_CARBON) -#elif defined (HAVE_QT) //YV see window_QT.cpp +#elif defined (HAVE_QT) //YV see window_QT.cpp #else diff --git a/modules/highgui/test/test_ffmpeg.cpp b/modules/highgui/test/test_ffmpeg.cpp index 53065462a..2bfd52723 100644 --- a/modules/highgui/test/test_ffmpeg.cpp +++ b/modules/highgui/test/test_ffmpeg.cpp @@ -176,7 +176,7 @@ TEST(Highgui_Video, ffmpeg_image) { CV_FFmpegReadImageTest test; test.safe_run() #endif -#if defined(HAVE_FFMPEG) || defined(WIN32) || defined(_WIN32) +#if defined(HAVE_FFMPEG) //////////////////////////////// Parallel VideoWriters and VideoCaptures //////////////////////////////////// diff --git a/modules/highgui/test/test_gui.cpp b/modules/highgui/test/test_gui.cpp index 285203cb0..c2726a43c 100644 --- a/modules/highgui/test/test_gui.cpp +++ b/modules/highgui/test/test_gui.cpp @@ -43,7 +43,7 @@ #include "test_precomp.hpp" #include "opencv2/highgui/highgui.hpp" -#if defined HAVE_GTK || defined HAVE_QT || defined WIN32 || defined _WIN32 || defined HAVE_CARBON || defined HAVE_COCOA +#if defined HAVE_GTK || defined HAVE_QT || defined HAVE_WIN32UI || defined HAVE_CARBON || defined HAVE_COCOA using namespace cv; using namespace std; diff --git a/modules/highgui/test/test_precomp.hpp b/modules/highgui/test/test_precomp.hpp index 3286c0f59..0d0bd8022 100644 --- a/modules/highgui/test/test_precomp.hpp +++ b/modules/highgui/test/test_precomp.hpp @@ -18,7 +18,7 @@ #include "opencv2/imgproc/imgproc_c.h" #include -#if defined(HAVE_VIDEOINPUT) || \ +#if defined(HAVE_DSHOW) || \ defined(HAVE_TYZX) || \ defined(HAVE_VFW) || \ defined(HAVE_LIBV4L) || \ @@ -34,7 +34,7 @@ defined(HAVE_OPENNI) || \ defined(HAVE_XIMEA) || \ defined(HAVE_AVFOUNDATION) || \ - defined(HAVE_GIGE_API) || \ + defined(HAVE_GIGE_API) || \ (0) //defined(HAVE_ANDROID_NATIVE_CAMERA) || - enable after #1193 # define BUILD_WITH_CAMERA_SUPPORT 1 @@ -47,9 +47,7 @@ defined(HAVE_QUICKTIME) || \ defined(HAVE_AVFOUNDATION) || \ /*defined(HAVE_OPENNI) || too specialized */ \ - defined(HAVE_FFMPEG) || \ - defined(WIN32) /* assume that we have ffmpeg */ - + defined(HAVE_FFMPEG) # define BUILD_WITH_VIDEO_INPUT_SUPPORT 1 #else # define BUILD_WITH_VIDEO_INPUT_SUPPORT 0 @@ -59,8 +57,7 @@ defined(HAVE_GSTREAMER) || \ defined(HAVE_QUICKTIME) || \ defined(HAVE_AVFOUNDATION) || \ - defined(HAVE_FFMPEG) || \ - defined(WIN32) /* assume that we have ffmpeg */ + defined(HAVE_FFMPEG) # define BUILD_WITH_VIDEO_OUTPUT_SUPPORT 1 #else # define BUILD_WITH_VIDEO_OUTPUT_SUPPORT 0 diff --git a/modules/stitching/include/opencv2/stitching/detail/warpers.hpp b/modules/stitching/include/opencv2/stitching/detail/warpers.hpp index 4a7cc4e8f..2bd46f75a 100644 --- a/modules/stitching/include/opencv2/stitching/detail/warpers.hpp +++ b/modules/stitching/include/opencv2/stitching/detail/warpers.hpp @@ -460,7 +460,7 @@ struct SphericalPortraitProjector : ProjectorBase // Projects image onto unit sphere with origin at (0, 0, 0). // Poles are located NOT at (0, -1, 0) and (0, 1, 0) points, BUT at (1, 0, 0) and (-1, 0, 0) points. -class SphericalPortraitWarper : public RotationWarperBase +class CV_EXPORTS SphericalPortraitWarper : public RotationWarperBase { public: SphericalPortraitWarper(float scale) { projector_.scale = scale; } @@ -476,7 +476,7 @@ struct CylindricalPortraitProjector : ProjectorBase }; -class CylindricalPortraitWarper : public RotationWarperBase +class CV_EXPORTS CylindricalPortraitWarper : public RotationWarperBase { public: CylindricalPortraitWarper(float scale) { projector_.scale = scale; } @@ -495,7 +495,7 @@ struct PlanePortraitProjector : ProjectorBase }; -class PlanePortraitWarper : public RotationWarperBase +class CV_EXPORTS PlanePortraitWarper : public RotationWarperBase { public: PlanePortraitWarper(float scale) { projector_.scale = scale; } diff --git a/platforms/winrt/arm.winrt.toolchain.cmake b/platforms/winrt/arm.winrt.toolchain.cmake new file mode 100644 index 000000000..b34056cd5 --- /dev/null +++ b/platforms/winrt/arm.winrt.toolchain.cmake @@ -0,0 +1,6 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR "arm-v7a") + +set(CMAKE_FIND_ROOT_PATH "${CMAKE_SOURCE_DIR}/platforms/winrt") +set(CMAKE_REQUIRED_DEFINITIONS -D_ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE) +add_definitions(-D_ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE) \ No newline at end of file diff --git a/platforms/winrt/scripts/cmake_winrt.cmd b/platforms/winrt/scripts/cmake_winrt.cmd new file mode 100644 index 000000000..aafed7d09 --- /dev/null +++ b/platforms/winrt/scripts/cmake_winrt.cmd @@ -0,0 +1,6 @@ +mkdir build +cd build + +rem call "C:\Program Files\Microsoft Visual Studio 11.0\VC\bin\x86_arm\vcvarsx86_arm.bat" + +cmake.exe -GNinja -DCMAKE_BUILD_TYPE=Release -DWITH_FFMPEG=OFF -DBUILD_opencv_gpu=OFF -DBUILD_opencv_python=OFF -DCMAKE_TOOLCHAIN_FILE=..\..\winrt\arm.winrt.toolchain.cmake ..\..\..