Merge pull request #2524 from SpecLad:merge-2.4
This commit is contained in:
commit
85cf2d9671
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r2.2.0.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r2.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r2.3.3.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r2.3.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r3.0.1.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r3.0.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.0.0.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.0.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.0.3.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.0.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.1.1.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.1.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.2.0.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.3.0.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.3.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.4.0.so
vendored
BIN
3rdparty/lib/armeabi-v7a/libnative_camera_r4.4.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r2.2.0.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r2.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r2.3.3.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r2.3.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r3.0.1.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r3.0.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.0.0.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.0.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.0.3.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.0.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.1.1.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.1.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.2.0.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.3.0.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.3.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/armeabi/libnative_camera_r4.4.0.so
vendored
BIN
3rdparty/lib/armeabi/libnative_camera_r4.4.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/mips/libnative_camera_r4.0.3.so
vendored
BIN
3rdparty/lib/mips/libnative_camera_r4.0.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/mips/libnative_camera_r4.1.1.so
vendored
BIN
3rdparty/lib/mips/libnative_camera_r4.1.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/mips/libnative_camera_r4.2.0.so
vendored
BIN
3rdparty/lib/mips/libnative_camera_r4.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/mips/libnative_camera_r4.3.0.so
vendored
BIN
3rdparty/lib/mips/libnative_camera_r4.3.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/mips/libnative_camera_r4.4.0.so
vendored
BIN
3rdparty/lib/mips/libnative_camera_r4.4.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r2.3.3.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r2.3.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r3.0.1.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r3.0.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r4.0.3.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r4.0.3.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r4.1.1.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r4.1.1.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r4.2.0.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r4.2.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r4.3.0.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r4.3.0.so
vendored
Binary file not shown.
BIN
3rdparty/lib/x86/libnative_camera_r4.4.0.so
vendored
BIN
3rdparty/lib/x86/libnative_camera_r4.4.0.so
vendored
Binary file not shown.
@ -438,7 +438,6 @@ include(cmake/OpenCVFindLibsGUI.cmake)
|
||||
include(cmake/OpenCVFindLibsVideo.cmake)
|
||||
include(cmake/OpenCVFindLibsPerf.cmake)
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Detect other 3rd-party libraries/tools
|
||||
# ----------------------------------------------------------------------------
|
||||
|
@ -217,3 +217,42 @@ else()
|
||||
unset(CUDA_ARCH_BIN CACHE)
|
||||
unset(CUDA_ARCH_PTX CACHE)
|
||||
endif()
|
||||
|
||||
if(HAVE_CUDA)
|
||||
set(CUDA_LIBS_PATH "")
|
||||
foreach(p ${CUDA_LIBRARIES} ${CUDA_npp_LIBRARY})
|
||||
get_filename_component(_tmp ${p} PATH)
|
||||
list(APPEND CUDA_LIBS_PATH ${_tmp})
|
||||
endforeach()
|
||||
|
||||
if(HAVE_CUBLAS)
|
||||
foreach(p ${CUDA_cublas_LIBRARY})
|
||||
get_filename_component(_tmp ${p} PATH)
|
||||
list(APPEND CUDA_LIBS_PATH ${_tmp})
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(HAVE_CUFFT)
|
||||
foreach(p ${CUDA_cufft_LIBRARY})
|
||||
get_filename_component(_tmp ${p} PATH)
|
||||
list(APPEND CUDA_LIBS_PATH ${_tmp})
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
list(REMOVE_DUPLICATES CUDA_LIBS_PATH)
|
||||
link_directories(${CUDA_LIBS_PATH})
|
||||
|
||||
set(CUDA_LIBRARIES_ABS ${CUDA_LIBRARIES})
|
||||
ocv_convert_to_lib_name(CUDA_LIBRARIES ${CUDA_LIBRARIES})
|
||||
set(CUDA_npp_LIBRARY_ABS ${CUDA_npp_LIBRARY})
|
||||
ocv_convert_to_lib_name(CUDA_npp_LIBRARY ${CUDA_npp_LIBRARY})
|
||||
if(HAVE_CUBLAS)
|
||||
set(CUDA_cublas_LIBRARY_ABS ${CUDA_cublas_LIBRARY})
|
||||
ocv_convert_to_lib_name(CUDA_cublas_LIBRARY ${CUDA_cublas_LIBRARY})
|
||||
endif()
|
||||
|
||||
if(HAVE_CUFFT)
|
||||
set(CUDA_cufft_LIBRARY_ABS ${CUDA_cufft_LIBRARY})
|
||||
ocv_convert_to_lib_name(CUDA_cufft_LIBRARY ${CUDA_cufft_LIBRARY})
|
||||
endif()
|
||||
endif()
|
||||
|
@ -27,7 +27,8 @@
|
||||
# The verbose template for OpenCV module:
|
||||
#
|
||||
# ocv_add_module(modname <dependencies>)
|
||||
# ocv_glob_module_sources() or glob them manually and ocv_set_module_sources(...)
|
||||
# ocv_glob_module_sources(([EXCLUDE_CUDA] <extra sources&headers>)
|
||||
# or glob them manually and ocv_set_module_sources(...)
|
||||
# ocv_module_include_directories(<extra include directories>)
|
||||
# ocv_create_module()
|
||||
# <add extra link dependencies, compiler options, etc>
|
||||
@ -478,9 +479,15 @@ endmacro()
|
||||
|
||||
# finds and sets headers and sources for the standard OpenCV module
|
||||
# Usage:
|
||||
# ocv_glob_module_sources(<extra sources&headers in the same format as used in ocv_set_module_sources>)
|
||||
# ocv_glob_module_sources([EXCLUDE_CUDA] <extra sources&headers in the same format as used in ocv_set_module_sources>)
|
||||
macro(ocv_glob_module_sources)
|
||||
file(GLOB_RECURSE lib_srcs "src/*.cpp")
|
||||
set(_argn ${ARGN})
|
||||
list(FIND _argn "EXCLUDE_CUDA" exclude_cuda)
|
||||
if(NOT exclude_cuda EQUAL -1)
|
||||
list(REMOVE_AT _argn ${exclude_cuda})
|
||||
endif()
|
||||
|
||||
file(GLOB_RECURSE lib_srcs "src/*.cpp")
|
||||
file(GLOB_RECURSE lib_int_hdrs "src/*.hpp" "src/*.h")
|
||||
file(GLOB lib_hdrs "include/opencv2/*.hpp" "include/opencv2/${name}/*.hpp" "include/opencv2/${name}/*.h")
|
||||
file(GLOB lib_hdrs_detail "include/opencv2/${name}/detail/*.hpp" "include/opencv2/${name}/detail/*.h")
|
||||
@ -492,15 +499,21 @@ macro(ocv_glob_module_sources)
|
||||
ocv_source_group("Src" DIRBASE "${CMAKE_CURRENT_SOURCE_DIR}/src" FILES ${lib_srcs} ${lib_int_hdrs})
|
||||
ocv_source_group("Include" DIRBASE "${CMAKE_CURRENT_SOURCE_DIR}/include" FILES ${lib_hdrs} ${lib_hdrs_detail})
|
||||
|
||||
file(GLOB lib_cuda_srcs "src/cuda/*.cu")
|
||||
set(cuda_objs "")
|
||||
set(lib_cuda_hdrs "")
|
||||
if(HAVE_CUDA AND lib_cuda_srcs)
|
||||
ocv_include_directories(${CUDA_INCLUDE_DIRS})
|
||||
file(GLOB lib_cuda_hdrs "src/cuda/*.hpp")
|
||||
if (exclude_cuda EQUAL -1)
|
||||
file(GLOB lib_cuda_srcs "src/cuda/*.cu")
|
||||
set(cuda_objs "")
|
||||
set(lib_cuda_hdrs "")
|
||||
if(HAVE_CUDA)
|
||||
ocv_include_directories(${CUDA_INCLUDE_DIRS})
|
||||
file(GLOB lib_cuda_hdrs "src/cuda/*.hpp")
|
||||
|
||||
ocv_cuda_compile(cuda_objs ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
source_group("Src\\Cuda" FILES ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
ocv_cuda_compile(cuda_objs ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
source_group("Src\\Cuda" FILES ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
endif()
|
||||
else()
|
||||
set(cuda_objs "")
|
||||
set(lib_cuda_srcs "")
|
||||
set(lib_cuda_hdrs "")
|
||||
endif()
|
||||
|
||||
file(GLOB cl_kernels "src/opencl/*.cl")
|
||||
@ -516,8 +529,8 @@ macro(ocv_glob_module_sources)
|
||||
list(APPEND lib_srcs ${cl_kernels} "${CMAKE_CURRENT_BINARY_DIR}/opencl_kernels.cpp" "${CMAKE_CURRENT_BINARY_DIR}/opencl_kernels.hpp")
|
||||
endif()
|
||||
|
||||
ocv_set_module_sources(${ARGN} HEADERS ${lib_hdrs} ${lib_hdrs_detail}
|
||||
SOURCES ${lib_srcs} ${lib_int_hdrs} ${cuda_objs} ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
ocv_set_module_sources(${_argn} HEADERS ${lib_hdrs} ${lib_hdrs_detail}
|
||||
SOURCES ${lib_srcs} ${lib_int_hdrs} ${cuda_objs} ${lib_cuda_srcs} ${lib_cuda_hdrs})
|
||||
endmacro()
|
||||
|
||||
# creates OpenCV module in current folder
|
||||
@ -622,11 +635,20 @@ endmacro()
|
||||
# short command for adding simple OpenCV module
|
||||
# see ocv_add_module for argument details
|
||||
# Usage:
|
||||
# ocv_define_module(module_name [INTERNAL] [REQUIRED] [<list of dependencies>] [OPTIONAL <list of optional dependencies>])
|
||||
# ocv_define_module(module_name [INTERNAL] [EXCLUDE_CUDA] [REQUIRED] [<list of dependencies>] [OPTIONAL <list of optional dependencies>])
|
||||
macro(ocv_define_module module_name)
|
||||
ocv_add_module(${module_name} ${ARGN})
|
||||
set(_argn ${ARGN})
|
||||
set(exclude_cuda "")
|
||||
foreach(arg ${_argn})
|
||||
if("${arg}" STREQUAL "EXCLUDE_CUDA")
|
||||
set(exclude_cuda "${arg}")
|
||||
list(REMOVE_ITEM _argn ${arg})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
ocv_add_module(${module_name} ${_argn})
|
||||
ocv_module_include_directories()
|
||||
ocv_glob_module_sources()
|
||||
ocv_glob_module_sources(${exclude_cuda})
|
||||
ocv_create_module()
|
||||
ocv_add_precompiled_headers(${the_module})
|
||||
|
||||
|
@ -19,8 +19,8 @@
|
||||
# This file will define the following variables:
|
||||
# - OpenCV_LIBS : The list of all imported targets for OpenCV modules.
|
||||
# - OpenCV_INCLUDE_DIRS : The OpenCV include directories.
|
||||
# - OpenCV_COMPUTE_CAPABILITIES : The version of compute capability
|
||||
# - OpenCV_ANDROID_NATIVE_API_LEVEL : Minimum required level of Android API
|
||||
# - OpenCV_COMPUTE_CAPABILITIES : The version of compute capability.
|
||||
# - OpenCV_ANDROID_NATIVE_API_LEVEL : Minimum required level of Android API.
|
||||
# - OpenCV_VERSION : The version of this OpenCV build: "@OPENCV_VERSION_PLAIN@"
|
||||
# - OpenCV_VERSION_MAJOR : Major version part of OpenCV_VERSION: "@OPENCV_VERSION_MAJOR@"
|
||||
# - OpenCV_VERSION_MINOR : Minor version part of OpenCV_VERSION: "@OPENCV_VERSION_MINOR@"
|
||||
@ -28,25 +28,29 @@
|
||||
# - OpenCV_VERSION_STATUS : Development status of this build: "@OPENCV_VERSION_STATUS@"
|
||||
#
|
||||
# Advanced variables:
|
||||
# - OpenCV_SHARED
|
||||
# - OpenCV_CONFIG_PATH
|
||||
# - OpenCV_INSTALL_PATH (not set on Windows)
|
||||
# - OpenCV_LIB_COMPONENTS
|
||||
# - OpenCV_USE_MANGLED_PATHS
|
||||
# - OpenCV_HAVE_ANDROID_CAMERA
|
||||
# - OpenCV_SHARED : Use OpenCV as shared library
|
||||
# - OpenCV_CONFIG_PATH : Path to this OpenCVConfig.cmake
|
||||
# - OpenCV_INSTALL_PATH : OpenCV location (not set on Windows)
|
||||
# - OpenCV_LIB_COMPONENTS : Present OpenCV modules list
|
||||
# - OpenCV_USE_MANGLED_PATHS : Mangled OpenCV path flag
|
||||
# - OpenCV_MODULES_SUFFIX : The suffix for OpenCVModules-XXX.cmake file
|
||||
# - OpenCV_HAVE_ANDROID_CAMERA : Presence of Android native camera wrappers
|
||||
#
|
||||
# Deprecated variables:
|
||||
# - OpenCV_VERSION_TWEAK : Always "0"
|
||||
#
|
||||
# ===================================================================================
|
||||
|
||||
set(modules_file_suffix "")
|
||||
if(ANDROID)
|
||||
string(REPLACE - _ modules_file_suffix "_${ANDROID_NDK_ABI_NAME}")
|
||||
if(NOT DEFINED OpenCV_MODULES_SUFFIX)
|
||||
if(ANDROID)
|
||||
string(REPLACE - _ OpenCV_MODULES_SUFFIX "_${ANDROID_NDK_ABI_NAME}")
|
||||
else()
|
||||
set(OpenCV_MODULES_SUFFIX "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT TARGET opencv_core)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/OpenCVModules${modules_file_suffix}.cmake)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/OpenCVModules${OpenCV_MODULES_SUFFIX}.cmake)
|
||||
endif()
|
||||
|
||||
# TODO All things below should be reviewed. What is about of moving this code into related modules (special vars/hooks/files)
|
||||
|
@ -6,12 +6,12 @@ Adding (blending) two images using OpenCV
|
||||
Goal
|
||||
=====
|
||||
|
||||
In this tutorial you will learn how to:
|
||||
In this tutorial you will learn:
|
||||
|
||||
.. container:: enumeratevisibleitemswithsquare
|
||||
|
||||
* What is *linear blending* and why it is useful.
|
||||
* Add two images using :add_weighted:`addWeighted <>`
|
||||
* what is *linear blending* and why it is useful;
|
||||
* how to add two images using :add_weighted:`addWeighted <>`
|
||||
|
||||
Theory
|
||||
=======
|
||||
|
@ -18,7 +18,7 @@ We'll seek answers for the following questions:
|
||||
Our test case
|
||||
=============
|
||||
|
||||
Let us consider a simple color reduction method. Using the unsigned char C and C++ type for matrix item storing a channel of pixel may have up to 256 different values. For a three channel image this can allow the formation of way too many colors (16 million to be exact). Working with so many color shades may give a heavy blow to our algorithm performance. However, sometimes it is enough to work with a lot less of them to get the same final result.
|
||||
Let us consider a simple color reduction method. By using the unsigned char C and C++ type for matrix item storing, a channel of pixel may have up to 256 different values. For a three channel image this can allow the formation of way too many colors (16 million to be exact). Working with so many color shades may give a heavy blow to our algorithm performance. However, sometimes it is enough to work with a lot less of them to get the same final result.
|
||||
|
||||
In this cases it's common that we make a *color space reduction*. This means that we divide the color space current value with a new input value to end up with fewer colors. For instance every value between zero and nine takes the new value zero, every value between ten and nineteen the value ten and so on.
|
||||
|
||||
|
@ -84,88 +84,10 @@ Code
|
||||
|
||||
* **Code at glance:**
|
||||
|
||||
.. code-block:: cpp
|
||||
.. literalinclude:: ../../../../../samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp
|
||||
:language: cpp
|
||||
:tab-width: 4
|
||||
|
||||
#include "opencv2/highgui.hpp"
|
||||
#include "opencv2/imgproc.hpp"
|
||||
#include <iostream>
|
||||
#include <stdio.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace cv;
|
||||
|
||||
/** @function main */
|
||||
int main( int argc, char** argv )
|
||||
{
|
||||
Mat src_base, hsv_base;
|
||||
Mat src_test1, hsv_test1;
|
||||
Mat src_test2, hsv_test2;
|
||||
Mat hsv_half_down;
|
||||
|
||||
/// Load three images with different environment settings
|
||||
if( argc < 4 )
|
||||
{ printf("** Error. Usage: ./compareHist_Demo <image_settings0> <image_setting1> <image_settings2>\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
src_base = imread( argv[1], 1 );
|
||||
src_test1 = imread( argv[2], 1 );
|
||||
src_test2 = imread( argv[3], 1 );
|
||||
|
||||
/// Convert to HSV
|
||||
cvtColor( src_base, hsv_base, CV_BGR2HSV );
|
||||
cvtColor( src_test1, hsv_test1, CV_BGR2HSV );
|
||||
cvtColor( src_test2, hsv_test2, CV_BGR2HSV );
|
||||
|
||||
hsv_half_down = hsv_base( Range( hsv_base.rows/2, hsv_base.rows - 1 ), Range( 0, hsv_base.cols - 1 ) );
|
||||
|
||||
/// Using 30 bins for hue and 32 for saturation
|
||||
int h_bins = 50; int s_bins = 60;
|
||||
int histSize[] = { h_bins, s_bins };
|
||||
|
||||
// hue varies from 0 to 256, saturation from 0 to 180
|
||||
float h_ranges[] = { 0, 256 };
|
||||
float s_ranges[] = { 0, 180 };
|
||||
|
||||
const float* ranges[] = { h_ranges, s_ranges };
|
||||
|
||||
// Use the o-th and 1-st channels
|
||||
int channels[] = { 0, 1 };
|
||||
|
||||
/// Histograms
|
||||
MatND hist_base;
|
||||
MatND hist_half_down;
|
||||
MatND hist_test1;
|
||||
MatND hist_test2;
|
||||
|
||||
/// Calculate the histograms for the HSV images
|
||||
calcHist( &hsv_base, 1, channels, Mat(), hist_base, 2, histSize, ranges, true, false );
|
||||
normalize( hist_base, hist_base, 0, 1, NORM_MINMAX, -1, Mat() );
|
||||
|
||||
calcHist( &hsv_half_down, 1, channels, Mat(), hist_half_down, 2, histSize, ranges, true, false );
|
||||
normalize( hist_half_down, hist_half_down, 0, 1, NORM_MINMAX, -1, Mat() );
|
||||
|
||||
calcHist( &hsv_test1, 1, channels, Mat(), hist_test1, 2, histSize, ranges, true, false );
|
||||
normalize( hist_test1, hist_test1, 0, 1, NORM_MINMAX, -1, Mat() );
|
||||
|
||||
calcHist( &hsv_test2, 1, channels, Mat(), hist_test2, 2, histSize, ranges, true, false );
|
||||
normalize( hist_test2, hist_test2, 0, 1, NORM_MINMAX, -1, Mat() );
|
||||
|
||||
/// Apply the histogram comparison methods
|
||||
for( int i = 0; i < 4; i++ )
|
||||
{ int compare_method = i;
|
||||
double base_base = compareHist( hist_base, hist_base, compare_method );
|
||||
double base_half = compareHist( hist_base, hist_half_down, compare_method );
|
||||
double base_test1 = compareHist( hist_base, hist_test1, compare_method );
|
||||
double base_test2 = compareHist( hist_base, hist_test2, compare_method );
|
||||
|
||||
printf( " Method [%d] Perfect, Base-Half, Base-Test(1), Base-Test(2) : %f, %f, %f, %f \n", i, base_base, base_half , base_test1, base_test2 );
|
||||
}
|
||||
|
||||
printf( "Done \n" );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
Explanation
|
||||
@ -211,11 +133,11 @@ Explanation
|
||||
|
||||
.. code-block:: cpp
|
||||
|
||||
int h_bins = 50; int s_bins = 32;
|
||||
int h_bins = 50; int s_bins = 60;
|
||||
int histSize[] = { h_bins, s_bins };
|
||||
|
||||
float h_ranges[] = { 0, 256 };
|
||||
float s_ranges[] = { 0, 180 };
|
||||
float h_ranges[] = { 0, 180 };
|
||||
float s_ranges[] = { 0, 256 };
|
||||
|
||||
const float* ranges[] = { h_ranges, s_ranges };
|
||||
|
||||
|
@ -58,7 +58,7 @@ SET_TARGET_PROPERTIES(${the_target} PROPERTIES
|
||||
RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}
|
||||
)
|
||||
|
||||
if (NOT (CMAKE_BUILD_TYPE MATCHES "debug"))
|
||||
if (NOT (CMAKE_BUILD_TYPE MATCHES "Debug"))
|
||||
ADD_CUSTOM_COMMAND( TARGET ${the_target} POST_BUILD COMMAND ${CMAKE_STRIP} --strip-unneeded "${LIBRARY_OUTPUT_PATH}/lib${the_target}.so" )
|
||||
endif()
|
||||
|
||||
|
@ -61,6 +61,12 @@
|
||||
|
||||
using namespace android;
|
||||
|
||||
// non-public camera related classes are not binary compatible
|
||||
// objects of these classes have different sizeof on different platforms
|
||||
// additional memory tail to all system objects to overcome sizeof issue
|
||||
#define MAGIC_TAIL 4096
|
||||
|
||||
|
||||
void debugShowFPS();
|
||||
|
||||
#if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
||||
@ -90,6 +96,7 @@ public:
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
std::string getProcessName()
|
||||
{
|
||||
std::string result;
|
||||
@ -142,12 +149,22 @@ class CameraHandler: public CameraListener
|
||||
protected:
|
||||
int cameraId;
|
||||
sp<Camera> camera;
|
||||
CameraParameters params;
|
||||
#if defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
||||
sp<SurfaceTexture> surface;
|
||||
#endif
|
||||
#if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
||||
sp<BufferQueue> queue;
|
||||
sp<ConsumerListenerStub> listener;
|
||||
#endif
|
||||
CameraParameters* params;
|
||||
CameraCallback cameraCallback;
|
||||
void* userData;
|
||||
|
||||
int emptyCameraCallbackReported;
|
||||
|
||||
int width;
|
||||
int height;
|
||||
|
||||
static const char* flashModesNames[ANDROID_CAMERA_FLASH_MODES_NUM];
|
||||
static const char* focusModesNames[ANDROID_CAMERA_FOCUS_MODES_NUM];
|
||||
static const char* whiteBalanceModesNames[ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM];
|
||||
@ -258,7 +275,7 @@ protected:
|
||||
|
||||
int is_supported(const char* supp_modes_key, const char* mode)
|
||||
{
|
||||
const char* supported_modes = params.get(supp_modes_key);
|
||||
const char* supported_modes = params->get(supp_modes_key);
|
||||
return (supported_modes && mode && (strstr(supported_modes, mode) > 0));
|
||||
}
|
||||
|
||||
@ -268,7 +285,7 @@ protected:
|
||||
if (focus_distance_type >= 0 && focus_distance_type < 3)
|
||||
{
|
||||
float focus_distances[3];
|
||||
const char* output = params.get(CameraParameters::KEY_FOCUS_DISTANCES);
|
||||
const char* output = params->get(CameraParameters::KEY_FOCUS_DISTANCES);
|
||||
int val_num = CameraHandler::split_float(output, focus_distances, ',', 3);
|
||||
if(val_num == 3)
|
||||
{
|
||||
@ -300,10 +317,15 @@ public:
|
||||
emptyCameraCallbackReported(0)
|
||||
{
|
||||
LOGD("Instantiated new CameraHandler (%p, %p)", callback, _userData);
|
||||
void* params_buffer = operator new(sizeof(CameraParameters) + MAGIC_TAIL);
|
||||
params = new(params_buffer) CameraParameters();
|
||||
}
|
||||
|
||||
virtual ~CameraHandler()
|
||||
{
|
||||
if (params)
|
||||
params->~CameraParameters();
|
||||
operator delete(params);
|
||||
LOGD("CameraHandler destructor is called");
|
||||
}
|
||||
|
||||
@ -371,10 +393,18 @@ const char* CameraHandler::focusModesNames[ANDROID_CAMERA_FOCUS_MODES_NUM] =
|
||||
CameraParameters::FOCUS_MODE_AUTO,
|
||||
#if !defined(ANDROID_r2_2_0)
|
||||
CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO,
|
||||
#else
|
||||
CameraParameters::FOCUS_MODE_AUTO,
|
||||
#endif
|
||||
CameraParameters::FOCUS_MODE_EDOF,
|
||||
CameraParameters::FOCUS_MODE_FIXED,
|
||||
CameraParameters::FOCUS_MODE_INFINITY
|
||||
CameraParameters::FOCUS_MODE_INFINITY,
|
||||
CameraParameters::FOCUS_MODE_MACRO,
|
||||
#if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
||||
CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE
|
||||
#else
|
||||
CameraParameters::FOCUS_MODE_AUTO
|
||||
#endif
|
||||
};
|
||||
|
||||
const char* CameraHandler::whiteBalanceModesNames[ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM] =
|
||||
@ -534,39 +564,39 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
|
||||
{
|
||||
LOGI("initCameraConnect: Setting paramers from previous camera handler");
|
||||
camera->setParameters(prevCameraParameters->flatten());
|
||||
handler->params.unflatten(prevCameraParameters->flatten());
|
||||
handler->params->unflatten(prevCameraParameters->flatten());
|
||||
}
|
||||
else
|
||||
{
|
||||
android::String8 params_str = camera->getParameters();
|
||||
LOGI("initCameraConnect: [%s]", params_str.string());
|
||||
|
||||
handler->params.unflatten(params_str);
|
||||
handler->params->unflatten(params_str);
|
||||
|
||||
LOGD("Supported Cameras: %s", handler->params.get("camera-indexes"));
|
||||
LOGD("Supported Picture Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
|
||||
LOGD("Supported Picture Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
|
||||
LOGD("Supported Preview Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
|
||||
LOGD("Supported Preview Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS));
|
||||
LOGD("Supported Preview Frame Rates: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
|
||||
LOGD("Supported Thumbnail Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES));
|
||||
LOGD("Supported Whitebalance Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE));
|
||||
LOGD("Supported Effects: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_EFFECTS));
|
||||
LOGD("Supported Scene Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES));
|
||||
LOGD("Supported Focus Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
|
||||
LOGD("Supported Antibanding Options: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING));
|
||||
LOGD("Supported Flash Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES));
|
||||
LOGD("Supported Cameras: %s", handler->params->get("camera-indexes"));
|
||||
LOGD("Supported Picture Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
|
||||
LOGD("Supported Picture Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
|
||||
LOGD("Supported Preview Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
|
||||
LOGD("Supported Preview Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS));
|
||||
LOGD("Supported Preview Frame Rates: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
|
||||
LOGD("Supported Thumbnail Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES));
|
||||
LOGD("Supported Whitebalance Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE));
|
||||
LOGD("Supported Effects: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_EFFECTS));
|
||||
LOGD("Supported Scene Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_SCENE_MODES));
|
||||
LOGD("Supported Focus Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
|
||||
LOGD("Supported Antibanding Options: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_ANTIBANDING));
|
||||
LOGD("Supported Flash Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FLASH_MODES));
|
||||
|
||||
#if !defined(ANDROID_r2_2_0)
|
||||
// Set focus mode to continuous-video if supported
|
||||
const char* available_focus_modes = handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
|
||||
const char* available_focus_modes = handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
|
||||
if (available_focus_modes != 0)
|
||||
{
|
||||
if (strstr(available_focus_modes, "continuous-video") != NULL)
|
||||
{
|
||||
handler->params.set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
handler->params->set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
|
||||
status_t resParams = handler->camera->setParameters(handler->params.flatten());
|
||||
status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
||||
|
||||
if (resParams != 0)
|
||||
{
|
||||
@ -581,7 +611,7 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
|
||||
#endif
|
||||
|
||||
//check if yuv420sp format available. Set this format as preview format.
|
||||
const char* available_formats = handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
|
||||
const char* available_formats = handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
|
||||
if (available_formats != 0)
|
||||
{
|
||||
const char* format_to_set = 0;
|
||||
@ -607,9 +637,9 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
|
||||
|
||||
if (0 != format_to_set)
|
||||
{
|
||||
handler->params.setPreviewFormat(format_to_set);
|
||||
handler->params->setPreviewFormat(format_to_set);
|
||||
|
||||
status_t resParams = handler->camera->setParameters(handler->params.flatten());
|
||||
status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
||||
|
||||
if (resParams != 0)
|
||||
LOGE("initCameraConnect: failed to set preview format to %s", format_to_set);
|
||||
@ -617,6 +647,13 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
|
||||
LOGD("initCameraConnect: preview format is set to %s", format_to_set);
|
||||
}
|
||||
}
|
||||
|
||||
handler->params->setPreviewSize(640, 480);
|
||||
status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
||||
if (resParams != 0)
|
||||
LOGE("initCameraConnect: failed to set preview resolution to 640x480");
|
||||
else
|
||||
LOGD("initCameraConnect: preview format is set to 640x480");
|
||||
}
|
||||
|
||||
status_t bufferStatus;
|
||||
@ -627,22 +664,27 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
|
||||
#elif defined(ANDROID_r2_3_3)
|
||||
/* Do nothing in case of 2.3 for now */
|
||||
#elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
||||
sp<SurfaceTexture> surfaceTexture = new SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
||||
bufferStatus = camera->setPreviewTexture(surfaceTexture);
|
||||
void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
|
||||
handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
||||
bufferStatus = camera->setPreviewTexture(handler->surface);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("initCameraConnect: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
|
||||
#elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
||||
sp<BufferQueue> bufferQueue = new BufferQueue();
|
||||
sp<BufferQueue::ConsumerListener> queueListener = new ConsumerListenerStub();
|
||||
bufferQueue->consumerConnect(queueListener);
|
||||
bufferStatus = camera->setPreviewTexture(bufferQueue);
|
||||
void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
||||
handler->queue = new(buffer_queue_obj) BufferQueue();
|
||||
void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
|
||||
handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
|
||||
handler->queue->consumerConnect(handler->listener);
|
||||
bufferStatus = camera->setPreviewTexture(handler->queue);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("initCameraConnect: failed setPreviewTexture call; camera might not work correctly");
|
||||
# elif defined(ANDROID_r4_4_0)
|
||||
sp<BufferQueue> bufferQueue = new BufferQueue();
|
||||
sp<IConsumerListener> queueListener = new ConsumerListenerStub();
|
||||
bufferQueue->consumerConnect(queueListener, true);
|
||||
bufferStatus = handler->camera->setPreviewTarget(bufferQueue);
|
||||
void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
||||
handler->queue = new(buffer_queue_obj) BufferQueue();
|
||||
void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
|
||||
handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
|
||||
handler->queue->consumerConnect(handler->listener, true);
|
||||
bufferStatus = handler->camera->setPreviewTarget(handler->queue);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
||||
# endif
|
||||
@ -723,18 +765,18 @@ double CameraHandler::getProperty(int propIdx)
|
||||
case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
|
||||
{
|
||||
int w,h;
|
||||
params.getPreviewSize(&w, &h);
|
||||
params->getPreviewSize(&w, &h);
|
||||
return w;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
|
||||
{
|
||||
int w,h;
|
||||
params.getPreviewSize(&w, &h);
|
||||
params->getPreviewSize(&w, &h);
|
||||
return h;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING:
|
||||
{
|
||||
cameraPropertySupportedPreviewSizesString = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
|
||||
cameraPropertySupportedPreviewSizesString = params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
|
||||
union {const char* str;double res;} u;
|
||||
memset(&u.res, 0, sizeof(u.res));
|
||||
u.str = cameraPropertySupportedPreviewSizesString.c_str();
|
||||
@ -742,7 +784,7 @@ double CameraHandler::getProperty(int propIdx)
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING:
|
||||
{
|
||||
const char* fmt = params.get(CameraParameters::KEY_PREVIEW_FORMAT);
|
||||
const char* fmt = params->get(CameraParameters::KEY_PREVIEW_FORMAT);
|
||||
if (fmt == CameraParameters::PIXEL_FORMAT_YUV422SP)
|
||||
fmt = "yuv422sp";
|
||||
else if (fmt == CameraParameters::PIXEL_FORMAT_YUV420SP)
|
||||
@ -762,44 +804,44 @@ double CameraHandler::getProperty(int propIdx)
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_EXPOSURE:
|
||||
{
|
||||
int exposure = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
|
||||
int exposure = params->getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
|
||||
return exposure;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FPS:
|
||||
{
|
||||
return params.getPreviewFrameRate();
|
||||
return params->getPreviewFrameRate();
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
|
||||
{
|
||||
int flash_mode = getModeNum(CameraHandler::flashModesNames,
|
||||
ANDROID_CAMERA_FLASH_MODES_NUM,
|
||||
params.get(CameraParameters::KEY_FLASH_MODE));
|
||||
params->get(CameraParameters::KEY_FLASH_MODE));
|
||||
return flash_mode;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
|
||||
{
|
||||
int focus_mode = getModeNum(CameraHandler::focusModesNames,
|
||||
ANDROID_CAMERA_FOCUS_MODES_NUM,
|
||||
params.get(CameraParameters::KEY_FOCUS_MODE));
|
||||
params->get(CameraParameters::KEY_FOCUS_MODE));
|
||||
return focus_mode;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
|
||||
{
|
||||
int white_balance = getModeNum(CameraHandler::whiteBalanceModesNames,
|
||||
ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM,
|
||||
params.get(CameraParameters::KEY_WHITE_BALANCE));
|
||||
params->get(CameraParameters::KEY_WHITE_BALANCE));
|
||||
return white_balance;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
|
||||
{
|
||||
int antibanding = getModeNum(CameraHandler::antibandingModesNames,
|
||||
ANDROID_CAMERA_ANTIBANDING_MODES_NUM,
|
||||
params.get(CameraParameters::KEY_ANTIBANDING));
|
||||
params->get(CameraParameters::KEY_ANTIBANDING));
|
||||
return antibanding;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH:
|
||||
{
|
||||
float focal_length = params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
|
||||
float focal_length = params->getFloat(CameraParameters::KEY_FOCAL_LENGTH);
|
||||
return focal_length;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR:
|
||||
@ -814,6 +856,24 @@ double CameraHandler::getProperty(int propIdx)
|
||||
{
|
||||
return getFocusDistance(ANDROID_CAMERA_FOCUS_DISTANCE_FAR_INDEX);
|
||||
}
|
||||
#if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
||||
case ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK:
|
||||
{
|
||||
const char* status = params->get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
|
||||
if (status == CameraParameters::TRUE)
|
||||
return 1.;
|
||||
else
|
||||
return 0.;
|
||||
}
|
||||
case ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK:
|
||||
{
|
||||
const char* status = params->get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
|
||||
if (status == CameraParameters::TRUE)
|
||||
return 1.;
|
||||
else
|
||||
return 0.;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
LOGW("CameraHandler::getProperty - Unsupported property.");
|
||||
};
|
||||
@ -824,99 +884,151 @@ void CameraHandler::setProperty(int propIdx, double value)
|
||||
{
|
||||
LOGD("CameraHandler::setProperty(%d, %f)", propIdx, value);
|
||||
|
||||
android::String8 params_str;
|
||||
params_str = camera->getParameters();
|
||||
LOGI("Params before set: [%s]", params_str.string());
|
||||
|
||||
switch (propIdx)
|
||||
{
|
||||
case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
|
||||
{
|
||||
int w,h;
|
||||
params.getPreviewSize(&w, &h);
|
||||
w = (int)value;
|
||||
params.setPreviewSize(w, h);
|
||||
params->getPreviewSize(&w, &h);
|
||||
width = (int)value;
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
|
||||
{
|
||||
int w,h;
|
||||
params.getPreviewSize(&w, &h);
|
||||
h = (int)value;
|
||||
params.setPreviewSize(w, h);
|
||||
params->getPreviewSize(&w, &h);
|
||||
height = (int)value;
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_EXPOSURE:
|
||||
{
|
||||
int max_exposure = params.getInt("max-exposure-compensation");
|
||||
int min_exposure = params.getInt("min-exposure-compensation");
|
||||
if(max_exposure && min_exposure){
|
||||
int max_exposure = params->getInt("max-exposure-compensation");
|
||||
int min_exposure = params->getInt("min-exposure-compensation");
|
||||
if(max_exposure && min_exposure)
|
||||
{
|
||||
int exposure = (int)value;
|
||||
if(exposure >= min_exposure && exposure <= max_exposure){
|
||||
params.set("exposure-compensation", exposure);
|
||||
} else {
|
||||
if(exposure >= min_exposure && exposure <= max_exposure)
|
||||
params->set("exposure-compensation", exposure);
|
||||
else
|
||||
LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure);
|
||||
}
|
||||
} else {
|
||||
} else
|
||||
LOGE("Exposure compensation adjust is not supported.");
|
||||
}
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
|
||||
{
|
||||
int new_val = (int)value;
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM){
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM)
|
||||
{
|
||||
const char* mode_name = flashModesNames[new_val];
|
||||
if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name))
|
||||
params.set(CameraParameters::KEY_FLASH_MODE, mode_name);
|
||||
params->set(CameraParameters::KEY_FLASH_MODE, mode_name);
|
||||
else
|
||||
LOGE("Flash mode %s is not supported.", mode_name);
|
||||
} else {
|
||||
LOGE("Flash mode value not in valid range.");
|
||||
}
|
||||
else
|
||||
LOGE("Flash mode value not in valid range.");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
|
||||
{
|
||||
int new_val = (int)value;
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM){
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM)
|
||||
{
|
||||
const char* mode_name = focusModesNames[new_val];
|
||||
if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name))
|
||||
params.set(CameraParameters::KEY_FOCUS_MODE, mode_name);
|
||||
params->set(CameraParameters::KEY_FOCUS_MODE, mode_name);
|
||||
else
|
||||
LOGE("Focus mode %s is not supported.", mode_name);
|
||||
} else {
|
||||
LOGE("Focus mode value not in valid range.");
|
||||
}
|
||||
else
|
||||
LOGE("Focus mode value not in valid range.");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
|
||||
{
|
||||
int new_val = (int)value;
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM){
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM)
|
||||
{
|
||||
const char* mode_name = whiteBalanceModesNames[new_val];
|
||||
if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name))
|
||||
params.set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
|
||||
params->set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
|
||||
else
|
||||
LOGE("White balance mode %s is not supported.", mode_name);
|
||||
} else {
|
||||
LOGE("White balance mode value not in valid range.");
|
||||
}
|
||||
else
|
||||
LOGE("White balance mode value not in valid range.");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
|
||||
{
|
||||
int new_val = (int)value;
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM){
|
||||
if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM)
|
||||
{
|
||||
const char* mode_name = antibandingModesNames[new_val];
|
||||
if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name))
|
||||
params.set(CameraParameters::KEY_ANTIBANDING, mode_name);
|
||||
params->set(CameraParameters::KEY_ANTIBANDING, mode_name);
|
||||
else
|
||||
LOGE("Antibanding mode %s is not supported.", mode_name);
|
||||
} else {
|
||||
LOGE("Antibanding mode value not in valid range.");
|
||||
}
|
||||
else
|
||||
LOGE("Antibanding mode value not in valid range.");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
#if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
||||
case ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK:
|
||||
{
|
||||
if (is_supported(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true"))
|
||||
{
|
||||
if (value != 0)
|
||||
params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::TRUE);
|
||||
else
|
||||
params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::FALSE);
|
||||
LOGE("Expose lock is set");
|
||||
}
|
||||
else
|
||||
LOGE("Expose lock is not supported");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
case ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK:
|
||||
{
|
||||
if (is_supported(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true"))
|
||||
{
|
||||
if (value != 0)
|
||||
params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::TRUE);
|
||||
else
|
||||
params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::FALSE);
|
||||
LOGE("White balance lock is set");
|
||||
}
|
||||
else
|
||||
LOGE("White balance lock is not supported");
|
||||
|
||||
camera->setParameters(params->flatten());
|
||||
}
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
LOGW("CameraHandler::setProperty - Unsupported property.");
|
||||
};
|
||||
|
||||
params_str = camera->getParameters();
|
||||
LOGI("Params after set: [%s]", params_str.string());
|
||||
}
|
||||
|
||||
void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
|
||||
@ -935,7 +1047,10 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
|
||||
return;
|
||||
}
|
||||
|
||||
CameraParameters curCameraParameters((*ppcameraHandler)->params.flatten());
|
||||
// delayed resolution setup to exclude errors during other parameres setup on the fly
|
||||
// without camera restart
|
||||
if (((*ppcameraHandler)->width != 0) && ((*ppcameraHandler)->height != 0))
|
||||
(*ppcameraHandler)->params->setPreviewSize((*ppcameraHandler)->width, (*ppcameraHandler)->height);
|
||||
|
||||
#if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \
|
||||
|| defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
||||
@ -951,27 +1066,27 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
|
||||
return;
|
||||
}
|
||||
|
||||
handler->camera->setParameters(curCameraParameters.flatten());
|
||||
handler->params.unflatten(curCameraParameters.flatten());
|
||||
handler->camera->setParameters((*ppcameraHandler)->params->flatten());
|
||||
|
||||
status_t bufferStatus;
|
||||
# if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
||||
sp<SurfaceTexture> surfaceTexture = new SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
||||
bufferStatus = handler->camera->setPreviewTexture(surfaceTexture);
|
||||
void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
|
||||
handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
||||
bufferStatus = handler->camera->setPreviewTexture(handler->surface);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("applyProperties: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
|
||||
# elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
||||
sp<BufferQueue> bufferQueue = new BufferQueue();
|
||||
sp<BufferQueue::ConsumerListener> queueListener = new ConsumerListenerStub();
|
||||
bufferQueue->consumerConnect(queueListener);
|
||||
bufferStatus = handler->camera->setPreviewTexture(bufferQueue);
|
||||
void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
||||
handler->queue = new(buffer_queue_obj) BufferQueue();
|
||||
handler->queue->consumerConnect(handler->listener);
|
||||
bufferStatus = handler->camera->setPreviewTexture(handler->queue);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
||||
# elif defined(ANDROID_r4_4_0)
|
||||
sp<BufferQueue> bufferQueue = new BufferQueue();
|
||||
sp<IConsumerListener> queueListener = new ConsumerListenerStub();
|
||||
bufferQueue->consumerConnect(queueListener, true);
|
||||
bufferStatus = handler->camera->setPreviewTarget(bufferQueue);
|
||||
void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
||||
handler->queue = new(buffer_queue_obj) BufferQueue();
|
||||
handler->queue->consumerConnect(handler->listener, true);
|
||||
bufferStatus = handler->camera->setPreviewTarget(handler->queue);
|
||||
if (bufferStatus != 0)
|
||||
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
||||
# endif
|
||||
@ -1002,7 +1117,7 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
|
||||
LOGD("CameraHandler::applyProperties(): after previousCameraHandler->closeCameraConnect");
|
||||
|
||||
LOGD("CameraHandler::applyProperties(): before initCameraConnect");
|
||||
CameraHandler* handler=initCameraConnect(cameraCallback, cameraId, userData, &curCameraParameters);
|
||||
CameraHandler* handler=initCameraConnect(cameraCallback, cameraId, userData, (*ppcameraHandler)->params);
|
||||
LOGD("CameraHandler::applyProperties(): after initCameraConnect, handler=0x%x", (int)handler);
|
||||
if (handler == NULL) {
|
||||
LOGE("ERROR in applyProperties --- cannot reinit camera");
|
||||
|
@ -15,7 +15,9 @@ enum {
|
||||
ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH = 105,
|
||||
ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR = 106,
|
||||
ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_OPTIMAL = 107,
|
||||
ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_FAR = 108
|
||||
ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_FAR = 108,
|
||||
ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK = 109,
|
||||
ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK = 110
|
||||
};
|
||||
|
||||
|
||||
@ -30,12 +32,12 @@ enum {
|
||||
|
||||
enum {
|
||||
ANDROID_CAMERA_FOCUS_MODE_AUTO = 0,
|
||||
ANDROID_CAMERA_FOCUS_MODE_CONTINUOUS_PICTURE,
|
||||
ANDROID_CAMERA_FOCUS_MODE_CONTINUOUS_VIDEO,
|
||||
ANDROID_CAMERA_FOCUS_MODE_EDOF,
|
||||
ANDROID_CAMERA_FOCUS_MODE_FIXED,
|
||||
ANDROID_CAMERA_FOCUS_MODE_INFINITY,
|
||||
ANDROID_CAMERA_FOCUS_MODE_MACRO,
|
||||
ANDROID_CAMERA_FOCUS_MODE_CONTINUOUS_PICTURE,
|
||||
ANDROID_CAMERA_FOCUS_MODES_NUM
|
||||
};
|
||||
|
||||
|
@ -4824,7 +4824,7 @@ cvRegisterType( const CvTypeInfo* _info )
|
||||
"Type name should contain only letters, digits, - and _" );
|
||||
}
|
||||
|
||||
info = (CvTypeInfo*)malloc( sizeof(*info) + len + 1 );
|
||||
info = (CvTypeInfo*)cvAlloc( sizeof(*info) + len + 1 );
|
||||
|
||||
*info = *_info;
|
||||
info->type_name = (char*)(info + 1);
|
||||
@ -4862,7 +4862,7 @@ cvUnregisterType( const char* type_name )
|
||||
if( !CvType::first || !CvType::last )
|
||||
CvType::first = CvType::last = 0;
|
||||
|
||||
free( info );
|
||||
cvFree( &info );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,7 +163,7 @@ namespace cv { namespace cuda { namespace device
|
||||
{
|
||||
//need only weight if fit is found
|
||||
float weight = alpha1 * gmm_weight(mode * frame.rows + y, x) + prune;
|
||||
|
||||
int swap_count = 0;
|
||||
//fit not found yet
|
||||
if (!fitsPDF)
|
||||
{
|
||||
@ -214,6 +214,7 @@ namespace cv { namespace cuda { namespace device
|
||||
if (weight < gmm_weight((i - 1) * frame.rows + y, x))
|
||||
break;
|
||||
|
||||
swap_count++;
|
||||
//swap one up
|
||||
swap(gmm_weight, x, y, i - 1, frame.rows);
|
||||
swap(gmm_variance, x, y, i - 1, frame.rows);
|
||||
@ -231,7 +232,7 @@ namespace cv { namespace cuda { namespace device
|
||||
nmodes--;
|
||||
}
|
||||
|
||||
gmm_weight(mode * frame.rows + y, x) = weight; //update weight by the calculated value
|
||||
gmm_weight((mode - swap_count) * frame.rows + y, x) = weight; //update weight by the calculated value
|
||||
totalWeight += weight;
|
||||
}
|
||||
|
||||
|
@ -758,10 +758,13 @@ private:
|
||||
|
||||
for (int k=0; k<indices_length; ++k) {
|
||||
if (belongs_to[k]==j) {
|
||||
belongs_to[k] = i;
|
||||
count[j]--;
|
||||
count[i]++;
|
||||
break;
|
||||
// for cluster j, we move the furthest element from the center to the empty cluster i
|
||||
if ( distance_(dataset_[indices[k]], dcenters[j], veclen_) == radiuses[j] ) {
|
||||
belongs_to[k] = i;
|
||||
count[j]--;
|
||||
count[i]++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
converged = false;
|
||||
|
@ -483,7 +483,7 @@ VideoWriter constructors
|
||||
|
||||
:param filename: Name of the output video file.
|
||||
|
||||
:param fourcc: 4-character code of codec used to compress the frames. For example, ``CV_FOURCC('P','I','M,'1')`` is a MPEG-1 codec, ``CV_FOURCC('M','J','P','G')`` is a motion-jpeg codec etc. List of codes can be obtained at `Video Codecs by FOURCC <http://www.fourcc.org/codecs.php>`_ page.
|
||||
:param fourcc: 4-character code of codec used to compress the frames. For example, ``CV_FOURCC('P','I','M','1')`` is a MPEG-1 codec, ``CV_FOURCC('M','J','P','G')`` is a motion-jpeg codec etc. List of codes can be obtained at `Video Codecs by FOURCC <http://www.fourcc.org/codecs.php>`_ page.
|
||||
|
||||
:param fps: Framerate of the created video stream.
|
||||
|
||||
|
@ -463,6 +463,8 @@ enum
|
||||
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_NEAR = 8006,
|
||||
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_OPTIMAL = 8007,
|
||||
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_FAR = 8008,
|
||||
CV_CAP_PROP_ANDROID_EXPOSE_LOCK = 8009,
|
||||
CV_CAP_PROP_ANDROID_WHITEBALANCE_LOCK = 8010,
|
||||
|
||||
// Properties of cameras available through AVFOUNDATION interface
|
||||
CV_CAP_PROP_IOS_DEVICE_FOCUS = 9001,
|
||||
@ -543,6 +545,7 @@ enum
|
||||
enum
|
||||
{
|
||||
CV_CAP_ANDROID_FOCUS_MODE_AUTO = 0,
|
||||
CV_CAP_ANDROID_FOCUS_MODE_CONTINUOUS_PICTURE,
|
||||
CV_CAP_ANDROID_FOCUS_MODE_CONTINUOUS_VIDEO,
|
||||
CV_CAP_ANDROID_FOCUS_MODE_EDOF,
|
||||
CV_CAP_ANDROID_FOCUS_MODE_FIXED,
|
||||
|
@ -289,6 +289,10 @@ double CvCapture_Android::getProperty( int propIdx )
|
||||
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_OPTIMAL);
|
||||
case CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_FAR:
|
||||
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_FAR);
|
||||
case CV_CAP_PROP_ANDROID_EXPOSE_LOCK:
|
||||
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK);
|
||||
case CV_CAP_PROP_ANDROID_WHITEBALANCE_LOCK:
|
||||
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK);
|
||||
default:
|
||||
CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." );
|
||||
break;
|
||||
@ -327,14 +331,23 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
|
||||
case CV_CAP_PROP_ANDROID_ANTIBANDING:
|
||||
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_ANTIBANDING, propValue);
|
||||
break;
|
||||
case CV_CAP_PROP_ANDROID_EXPOSE_LOCK:
|
||||
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK, propValue);
|
||||
break;
|
||||
case CV_CAP_PROP_ANDROID_WHITEBALANCE_LOCK:
|
||||
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK, propValue);
|
||||
break;
|
||||
default:
|
||||
CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." );
|
||||
return false;
|
||||
}
|
||||
|
||||
if (propIdx != CV_CAP_PROP_AUTOGRAB) {// property for highgui class CvCapture_Android only
|
||||
// Only changes in frame size require camera restart
|
||||
if ((propIdx == CV_CAP_PROP_FRAME_WIDTH) || (propIdx == CV_CAP_PROP_FRAME_HEIGHT))
|
||||
{ // property for highgui class CvCapture_Android only
|
||||
m_CameraParamsChanged = true;
|
||||
}
|
||||
|
||||
res = true;
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,6 @@ public class NativeCameraView extends CameraBridgeViewBase {
|
||||
}
|
||||
|
||||
deliverAndDrawFrame(mFrame);
|
||||
|
||||
} while (!mStopThread);
|
||||
}
|
||||
}
|
||||
|
@ -9,12 +9,13 @@
|
||||
#ifndef __OPENCV_PERF_PRECOMP_HPP__
|
||||
#define __OPENCV_PERF_PRECOMP_HPP__
|
||||
|
||||
#include "cvconfig.h"
|
||||
|
||||
#include "opencv2/ts.hpp"
|
||||
#include "opencv2/nonfree.hpp"
|
||||
#include "opencv2/highgui.hpp"
|
||||
|
||||
#include "opencv2/opencv_modules.hpp"
|
||||
#include "cvconfig.h"
|
||||
|
||||
#ifdef HAVE_OPENCV_OCL
|
||||
# include "opencv2/nonfree/ocl.hpp"
|
||||
|
@ -1,4 +1,4 @@
|
||||
if(ANDROID OR IOS)
|
||||
if(IOS)
|
||||
ocv_module_disable(superres)
|
||||
endif()
|
||||
|
||||
|
@ -578,7 +578,7 @@ public:
|
||||
for( int mode = 0; mode < nmodes; mode++, mean_m += nchannels )
|
||||
{
|
||||
float weight = alpha1*gmm[mode].weight + prune;//need only weight if fit is found
|
||||
|
||||
int swap_count = 0;
|
||||
////
|
||||
//fit not found yet
|
||||
if( !fitsPDF )
|
||||
@ -643,6 +643,7 @@ public:
|
||||
if( weight < gmm[i-1].weight )
|
||||
break;
|
||||
|
||||
swap_count++;
|
||||
//swap one up
|
||||
std::swap(gmm[i], gmm[i-1]);
|
||||
for( int c = 0; c < nchannels; c++ )
|
||||
@ -660,7 +661,7 @@ public:
|
||||
nmodes--;
|
||||
}
|
||||
|
||||
gmm[mode].weight = weight;//update weight by the calculated value
|
||||
gmm[mode-swap_count].weight = weight;//update weight by the calculated value
|
||||
totalWeight += weight;
|
||||
}
|
||||
//go through all modes
|
||||
@ -918,4 +919,4 @@ Ptr<BackgroundSubtractorMOG2> createBackgroundSubtractorMOG2(int _history, doubl
|
||||
|
||||
}
|
||||
|
||||
/* End of file. */
|
||||
/* End of file. */
|
||||
|
@ -102,7 +102,7 @@ __kernel void mog2_kernel(__global const uchar* frame, int frame_step, int frame
|
||||
{
|
||||
|
||||
float c_weight = alpha1 * _weight[(mode * frame_row + y) * weight_step + x] + prune;
|
||||
|
||||
int swap_count = 0;
|
||||
if (!fitsPDF)
|
||||
{
|
||||
float c_var = _variance[(mode * frame_row + y) * var_step + x];
|
||||
@ -132,6 +132,7 @@ __kernel void mog2_kernel(__global const uchar* frame, int frame_step, int frame
|
||||
{
|
||||
if (c_weight < _weight[((i - 1) * frame_row + y) * weight_step + x])
|
||||
break;
|
||||
swap_count++;
|
||||
swap(_weight, x, y, i - 1, frame_row, weight_step);
|
||||
swap(_variance, x, y, i - 1, frame_row, var_step);
|
||||
#if (CN==1)
|
||||
@ -149,7 +150,7 @@ __kernel void mog2_kernel(__global const uchar* frame, int frame_step, int frame
|
||||
nmodes--;
|
||||
}
|
||||
|
||||
_weight[(mode * frame_row + y) * weight_step + x] = c_weight; //update weight by the calculated value
|
||||
_weight[((mode - swap_count) * frame_row + y) * weight_step + x] = c_weight; //update weight by the calculated value
|
||||
totalWeight += c_weight;
|
||||
}
|
||||
|
||||
|
@ -1050,7 +1050,7 @@ viz::WWidgetMerger::WWidgetMerger
|
||||
---------------------------------------
|
||||
Constructs a WWidgetMerger.
|
||||
|
||||
.. ocv:WWidgetMerger:: WWidgetMerger()
|
||||
.. ocv:function:: WWidgetMerger()
|
||||
|
||||
viz::WWidgetMerger::addCloud
|
||||
-------------------------------
|
||||
|
@ -40,13 +40,13 @@ int main( int argc, char** argv )
|
||||
|
||||
hsv_half_down = hsv_base( Range( hsv_base.rows/2, hsv_base.rows - 1 ), Range( 0, hsv_base.cols - 1 ) );
|
||||
|
||||
/// Using 30 bins for hue and 32 for saturation
|
||||
/// Using 50 bins for hue and 60 for saturation
|
||||
int h_bins = 50; int s_bins = 60;
|
||||
int histSize[] = { h_bins, s_bins };
|
||||
|
||||
// hue varies from 0 to 256, saturation from 0 to 180
|
||||
float s_ranges[] = { 0, 256 };
|
||||
// hue varies from 0 to 179, saturation from 0 to 255
|
||||
float h_ranges[] = { 0, 180 };
|
||||
float s_ranges[] = { 0, 256 };
|
||||
|
||||
const float* ranges[] = { h_ranges, s_ranges };
|
||||
|
||||
|
@ -49,8 +49,8 @@ if(BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND)
|
||||
|
||||
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${OPENCV_CUDA_SAMPLES_REQUIRED_DEPS})
|
||||
|
||||
if(HAVE_CUDA)
|
||||
target_link_libraries(${the_target} ${CUDA_CUDA_LIBRARY})
|
||||
if(HAVE_CUDA AND NOT ANDROID)
|
||||
target_link_libraries(${the_target} ${CUDA_CUDA_LIBRARY})
|
||||
endif()
|
||||
|
||||
if(HAVE_opencv_nonfree)
|
||||
|
@ -1,7 +1,7 @@
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <string>
|
||||
#include <cctype>
|
||||
#include <ctype.h>
|
||||
|
||||
#include "opencv2/core.hpp"
|
||||
#include "opencv2/core/utility.hpp"
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include <memory>
|
||||
#include <exception>
|
||||
#include <ctime>
|
||||
#include <ctype.h>
|
||||
|
||||
#include "cvconfig.h"
|
||||
#include <iostream>
|
||||
|
@ -1,6 +1,8 @@
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <string>
|
||||
#include <ctype.h>
|
||||
|
||||
#include "opencv2/core.hpp"
|
||||
#include "opencv2/core/utility.hpp"
|
||||
#include "opencv2/highgui.hpp"
|
||||
|
Loading…
Reference in New Issue
Block a user