merged all the latest changes from 2.4 to trunk

This commit is contained in:
Vadim Pisarevsky
2012-04-13 21:50:59 +00:00
parent 020f9a6047
commit 2fd1e2ea57
416 changed files with 12852 additions and 6070 deletions

View File

@@ -52,19 +52,12 @@ set(grfmt_srcs src/bitstrm.cpp ${grfmt_srcs})
source_group("Src\\grfmts" FILES ${grfmt_hdrs} ${grfmt_srcs})
set(highgui_hdrs src/precomp.hpp src/utils.hpp)
if(NEW_FFMPEG)
set(highgui_srcs
src/cap.cpp
src/cap_images.cpp
src/cap_ffmpeg_v2.cpp
src/loadsave.cpp
src/precomp.cpp
src/utils.cpp
src/window.cpp
)
set(highgui_hdrs src/precomp.hpp src/utils.hpp src/cap_ffmpeg_impl_v2.hpp)
else()
set(highgui_hdrs src/precomp.hpp src/utils.hpp src/cap_ffmpeg_impl.hpp)
endif()
set(highgui_srcs
src/cap.cpp
src/cap_images.cpp
@@ -74,129 +67,144 @@ set(highgui_srcs
src/utils.cpp
src/window.cpp
)
endif()
file(GLOB highgui_ext_hdrs "include/opencv2/${name}/*.hpp" "include/opencv2/${name}/*.h")
#YV
if (HAVE_QT)
if (HAVE_QT_OPENGL)
set(QT_USE_QTOPENGL TRUE)
endif()
INCLUDE(${QT_USE_FILE})
SET(_RCCS_FILES src/window_QT.qrc)
QT4_ADD_RESOURCES(_RCC_OUTFILES ${_RCCS_FILES})
if (HAVE_QT_OPENGL)
set(QT_USE_QTOPENGL TRUE)
endif()
INCLUDE(${QT_USE_FILE})
SET(_MOC_HEADERS src/window_QT.h )
QT4_WRAP_CPP(_MOC_OUTFILES ${_MOC_HEADERS})
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} ${QT_LIBRARIES} ${QT_QTTEST_LIBRARY})
set(highgui_srcs ${highgui_srcs} src/window_QT.cpp ${_MOC_OUTFILES} ${_RCC_OUTFILES} )
SET(_RCCS_FILES src/window_QT.qrc)
QT4_ADD_RESOURCES(_RCC_OUTFILES ${_RCCS_FILES})
SET(_MOC_HEADERS src/window_QT.h )
QT4_WRAP_CPP(_MOC_OUTFILES ${_MOC_HEADERS})
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} ${QT_LIBRARIES} ${QT_QTTEST_LIBRARY})
set(highgui_srcs ${highgui_srcs} src/window_QT.cpp ${_MOC_OUTFILES} ${_RCC_OUTFILES} )
endif()
if(WIN32)
if(NOT HAVE_QT)
set(highgui_srcs ${highgui_srcs} src/window_w32.cpp)
endif()
set(highgui_srcs ${highgui_srcs} src/cap_vfw.cpp src/cap_cmu.cpp src/cap_dshow.cpp)
if(HAVE_MIL)
set(highgui_srcs ${highgui_srcs} src/cap_mil.cpp)
endif()
if(NOT HAVE_QT)
set(highgui_srcs ${highgui_srcs} src/window_w32.cpp)
endif()
set(highgui_srcs ${highgui_srcs} src/cap_vfw.cpp src/cap_cmu.cpp src/cap_dshow.cpp)
if(HAVE_MIL)
set(highgui_srcs ${highgui_srcs} src/cap_mil.cpp)
endif()
endif()
if(UNIX)
if(NOT HAVE_QT)
if(HAVE_GTK)
set(highgui_srcs ${highgui_srcs} src/window_gtk.cpp)
endif()
if(NOT HAVE_QT)
if(HAVE_GTK)
set(highgui_srcs ${highgui_srcs} src/window_gtk.cpp)
endif()
endif()
if(HAVE_XINE)
set(highgui_srcs ${highgui_srcs} src/cap_xine.cpp)
endif()
if(HAVE_XINE)
set(highgui_srcs ${highgui_srcs} src/cap_xine.cpp)
endif()
if(HAVE_DC1394_2)
set(highgui_srcs ${highgui_srcs} src/cap_dc1394_v2.cpp)
endif()
if(HAVE_DC1394_2)
set(highgui_srcs ${highgui_srcs} src/cap_dc1394_v2.cpp)
endif()
if(HAVE_DC1394)
set(highgui_srcs ${highgui_srcs} src/cap_dc1394.cpp)
endif()
if(HAVE_DC1394)
set(highgui_srcs ${highgui_srcs} src/cap_dc1394.cpp)
endif()
if(HAVE_FFMPEG)
if(BZIP2_LIBRARIES)
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} ${BZIP2_LIBRARIES})
endif()
if(HAVE_FFMPEG)
if(BZIP2_LIBRARIES)
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} ${BZIP2_LIBRARIES})
endif()
endif()
if(HAVE_PVAPI)
add_definitions(-DHAVE_PVAPI)
set(highgui_srcs src/cap_pvapi.cpp ${highgui_srcs})
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} PvAPI)
if(HAVE_PVAPI)
add_definitions(-DHAVE_PVAPI)
ocv_include_directories(${PVAPI_INCLUDE_PATH})
if(X86)
set(PVAPI_SDK_SUBDIR x86)
elseif(X86_64)
set(PVAPI_SDK_SUBDIR x64)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES arm)
set(PVAPI_SDK_SUBDIR arm)
endif()
if(HAVE_GSTREAMER)
set(highgui_srcs ${highgui_srcs} src/cap_gstreamer.cpp)
if(PVAPI_SDK_SUBDIR AND CMAKE_COMPILER_IS_GNUCXX)
get_filename_component(PVAPI_EXPECTED_LIB_PATH "${PVAPI_INCLUDE_PATH}/../lib-pc/${PVAPI_SDK_SUBDIR}/${CMAKE_OPENCV_GCC_VERSION_MAJOR}.${CMAKE_OPENCV_GCC_VERSION_MINOR}" ABSOLUTE)
link_directories(${PVAPI_EXPECTED_LIB_PATH})
endif()
set(highgui_srcs src/cap_pvapi.cpp ${highgui_srcs})
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} PvAPI)
endif()
if(HAVE_UNICAP)
set(highgui_srcs ${highgui_srcs} src/cap_unicap.cpp)
if(HAVE_GSTREAMER)
set(highgui_srcs ${highgui_srcs} src/cap_gstreamer.cpp)
endif()
if(HAVE_UNICAP)
set(highgui_srcs ${highgui_srcs} src/cap_unicap.cpp)
endif()
if(HAVE_LIBV4L)
set(highgui_srcs ${highgui_srcs} src/cap_libv4l.cpp)
else()
if(HAVE_CAMV4L OR HAVE_CAMV4L2)
set(highgui_srcs ${highgui_srcs} src/cap_v4l.cpp)
endif()
endif()
if(HAVE_LIBV4L)
set(highgui_srcs ${highgui_srcs} src/cap_libv4l.cpp)
else()
if(HAVE_CAMV4L OR HAVE_CAMV4L2)
set(highgui_srcs ${highgui_srcs} src/cap_v4l.cpp)
endif()
endif()
foreach(P ${HIGHGUI_INCLUDE_DIRS})
ocv_include_directories(${P})
endforeach()
foreach(P ${HIGHGUI_INCLUDE_DIRS})
ocv_include_directories(${P})
endforeach()
foreach(P ${HIGHGUI_LIBRARY_DIRS})
link_directories(${P})
endforeach()
foreach(P ${HIGHGUI_LIBRARY_DIRS})
link_directories(${P})
endforeach()
endif()
#OpenNI
if(WITH_OPENNI AND HAVE_OPENNI)
set(highgui_srcs ${highgui_srcs} src/cap_openni.cpp)
ocv_include_directories(${OPENNI_INCLUDE_DIR})
set(highgui_srcs ${highgui_srcs} src/cap_openni.cpp)
ocv_include_directories(${OPENNI_INCLUDE_DIR})
endif()
#YV
if(APPLE)
if (NOT IOS)
add_definitions(-DHAVE_QUICKTIME=1)
endif()
if (NOT IOS)
add_definitions(-DHAVE_QUICKTIME=1)
endif()
if(NOT OPENCV_BUILD_3RDPARTY_LIBS)
add_definitions(-DHAVE_IMAGEIO=1)
endif()
if(NOT OPENCV_BUILD_3RDPARTY_LIBS)
add_definitions(-DHAVE_IMAGEIO=1)
endif()
if (NOT HAVE_QT)
if(WITH_CARBON)
add_definitions(-DHAVE_CARBON=1)
set(highgui_srcs ${highgui_srcs} src/window_carbon.cpp)
else()
add_definitions(-DHAVE_COCOA=1)
set(highgui_srcs ${highgui_srcs} src/window_cocoa.mm)
endif()
endif()
if(WITH_QUICKTIME)
set(highgui_srcs ${highgui_srcs} src/cap_qt.cpp)
if (NOT HAVE_QT)
if(WITH_CARBON)
add_definitions(-DHAVE_CARBON=1)
set(highgui_srcs ${highgui_srcs} src/window_carbon.cpp)
else()
if (WITH_AVFOUNDATION)
add_definitions(-DHAVE_AVFOUNDATION=1)
set(highgui_srcs ${highgui_srcs} src/cap_avfoundation.mm)
else()
set(highgui_srcs ${highgui_srcs} src/cap_qtkit.mm)
endif()
add_definitions(-DHAVE_COCOA=1)
set(highgui_srcs ${highgui_srcs} src/window_cocoa.mm)
endif()
endif()
if(WITH_QUICKTIME)
set(highgui_srcs ${highgui_srcs} src/cap_qt.cpp)
else()
if(WITH_AVFOUNDATION)
add_definitions(-DHAVE_AVFOUNDATION=1)
set(highgui_srcs ${highgui_srcs} src/cap_avfoundation.mm)
else()
set(highgui_srcs ${highgui_srcs} src/cap_qtkit.mm)
endif()
endif()
if(HAVE_FFMPEG)
set(HIGHGUI_LIBRARIES ${HIGHGUI_LIBRARIES} "-framework VideoDecodeAcceleration")
endif()
endif(APPLE)
if(HAVE_opencv_androidcamera)
@@ -234,14 +242,13 @@ if(IOS)
endif()
if(WIN32)
link_directories("${CMAKE_CURRENT_SOURCE_DIR}/../../3rdparty/lib")
include_directories(AFTER "${CMAKE_CURRENT_SOURCE_DIR}/../../3rdparty/include") #for directshow
link_directories("${OpenCV_SOURCE_DIR}/3rdparty/lib")
include_directories(AFTER "${OpenCV_SOURCE_DIR}/3rdparty/include") #for directshow
endif()
source_group("Src" FILES ${highgui_srcs} ${highgui_hdrs})
source_group("Include" FILES ${highgui_ext_hdrs})
ocv_set_module_sources(HEADERS ${highgui_ext_hdrs} SOURCES ${highgui_srcs} ${highgui_hdrs} ${grfmt_srcs} ${grfmt_hdrs})
ocv_module_include_directories()
ocv_create_module(${GRFMT_LIBS} ${HIGHGUI_LIBRARIES})
@@ -259,11 +266,11 @@ set_target_properties(${the_module} PROPERTIES LINK_INTERFACE_LIBRARIES "")
ocv_add_precompiled_headers(${the_module})
if(CMAKE_COMPILER_IS_GNUCXX)
if(CMAKE_COMPILER_IS_GNUCXX AND NOT ENABLE_NOISY_WARNINGS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-declarations")
endif()
if(WIN32)
if(WIN32 AND WITH_FFMPEG)
#copy ffmpeg dll to the output folder
if(MSVC64 OR MINGW64)
set(FFMPEG_SUFFIX _64)

View File

@@ -8,7 +8,7 @@ applications and can be used within functionally rich UI frameworks (such as Qt*
It provides easy interface to:
* Create and manipulate windows that can display images and "remember" their content (no need to handle repaint events from OS).
* Add trackbars to the windows, handle simple mouse events as well as keyboard commmands.
* Add trackbars to the windows, handle simple mouse events as well as keyboard commands.
* Read and write images to/from disk or memory.
* Read video from camera or file and write video to a file.

View File

@@ -105,7 +105,7 @@ Provides parameters of a window.
:param name: Name of the window.
:param prop_id: Window property to retrive. The following operation flags are available:
:param prop_id: Window property to retrieve. The following operation flags are available:
* **CV_WND_PROP_FULLSCREEN** Change if the window is fullscreen ( ``CV_WINDOW_NORMAL`` or ``CV_WINDOW_FULLSCREEN`` ).
@@ -224,11 +224,11 @@ Displays a text on the window statusbar during the specified period of time.
The function ``displayOverlay`` displays useful information/tips on top of the window for a certain amount of time
*delayms*
. This information is displayed on the window statubar (the window must be created with the ``CV_GUI_EXPANDED`` flags).
. This information is displayed on the window statusbar (the window must be created with the ``CV_GUI_EXPANDED`` flags).
createOpenGLCallback
------------------------
Creates a callback function called to draw OpenGL on top the the image display by ``windowname``.
Creates a callback function called to draw OpenGL on top the image display by ``windowname``.
.. ocv:function:: void createOpenGLCallback( const string& window_name, OpenGLCallback callbackOpenGL, void* userdata =NULL, double angle=-1, double zmin=-1, double zmax=-1)

View File

@@ -128,6 +128,51 @@ The function ``imwrite`` saves the image to the specified file. The image format
:ocv:func:`Mat::convertTo` , and
:ocv:func:`cvtColor` to convert it before saving. Or, use the universal XML I/O functions to save the image to XML or YAML format.
It is possible to store PNG images with an alpha channel using this function. To do this, create 8-bit (or 16-bit) 4-channel image BGRA, where the alpha channel goes last. Fully transparent pixels should have alpha set to 0, fully opaque pixels should have alpha set to 255/65535. The sample below shows how to create such a BGRA image and store to PNG file. It also demonstrates how to set custom compression parameters ::
#include <vector>
#include <stdio.h>
#include <opencv2/opencv.hpp>
using namespace cv;
using namespace std;
void createAlphaMat(Mat &mat)
{
for (int i = 0; i < mat.rows; ++i) {
for (int j = 0; j < mat.cols; ++j) {
Vec4b& rgba = mat.at<Vec4b>(i, j);
rgba[0] = UCHAR_MAX;
rgba[1] = saturate_cast<uchar>((float (mat.cols - j)) / ((float)mat.cols) * UCHAR_MAX);
rgba[2] = saturate_cast<uchar>((float (mat.rows - i)) / ((float)mat.rows) * UCHAR_MAX);
rgba[3] = saturate_cast<uchar>(0.5 * (rgba[1] + rgba[2]));
}
}
}
int main(int argv, char **argc)
{
// Create mat with alpha channel
Mat mat(480, 640, CV_8UC4);
createAlphaMat(mat);
vector<int> compression_params;
compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
compression_params.push_back(9);
try {
imwrite("alpha.png", mat, compression_params);
}
catch (runtime_error& ex) {
fprintf(stderr, "Exception converting image to PNG format: %s\n", ex.what());
return 1;
}
fprintf(stdout, "Saved PNG file with alpha data.\n");
return 0;
}
VideoCapture
------------
.. ocv:class:: VideoCapture
@@ -264,7 +309,7 @@ Decodes and returns the grabbed video frame.
.. ocv:pyoldfunction:: cv.RetrieveFrame(capture) -> iplimage
The methods/functions decode and retruen the just grabbed frame. If no frames has been grabbed (camera has been disconnected, or there are no more frames in video file), the methods return false and the functions return NULL pointer.
The methods/functions decode and return the just grabbed frame. If no frames has been grabbed (camera has been disconnected, or there are no more frames in video file), the methods return false and the functions return NULL pointer.
.. note:: OpenCV 1.x functions ``cvRetrieveFrame`` and ``cv.RetrieveFrame`` return image stored inside the video capturing structure. It is not allowed to modify or release the image! You can copy the frame using :ocv:cfunc:`cvCloneImage` and then do whatever you want with the copy.
@@ -283,7 +328,7 @@ Grabs, decodes and returns the next video frame.
.. ocv:pyoldfunction:: cv.QueryFrame(capture) -> iplimage
The methods/functions combine :ocv:func:`VideoCapture::grab` and :ocv:func:`VideoCapture::retrieve` in one call. This is the most convenient method for reading video files or capturing data from decode and retruen the just grabbed frame. If no frames has been grabbed (camera has been disconnected, or there are no more frames in video file), the methods return false and the functions return NULL pointer.
The methods/functions combine :ocv:func:`VideoCapture::grab` and :ocv:func:`VideoCapture::retrieve` in one call. This is the most convenient method for reading video files or capturing data from decode and return the just grabbed frame. If no frames has been grabbed (camera has been disconnected, or there are no more frames in video file), the methods return false and the functions return NULL pointer.
.. note:: OpenCV 1.x functions ``cvRetrieveFrame`` and ``cv.RetrieveFrame`` return image stored inside the video capturing structure. It is not allowed to modify or release the image! You can copy the frame using :ocv:cfunc:`cvCloneImage` and then do whatever you want with the copy.

View File

@@ -24,7 +24,7 @@ Creates a trackbar and attaches it to the specified window.
:param userdata: User data that is passed as is to the callback. It can be used to handle trackbar events without using global variables.
The function ``createTrackbar`` creates a trackbar (a slider or range control) with the specified name and range, assigns a variable ``value`` to be a position syncronized with the trackbar and specifies the callback function ``onChange`` to be called on the trackbar position change. The created trackbar is displayed in the specified window ``winname``.
The function ``createTrackbar`` creates a trackbar (a slider or range control) with the specified name and range, assigns a variable ``value`` to be a position synchronized with the trackbar and specifies the callback function ``onChange`` to be called on the trackbar position change. The created trackbar is displayed in the specified window ``winname``.
.. note::

View File

@@ -232,8 +232,9 @@ public:
virtual ~VideoWriter();
CV_WRAP virtual bool open(const string& filename, int fourcc, double fps,
Size frameSize, bool isColor=true);
Size frameSize, bool isColor=true);
CV_WRAP virtual bool isOpened() const;
CV_WRAP virtual void release();
virtual VideoWriter& operator << (const Mat& image);
CV_WRAP virtual void write(const Mat& image);

View File

@@ -436,6 +436,16 @@ enum
CV_CAP_PROP_XI_AEAG_LEVEL = 419, // Average intensity of output signal AEAG should achieve(in %)
CV_CAP_PROP_XI_TIMEOUT = 420, // Image capture timeout in milliseconds
// Properties for Android cameras
CV_CAP_PROP_ANDROID_FLASH_MODE = 8001,
CV_CAP_PROP_ANDROID_FOCUS_MODE = 8002,
CV_CAP_PROP_ANDROID_WHITE_BALANCE = 8003,
CV_CAP_PROP_ANDROID_ANTIBANDING = 8004,
CV_CAP_PROP_ANDROID_FOCAL_LENGTH = 8005,
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_NEAR = 8006,
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_OPTIMAL = 8007,
CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_FAR = 8008,
// Properties of cameras available through AVFOUNDATION interface
CV_CAP_PROP_IOS_DEVICE_FOCUS = 9001,
CV_CAP_PROP_IOS_DEVICE_EXPOSURE = 9002,
@@ -477,6 +487,45 @@ enum
CV_CAP_ANDROID_COLOR_FRAME_RGBA = 4
};
// supported Android camera flash modes
enum {
CV_CAP_ANDROID_FLASH_MODE_AUTO = 0,
CV_CAP_ANDROID_FLASH_MODE_OFF,
CV_CAP_ANDROID_FLASH_MODE_ON,
CV_CAP_ANDROID_FLASH_MODE_RED_EYE,
CV_CAP_ANDROID_FLASH_MODE_TORCH
};
// supported Android camera focus modes
enum {
CV_CAP_ANDROID_FOCUS_MODE_AUTO = 0,
CV_CAP_ANDROID_FOCUS_MODE_CONTINUOUS_VIDEO,
CV_CAP_ANDROID_FOCUS_MODE_EDOF,
CV_CAP_ANDROID_FOCUS_MODE_FIXED,
CV_CAP_ANDROID_FOCUS_MODE_INFINITY,
CV_CAP_ANDROID_FOCUS_MODE_MACRO
};
// supported Android camera white balance modes
enum {
CV_CAP_ANDROID_WHITE_BALANCE_AUTO = 0,
CV_CAP_ANDROID_WHITE_BALANCE_CLOUDY_DAYLIGHT,
CV_CAP_ANDROID_WHITE_BALANCE_DAYLIGHT,
CV_CAP_ANDROID_WHITE_BALANCE_FLUORESCENT,
CV_CAP_ANDROID_WHITE_BALANCE_INCANDESCENT,
CV_CAP_ANDROID_WHITE_BALANCE_SHADE,
CV_CAP_ANDROID_WHITE_BALANCE_TWILIGHT,
CV_CAP_ANDROID_WHITE_BALANCE_WARM_FLUORESCENT
};
// supported Android camera antibanding modes
enum {
CV_CAP_ANDROID_ANTIBANDING_50HZ = 0,
CV_CAP_ANDROID_ANTIBANDING_60HZ,
CV_CAP_ANDROID_ANTIBANDING_AUTO,
CV_CAP_ANDROID_ANTIBANDING_OFF
};
/* retrieve or set capture properties */
CVAPI(double) cvGetCaptureProperty( CvCapture* capture, int property_id );
CVAPI(int) cvSetCaptureProperty( CvCapture* capture, int property_id, double value );

View File

@@ -0,0 +1,27 @@
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
using std::tr1::make_tuple;
using std::tr1::get;
typedef std::tr1::tuple<String, bool> VideoCapture_Reading_t;
typedef perf::TestBaseWithParam<VideoCapture_Reading_t> VideoCapture_Reading;
PERF_TEST_P(VideoCapture_Reading, ReadFile,
testing::Combine( testing::Values( "highgui/video/big_buck_bunny.avi",
"highgui/video/big_buck_bunny.mov",
"highgui/video/big_buck_bunny.mp4",
"highgui/video/big_buck_bunny.mpg",
"highgui/video/big_buck_bunny.wmv" ),
testing::Values(true, true, true, true, true) ))
{
string filename = getDataPath(get<0>(GetParam()));
VideoCapture cap;
TEST_CYCLE() cap.open(filename);
SANITY_CHECK(cap.isOpened());
}

View File

@@ -0,0 +1,3 @@
#include "perf_precomp.hpp"
CV_PERF_TEST_MAIN(highgui)

View File

@@ -0,0 +1,29 @@
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
using std::tr1::make_tuple;
using std::tr1::get;
typedef std::tr1::tuple<String, bool> VideoWriter_Writing_t;
typedef perf::TestBaseWithParam<VideoWriter_Writing_t> VideoWriter_Writing;
PERF_TEST_P(VideoWriter_Writing, WriteFrame,
testing::Combine( testing::Values( "python/images/QCIF_00.bmp",
"python/images/QCIF_01.bmp",
"python/images/QCIF_02.bmp",
"python/images/QCIF_03.bmp",
"python/images/QCIF_04.bmp",
"python/images/QCIF_05.bmp" ),
testing::Bool()))
{
string filename = getDataPath(get<0>(GetParam()));
bool isColor = get<1>(GetParam());
VideoWriter writer("perf_writer.avi", CV_FOURCC('X', 'V', 'I', 'D'), 25, cv::Size(640, 480), isColor);
TEST_CYCLE() { Mat image = imread(filename, 1); writer << image; }
SANITY_CHECK(writer.isOpened());
}

View File

@@ -0,0 +1 @@
#include "perf_precomp.hpp"

View File

@@ -0,0 +1,11 @@
#ifndef __OPENCV_PERF_PRECOMP_HPP__
#define __OPENCV_PERF_PRECOMP_HPP__
#include "opencv2/ts/ts.hpp"
#include "opencv2/highgui/highgui.hpp"
#if GTEST_CREATE_SHARED_LIBRARY
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined
#endif
#endif

View File

@@ -493,9 +493,14 @@ VideoWriter::VideoWriter(const string& filename, int fourcc, double fps, Size fr
open(filename, fourcc, fps, frameSize, isColor);
}
VideoWriter::~VideoWriter()
void VideoWriter::release()
{
writer.release();
}
VideoWriter::~VideoWriter()
{
release();
}
bool VideoWriter::open(const string& filename, int fourcc, double fps, Size frameSize, bool isColor)

View File

@@ -263,11 +263,30 @@ double CvCapture_Android::getProperty( int propIdx )
return (double)m_activity->getFrameWidth();
case CV_CAP_PROP_FRAME_HEIGHT:
return (double)m_activity->getFrameHeight();
case CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING);
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING);
case CV_CAP_PROP_PREVIEW_FORMAT:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING);
case CV_CAP_PROP_FPS:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FPS);
case CV_CAP_PROP_EXPOSURE:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_EXPOSURE);
case CV_CAP_PROP_ANDROID_FLASH_MODE:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FLASH_MODE);
case CV_CAP_PROP_ANDROID_FOCUS_MODE:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_MODE);
case CV_CAP_PROP_ANDROID_WHITE_BALANCE:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_WHITE_BALANCE);
case CV_CAP_PROP_ANDROID_ANTIBANDING:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_ANTIBANDING);
case CV_CAP_PROP_ANDROID_FOCAL_LENGTH:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH);
case CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_NEAR:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR);
case CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_OPTIMAL:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_OPTIMAL);
case CV_CAP_PROP_ANDROID_FOCUS_DISTANCE_FAR:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_FAR);
default:
CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." );
break;
@@ -288,11 +307,24 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
case CV_CAP_PROP_FRAME_HEIGHT:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT, propValue);
break;
case CV_CAP_PROP_AUTOGRAB:
m_shouldAutoGrab=(propValue != 0);
break;
case CV_CAP_PROP_EXPOSURE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_EXPOSURE, propValue);
break;
case CV_CAP_PROP_ANDROID_FLASH_MODE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FLASH_MODE, propValue);
break;
case CV_CAP_PROP_ANDROID_FOCUS_MODE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FOCUS_MODE, propValue);
break;
case CV_CAP_PROP_ANDROID_WHITE_BALANCE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_WHITE_BALANCE, propValue);
break;
case CV_CAP_PROP_ANDROID_ANTIBANDING:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_ANTIBANDING, propValue);
break;
default:
CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." );
return false;

View File

@@ -207,6 +207,7 @@ DEFINE_GUID(MEDIASUBTYPE_RGB24,0xe436eb7d,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf
DEFINE_GUID(MEDIASUBTYPE_RGB32,0xe436eb7e,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_RGB555,0xe436eb7c,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_RGB565,0xe436eb7b,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_I420,0x49343230,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_UYVY,0x59565955,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_Y211,0x31313259,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_Y411,0x31313459,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
@@ -339,7 +340,7 @@ static bool verbose = true;
//videoInput defines
#define VI_VERSION 0.1995
#define VI_MAX_CAMERAS 20
#define VI_NUM_TYPES 19 //MGB
#define VI_NUM_TYPES 20 //MGB
#define VI_NUM_FORMATS 18 //DON'T TOUCH
//defines for setPhyCon - tuner is not as well supported as composite and s-video
@@ -1066,6 +1067,7 @@ videoInput::videoInput(){
mediaSubtypes[16] = MEDIASUBTYPE_Y800;
mediaSubtypes[17] = MEDIASUBTYPE_Y8;
mediaSubtypes[18] = MEDIASUBTYPE_GREY;
mediaSubtypes[19] = MEDIASUBTYPE_I420;
//The video formats we support
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
@@ -2171,6 +2173,7 @@ void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
else if(type == MEDIASUBTYPE_Y800) sprintf(tmpStr, "Y800");
else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
else if(type == MEDIASUBTYPE_I420) sprintf(tmpStr, "I420");
else sprintf(tmpStr, "OTHER");
memcpy(typeAsString, tmpStr, sizeof(char)*8);
@@ -3279,8 +3282,7 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
{
// image capture properties
bool handled = false;
switch( property_id )
switch( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
width = cvRound(value);
@@ -3302,8 +3304,13 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
break;
case CV_CAP_PROP_FPS:
VI.setIdealFramerate(index,cvRound(value));
handled = true;
int fps = cvRound(value);
if (fps != VI.getFPS(0))
{
VI.stopDevice(index);
VI.setIdealFramerate(index,fps);
VI.setupDevice(index);
}
break;
}
@@ -3311,16 +3318,18 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
if ( handled ) {
// a stream setting
if( width > 0 && height > 0 )
{
if( width != VI.getWidth(index) || height != VI.getHeight(index) )//|| fourcc != VI.getFourcc(index) )
{
if( width != VI.getWidth(index) || height != VI.getHeight(index) ) //|| fourcc != VI.getFourcc(index) )
{
int fps = static_cast<int>(VI.getFPS(index));
VI.stopDevice(index);
VI.setupDeviceFourcc(index, width, height,fourcc);
VI.setIdealFramerate(index, fps);
VI.setupDeviceFourcc(index, width, height, fourcc);
}
width = height = fourcc = -1;
return VI.isDeviceSetup(index);
}
return true;
return true;
}
// show video/camera filter dialog

View File

@@ -42,7 +42,11 @@
#include "precomp.hpp"
#ifdef HAVE_FFMPEG
#ifdef NEW_FFMPEG
#include "cap_ffmpeg_impl_v2.hpp"
#else
#include "cap_ffmpeg_impl.hpp"
#endif
#else
#include "cap_ffmpeg_api.hpp"
#endif

View File

@@ -245,7 +245,7 @@ void CvCapture_FFMPEG::init()
void CvCapture_FFMPEG::close()
{
if( picture )
av_free(picture);
av_free(picture);
if( video_st )
{
@@ -316,13 +316,13 @@ bool CvCapture_FFMPEG::reopen()
}
#ifndef AVSEEK_FLAG_FRAME
#define AVSEEK_FLAG_FRAME 0
#define AVSEEK_FLAG_FRAME 0
#endif
#ifndef AVSEEK_FLAG_ANY
#define AVSEEK_FLAG_ANY 1
#define AVSEEK_FLAG_ANY 1
#endif
#ifndef SHORTER_DISTANCE_FOR_SEEK_TO_MAKE_IT_FASTER
#define SHORTER_DISTANCE_FOR_SEEK_TO_MAKE_IT_FASTER 25
#define SHORTER_DISTANCE_FOR_SEEK_TO_MAKE_IT_FASTER 25
#endif
bool CvCapture_FFMPEG::open( const char* _filename )
@@ -358,24 +358,24 @@ bool CvCapture_FFMPEG::open( const char* _filename )
avcodec_thread_init(enc, get_number_of_cpus());
#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(53, 2, 0)
#define AVMEDIA_TYPE_VIDEO CODEC_TYPE_VIDEO
#endif
#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(53, 2, 0)
#define AVMEDIA_TYPE_VIDEO CODEC_TYPE_VIDEO
#endif
if( AVMEDIA_TYPE_VIDEO == enc->codec_type && video_stream < 0) {
AVCodec *codec = avcodec_find_decoder(enc->codec_id);
if (!codec ||
avcodec_open(enc, codec) < 0)
goto exit_func;
avcodec_open(enc, codec) < 0)
goto exit_func;
video_stream = i;
video_st = ic->streams[i];
picture = avcodec_alloc_frame();
rgb_picture.data[0] = (uint8_t*)malloc(
avpicture_get_size( PIX_FMT_BGR24,
enc->width, enc->height ));
avpicture_get_size( PIX_FMT_BGR24,
enc->width, enc->height ));
avpicture_fill( (AVPicture*)&rgb_picture, rgb_picture.data[0],
PIX_FMT_BGR24, enc->width, enc->height );
PIX_FMT_BGR24, enc->width, enc->height );
frame.width = enc->width;
frame.height = enc->height;
@@ -406,7 +406,7 @@ bool CvCapture_FFMPEG::open( const char* _filename )
int flags = AVSEEK_FLAG_FRAME | AVSEEK_FLAG_BACKWARD;
av_seek_frame(ic, video_stream, ts, flags);
}
exit_func:
exit_func:
if( !valid )
close();
@@ -445,22 +445,22 @@ bool CvCapture_FFMPEG::grabFrame()
break;
if( packet.stream_index != video_stream ) {
av_free_packet (&packet);
continue;
}
av_free_packet (&packet);
continue;
}
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 2, 0)
avcodec_decode_video2(video_st->codec, picture, &got_picture, &packet);
avcodec_decode_video2(video_st->codec, picture, &got_picture, &packet);
#else
#if LIBAVFORMAT_BUILD > 4628
avcodec_decode_video(video_st->codec,
picture, &got_picture,
packet.data, packet.size);
#else
avcodec_decode_video(&video_st->codec,
picture, &got_picture,
packet.data, packet.size);
#endif
#if LIBAVFORMAT_BUILD > 4628
avcodec_decode_video(video_st->codec,
picture, &got_picture,
packet.data, packet.size);
#else
avcodec_decode_video(&video_st->codec,
picture, &got_picture,
packet.data, packet.size);
#endif
#endif
if (got_picture) {
@@ -496,18 +496,18 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
#endif
#else
img_convert_ctx = sws_getContext(video_st->codec->width,
video_st->codec->height,
video_st->codec->pix_fmt,
video_st->codec->width,
video_st->codec->height,
PIX_FMT_BGR24,
SWS_BICUBIC,
NULL, NULL, NULL);
video_st->codec->height,
video_st->codec->pix_fmt,
video_st->codec->width,
video_st->codec->height,
PIX_FMT_BGR24,
SWS_BICUBIC,
NULL, NULL, NULL);
sws_scale(img_convert_ctx, picture->data,
picture->linesize, 0,
video_st->codec->height,
rgb_picture.data, rgb_picture.linesize);
sws_scale(img_convert_ctx, picture->data,
picture->linesize, 0,
video_st->codec->height,
rgb_picture.data, rgb_picture.linesize);
sws_freeContext(img_convert_ctx);
#endif
*data = frame.data;
@@ -554,31 +554,43 @@ double CvCapture_FFMPEG::getProperty( int property_id )
if(video_st->cur_dts != AV_NOPTS_VALUE_ && video_st->duration != AV_NOPTS_VALUE_)
return(((video_st->cur_dts-video_st->first_dts)+(1.0/frameScale)) / (double)video_st->duration);
break;
case CV_FFMPEG_CAP_PROP_FRAME_COUNT:
if(video_st->duration != AV_NOPTS_VALUE_)
return (double)ceil(ic->duration * av_q2d(video_st->r_frame_rate) / AV_TIME_BASE);
break;
case CV_FFMPEG_CAP_PROP_FRAME_COUNT:
{
int64_t nbf = ic->streams[video_stream]->nb_frames;
double eps = 0.000025;
if (nbf == 0)
{
double fps = static_cast<double>(ic->streams[video_stream]->r_frame_rate.num) / static_cast<double>(ic->streams[video_stream]->r_frame_rate.den);
if (fps < eps)
{
fps = 1.0 / (static_cast<double>(ic->streams[video_stream]->codec->time_base.num) / static_cast<double>(ic->streams[video_stream]->codec->time_base.den));
}
nbf = static_cast<int64_t>(round(ic->duration * fps) / AV_TIME_BASE);
}
return nbf;
}
break;
case CV_FFMPEG_CAP_PROP_FRAME_WIDTH:
return (double)frame.width;
break;
break;
case CV_FFMPEG_CAP_PROP_FRAME_HEIGHT:
return (double)frame.height;
break;
break;
case CV_FFMPEG_CAP_PROP_FPS:
#if LIBAVCODEC_BUILD > 4753
return av_q2d (video_st->r_frame_rate);
#else
return (double)video_st->codec.frame_rate
/ (double)video_st->codec.frame_rate_base;
/ (double)video_st->codec.frame_rate_base;
#endif
break;
break;
case CV_FFMPEG_CAP_PROP_FOURCC:
#if LIBAVFORMAT_BUILD > 4628
return (double)video_st->codec->codec_tag;
#else
return (double)video_st->codec.codec_tag;
#endif
break;
break;
}
return 0;
@@ -605,24 +617,24 @@ bool CvCapture_FFMPEG::seekKeyAndRunOverFrames(int framenumber)
{
int ret;
if (framenumber > video_st->cur_dts-1) {
if (framenumber-(video_st->cur_dts-1) > SHORTER_DISTANCE_FOR_SEEK_TO_MAKE_IT_FASTER) {
ret = av_seek_frame(ic, video_stream, framenumber, 1);
assert(ret >= 0);
if( ret < 0 )
return false;
}
grabFrame();
while ((video_st->cur_dts-1) < framenumber)
if ( !grabFrame() ) return false;
if (framenumber-(video_st->cur_dts-1) > SHORTER_DISTANCE_FOR_SEEK_TO_MAKE_IT_FASTER) {
ret = av_seek_frame(ic, video_stream, framenumber, 1);
assert(ret >= 0);
if( ret < 0 )
return false;
}
grabFrame();
while ((video_st->cur_dts-1) < framenumber)
if ( !grabFrame() ) return false;
}
else if ( framenumber < (video_st->cur_dts-1) ) {
ret=av_seek_frame(ic, video_stream, framenumber, 1);
assert( ret >= 0 );
if( ret < 0 )
return false;
grabFrame();
while ((video_st->cur_dts-1) < framenumber )
if ( !grabFrame() ) return false;
ret=av_seek_frame(ic, video_stream, framenumber, 1);
assert( ret >= 0 );
if( ret < 0 )
return false;
grabFrame();
while ((video_st->cur_dts-1) < framenumber )
if ( !grabFrame() ) return false;
}
return true;
}
@@ -666,7 +678,7 @@ bool CvCapture_FFMPEG::setProperty( int property_id, double value )
if (!slowSeek((int)timestamp))
{
fprintf(stderr, "HIGHGUI ERROR: AVI: could not (slow) seek to position %0.3f\n",
(double)timestamp / AV_TIME_BASE);
(double)timestamp / AV_TIME_BASE);
return false;
}
}
@@ -674,7 +686,7 @@ bool CvCapture_FFMPEG::setProperty( int property_id, double value )
{
int flags = AVSEEK_FLAG_ANY;
if (timestamp < ic->streams[video_stream]->cur_dts)
flags |= AVSEEK_FLAG_BACKWARD;
flags |= AVSEEK_FLAG_BACKWARD;
int ret = av_seek_frame(ic, video_stream, timestamp, flags);
if (ret < 0)
{
@@ -698,7 +710,7 @@ bool CvCapture_FFMPEG::setProperty( int property_id, double value )
struct CvVideoWriter_FFMPEG
{
bool open( const char* filename, int fourcc,
double fps, int width, int height, bool isColor );
double fps, int width, int height, bool isColor );
void close();
bool writeFrame( const unsigned char* data, int step, int width, int height, int cn, int origin );
@@ -724,34 +736,34 @@ static const char * icvFFMPEGErrStr(int err)
{
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 2, 0)
switch(err) {
case AVERROR_BSF_NOT_FOUND:
return "Bitstream filter not found";
case AVERROR_DECODER_NOT_FOUND:
return "Decoder not found";
case AVERROR_DEMUXER_NOT_FOUND:
return "Demuxer not found";
case AVERROR_ENCODER_NOT_FOUND:
return "Encoder not found";
case AVERROR_EOF:
return "End of file";
case AVERROR_EXIT:
return "Immediate exit was requested; the called function should not be restarted";
case AVERROR_FILTER_NOT_FOUND:
return "Filter not found";
case AVERROR_INVALIDDATA:
return "Invalid data found when processing input";
case AVERROR_MUXER_NOT_FOUND:
return "Muxer not found";
case AVERROR_OPTION_NOT_FOUND:
return "Option not found";
case AVERROR_PATCHWELCOME:
return "Not yet implemented in FFmpeg, patches welcome";
case AVERROR_PROTOCOL_NOT_FOUND:
return "Protocol not found";
case AVERROR_STREAM_NOT_FOUND:
return "Stream not found";
default:
break;
case AVERROR_BSF_NOT_FOUND:
return "Bitstream filter not found";
case AVERROR_DECODER_NOT_FOUND:
return "Decoder not found";
case AVERROR_DEMUXER_NOT_FOUND:
return "Demuxer not found";
case AVERROR_ENCODER_NOT_FOUND:
return "Encoder not found";
case AVERROR_EOF:
return "End of file";
case AVERROR_EXIT:
return "Immediate exit was requested; the called function should not be restarted";
case AVERROR_FILTER_NOT_FOUND:
return "Filter not found";
case AVERROR_INVALIDDATA:
return "Invalid data found when processing input";
case AVERROR_MUXER_NOT_FOUND:
return "Muxer not found";
case AVERROR_OPTION_NOT_FOUND:
return "Option not found";
case AVERROR_PATCHWELCOME:
return "Not yet implemented in FFmpeg, patches welcome";
case AVERROR_PROTOCOL_NOT_FOUND:
return "Protocol not found";
case AVERROR_STREAM_NOT_FOUND:
return "Stream not found";
default:
break;
}
#else
switch(err) {
@@ -818,7 +830,7 @@ static AVFrame * icv_alloc_picture_FFMPEG(int pix_fmt, int width, int height, bo
return NULL;
}
avpicture_fill((AVPicture *)picture, picture_buf,
(PixelFormat) pix_fmt, width, height);
(PixelFormat) pix_fmt, width, height);
}
else {
}
@@ -875,11 +887,11 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc,
of which frame timestamps are represented. for fixed-fps content,
timebase should be 1/framerate and timestamp increments should be
identically 1. */
frame_rate = static_cast<int>(fps+0.5);
frame_rate_base = 1;
while (fabs(static_cast<double>(frame_rate)/frame_rate_base) - fps > 0.001){
frame_rate_base *= 10;
frame_rate = static_cast<int>(fps*frame_rate_base + 0.5);
frame_rate = static_cast<int>(fps+0.5);
frame_rate_base = 1;
while (fabs(static_cast<double>(frame_rate)/frame_rate_base) - fps > 0.001){
frame_rate_base *= 10;
frame_rate = static_cast<int>(fps*frame_rate_base + 0.5);
}
#if LIBAVFORMAT_BUILD > 4752
c->time_base.den = frame_rate;
@@ -947,9 +959,9 @@ int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st, uint8_
AVPacket pkt;
av_init_packet(&pkt);
#ifndef PKT_FLAG_KEY
#define PKT_FLAG_KEY AV_PKT_FLAG_KEY
#endif
#ifndef PKT_FLAG_KEY
#define PKT_FLAG_KEY AV_PKT_FLAG_KEY
#endif
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
@@ -966,7 +978,8 @@ int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st, uint8_
av_init_packet(&pkt);
#if LIBAVFORMAT_BUILD > 4752
pkt.pts = av_rescale_q(c->coded_frame->pts, c->time_base, video_st->time_base);
if(c->coded_frame->pts != (int64_t)AV_NOPTS_VALUE)
pkt.pts = av_rescale_q(c->coded_frame->pts, c->time_base, video_st->time_base);
#else
pkt.pts = c->coded_frame->pts;
#endif
@@ -1054,7 +1067,7 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
}
// check if buffer sizes match, i.e. image has expected format (size, channels, bitdepth, alignment)
/*#if LIBAVCODEC_VERSION_INT >= ((52<<16)+(37<<8)+0)
/*#if LIBAVCODEC_VERSION_INT >= ((52<<16)+(37<<8)+0)
assert (image->imageSize == avpicture_get_size( (PixelFormat)input_pix_fmt, image->width, image->height ));
#else
assert (image->imageSize == avpicture_get_size( input_pix_fmt, image->width, image->height ));
@@ -1064,38 +1077,38 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
assert( input_picture );
// let input_picture point to the raw data buffer of 'image'
avpicture_fill((AVPicture *)input_picture, (uint8_t *) data,
(PixelFormat)input_pix_fmt, width, height);
(PixelFormat)input_pix_fmt, width, height);
#if !defined(HAVE_FFMPEG_SWSCALE)
// convert to the color format needed by the codec
if( img_convert((AVPicture *)picture, c->pix_fmt,
(AVPicture *)input_picture, (PixelFormat)input_pix_fmt,
width, height) < 0){
(AVPicture *)input_picture, (PixelFormat)input_pix_fmt,
width, height) < 0){
return false;
}
#else
img_convert_ctx = sws_getContext(width,
height,
(PixelFormat)input_pix_fmt,
c->width,
c->height,
c->pix_fmt,
SWS_BICUBIC,
NULL, NULL, NULL);
height,
(PixelFormat)input_pix_fmt,
c->width,
c->height,
c->pix_fmt,
SWS_BICUBIC,
NULL, NULL, NULL);
if ( sws_scale(img_convert_ctx, input_picture->data,
input_picture->linesize, 0,
height,
picture->data, picture->linesize) < 0 )
{
return false;
}
if ( sws_scale(img_convert_ctx, input_picture->data,
input_picture->linesize, 0,
height,
picture->data, picture->linesize) < 0 )
{
return false;
}
sws_freeContext(img_convert_ctx);
#endif
}
else{
avpicture_fill((AVPicture *)picture, (uint8_t *) data,
(PixelFormat)input_pix_fmt, width, height);
(PixelFormat)input_pix_fmt, width, height);
}
ret = icv_av_write_frame_FFMPEG( oc, video_st, outbuf, outbuf_size, picture) >= 0;
@@ -1124,306 +1137,312 @@ void CvVideoWriter_FFMPEG::close()
#if LIBAVFORMAT_BUILD > 4628
if( video_st->codec->pix_fmt != input_pix_fmt){
#else
if( video_st->codec.pix_fmt != input_pix_fmt){
if( video_st->codec.pix_fmt != input_pix_fmt){
#endif
if(picture->data[0])
free(picture->data[0]);
picture->data[0] = 0;
}
av_free(picture);
if(picture->data[0])
free(picture->data[0]);
picture->data[0] = 0;
}
av_free(picture);
if (input_picture) {
av_free(input_picture);
}
if (input_picture) {
av_free(input_picture);
}
/* close codec */
/* close codec */
#if LIBAVFORMAT_BUILD > 4628
avcodec_close(video_st->codec);
avcodec_close(video_st->codec);
#else
avcodec_close(&(video_st->codec));
avcodec_close(&(video_st->codec));
#endif
av_free(outbuf);
av_free(outbuf);
/* free the streams */
for(i = 0; i < oc->nb_streams; i++) {
av_freep(&oc->streams[i]->codec);
av_freep(&oc->streams[i]);
}
/* free the streams */
for(i = 0; i < oc->nb_streams; i++) {
av_freep(&oc->streams[i]->codec);
av_freep(&oc->streams[i]);
}
if (!(fmt->flags & AVFMT_NOFILE)) {
/* close the output file */
if (!(fmt->flags & AVFMT_NOFILE)) {
/* close the output file */
#if LIBAVCODEC_VERSION_INT >= ((51<<16)+(49<<8)+0)
url_fclose(oc->pb);
url_fclose(oc->pb);
#else
url_fclose(&oc->pb);
url_fclose(&oc->pb);
#endif
}
}
/* free the stream */
av_free(oc);
/* free the stream */
av_free(oc);
if( temp_image.data )
{
free(temp_image.data);
temp_image.data = 0;
if( temp_image.data )
{
free(temp_image.data);
temp_image.data = 0;
}
init();
}
init();
}
/// Create a video writer object that uses FFMPEG
bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
double fps, int width, int height, bool is_color )
{
CodecID codec_id = CODEC_ID_NONE;
/// Create a video writer object that uses FFMPEG
bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
double fps, int width, int height, bool is_color )
{
CodecID codec_id = CODEC_ID_NONE;
int err, codec_pix_fmt, bitrate_scale = 64;
close();
close();
// check arguments
// check arguments
assert(filename);
assert(fps > 0);
assert(width > 0 && height > 0);
// tell FFMPEG to register codecs
// tell FFMPEG to register codecs
av_register_all();
/* auto detect the output format from the name and fourcc code. */
/* auto detect the output format from the name and fourcc code. */
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 2, 0)
fmt = av_guess_format(NULL, filename, NULL);
fmt = av_guess_format(NULL, filename, NULL);
#else
fmt = guess_format(NULL, filename, NULL);
fmt = guess_format(NULL, filename, NULL);
#endif
if (!fmt)
return false;
/* determine optimal pixel format */
if (is_color) {
input_pix_fmt = PIX_FMT_BGR24;
}
else {
input_pix_fmt = PIX_FMT_GRAY8;
}
if (!fmt)
return false;
/* Lookup codec_id for given fourcc */
/* determine optimal pixel format */
if (is_color) {
input_pix_fmt = PIX_FMT_BGR24;
}
else {
input_pix_fmt = PIX_FMT_GRAY8;
}
/* Lookup codec_id for given fourcc */
#if LIBAVCODEC_VERSION_INT<((51<<16)+(49<<8)+0)
if( (codec_id = codec_get_bmp_id( fourcc )) == CODEC_ID_NONE )
return false;
if( (codec_id = codec_get_bmp_id( fourcc )) == CODEC_ID_NONE )
return false;
#else
const struct AVCodecTag * tags[] = { codec_bmp_tags, NULL};
if( (codec_id = av_codec_get_id(tags, fourcc)) == CODEC_ID_NONE )
return false;
const struct AVCodecTag * tags[] = { codec_bmp_tags, NULL};
if( (codec_id = av_codec_get_id(tags, fourcc)) == CODEC_ID_NONE )
return false;
#endif
// alloc memory for context
// alloc memory for context
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 2, 0)
oc = avformat_alloc_context();
oc = avformat_alloc_context();
#else
oc = av_alloc_format_context();
oc = av_alloc_format_context();
#endif
assert (oc);
assert (oc);
/* set file name */
oc->oformat = fmt;
snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
/* set file name */
oc->oformat = fmt;
snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
/* set some options */
oc->max_delay = (int)(0.7*AV_TIME_BASE); /* This reduces buffer underrun warnings with MPEG */
/* set some options */
oc->max_delay = (int)(0.7*AV_TIME_BASE); /* This reduces buffer underrun warnings with MPEG */
// set a few optimal pixel formats for lossless codecs of interest..
switch (codec_id) {
// set a few optimal pixel formats for lossless codecs of interest..
switch (codec_id) {
#if LIBAVCODEC_VERSION_INT>((50<<16)+(1<<8)+0)
case CODEC_ID_JPEGLS:
// BGR24 or GRAY8 depending on is_color...
codec_pix_fmt = input_pix_fmt;
break;
case CODEC_ID_JPEGLS:
// BGR24 or GRAY8 depending on is_color...
codec_pix_fmt = input_pix_fmt;
break;
#endif
case CODEC_ID_HUFFYUV:
codec_pix_fmt = PIX_FMT_YUV422P;
break;
case CODEC_ID_MJPEG:
case CODEC_ID_LJPEG:
codec_pix_fmt = PIX_FMT_YUVJ420P;
bitrate_scale = 128;
break;
case CODEC_ID_RAWVIDEO:
codec_pix_fmt = input_pix_fmt == PIX_FMT_GRAY8 ||
input_pix_fmt == PIX_FMT_GRAY16LE ||
input_pix_fmt == PIX_FMT_GRAY16BE ? input_pix_fmt : PIX_FMT_YUV420P;
break;
default:
// good for lossy formats, MPEG, etc.
codec_pix_fmt = PIX_FMT_YUV420P;
break;
}
case CODEC_ID_HUFFYUV:
codec_pix_fmt = PIX_FMT_YUV422P;
break;
case CODEC_ID_MJPEG:
case CODEC_ID_LJPEG:
codec_pix_fmt = PIX_FMT_YUVJ420P;
bitrate_scale = 128;
break;
case CODEC_ID_RAWVIDEO:
codec_pix_fmt = input_pix_fmt == PIX_FMT_GRAY8 ||
input_pix_fmt == PIX_FMT_GRAY16LE ||
input_pix_fmt == PIX_FMT_GRAY16BE ? input_pix_fmt : PIX_FMT_YUV420P;
break;
default:
// good for lossy formats, MPEG, etc.
codec_pix_fmt = PIX_FMT_YUV420P;
break;
}
// TODO -- safe to ignore output audio stream?
video_st = icv_add_video_stream_FFMPEG(oc, codec_id,
width, height, width*height*bitrate_scale,
fps, codec_pix_fmt);
// TODO -- safe to ignore output audio stream?
video_st = icv_add_video_stream_FFMPEG(oc, codec_id,
width, height, width*height*bitrate_scale,
fps, codec_pix_fmt);
/* set the output parameters (must be done even if no
/* set the output parameters (must be done even if no
parameters). */
if (av_set_parameters(oc, NULL) < 0) {
return false;
}
if (av_set_parameters(oc, NULL) < 0) {
return false;
}
dump_format(oc, 0, filename, 1);
dump_format(oc, 0, filename, 1);
/* now that all the parameters are set, we can open the audio and
/* now that all the parameters are set, we can open the audio and
video codecs and allocate the necessary encode buffers */
if (!video_st){
return false;
}
if (!video_st){
return false;
}
AVCodec *codec;
AVCodecContext *c;
AVCodec *codec;
AVCodecContext *c;
#if LIBAVFORMAT_BUILD > 4628
c = (video_st->codec);
c = (video_st->codec);
#else
c = &(video_st->codec);
c = &(video_st->codec);
#endif
c->codec_tag = fourcc;
/* find the video encoder */
codec = avcodec_find_encoder(c->codec_id);
if (!codec) {
return false;
}
c->codec_tag = fourcc;
/* find the video encoder */
codec = avcodec_find_encoder(c->codec_id);
if (!codec) {
fprintf(stderr, "Could not find encoder for codec id %d: %s", c->codec_id, icvFFMPEGErrStr(
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 2, 0)
AVERROR_ENCODER_NOT_FOUND
#else
-1
#endif
));
return false;
}
c->bit_rate_tolerance = c->bit_rate;
c->bit_rate_tolerance = c->bit_rate;
/* open the codec */
if ( (err=avcodec_open(c, codec)) < 0) {
char errtext[256];
sprintf(errtext, "Could not open codec '%s': %s", codec->name, icvFFMPEGErrStr(err));
return false;
}
/* open the codec */
if ( (err=avcodec_open(c, codec)) < 0 ) {
fprintf(stderr, "Could not open codec '%s': %s", codec->name, icvFFMPEGErrStr(err));
return false;
}
outbuf = NULL;
outbuf = NULL;
if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
/* allocate output buffer */
/* assume we will never get codec output with more than 4 bytes per pixel... */
outbuf_size = width*height*4;
outbuf = (uint8_t *) av_malloc(outbuf_size);
}
if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
/* allocate output buffer */
/* assume we will never get codec output with more than 4 bytes per pixel... */
outbuf_size = width*height*4;
outbuf = (uint8_t *) av_malloc(outbuf_size);
}
bool need_color_convert;
need_color_convert = (c->pix_fmt != input_pix_fmt);
bool need_color_convert;
need_color_convert = (c->pix_fmt != input_pix_fmt);
/* allocate the encoded raw picture */
picture = icv_alloc_picture_FFMPEG(c->pix_fmt, c->width, c->height, need_color_convert);
if (!picture) {
return false;
}
/* allocate the encoded raw picture */
picture = icv_alloc_picture_FFMPEG(c->pix_fmt, c->width, c->height, need_color_convert);
if (!picture) {
return false;
}
/* if the output format is not our input format, then a temporary
/* if the output format is not our input format, then a temporary
picture of the input format is needed too. It is then converted
to the required output format */
input_picture = NULL;
if ( need_color_convert ) {
input_picture = icv_alloc_picture_FFMPEG(input_pix_fmt, c->width, c->height, false);
if (!input_picture) {
return false;
input_picture = NULL;
if ( need_color_convert ) {
input_picture = icv_alloc_picture_FFMPEG(input_pix_fmt, c->width, c->height, false);
if (!input_picture) {
return false;
}
}
/* open the output file, if needed */
if (!(fmt->flags & AVFMT_NOFILE)) {
if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
return false;
}
}
/* write the stream header, if any */
av_write_header( oc );
return true;
}
CvCapture_FFMPEG* cvCreateFileCapture_FFMPEG( const char* filename )
{
CvCapture_FFMPEG* capture = (CvCapture_FFMPEG*)malloc(sizeof(*capture));
capture->init();
if( capture->open( filename ))
return capture;
capture->close();
free(capture);
return 0;
}
void cvReleaseCapture_FFMPEG(CvCapture_FFMPEG** capture)
{
if( capture && *capture )
{
(*capture)->close();
free(*capture);
*capture = 0;
}
}
/* open the output file, if needed */
if (!(fmt->flags & AVFMT_NOFILE)) {
if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
return false;
int cvSetCaptureProperty_FFMPEG(CvCapture_FFMPEG* capture, int prop_id, double value)
{
return capture->setProperty(prop_id, value);
}
double cvGetCaptureProperty_FFMPEG(CvCapture_FFMPEG* capture, int prop_id)
{
return capture->getProperty(prop_id);
}
int cvGrabFrame_FFMPEG(CvCapture_FFMPEG* capture)
{
return capture->grabFrame();
}
int cvRetrieveFrame_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn)
{
return capture->retrieveFrame(0, data, step, width, height, cn);
}
CvVideoWriter_FFMPEG* cvCreateVideoWriter_FFMPEG( const char* filename, int fourcc, double fps,
int width, int height, int isColor )
{
CvVideoWriter_FFMPEG* writer = (CvVideoWriter_FFMPEG*)malloc(sizeof(*writer));
writer->init();
if( writer->open( filename, fourcc, fps, width, height, isColor != 0 ))
return writer;
writer->close();
free(writer);
return 0;
}
void cvReleaseVideoWriter_FFMPEG( CvVideoWriter_FFMPEG** writer )
{
if( writer && *writer )
{
(*writer)->close();
free(*writer);
*writer = 0;
}
}
/* write the stream header, if any */
av_write_header( oc );
return true;
}
CvCapture_FFMPEG* cvCreateFileCapture_FFMPEG( const char* filename )
{
CvCapture_FFMPEG* capture = (CvCapture_FFMPEG*)malloc(sizeof(*capture));
capture->init();
if( capture->open( filename ))
return capture;
capture->close();
free(capture);
return 0;
}
void cvReleaseCapture_FFMPEG(CvCapture_FFMPEG** capture)
{
if( capture && *capture )
int cvWriteFrame_FFMPEG( CvVideoWriter_FFMPEG* writer,
const unsigned char* data, int step,
int width, int height, int cn, int origin)
{
(*capture)->close();
free(*capture);
*capture = 0;
return writer->writeFrame(data, step, width, height, cn, origin);
}
}
int cvSetCaptureProperty_FFMPEG(CvCapture_FFMPEG* capture, int prop_id, double value)
{
return capture->setProperty(prop_id, value);
}
double cvGetCaptureProperty_FFMPEG(CvCapture_FFMPEG* capture, int prop_id)
{
return capture->getProperty(prop_id);
}
int cvGrabFrame_FFMPEG(CvCapture_FFMPEG* capture)
{
return capture->grabFrame();
}
int cvRetrieveFrame_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn)
{
return capture->retrieveFrame(0, data, step, width, height, cn);
}
CvVideoWriter_FFMPEG* cvCreateVideoWriter_FFMPEG( const char* filename, int fourcc, double fps,
int width, int height, int isColor )
{
CvVideoWriter_FFMPEG* writer = (CvVideoWriter_FFMPEG*)malloc(sizeof(*writer));
writer->init();
if( writer->open( filename, fourcc, fps, width, height, isColor != 0 ))
return writer;
writer->close();
free(writer);
return 0;
}
void cvReleaseVideoWriter_FFMPEG( CvVideoWriter_FFMPEG** writer )
{
if( writer && *writer )
{
(*writer)->close();
free(*writer);
*writer = 0;
}
}
int cvWriteFrame_FFMPEG( CvVideoWriter_FFMPEG* writer,
const unsigned char* data, int step,
int width, int height, int cn, int origin)
{
return writer->writeFrame(data, step, width, height, cn, origin);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,251 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#ifdef HAVE_FFMPEG
#include "cap_ffmpeg_impl_v2.hpp"
#else
#include "cap_ffmpeg_api.hpp"
#endif
static CvCreateFileCapture_Plugin icvCreateFileCapture_FFMPEG_p = 0;
static CvReleaseCapture_Plugin icvReleaseCapture_FFMPEG_p = 0;
static CvGrabFrame_Plugin icvGrabFrame_FFMPEG_p = 0;
static CvRetrieveFrame_Plugin icvRetrieveFrame_FFMPEG_p = 0;
static CvSetCaptureProperty_Plugin icvSetCaptureProperty_FFMPEG_p = 0;
static CvGetCaptureProperty_Plugin icvGetCaptureProperty_FFMPEG_p = 0;
static CvCreateVideoWriter_Plugin icvCreateVideoWriter_FFMPEG_p = 0;
static CvReleaseVideoWriter_Plugin icvReleaseVideoWriter_FFMPEG_p = 0;
static CvWriteFrame_Plugin icvWriteFrame_FFMPEG_p = 0;
static void
icvInitFFMPEG(void)
{
static int ffmpegInitialized = 0;
if( !ffmpegInitialized )
{
#if defined WIN32 || defined _WIN32
const char* module_name = "opencv_ffmpeg"
#if (defined _MSC_VER && defined _M_X64) || (defined __GNUC__ && defined __x86_64__)
"_64"
#endif
".dll";
static HMODULE icvFFOpenCV = LoadLibrary( module_name );
if( icvFFOpenCV )
{
icvCreateFileCapture_FFMPEG_p =
(CvCreateFileCapture_Plugin)GetProcAddress(icvFFOpenCV, "cvCreateFileCapture_FFMPEG");
icvReleaseCapture_FFMPEG_p =
(CvReleaseCapture_Plugin)GetProcAddress(icvFFOpenCV, "cvReleaseCapture_FFMPEG");
icvGrabFrame_FFMPEG_p =
(CvGrabFrame_Plugin)GetProcAddress(icvFFOpenCV, "cvGrabFrame_FFMPEG");
icvRetrieveFrame_FFMPEG_p =
(CvRetrieveFrame_Plugin)GetProcAddress(icvFFOpenCV, "cvRetrieveFrame_FFMPEG");
icvSetCaptureProperty_FFMPEG_p =
(CvSetCaptureProperty_Plugin)GetProcAddress(icvFFOpenCV, "cvSetCaptureProperty_FFMPEG");
icvGetCaptureProperty_FFMPEG_p =
(CvGetCaptureProperty_Plugin)GetProcAddress(icvFFOpenCV, "cvGetCaptureProperty_FFMPEG");
icvCreateVideoWriter_FFMPEG_p =
(CvCreateVideoWriter_Plugin)GetProcAddress(icvFFOpenCV, "cvCreateVideoWriter_FFMPEG");
icvReleaseVideoWriter_FFMPEG_p =
(CvReleaseVideoWriter_Plugin)GetProcAddress(icvFFOpenCV, "cvReleaseVideoWriter_FFMPEG");
icvWriteFrame_FFMPEG_p =
(CvWriteFrame_Plugin)GetProcAddress(icvFFOpenCV, "cvWriteFrame_FFMPEG");
#if 0
if( icvCreateFileCapture_FFMPEG_p != 0 &&
icvReleaseCapture_FFMPEG_p != 0 &&
icvGrabFrame_FFMPEG_p != 0 &&
icvRetrieveFrame_FFMPEG_p != 0 &&
icvSetCaptureProperty_FFMPEG_p != 0 &&
icvGetCaptureProperty_FFMPEG_p != 0 &&
icvCreateVideoWriter_FFMPEG_p != 0 &&
icvReleaseVideoWriter_FFMPEG_p != 0 &&
icvWriteFrame_FFMPEG_p != 0 )
{
printf("Successfully initialized ffmpeg plugin!\n");
}
else
{
printf("Failed to load FFMPEG plugin: module handle=%p\n", icvFFOpenCV);
}
#endif
}
#elif defined HAVE_FFMPEG
icvCreateFileCapture_FFMPEG_p = (CvCreateFileCapture_Plugin)cvCreateFileCapture_FFMPEG;
icvReleaseCapture_FFMPEG_p = (CvReleaseCapture_Plugin)cvReleaseCapture_FFMPEG;
icvGrabFrame_FFMPEG_p = (CvGrabFrame_Plugin)cvGrabFrame_FFMPEG;
icvRetrieveFrame_FFMPEG_p = (CvRetrieveFrame_Plugin)cvRetrieveFrame_FFMPEG;
icvSetCaptureProperty_FFMPEG_p = (CvSetCaptureProperty_Plugin)cvSetCaptureProperty_FFMPEG;
icvGetCaptureProperty_FFMPEG_p = (CvGetCaptureProperty_Plugin)cvGetCaptureProperty_FFMPEG;
icvCreateVideoWriter_FFMPEG_p = (CvCreateVideoWriter_Plugin)cvCreateVideoWriter_FFMPEG;
icvReleaseVideoWriter_FFMPEG_p = (CvReleaseVideoWriter_Plugin)cvReleaseVideoWriter_FFMPEG;
icvWriteFrame_FFMPEG_p = (CvWriteFrame_Plugin)cvWriteFrame_FFMPEG;
#endif
ffmpegInitialized = 1;
}
}
class CvCapture_FFMPEG_proxy : public CvCapture
{
public:
CvCapture_FFMPEG_proxy() { ffmpegCapture = 0; }
virtual ~CvCapture_FFMPEG_proxy() { close(); }
virtual double getProperty(int propId)
{
return ffmpegCapture ? icvGetCaptureProperty_FFMPEG_p(ffmpegCapture, propId) : 0;
}
virtual bool setProperty(int propId, double value)
{
return ffmpegCapture ? icvSetCaptureProperty_FFMPEG_p(ffmpegCapture, propId, value)!=0 : false;
}
virtual bool grabFrame()
{
return ffmpegCapture ? icvGrabFrame_FFMPEG_p(ffmpegCapture)!=0 : false;
}
virtual IplImage* retrieveFrame(int)
{
unsigned char* data = 0;
int step=0, width=0, height=0, cn=0;
if(!ffmpegCapture ||
!icvRetrieveFrame_FFMPEG_p(ffmpegCapture,&data,&step,&width,&height,&cn))
return 0;
cvInitImageHeader(&frame, cvSize(width, height), 8, cn);
cvSetData(&frame, data, step);
return &frame;
}
virtual bool open( const char* filename )
{
close();
icvInitFFMPEG();
if( !icvCreateFileCapture_FFMPEG_p )
return false;
ffmpegCapture = icvCreateFileCapture_FFMPEG_p( filename );
return ffmpegCapture != 0;
}
virtual void close()
{
if( ffmpegCapture && icvReleaseCapture_FFMPEG_p )
icvReleaseCapture_FFMPEG_p( &ffmpegCapture );
assert( ffmpegCapture == 0 );
ffmpegCapture = 0;
}
protected:
void* ffmpegCapture;
IplImage frame;
};
CvCapture* cvCreateFileCapture_FFMPEG_proxy(const char * filename)
{
CvCapture_FFMPEG_proxy* result = new CvCapture_FFMPEG_proxy;
if( result->open( filename ))
return result;
delete result;
#if defined WIN32 || defined _WIN32
return cvCreateFileCapture_VFW(filename);
#else
return 0;
#endif
}
class CvVideoWriter_FFMPEG_proxy : public CvVideoWriter
{
public:
CvVideoWriter_FFMPEG_proxy() { ffmpegWriter = 0; }
virtual ~CvVideoWriter_FFMPEG_proxy() { close(); }
virtual bool writeFrame( const IplImage* image )
{
if(!ffmpegWriter)
return false;
CV_Assert(image->depth == 8);
return icvWriteFrame_FFMPEG_p(ffmpegWriter, (const uchar*)image->imageData,
image->widthStep, image->width, image->height, image->nChannels, image->origin) !=0;
}
virtual bool open( const char* filename, int fourcc, double fps, CvSize frameSize, bool isColor )
{
close();
icvInitFFMPEG();
if( !icvCreateVideoWriter_FFMPEG_p )
return false;
ffmpegWriter = icvCreateVideoWriter_FFMPEG_p( filename, fourcc, fps, frameSize.width, frameSize.height, isColor );
return ffmpegWriter != 0;
}
virtual void close()
{
if( ffmpegWriter && icvReleaseVideoWriter_FFMPEG_p )
icvReleaseVideoWriter_FFMPEG_p( &ffmpegWriter );
assert( ffmpegWriter == 0 );
ffmpegWriter = 0;
}
protected:
void* ffmpegWriter;
};
CvVideoWriter* cvCreateVideoWriter_FFMPEG_proxy( const char* filename, int fourcc,
double fps, CvSize frameSize, int isColor )
{
CvVideoWriter_FFMPEG_proxy* result = new CvVideoWriter_FFMPEG_proxy;
if( result->open( filename, fourcc, fps, frameSize, isColor != 0 ))
return result;
delete result;
#if defined WIN32 || defined _WIN32
return cvCreateVideoWriter_VFW(filename, fourcc, fps, frameSize, isColor);
#else
return 0;
#endif
}

View File

@@ -44,6 +44,10 @@
#ifdef HAVE_OPENNI
#if TBB_INTERFACE_VERSION < 5000
# undef HAVE_TBB
#endif
#include <iostream>
#include <queue>
#include "XnCppWrapper.h"

View File

@@ -109,6 +109,7 @@ protected:
CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI()
{
monocrome=false;
memset(&this->Camera, 0, sizeof(this->Camera));
}
void CvCaptureCAM_PvAPI::Sleep(unsigned int time)
{

View File

@@ -59,6 +59,11 @@
#include <zlib.h>
#include "grfmt_png.hpp"
#if defined _MSC_VER && _MSC_VER >= 1200
// disable warnings related to _setjmp
#pragma warning( disable: 4611 )
#endif
namespace cv
{

View File

@@ -57,11 +57,9 @@ public:
{
const int img_r = 4096;
const int img_c = 4096;
Size frame_s = Size(img_c, img_r);
const double fps = 30;
const double time_sec = 2;
const int coeff = static_cast<int>(static_cast<double>(cv::min(img_c, img_r)) / (fps * time_sec));
const double fps0 = 15;
const double time_sec = 1;
const size_t n = sizeof(codec_bmp_tags)/sizeof(codec_bmp_tags[0]);
bool created = false;
@@ -69,25 +67,59 @@ public:
for (size_t j = 0; j < n; ++j)
{
stringstream s; s << codec_bmp_tags[j].tag;
int tag = codec_bmp_tags[j].tag;
if( tag != MKTAG('H', '2', '6', '3') &&
tag != MKTAG('H', '2', '6', '1') &&
tag != MKTAG('D', 'I', 'V', 'X') &&
tag != MKTAG('D', 'X', '5', '0') &&
tag != MKTAG('X', 'V', 'I', 'D') &&
tag != MKTAG('m', 'p', '4', 'v') &&
tag != MKTAG('D', 'I', 'V', '3') &&
tag != MKTAG('W', 'M', 'V', '1') &&
tag != MKTAG('W', 'M', 'V', '2') &&
tag != MKTAG('M', 'P', 'E', 'G') &&
tag != MKTAG('M', 'J', 'P', 'G') &&
tag != MKTAG('j', 'p', 'e', 'g') &&
tag != 0 &&
tag != MKTAG('I', '4', '2', '0') &&
tag != MKTAG('Y', 'U', 'Y', '2') &&
tag != MKTAG('F', 'L', 'V', '1') )
continue;
const string filename = "output_"+s.str()+".avi";
Mat img(img_r, img_c, CV_8UC3, Scalar::all(0));
try
{
VideoWriter writer(filename, codec_bmp_tags[j].tag, fps, frame_s);
double fps = fps0;
Size frame_s = Size(img_c, img_r);
if( tag == CV_FOURCC('H', '2', '6', '1') )
frame_s = Size(352, 288);
else if( tag == CV_FOURCC('H', '2', '6', '3') )
frame_s = Size(704, 576);
/*else if( tag == CV_FOURCC('M', 'J', 'P', 'G') ||
tag == CV_FOURCC('j', 'p', 'e', 'g') )
frame_s = Size(1920, 1080);*/
if( tag == CV_FOURCC('M', 'P', 'E', 'G') )
fps = 25;
VideoWriter writer(filename, tag, fps, frame_s);
if (writer.isOpened() == false)
{
ts->printf(ts->LOG, "\n\nFile name: %s\n", filename.c_str());
ts->printf(ts->LOG, "Codec id: %d Codec tag: %d\n", j, codec_bmp_tags[j].tag);
ts->printf(ts->LOG, "Codec id: %d Codec tag: %c%c%c%c\n", j,
tag & 255, (tag >> 8) & 255, (tag >> 16) & 255, (tag >> 24) & 255);
ts->printf(ts->LOG, "Error: cannot create video file.");
ts->set_failed_test_info(ts->FAIL_INVALID_OUTPUT);
}
else
{
Mat img(frame_s, CV_8UC3, Scalar::all(0));
const int coeff = cvRound(cv::min(frame_s.width, frame_s.height)/(fps0 * time_sec));
for (int i = 0 ; i < static_cast<int>(fps * time_sec); i++ )
{
//circle(img, Point2i(img_c / 2, img_r / 2), cv::min(img_r, img_c) / 2 * (i + 1), Scalar(255, 0, 0, 0), 2);

View File

@@ -55,7 +55,7 @@ public:
void CV_FramecountTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
const int time_sec = 5, fps = 25;

View File

@@ -122,7 +122,7 @@ void CV_VideoPositioningTest::run_test(int method)
ts->printf(cvtest::TS::LOG, "\n\nSource files directory: %s\n", (src_dir+"video/").c_str());
const string ext[] = {"mov", "avi", "mp4", "mpg", "wmv"};
const string ext[] = {"avi", "mp4", "wmv"};
size_t n = sizeof(ext)/sizeof(ext[0]);
@@ -209,7 +209,7 @@ void CV_VideoPositioningTest::run_test(int method)
void CV_VideoProgressivePositioningTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
run_test(PROGRESSIVE);
@@ -220,7 +220,7 @@ void CV_VideoProgressivePositioningTest::run(int)
void CV_VideoRandomPositioningTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
run_test(RANDOM);

View File

@@ -377,29 +377,40 @@ void CV_HighGuiTest::SpecificImageTest(const string& dir)
void CV_HighGuiTest::SpecificVideoFileTest(const string& dir, const char codecchars[4])
{
const string ext[] = {"avi", "mov", "mp4", "mpg", "wmv"};
const size_t n = sizeof(ext)/sizeof(ext[0]);
const string exts[] = {"avi", "mov", "mpg", "wmv"};
const size_t n = sizeof(exts)/sizeof(exts[0]);
int fourcc0 = CV_FOURCC(codecchars[0], codecchars[1], codecchars[2], codecchars[3]);
for (size_t j = 0; j < n; ++j)
if ((ext[j]!="mp4")||(string(&codecchars[0], 4)!="IYUV"))
#if defined WIN32 || defined _WIN32
if (((ext[j]!="mov")||(string(&codecchars[0], 4)=="XVID"))&&(ext[j]!="mp4"))
#endif
{
const string video_file = "video_" + string(&codecchars[0], 4) + "." + ext[j];
string ext = exts[j];
int fourcc = fourcc0;
if( (ext == "mov" && fourcc != CV_FOURCC('M', 'J', 'P', 'G')) ||
(ext == "mpg" && fourcc != CV_FOURCC('m', 'p', 'e', 'g')) ||
(ext == "wmv" && fourcc != CV_FOURCC('M', 'J', 'P', 'G')))
continue;
if( ext == "mov" )
fourcc = CV_FOURCC('m', 'p', '4', 'v');
string fourcc_str = format("%c%c%c%c", fourcc & 255, (fourcc >> 8) & 255, (fourcc >> 16) & 255, (fourcc >> 24) & 255);
const string video_file = "video_" + fourcc_str + "." + ext;
VideoWriter writer = cv::VideoWriter(video_file, CV_FOURCC(codecchars[0], codecchars[1], codecchars[2], codecchars[3]), 25, cv::Size(968, 757), true);
Size frame_size(968 & -2, 757 & -2);
//Size frame_size(968 & -16, 757 & -16);
//Size frame_size(640, 480);
VideoWriter writer(video_file, fourcc, 25, frame_size, true);
if (!writer.isOpened())
{
VideoWriter writer(video_file, fourcc, 25, frame_size, true);
ts->printf(ts->LOG, "Creating a video in %s...\n", video_file.c_str());
ts->printf(ts->LOG, "Cannot create VideoWriter object with codec %s.\n", string(&codecchars[0], 4).c_str());
ts->printf(ts->LOG, "Cannot create VideoWriter object with codec %s.\n", fourcc_str.c_str());
ts->set_failed_test_info(ts->FAIL_MISMATCH);
continue;
}
const size_t IMAGE_COUNT = 30;
const size_t IMAGE_COUNT = 30;
for(size_t i = 0; i < IMAGE_COUNT; ++i)
{
@@ -417,10 +428,10 @@ void CV_HighGuiTest::SpecificVideoFileTest(const string& dir, const char codecch
ts->printf(ts->LOG, "Error: cannot read frame from %s.\n", (ts->get_data_path()+"../python/images/QCIF_"+s_digit.str()+".bmp").c_str());
ts->printf(ts->LOG, "Continue creating the video file...\n");
ts->set_failed_test_info(ts->FAIL_INVALID_TEST_DATA);
continue;
break;//continue;
}
cv::resize(img, img, Size(968, 757), 0.0, 0.0, cv::INTER_CUBIC);
cv::resize(img, img, frame_size, 0.0, 0.0, cv::INTER_CUBIC);
for (int k = 0; k < img.rows; ++k)
for (int l = 0; l < img.cols; ++l)
@@ -433,32 +444,31 @@ void CV_HighGuiTest::SpecificVideoFileTest(const string& dir, const char codecch
writer << img;
}
writer.~VideoWriter();
writer.release();
cv::VideoCapture cap(video_file);
size_t FRAME_COUNT = (size_t)cap.get(CV_CAP_PROP_FRAME_COUNT);
if (FRAME_COUNT != IMAGE_COUNT)
if (FRAME_COUNT != IMAGE_COUNT && ext != "mpg" )
{
ts->printf(ts->LOG, "\nFrame count checking for video_%s.%s...\n", string(&codecchars[0], 4).c_str(), ext[j].c_str());
ts->printf(ts->LOG, "Video codec: %s\n", string(&codecchars[0], 4).c_str());
ts->printf(ts->LOG, "\nFrame count checking for video_%s.%s...\n", fourcc_str.c_str(), ext.c_str());
ts->printf(ts->LOG, "Video codec: %s\n", fourcc_str.c_str());
ts->printf(ts->LOG, "Required frame count: %d; Returned frame count: %d\n", IMAGE_COUNT, FRAME_COUNT);
ts->printf(ts->LOG, "Error: Incorrect frame count in the video.\n");
ts->printf(ts->LOG, "Continue checking...\n");
ts->set_failed_test_info(ts->FAIL_BAD_ACCURACY);
}
cap.set(CV_CAP_PROP_POS_FRAMES, -1);
//cap.set(CV_CAP_PROP_POS_FRAMES, -1);
for (int i = -1; i < (int)std::min<size_t>(FRAME_COUNT, IMAGE_COUNT)-1; i++)
for (int i = 0; i < (int)std::min<size_t>(FRAME_COUNT, IMAGE_COUNT)-1; i++)
{
cv::Mat frame; cap >> frame;
if (frame.empty())
{
ts->printf(ts->LOG, "\nVideo file directory: %s\n", ".");
ts->printf(ts->LOG, "File name: video_%s.%s\n", string(&codecchars[0], 4).c_str(), ext[j].c_str());
ts->printf(ts->LOG, "Video codec: %s\n", string(&codecchars[0], 4).c_str());
ts->printf(ts->LOG, "File name: video_%s.%s\n", fourcc_str.c_str(), ext.c_str());
ts->printf(ts->LOG, "Video codec: %s\n", fourcc_str.c_str());
ts->printf(ts->LOG, "Error: cannot read the next frame with index %d.\n", i+1);
ts->set_failed_test_info(ts->FAIL_MISSING_TEST_DATA);
break;
@@ -477,23 +487,20 @@ void CV_HighGuiTest::SpecificVideoFileTest(const string& dir, const char codecch
continue;
}
const double thresDbell = 20;
const double thresDbell = 40;
double psnr = PSNR(img, frame);
if (psnr > thresDbell)
{
ts->printf(ts->LOG, "\nReading frame from the file video_%s.%s...\n", string(&codecchars[0], 4).c_str(), ext[j].c_str());
ts->printf(ts->LOG, "\nReading frame from the file video_%s.%s...\n", fourcc_str.c_str(), ext.c_str());
ts->printf(ts->LOG, "Frame index: %d\n", i+1);
ts->printf(ts->LOG, "Difference between saved and original images: %g\n", psnr);
ts->printf(ts->LOG, "Maximum allowed difference: %g\n", thresDbell);
ts->printf(ts->LOG, "Error: too big difference between saved and original images.\n");
continue;
break;
}
}
cap.~VideoCapture();
}
}
@@ -556,9 +563,6 @@ void CV_HighGuiTest::SpecificVideoCameraTest(const string& dir, const char codec
if (framecount == IMAGE_COUNT) break;
}
frame.~Mat();
writer.~VideoWriter();
cv::VideoCapture vcap(dir+"video_"+string(&codecchars[0], 4)+"."+ext[i]);
if (!vcap.isOpened())
@@ -613,12 +617,7 @@ void CV_HighGuiTest::SpecificVideoCameraTest(const string& dir, const char codec
continue;
}
}
img.~Mat();
vcap.~VideoCapture();
}
cap.~VideoCapture();
}
void CV_ImageTest::run(int)
@@ -633,12 +632,12 @@ void CV_SpecificImageTest::run(int)
void CV_VideoTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
const char codecs[][4] = { {'I', 'Y', 'U', 'V'},
{'X', 'V', 'I', 'D'},
{'M', 'P', 'G', '2'},
{'m', 'p', 'e', 'g'},
{'M', 'J', 'P', 'G'} };
printf("%s", ts->get_data_path().c_str());
@@ -656,10 +655,10 @@ void CV_VideoTest::run(int)
void CV_SpecificVideoFileTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
const char codecs[][4] = { {'M', 'P', 'G', '2'},
const char codecs[][4] = { {'m', 'p', 'e', 'g'},
{'X', 'V', 'I', 'D'},
{'M', 'J', 'P', 'G'},
{'I', 'Y', 'U', 'V'} };
@@ -680,7 +679,7 @@ void CV_SpecificVideoCameraTest::run(int)
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
const char codecs[][4] = { {'M', 'P', 'G', '2'},
const char codecs[][4] = { {'m', 'p', 'e', 'g'},
{'X', 'V', 'I', 'D'},
{'M', 'J', 'P', 'G'},
{'I', 'Y', 'U', 'V'} };

View File

@@ -61,6 +61,9 @@ void CV_PositioningTest::CreateTestVideo(const string& format, int codec, int fr
{
stringstream s; s << codec;
//if( format == "mov" && codec == CV_FOURCC('m', 'p', 'e', 'g')
// putchar('$');
cv::VideoWriter writer("test_video_"+s.str()+"."+format, codec, 25, cv::Size(640, 480), false);
for (int i = 0; i < framecount; ++i)
@@ -95,19 +98,17 @@ void CV_PositioningTest::CreateTestVideo(const string& format, int codec, int fr
writer << mat;
}
writer.~VideoWriter();
}
void CV_PositioningTest::run(int)
{
#if defined WIN32 || (defined __linux__ && !defined ANDROID)
#if defined WIN32 || (defined __linux__ && !defined ANDROID) || (defined __APPLE__ && defined HAVE_FFMPEG)
#if !defined HAVE_GSTREAMER || defined HAVE_GSTREAMER_APP
const string format[] = {"avi", "mov", "mp4", "mpg", "wmv", "3gp"};
const char codec[][4] = { {'X', 'V', 'I', 'D'},
{'M', 'P', 'G', '2'},
{'m', 'p', 'e', 'g'},
{'M', 'J', 'P', 'G'} };
size_t n_format = sizeof(format)/sizeof(format[0]),