Merge pull request #2003 from vbystricky:intelperc_camera_2.4
This commit is contained in:
commit
6e22be4100
@ -163,7 +163,7 @@ OCV_OPTION(WITH_XINE "Include Xine support (GPL)" OFF
|
||||
OCV_OPTION(WITH_OPENCL "Include OpenCL Runtime support" ON IF (NOT IOS) )
|
||||
OCV_OPTION(WITH_OPENCLAMDFFT "Include AMD OpenCL FFT library support" ON IF (NOT ANDROID AND NOT IOS) )
|
||||
OCV_OPTION(WITH_OPENCLAMDBLAS "Include AMD OpenCL BLAS library support" ON IF (NOT ANDROID AND NOT IOS) )
|
||||
|
||||
OCV_OPTION(WITH_INTELPERC "Include Intel Perceptual Computing support" OFF IF WIN32 )
|
||||
|
||||
# OpenCV build components
|
||||
# ===================================================
|
||||
@ -840,6 +840,11 @@ if(DEFINED WITH_XINE)
|
||||
status(" Xine:" HAVE_XINE THEN "YES (ver ${ALIASOF_libxine_VERSION})" ELSE NO)
|
||||
endif(DEFINED WITH_XINE)
|
||||
|
||||
if(DEFINED WITH_INTELPERC)
|
||||
status(" Intel PerC:" HAVE_INTELPERC THEN "YES" ELSE NO)
|
||||
endif(DEFINED WITH_INTELPERC)
|
||||
|
||||
|
||||
# ========================== Other third-party libraries ==========================
|
||||
status("")
|
||||
status(" Other third-party libraries:")
|
||||
|
20
cmake/OpenCVFindIntelPerCSDK.cmake
Normal file
20
cmake/OpenCVFindIntelPerCSDK.cmake
Normal file
@ -0,0 +1,20 @@
|
||||
# Main variables:
|
||||
# INTELPERC_LIBRARIES and INTELPERC_INCLUDE to link Intel Perceptial Computing SDK modules
|
||||
# HAVE_INTELPERC for conditional compilation OpenCV with/without Intel Perceptial Computing SDK
|
||||
|
||||
if(X86_64)
|
||||
find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers")
|
||||
find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/x64" DOC "Path to Intel Perceptual Computing SDK interface libraries")
|
||||
else()
|
||||
find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers")
|
||||
find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/Win32" DOC "Path to Intel Perceptual Computing SDK interface libraries")
|
||||
endif()
|
||||
|
||||
if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES)
|
||||
set(HAVE_INTELPERC TRUE)
|
||||
else()
|
||||
set(HAVE_INTELPERC FALSE)
|
||||
message(WARNING "Intel Perceptual Computing SDK library directory (set by INTELPERC_LIB_DIR variable) is not found or does not have Intel Perceptual Computing SDK libraries.")
|
||||
endif() #if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES)
|
||||
|
||||
mark_as_advanced(FORCE INTELPERC_LIBRARIES INTELPERC_INCLUDE_DIR)
|
@ -250,3 +250,8 @@ if (NOT IOS)
|
||||
set(HAVE_QTKIT YES)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- Intel Perceptual Computing SDK ---
|
||||
if(WITH_INTELPERC)
|
||||
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindIntelPerCSDK.cmake")
|
||||
endif(WITH_INTELPERC)
|
||||
|
@ -85,6 +85,9 @@
|
||||
/* Apple ImageIO Framework */
|
||||
#cmakedefine HAVE_IMAGEIO
|
||||
|
||||
/* Intel Perceptual Computing SDK library */
|
||||
#cmakedefine HAVE_INTELPERC
|
||||
|
||||
/* Intel Integrated Performance Primitives */
|
||||
#cmakedefine HAVE_IPP
|
||||
|
||||
|
79
doc/user_guide/ug_intelperc.rst
Normal file
79
doc/user_guide/ug_intelperc.rst
Normal file
@ -0,0 +1,79 @@
|
||||
*******
|
||||
HighGUI
|
||||
*******
|
||||
|
||||
.. highlight:: cpp
|
||||
|
||||
Using Creative Senz3D and other Intel Perceptual Computing SDK compatible depth sensors
|
||||
=======================================================================================
|
||||
|
||||
Depth sensors compatible with Intel Perceptual Computing SDK are supported through ``VideoCapture`` class. Depth map, RGB image and some other formats of output can be retrieved by using familiar interface of ``VideoCapture``.
|
||||
|
||||
In order to use depth sensor with OpenCV you should do the following preliminary steps:
|
||||
|
||||
#.
|
||||
Install Intel Perceptual Computing SDK (from here http://www.intel.com/software/perceptual).
|
||||
|
||||
#.
|
||||
Configure OpenCV with Intel Perceptual Computing SDK support by setting ``WITH_INTELPERC`` flag in CMake. If Intel Perceptual Computing SDK is found in install folders OpenCV will be built with Intel Perceptual Computing SDK library (see a status ``INTELPERC`` in CMake log). If CMake process doesn't find Intel Perceptual Computing SDK installation folder automatically, the user should change corresponding CMake variables ``INTELPERC_LIB_DIR`` and ``INTELPERC_INCLUDE_DIR`` to the proper value.
|
||||
|
||||
#.
|
||||
Build OpenCV.
|
||||
|
||||
VideoCapture can retrieve the following data:
|
||||
|
||||
#.
|
||||
data given from depth generator:
|
||||
* ``CV_CAP_INTELPERC_DEPTH_MAP`` - each pixel is a 16-bit integer. The value indicates the distance from an object to the camera's XY plane or the Cartesian depth. (CV_16UC1)
|
||||
* ``CV_CAP_INTELPERC_UVDEPTH_MAP`` - each pixel contains two 32-bit floating point values in the range of 0-1, representing the mapping of depth coordinates to the color coordinates. (CV_32FC2)
|
||||
* ``CV_CAP_INTELPERC_IR_MAP`` - each pixel is a 16-bit integer. The value indicates the intensity of the reflected laser beam. (CV_16UC1)
|
||||
#.
|
||||
data given from RGB image generator:
|
||||
* ``CV_CAP_INTELPERC_IMAGE`` - color image. (CV_8UC3)
|
||||
|
||||
In order to get depth map from depth sensor use ``VideoCapture::operator >>``, e. g. ::
|
||||
|
||||
VideoCapture capture( CV_CAP_INTELPERC );
|
||||
for(;;)
|
||||
{
|
||||
Mat depthMap;
|
||||
capture >> depthMap;
|
||||
|
||||
if( waitKey( 30 ) >= 0 )
|
||||
break;
|
||||
}
|
||||
|
||||
For getting several data maps use ``VideoCapture::grab`` and ``VideoCapture::retrieve``, e.g. ::
|
||||
|
||||
VideoCapture capture(CV_CAP_INTELPERC);
|
||||
for(;;)
|
||||
{
|
||||
Mat depthMap;
|
||||
Mat image;
|
||||
Mat irImage;
|
||||
|
||||
capture.grab();
|
||||
|
||||
capture.retrieve( depthMap, CV_CAP_INTELPERC_DEPTH_MAP );
|
||||
capture.retrieve( image, CV_CAP_INTELPERC_IMAGE );
|
||||
capture.retrieve( irImage, CV_CAP_INTELPERC_IR_MAP);
|
||||
|
||||
if( waitKey( 30 ) >= 0 )
|
||||
break;
|
||||
}
|
||||
|
||||
For setting and getting some property of sensor` data generators use ``VideoCapture::set`` and ``VideoCapture::get`` methods respectively, e.g. ::
|
||||
|
||||
VideoCapture capture( CV_CAP_INTELPERC );
|
||||
capture.set( CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0 );
|
||||
cout << "FPS " << capture.get( CV_CAP_INTELPERC_DEPTH_GENERATOR+CV_CAP_PROP_FPS ) << endl;
|
||||
|
||||
Since two types of sensor's data generators are supported (image generator and depth generator), there are two flags that should be used to set/get property of the needed generator:
|
||||
|
||||
* CV_CAP_INTELPERC_IMAGE_GENERATOR -- a flag for access to the image generator properties.
|
||||
|
||||
* CV_CAP_INTELPERC_DEPTH_GENERATOR -- a flag for access to the depth generator properties. This flag value is assumed by default if neither of the two possible values of the property is set.
|
||||
|
||||
For more information please refer to the example of usage intelperc_capture.cpp_ in ``opencv/samples/cpp`` folder.
|
||||
|
||||
.. _intelperc_capture.cpp: https://github.com/Itseez/opencv/tree/master/samples/cpp/intelperc_capture.cpp
|
@ -9,3 +9,4 @@ OpenCV User Guide
|
||||
ug_features2d.rst
|
||||
ug_highgui.rst
|
||||
ug_traincascade.rst
|
||||
ug_intelperc.rst
|
||||
|
@ -218,6 +218,12 @@ elseif(HAVE_QTKIT)
|
||||
list(APPEND HIGHGUI_LIBRARIES "-framework QTKit" "-framework QuartzCore" "-framework AppKit")
|
||||
endif()
|
||||
|
||||
if(HAVE_INTELPERC)
|
||||
list(APPEND highgui_srcs src/cap_intelperc.cpp)
|
||||
ocv_include_directories(${INTELPERC_INCLUDE_DIR})
|
||||
list(APPEND HIGHGUI_LIBRARIES ${INTELPERC_LIBRARIES})
|
||||
endif(HAVE_INTELPERC)
|
||||
|
||||
if(IOS)
|
||||
add_definitions(-DHAVE_IOS=1)
|
||||
list(APPEND highgui_srcs src/ios_conversions.mm src/cap_ios_abstract_camera.mm src/cap_ios_photo_camera.mm src/cap_ios_video_camera.mm)
|
||||
|
@ -312,7 +312,9 @@ enum
|
||||
|
||||
CV_CAP_AVFOUNDATION = 1200, // AVFoundation framework for iOS (OS X Lion will have the same API)
|
||||
|
||||
CV_CAP_GIGANETIX = 1300 // Smartek Giganetix GigEVisionSDK
|
||||
CV_CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK
|
||||
|
||||
CV_CAP_INTELPERC = 1500 // Intel Perceptual Computing SDK
|
||||
};
|
||||
|
||||
/* start capturing frames from camera: index = camera_index + domain_offset (CV_CAP_*) */
|
||||
@ -458,16 +460,29 @@ enum
|
||||
CV_CAP_PROP_IOS_DEVICE_EXPOSURE = 9002,
|
||||
CV_CAP_PROP_IOS_DEVICE_FLASH = 9003,
|
||||
CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE = 9004,
|
||||
CV_CAP_PROP_IOS_DEVICE_TORCH = 9005
|
||||
CV_CAP_PROP_IOS_DEVICE_TORCH = 9005,
|
||||
|
||||
// Properties of cameras available through Smartek Giganetix Ethernet Vision interface
|
||||
/* --- Vladimir Litvinenko (litvinenko.vladimir@gmail.com) --- */
|
||||
,CV_CAP_PROP_GIGA_FRAME_OFFSET_X = 10001,
|
||||
CV_CAP_PROP_GIGA_FRAME_OFFSET_X = 10001,
|
||||
CV_CAP_PROP_GIGA_FRAME_OFFSET_Y = 10002,
|
||||
CV_CAP_PROP_GIGA_FRAME_WIDTH_MAX = 10003,
|
||||
CV_CAP_PROP_GIGA_FRAME_HEIGH_MAX = 10004,
|
||||
CV_CAP_PROP_GIGA_FRAME_SENS_WIDTH = 10005,
|
||||
CV_CAP_PROP_GIGA_FRAME_SENS_HEIGH = 10006
|
||||
CV_CAP_PROP_GIGA_FRAME_SENS_HEIGH = 10006,
|
||||
|
||||
CV_CAP_PROP_INTELPERC_PROFILE_COUNT = 11001,
|
||||
CV_CAP_PROP_INTELPERC_PROFILE_IDX = 11002,
|
||||
CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE = 11003,
|
||||
CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE = 11004,
|
||||
CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD = 11005,
|
||||
CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ = 11006,
|
||||
CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT = 11007,
|
||||
|
||||
// Intel PerC streams
|
||||
CV_CAP_INTELPERC_DEPTH_GENERATOR = 1 << 29,
|
||||
CV_CAP_INTELPERC_IMAGE_GENERATOR = 1 << 28,
|
||||
CV_CAP_INTELPERC_GENERATORS_MASK = CV_CAP_INTELPERC_DEPTH_GENERATOR + CV_CAP_INTELPERC_IMAGE_GENERATOR
|
||||
};
|
||||
|
||||
enum
|
||||
@ -548,6 +563,14 @@ enum
|
||||
CV_CAP_ANDROID_ANTIBANDING_OFF
|
||||
};
|
||||
|
||||
enum
|
||||
{
|
||||
CV_CAP_INTELPERC_DEPTH_MAP = 0, // Each pixel is a 16-bit integer. The value indicates the distance from an object to the camera's XY plane or the Cartesian depth.
|
||||
CV_CAP_INTELPERC_UVDEPTH_MAP = 1, // Each pixel contains two 32-bit floating point values in the range of 0-1, representing the mapping of depth coordinates to the color coordinates.
|
||||
CV_CAP_INTELPERC_IR_MAP = 2, // Each pixel is a 16-bit integer. The value indicates the intensity of the reflected laser beam.
|
||||
CV_CAP_INTELPERC_IMAGE = 3
|
||||
};
|
||||
|
||||
/* retrieve or set capture properties */
|
||||
CVAPI(double) cvGetCaptureProperty( CvCapture* capture, int property_id );
|
||||
CVAPI(int) cvSetCaptureProperty( CvCapture* capture, int property_id, double value );
|
||||
|
@ -155,6 +155,9 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
|
||||
#endif
|
||||
#ifdef HAVE_GIGE_API
|
||||
CV_CAP_GIGANETIX,
|
||||
#endif
|
||||
#ifdef HAVE_INTELPERC
|
||||
CV_CAP_INTELPERC,
|
||||
#endif
|
||||
-1
|
||||
};
|
||||
@ -193,6 +196,7 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
|
||||
defined(HAVE_AVFOUNDATION) || \
|
||||
defined(HAVE_ANDROID_NATIVE_CAMERA) || \
|
||||
defined(HAVE_GIGE_API) || \
|
||||
defined(HAVE_INTELPERC) || \
|
||||
(0)
|
||||
// local variable to memorize the captured device
|
||||
CvCapture *capture;
|
||||
@ -341,6 +345,14 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
|
||||
return capture;
|
||||
break; // CV_CAP_GIGANETIX
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_INTELPERC
|
||||
case CV_CAP_INTELPERC:
|
||||
capture = cvCreateCameraCapture_IntelPerC(index);
|
||||
if (capture)
|
||||
return capture;
|
||||
break; // CV_CAP_INTEL_PERC
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
714
modules/highgui/src/cap_intelperc.cpp
Normal file
714
modules/highgui/src/cap_intelperc.cpp
Normal file
@ -0,0 +1,714 @@
|
||||
#include "precomp.hpp"
|
||||
|
||||
#ifdef HAVE_INTELPERC
|
||||
|
||||
#include "pxcsession.h"
|
||||
#include "pxcsmartptr.h"
|
||||
#include "pxccapture.h"
|
||||
|
||||
class CvIntelPerCStreamBase
|
||||
{
|
||||
protected:
|
||||
struct FrameInternal
|
||||
{
|
||||
IplImage* retrieveFrame()
|
||||
{
|
||||
if (m_mat.empty())
|
||||
return NULL;
|
||||
m_iplHeader = IplImage(m_mat);
|
||||
return &m_iplHeader;
|
||||
}
|
||||
cv::Mat m_mat;
|
||||
private:
|
||||
IplImage m_iplHeader;
|
||||
};
|
||||
public:
|
||||
CvIntelPerCStreamBase()
|
||||
: m_profileIdx(-1)
|
||||
, m_frameIdx(0)
|
||||
, m_timeStampStartNS(0)
|
||||
{
|
||||
}
|
||||
virtual ~CvIntelPerCStreamBase()
|
||||
{
|
||||
}
|
||||
|
||||
bool isValid()
|
||||
{
|
||||
return (m_device.IsValid() && m_stream.IsValid());
|
||||
}
|
||||
bool grabFrame()
|
||||
{
|
||||
if (!m_stream.IsValid())
|
||||
return false;
|
||||
if (-1 == m_profileIdx)
|
||||
{
|
||||
if (!setProperty(CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0))
|
||||
return false;
|
||||
}
|
||||
PXCSmartPtr<PXCImage> pxcImage; PXCSmartSP sp;
|
||||
if (PXC_STATUS_NO_ERROR > m_stream->ReadStreamAsync(&pxcImage, &sp))
|
||||
return false;
|
||||
if (PXC_STATUS_NO_ERROR > sp->Synchronize())
|
||||
return false;
|
||||
if (0 == m_timeStampStartNS)
|
||||
m_timeStampStartNS = pxcImage->QueryTimeStamp();
|
||||
m_timeStamp = (double)((pxcImage->QueryTimeStamp() - m_timeStampStartNS) / 10000);
|
||||
m_frameIdx++;
|
||||
return prepareIplImage(pxcImage);
|
||||
}
|
||||
int getProfileIDX() const
|
||||
{
|
||||
return m_profileIdx;
|
||||
}
|
||||
public:
|
||||
virtual bool initStream(PXCSession *session) = 0;
|
||||
virtual double getProperty(int propIdx)
|
||||
{
|
||||
double ret = 0.0;
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_INTELPERC_PROFILE_COUNT:
|
||||
ret = (double)m_profiles.size();
|
||||
break;
|
||||
case CV_CAP_PROP_FRAME_WIDTH :
|
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
|
||||
ret = (double)m_profiles[m_profileIdx].imageInfo.width;
|
||||
break;
|
||||
case CV_CAP_PROP_FRAME_HEIGHT :
|
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
|
||||
ret = (double)m_profiles[m_profileIdx].imageInfo.height;
|
||||
break;
|
||||
case CV_CAP_PROP_FPS :
|
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
|
||||
{
|
||||
ret = ((double)m_profiles[m_profileIdx].frameRateMin.numerator / (double)m_profiles[m_profileIdx].frameRateMin.denominator
|
||||
+ (double)m_profiles[m_profileIdx].frameRateMax.numerator / (double)m_profiles[m_profileIdx].frameRateMax.denominator) / 2.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_POS_FRAMES:
|
||||
ret = (double)m_frameIdx;
|
||||
break;
|
||||
case CV_CAP_PROP_POS_MSEC:
|
||||
ret = m_timeStamp;
|
||||
break;
|
||||
};
|
||||
return ret;
|
||||
}
|
||||
virtual bool setProperty(int propIdx, double propVal)
|
||||
{
|
||||
bool isSet = false;
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_INTELPERC_PROFILE_IDX:
|
||||
{
|
||||
int propValInt = (int)propVal;
|
||||
if ((0 <= propValInt) && (propValInt < m_profiles.size()))
|
||||
{
|
||||
if (m_profileIdx != propValInt)
|
||||
{
|
||||
m_profileIdx = propValInt;
|
||||
if (m_stream.IsValid())
|
||||
m_stream->SetProfile(&m_profiles[m_profileIdx]);
|
||||
m_frameIdx = 0;
|
||||
m_timeStampStartNS = 0;
|
||||
}
|
||||
isSet = true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
};
|
||||
return isSet;
|
||||
}
|
||||
protected:
|
||||
PXCSmartPtr<PXCCapture::Device> m_device;
|
||||
bool initDevice(PXCSession *session)
|
||||
{
|
||||
if (NULL == session)
|
||||
return false;
|
||||
|
||||
pxcStatus sts = PXC_STATUS_NO_ERROR;
|
||||
PXCSession::ImplDesc templat;
|
||||
memset(&templat,0,sizeof(templat));
|
||||
templat.group = PXCSession::IMPL_GROUP_SENSOR;
|
||||
templat.subgroup= PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE;
|
||||
|
||||
for (int modidx = 0; PXC_STATUS_NO_ERROR <= sts; modidx++)
|
||||
{
|
||||
PXCSession::ImplDesc desc;
|
||||
sts = session->QueryImpl(&templat, modidx, &desc);
|
||||
if (PXC_STATUS_NO_ERROR > sts)
|
||||
break;
|
||||
|
||||
PXCSmartPtr<PXCCapture> capture;
|
||||
sts = session->CreateImpl<PXCCapture>(&desc, &capture);
|
||||
if (!capture.IsValid())
|
||||
continue;
|
||||
|
||||
/* enumerate devices */
|
||||
for (int devidx = 0; PXC_STATUS_NO_ERROR <= sts; devidx++)
|
||||
{
|
||||
PXCSmartPtr<PXCCapture::Device> device;
|
||||
sts = capture->CreateDevice(devidx, &device);
|
||||
if (PXC_STATUS_NO_ERROR <= sts)
|
||||
{
|
||||
m_device = device.ReleasePtr();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
PXCSmartPtr<PXCCapture::VideoStream> m_stream;
|
||||
void initStreamImpl(PXCImage::ImageType type)
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return;
|
||||
|
||||
pxcStatus sts = PXC_STATUS_NO_ERROR;
|
||||
/* enumerate streams */
|
||||
for (int streamidx = 0; PXC_STATUS_NO_ERROR <= sts; streamidx++)
|
||||
{
|
||||
PXCCapture::Device::StreamInfo sinfo;
|
||||
sts = m_device->QueryStream(streamidx, &sinfo);
|
||||
if (PXC_STATUS_NO_ERROR > sts)
|
||||
break;
|
||||
if (PXCCapture::VideoStream::CUID != sinfo.cuid)
|
||||
continue;
|
||||
if (type != sinfo.imageType)
|
||||
continue;
|
||||
|
||||
sts = m_device->CreateStream<PXCCapture::VideoStream>(streamidx, &m_stream);
|
||||
if (PXC_STATUS_NO_ERROR == sts)
|
||||
break;
|
||||
m_stream.ReleaseRef();
|
||||
}
|
||||
}
|
||||
protected:
|
||||
std::vector<PXCCapture::VideoStream::ProfileInfo> m_profiles;
|
||||
int m_profileIdx;
|
||||
int m_frameIdx;
|
||||
pxcU64 m_timeStampStartNS;
|
||||
double m_timeStamp;
|
||||
|
||||
virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& /*pinfo*/)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
void enumProfiles()
|
||||
{
|
||||
m_profiles.clear();
|
||||
if (!m_stream.IsValid())
|
||||
return;
|
||||
pxcStatus sts = PXC_STATUS_NO_ERROR;
|
||||
for (int profidx = 0; PXC_STATUS_NO_ERROR <= sts; profidx++)
|
||||
{
|
||||
PXCCapture::VideoStream::ProfileInfo pinfo;
|
||||
sts = m_stream->QueryProfile(profidx, &pinfo);
|
||||
if (PXC_STATUS_NO_ERROR > sts)
|
||||
break;
|
||||
if (validProfile(pinfo))
|
||||
m_profiles.push_back(pinfo);
|
||||
}
|
||||
}
|
||||
virtual bool prepareIplImage(PXCImage *pxcImage) = 0;
|
||||
};
|
||||
|
||||
class CvIntelPerCStreamImage
|
||||
: public CvIntelPerCStreamBase
|
||||
{
|
||||
public:
|
||||
CvIntelPerCStreamImage()
|
||||
{
|
||||
}
|
||||
virtual ~CvIntelPerCStreamImage()
|
||||
{
|
||||
}
|
||||
|
||||
virtual bool initStream(PXCSession *session)
|
||||
{
|
||||
if (!initDevice(session))
|
||||
return false;
|
||||
initStreamImpl(PXCImage::IMAGE_TYPE_COLOR);
|
||||
if (!m_stream.IsValid())
|
||||
return false;
|
||||
enumProfiles();
|
||||
return true;
|
||||
}
|
||||
virtual double getProperty(int propIdx)
|
||||
{
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_BRIGHTNESS:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_CONTRAST:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_SATURATION:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_HUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_GAMMA:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_SHARPNESS:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_GAIN:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_BACKLIGHT:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_EXPOSURE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
//Add image stream specific properties
|
||||
}
|
||||
return CvIntelPerCStreamBase::getProperty(propIdx);
|
||||
}
|
||||
virtual bool setProperty(int propIdx, double propVal)
|
||||
{
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_BRIGHTNESS:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_CONTRAST:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_SATURATION:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_HUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_GAMMA:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_SHARPNESS:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_GAIN:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_BACKLIGHT:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_EXPOSURE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, (float)propVal));
|
||||
}
|
||||
break;
|
||||
//Add image stream specific properties
|
||||
}
|
||||
return CvIntelPerCStreamBase::setProperty(propIdx, propVal);
|
||||
}
|
||||
public:
|
||||
IplImage* retrieveFrame()
|
||||
{
|
||||
return m_frame.retrieveFrame();
|
||||
}
|
||||
protected:
|
||||
FrameInternal m_frame;
|
||||
bool prepareIplImage(PXCImage *pxcImage)
|
||||
{
|
||||
if (NULL == pxcImage)
|
||||
return false;
|
||||
PXCImage::ImageInfo info;
|
||||
pxcImage->QueryInfo(&info);
|
||||
|
||||
PXCImage::ImageData data;
|
||||
pxcImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_RGB24, &data);
|
||||
|
||||
if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type)
|
||||
return false;
|
||||
|
||||
cv::Mat temp(info.height, info.width, CV_8UC3, data.planes[0], data.pitches[0]);
|
||||
temp.copyTo(m_frame.m_mat);
|
||||
|
||||
pxcImage->ReleaseAccess(&data);
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
class CvIntelPerCStreamDepth
|
||||
: public CvIntelPerCStreamBase
|
||||
{
|
||||
public:
|
||||
CvIntelPerCStreamDepth()
|
||||
{
|
||||
}
|
||||
virtual ~CvIntelPerCStreamDepth()
|
||||
{
|
||||
}
|
||||
|
||||
virtual bool initStream(PXCSession *session)
|
||||
{
|
||||
if (!initDevice(session))
|
||||
return false;
|
||||
initStreamImpl(PXCImage::IMAGE_TYPE_DEPTH);
|
||||
if (!m_stream.IsValid())
|
||||
return false;
|
||||
enumProfiles();
|
||||
return true;
|
||||
}
|
||||
virtual double getProperty(int propIdx)
|
||||
{
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0;
|
||||
float fret = 0.0f;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, &fret))
|
||||
return (double)fret;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0f;
|
||||
PXCPointF32 ptf;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf))
|
||||
return (double)ptf.x;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return 0.0f;
|
||||
PXCPointF32 ptf;
|
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf))
|
||||
return (double)ptf.y;
|
||||
return 0.0;
|
||||
}
|
||||
break;
|
||||
//Add depth stream sepcific properties
|
||||
}
|
||||
return CvIntelPerCStreamBase::getProperty(propIdx);
|
||||
}
|
||||
virtual bool setProperty(int propIdx, double propVal)
|
||||
{
|
||||
switch (propIdx)
|
||||
{
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, (float)propVal));
|
||||
}
|
||||
break;
|
||||
case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD:
|
||||
{
|
||||
if (!m_device.IsValid())
|
||||
return false;
|
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, (float)propVal));
|
||||
}
|
||||
break;
|
||||
//Add depth stream sepcific properties
|
||||
}
|
||||
return CvIntelPerCStreamBase::setProperty(propIdx, propVal);
|
||||
}
|
||||
public:
|
||||
IplImage* retrieveDepthFrame()
|
||||
{
|
||||
return m_frameDepth.retrieveFrame();
|
||||
}
|
||||
IplImage* retrieveIRFrame()
|
||||
{
|
||||
return m_frameIR.retrieveFrame();
|
||||
}
|
||||
IplImage* retrieveUVFrame()
|
||||
{
|
||||
return m_frameUV.retrieveFrame();
|
||||
}
|
||||
protected:
|
||||
virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo)
|
||||
{
|
||||
return (PXCImage::COLOR_FORMAT_DEPTH == pinfo.imageInfo.format);
|
||||
}
|
||||
protected:
|
||||
FrameInternal m_frameDepth;
|
||||
FrameInternal m_frameIR;
|
||||
FrameInternal m_frameUV;
|
||||
|
||||
bool prepareIplImage(PXCImage *pxcImage)
|
||||
{
|
||||
if (NULL == pxcImage)
|
||||
return false;
|
||||
PXCImage::ImageInfo info;
|
||||
pxcImage->QueryInfo(&info);
|
||||
|
||||
PXCImage::ImageData data;
|
||||
pxcImage->AcquireAccess(PXCImage::ACCESS_READ, &data);
|
||||
|
||||
if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type)
|
||||
return false;
|
||||
|
||||
if (PXCImage::COLOR_FORMAT_DEPTH != data.format)
|
||||
return false;
|
||||
|
||||
{
|
||||
cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[0], data.pitches[0]);
|
||||
temp.copyTo(m_frameDepth.m_mat);
|
||||
}
|
||||
{
|
||||
cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[1], data.pitches[1]);
|
||||
temp.copyTo(m_frameIR.m_mat);
|
||||
}
|
||||
{
|
||||
cv::Mat temp(info.height, info.width, CV_32FC2, data.planes[2], data.pitches[2]);
|
||||
temp.copyTo(m_frameUV.m_mat);
|
||||
}
|
||||
|
||||
pxcImage->ReleaseAccess(&data);
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
class CvCapture_IntelPerC : public CvCapture
|
||||
{
|
||||
public:
|
||||
CvCapture_IntelPerC(int /*index*/)
|
||||
: m_contextOpened(false)
|
||||
{
|
||||
pxcStatus sts = PXCSession_Create(&m_session);
|
||||
if (PXC_STATUS_NO_ERROR > sts)
|
||||
return;
|
||||
m_contextOpened = m_imageStream.initStream(m_session);
|
||||
m_contextOpened &= m_depthStream.initStream(m_session);
|
||||
}
|
||||
virtual ~CvCapture_IntelPerC(){}
|
||||
|
||||
virtual double getProperty(int propIdx)
|
||||
{
|
||||
double propValue = 0;
|
||||
int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK;
|
||||
if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK))
|
||||
{
|
||||
propValue = m_imageStream.getProperty(purePropIdx);
|
||||
}
|
||||
else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK))
|
||||
{
|
||||
propValue = m_depthStream.getProperty(purePropIdx);
|
||||
}
|
||||
else
|
||||
{
|
||||
propValue = m_depthStream.getProperty(purePropIdx);
|
||||
}
|
||||
return propValue;
|
||||
}
|
||||
virtual bool setProperty(int propIdx, double propVal)
|
||||
{
|
||||
bool isSet = false;
|
||||
int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK;
|
||||
if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK))
|
||||
{
|
||||
isSet = m_imageStream.setProperty(purePropIdx, propVal);
|
||||
}
|
||||
else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK))
|
||||
{
|
||||
isSet = m_depthStream.setProperty(purePropIdx, propVal);
|
||||
}
|
||||
else
|
||||
{
|
||||
isSet = m_depthStream.setProperty(purePropIdx, propVal);
|
||||
}
|
||||
return isSet;
|
||||
}
|
||||
|
||||
bool grabFrame()
|
||||
{
|
||||
if (!isOpened())
|
||||
return false;
|
||||
|
||||
bool isGrabbed = false;
|
||||
if (m_depthStream.isValid())
|
||||
isGrabbed = m_depthStream.grabFrame();
|
||||
if ((m_imageStream.isValid()) && (-1 != m_imageStream.getProfileIDX()))
|
||||
isGrabbed &= m_imageStream.grabFrame();
|
||||
|
||||
return isGrabbed;
|
||||
}
|
||||
|
||||
virtual IplImage* retrieveFrame(int outputType)
|
||||
{
|
||||
IplImage* image = 0;
|
||||
switch (outputType)
|
||||
{
|
||||
case CV_CAP_INTELPERC_DEPTH_MAP:
|
||||
image = m_depthStream.retrieveDepthFrame();
|
||||
break;
|
||||
case CV_CAP_INTELPERC_UVDEPTH_MAP:
|
||||
image = m_depthStream.retrieveUVFrame();
|
||||
break;
|
||||
case CV_CAP_INTELPERC_IR_MAP:
|
||||
image = m_depthStream.retrieveIRFrame();
|
||||
break;
|
||||
case CV_CAP_INTELPERC_IMAGE:
|
||||
image = m_imageStream.retrieveFrame();
|
||||
break;
|
||||
}
|
||||
CV_Assert(NULL != image);
|
||||
return image;
|
||||
}
|
||||
|
||||
bool isOpened() const
|
||||
{
|
||||
return m_contextOpened;
|
||||
}
|
||||
protected:
|
||||
bool m_contextOpened;
|
||||
|
||||
PXCSmartPtr<PXCSession> m_session;
|
||||
CvIntelPerCStreamImage m_imageStream;
|
||||
CvIntelPerCStreamDepth m_depthStream;
|
||||
};
|
||||
|
||||
|
||||
CvCapture* cvCreateCameraCapture_IntelPerC(int index)
|
||||
{
|
||||
CvCapture_IntelPerC* capture = new CvCapture_IntelPerC(index);
|
||||
|
||||
if( capture->isOpened() )
|
||||
return capture;
|
||||
|
||||
delete capture;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
#endif //HAVE_INTELPERC
|
@ -127,6 +127,7 @@ CvCapture* cvCreateFileCapture_OpenNI( const char* filename );
|
||||
CvCapture* cvCreateCameraCapture_Android( int index );
|
||||
CvCapture* cvCreateCameraCapture_XIMEA( int index );
|
||||
CvCapture* cvCreateCameraCapture_AVFoundation(int index);
|
||||
CvCapture* cvCreateCameraCapture_IntelPerC(int index);
|
||||
|
||||
|
||||
CVAPI(int) cvHaveImageReader(const char* filename);
|
||||
|
@ -34,6 +34,7 @@
|
||||
defined(HAVE_XIMEA) || \
|
||||
defined(HAVE_AVFOUNDATION) || \
|
||||
defined(HAVE_GIGE_API) || \
|
||||
defined(HAVE_INTELPERC) || \
|
||||
(0)
|
||||
//defined(HAVE_ANDROID_NATIVE_CAMERA) || - enable after #1193
|
||||
# define BUILD_WITH_CAMERA_SUPPORT 1
|
||||
|
@ -18,6 +18,8 @@ class_ignore_list = (
|
||||
const_ignore_list = (
|
||||
"CV_CAP_OPENNI",
|
||||
"CV_CAP_PROP_OPENNI_",
|
||||
"CV_CAP_INTELPERC",
|
||||
"CV_CAP_PROP_INTELPERC_"
|
||||
"WINDOW_AUTOSIZE",
|
||||
"CV_WND_PROP_",
|
||||
"CV_WINDOW_",
|
||||
|
376
samples/cpp/intelperc_capture.cpp
Normal file
376
samples/cpp/intelperc_capture.cpp
Normal file
@ -0,0 +1,376 @@
|
||||
// testOpenCVCam.cpp : Defines the entry point for the console application.
|
||||
//
|
||||
|
||||
#include "opencv2/highgui/highgui.hpp"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using namespace cv;
|
||||
using namespace std;
|
||||
|
||||
static bool g_printStreamSetting = false;
|
||||
static int g_imageStreamProfileIdx = -1;
|
||||
static int g_depthStreamProfileIdx = -1;
|
||||
static bool g_irStreamShow = false;
|
||||
static double g_imageBrightness = -DBL_MAX;
|
||||
static double g_imageContrast = -DBL_MAX;
|
||||
static bool g_printTiming = false;
|
||||
static bool g_showClosedPoint = false;
|
||||
|
||||
|
||||
static int g_closedDepthPoint[2];
|
||||
|
||||
static void printUsage(const char *arg0)
|
||||
{
|
||||
const char *filename = arg0;
|
||||
while (*filename)
|
||||
filename++;
|
||||
while ((arg0 <= filename) && ('\\' != *filename) && ('/' != *filename))
|
||||
filename--;
|
||||
filename++;
|
||||
|
||||
cout << "This program demonstrates usage of camera supported\nby Intel Perceptual computing SDK." << endl << endl;
|
||||
cout << "usage: " << filename << "[-ps] [-isp IDX] [-dsp IDX]\n [-ir] [-imb VAL] [-imc VAL]" << endl << endl;
|
||||
cout << " -ps, print streams setting and profiles" << endl;
|
||||
cout << " -isp IDX, set profile index of the image stream" << endl;
|
||||
cout << " -dsp IDX, set profile index of the depth stream" << endl;
|
||||
cout << " -ir, show data from IR stream" << endl;
|
||||
cout << " -imb VAL, set brighness value for a image stream" << endl;
|
||||
cout << " -imc VAL, set contrast value for a image stream" << endl;
|
||||
cout << " -pts, print frame index and frame time" << endl;
|
||||
cout << " --show-closed, print frame index and frame time" << endl;
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
static void parseCMDLine(int argc, char* argv[])
|
||||
{
|
||||
if( argc == 1 )
|
||||
{
|
||||
printUsage(argv[0]);
|
||||
}
|
||||
else
|
||||
{
|
||||
for( int i = 1; i < argc; i++ )
|
||||
{
|
||||
if ((0 == strcmp(argv[i], "--help")) || (0 == strcmp( argv[i], "-h")))
|
||||
{
|
||||
printUsage(argv[0]);
|
||||
exit(0);
|
||||
}
|
||||
else if ((0 == strcmp( argv[i], "--print-streams")) || (0 == strcmp( argv[i], "-ps")))
|
||||
{
|
||||
g_printStreamSetting = true;
|
||||
}
|
||||
else if ((0 == strcmp( argv[i], "--image-stream-prof")) || (0 == strcmp( argv[i], "-isp")))
|
||||
{
|
||||
g_imageStreamProfileIdx = atoi(argv[++i]);
|
||||
}
|
||||
else if ((0 == strcmp( argv[i], "--depth-stream-prof")) || (0 == strcmp( argv[i], "-dsp")))
|
||||
{
|
||||
g_depthStreamProfileIdx = atoi(argv[++i]);
|
||||
}
|
||||
else if (0 == strcmp( argv[i], "-ir"))
|
||||
{
|
||||
g_irStreamShow = true;
|
||||
}
|
||||
else if (0 == strcmp( argv[i], "-imb"))
|
||||
{
|
||||
g_imageBrightness = atof(argv[++i]);
|
||||
}
|
||||
else if (0 == strcmp( argv[i], "-imc"))
|
||||
{
|
||||
g_imageContrast = atof(argv[++i]);
|
||||
}
|
||||
else if (0 == strcmp(argv[i], "-pts"))
|
||||
{
|
||||
g_printTiming = true;
|
||||
}
|
||||
else if (0 == strcmp(argv[i], "--show-closed"))
|
||||
{
|
||||
g_showClosedPoint = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
cout << "Unsupported command line argument: " << argv[i] << "." << endl;
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
if (g_showClosedPoint && (-1 == g_depthStreamProfileIdx))
|
||||
{
|
||||
cerr << "For --show-closed depth profile has be selected" << endl;
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void printStreamProperties(VideoCapture &capture)
|
||||
{
|
||||
size_t profilesCount = (size_t)capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_COUNT);
|
||||
cout << "Image stream." << endl;
|
||||
cout << " Brightness = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_BRIGHTNESS) << endl;
|
||||
cout << " Contrast = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_CONTRAST) << endl;
|
||||
cout << " Saturation = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_SATURATION) << endl;
|
||||
cout << " Hue = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_HUE) << endl;
|
||||
cout << " Gamma = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_GAMMA) << endl;
|
||||
cout << " Sharpness = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_SHARPNESS) << endl;
|
||||
cout << " Gain = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_GAIN) << endl;
|
||||
cout << " Backligh = " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_BACKLIGHT) << endl;
|
||||
cout << "Image streams profiles:" << endl;
|
||||
for (size_t i = 0; i < profilesCount; i++)
|
||||
{
|
||||
capture.set(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)i);
|
||||
cout << " Profile[" << i << "]: ";
|
||||
cout << "width = " <<
|
||||
(int)capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_FRAME_WIDTH);
|
||||
cout << ", height = " <<
|
||||
(int)capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_FRAME_HEIGHT);
|
||||
cout << ", fps = " <<
|
||||
capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_FPS);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
profilesCount = (size_t)capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_COUNT);
|
||||
cout << "Depth stream." << endl;
|
||||
cout << " Low confidence value = " << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE) << endl;
|
||||
cout << " Saturation value = " << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE) << endl;
|
||||
cout << " Confidence threshold = " << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD) << endl;
|
||||
cout << " Focal length = (" << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ) << ", "
|
||||
<< capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT) << ")" << endl;
|
||||
cout << "Depth streams profiles:" << endl;
|
||||
for (size_t i = 0; i < profilesCount; i++)
|
||||
{
|
||||
capture.set(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)i);
|
||||
cout << " Profile[" << i << "]: ";
|
||||
cout << "width = " <<
|
||||
(int)capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_FRAME_WIDTH);
|
||||
cout << ", height = " <<
|
||||
(int)capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_FRAME_HEIGHT);
|
||||
cout << ", fps = " <<
|
||||
capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_FPS);
|
||||
cout << endl;
|
||||
}
|
||||
}
|
||||
|
||||
static void imshowImage(const char *winname, Mat &image, VideoCapture &capture)
|
||||
{
|
||||
if (g_showClosedPoint)
|
||||
{
|
||||
Mat uvMap;
|
||||
if (capture.retrieve(uvMap, CV_CAP_INTELPERC_UVDEPTH_MAP))
|
||||
{
|
||||
float *uvmap = (float *)uvMap.ptr() + 2 * (g_closedDepthPoint[0] * uvMap.cols + g_closedDepthPoint[1]);
|
||||
int x = (int)((*uvmap) * image.cols); uvmap++;
|
||||
int y = (int)((*uvmap) * image.rows);
|
||||
|
||||
if ((0 <= x) && (0 <= y))
|
||||
{
|
||||
static const int pointSize = 4;
|
||||
for (int row = y; row < min(y + pointSize, image.rows); row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row) + x * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - x); col++, ptrDst+=3)
|
||||
{
|
||||
*ptrDst = 255;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
imshow(winname, image);
|
||||
}
|
||||
static void imshowIR(const char *winname, Mat &ir)
|
||||
{
|
||||
Mat image;
|
||||
if (g_showClosedPoint)
|
||||
{
|
||||
image.create(ir.rows, ir.cols, CV_8UC3);
|
||||
for (int row = 0; row < ir.rows; row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row);
|
||||
short* ptrSrc = (short*)ir.ptr(row);
|
||||
for (int col = 0; col < ir.cols; col++, ptrSrc++)
|
||||
{
|
||||
uchar val = (uchar) ((*ptrSrc) >> 2);
|
||||
*ptrDst = val; ptrDst++;
|
||||
*ptrDst = val; ptrDst++;
|
||||
*ptrDst = val; ptrDst++;
|
||||
}
|
||||
}
|
||||
|
||||
static const int pointSize = 4;
|
||||
for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3)
|
||||
{
|
||||
*ptrDst = 255;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
image.create(ir.rows, ir.cols, CV_8UC1);
|
||||
for (int row = 0; row < ir.rows; row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row);
|
||||
short* ptrSrc = (short*)ir.ptr(row);
|
||||
for (int col = 0; col < ir.cols; col++, ptrSrc++, ptrDst++)
|
||||
{
|
||||
*ptrDst = (uchar) ((*ptrSrc) >> 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
imshow(winname, image);
|
||||
}
|
||||
static void imshowDepth(const char *winname, Mat &depth, VideoCapture &capture)
|
||||
{
|
||||
short lowValue = (short)capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE);
|
||||
short saturationValue = (short)capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE);
|
||||
|
||||
Mat image;
|
||||
if (g_showClosedPoint)
|
||||
{
|
||||
image.create(depth.rows, depth.cols, CV_8UC3);
|
||||
for (int row = 0; row < depth.rows; row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row);
|
||||
short* ptrSrc = (short*)depth.ptr(row);
|
||||
for (int col = 0; col < depth.cols; col++, ptrSrc++)
|
||||
{
|
||||
if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc)))
|
||||
{
|
||||
*ptrDst = 0; ptrDst++;
|
||||
*ptrDst = 0; ptrDst++;
|
||||
*ptrDst = 0; ptrDst++;
|
||||
}
|
||||
else
|
||||
{
|
||||
uchar val = (uchar) ((*ptrSrc) >> 2);
|
||||
*ptrDst = val; ptrDst++;
|
||||
*ptrDst = val; ptrDst++;
|
||||
*ptrDst = val; ptrDst++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static const int pointSize = 4;
|
||||
for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3)
|
||||
{
|
||||
*ptrDst = 255;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
image.create(depth.rows, depth.cols, CV_8UC1);
|
||||
for (int row = 0; row < depth.rows; row++)
|
||||
{
|
||||
uchar* ptrDst = image.ptr(row);
|
||||
short* ptrSrc = (short*)depth.ptr(row);
|
||||
for (int col = 0; col < depth.cols; col++, ptrSrc++, ptrDst++)
|
||||
{
|
||||
if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc)))
|
||||
*ptrDst = 0;
|
||||
else
|
||||
*ptrDst = (uchar) ((*ptrSrc) >> 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
imshow(winname, image);
|
||||
}
|
||||
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
parseCMDLine(argc, argv);
|
||||
|
||||
VideoCapture capture;
|
||||
capture.open(CV_CAP_INTELPERC);
|
||||
if (!capture.isOpened())
|
||||
{
|
||||
cerr << "Can not open a capture object." << endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (g_printStreamSetting)
|
||||
printStreamProperties(capture);
|
||||
|
||||
if (-1 != g_imageStreamProfileIdx)
|
||||
{
|
||||
if (!capture.set(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_imageStreamProfileIdx))
|
||||
{
|
||||
cerr << "Can not setup a image stream." << endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if (-1 != g_depthStreamProfileIdx)
|
||||
{
|
||||
if (!capture.set(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_depthStreamProfileIdx))
|
||||
{
|
||||
cerr << "Can not setup a depth stream." << endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
else if (g_irStreamShow)
|
||||
{
|
||||
if (!capture.set(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0.0))
|
||||
{
|
||||
cerr << "Can not setup a IR stream." << endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
cout << "Streams not selected" << endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
//Setup additional properies only after set profile of the stream
|
||||
if ( (-10000.0 < g_imageBrightness) && (g_imageBrightness < 10000.0))
|
||||
capture.set(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_BRIGHTNESS, g_imageBrightness);
|
||||
if ( (0 < g_imageContrast) && (g_imageContrast < 10000.0))
|
||||
capture.set(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_BRIGHTNESS, g_imageContrast);
|
||||
|
||||
int frame = 0;
|
||||
for(;;frame++)
|
||||
{
|
||||
Mat bgrImage;
|
||||
Mat depthImage;
|
||||
Mat irImage;
|
||||
|
||||
if (!capture.grab())
|
||||
{
|
||||
cout << "Can not grab images." << endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ((-1 != g_depthStreamProfileIdx) && (capture.retrieve(depthImage, CV_CAP_INTELPERC_DEPTH_MAP)))
|
||||
{
|
||||
if (g_showClosedPoint)
|
||||
{
|
||||
double minVal = 0.0; double maxVal = 0.0;
|
||||
minMaxIdx(depthImage, &minVal, &maxVal, g_closedDepthPoint);
|
||||
}
|
||||
imshowDepth("depth image", depthImage, capture);
|
||||
}
|
||||
if ((g_irStreamShow) && (capture.retrieve(irImage, CV_CAP_INTELPERC_IR_MAP)))
|
||||
imshowIR("ir image", irImage);
|
||||
if ((-1 != g_imageStreamProfileIdx) && (capture.retrieve(bgrImage, CV_CAP_INTELPERC_IMAGE)))
|
||||
imshowImage("color image", bgrImage, capture);
|
||||
|
||||
if (g_printTiming)
|
||||
{
|
||||
cout << "Image frame: " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_POS_FRAMES)
|
||||
<< ", Depth(IR) frame: " << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_POS_FRAMES) << endl;
|
||||
cout << "Image frame: " << capture.get(CV_CAP_INTELPERC_IMAGE_GENERATOR | CV_CAP_PROP_POS_MSEC)
|
||||
<< ", Depth(IR) frame: " << capture.get(CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_POS_MSEC) << endl;
|
||||
}
|
||||
if( waitKey(30) >= 0 )
|
||||
break;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user