Added better OpenNI2 support to the Asus Xtion and Occipital Structure

sensors which do not have image generators.
This commit is contained in:
vincentweb 2014-12-14 20:52:24 +01:00
parent 50c9367d7a
commit 4d12beb723
2 changed files with 21 additions and 13 deletions

View File

@ -76,7 +76,8 @@ enum { CAP_ANY = 0, // autodetect
CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK
CAP_MSMF = 1400, // Microsoft Media Foundation (via videoInput)
CAP_INTELPERC = 1500, // Intel Perceptual Computing SDK
CAP_OPENNI2 = 1600 // OpenNI2 (for Kinect)
CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect)
CAP_OPENNI2_ASUS = 1610 // OpenNI2 (for Asus Xtion and Occipital Structure sensors)
};
// generic properties (based on DC1394 properties)

View File

@ -68,8 +68,6 @@
#define CV_DEPTH_STREAM 0
#define CV_COLOR_STREAM 1
#define CV_NUM_STREAMS 2
#include "OpenNI.h"
#include "PS1080.h"
@ -161,6 +159,7 @@ protected:
int currentStream;
int numStream;
std::vector<OutputMap> outputMaps;
};
@ -198,6 +197,7 @@ openni::VideoMode CvCapture_OpenNI2::defaultDepthOutputMode()
CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
{
numStream = 2;
const char* deviceURI = openni::ANY_DEVICE;
openni::Status status;
int deviceType = DEVICE_DEFAULT;
@ -215,6 +215,10 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
index %= 10;
}
// Asus XTION and Occipital Structure Sensor do not have an image generator
if (deviceType == DEVICE_ASUS_XTION)
numStream = 1;
if( deviceType > DEVICE_MAX )
return;
@ -259,6 +263,10 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find depth stream:: %s\n", openni::OpenNI::getExtendedError()));
return;
}
streams = new openni::VideoStream*[numStream];
streams[CV_DEPTH_STREAM] = &depth;
// create a color object
status = color.create(device, openni::SENSOR_COLOR);
if (status == openni::STATUS_OK)
@ -275,13 +283,19 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
color.destroy();
return;
}
streams[CV_COLOR_STREAM] = &color;
}
else
else if (numStream == 2)
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find color stream: %s\n", openni::OpenNI::getExtendedError()));
return;
}
if( !readCamerasParams() )
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
return;
}
// if( deviceType == DEVICE_ASUS_XTION )
// {
@ -291,14 +305,6 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
// depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/);
// }
if( !readCamerasParams() )
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
return;
}
streams = new openni::VideoStream*[CV_NUM_STREAMS];
streams[CV_DEPTH_STREAM] = &depth;
streams[CV_COLOR_STREAM] = &color;
outputMaps.resize( outputMapsTypesCount );
@ -309,6 +315,7 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename)
{
numStream = 2;
openni::Status status;
isContextOpened = false;
@ -695,7 +702,7 @@ bool CvCapture_OpenNI2::grabFrame()
bool isGrabbed = false;
openni::Status status = openni::OpenNI::waitForAnyStream(streams, CV_NUM_STREAMS, &currentStream, CV_STREAM_TIMEOUT);
openni::Status status = openni::OpenNI::waitForAnyStream(streams, numStream, &currentStream, CV_STREAM_TIMEOUT);
if( status != openni::STATUS_OK )
return false;