Changes to be committed:

(use "git reset HEAD <file>..." to unstage)

	modified:   modules/highgui/include/opencv2/highgui/highgui_c.h
	modified:   modules/highgui/src/cap_dshow.cpp
	modified:   modules/highgui/src/cap_pvapi.cpp
	modified:   modules/java/generator/gen_java.py

Änderungen in der PvAPI hinzugefügt.
This commit is contained in:
Prof. Dr. Rudolf Haussmann 2015-03-20 01:42:16 +01:00
parent 9185b62a1d
commit a7bf1d53d8
4 changed files with 383 additions and 140 deletions

View File

@ -365,7 +365,7 @@ enum
CV_CAP_PROP_CONVERT_RGB =16, CV_CAP_PROP_CONVERT_RGB =16,
CV_CAP_PROP_WHITE_BALANCE_U =17, CV_CAP_PROP_WHITE_BALANCE_U =17,
CV_CAP_PROP_RECTIFICATION =18, CV_CAP_PROP_RECTIFICATION =18,
CV_CAP_PROP_MONOCROME =19, CV_CAP_PROP_MONOCHROME =19,
CV_CAP_PROP_SHARPNESS =20, CV_CAP_PROP_SHARPNESS =20,
CV_CAP_PROP_AUTO_EXPOSURE =21, // exposure control done by camera, CV_CAP_PROP_AUTO_EXPOSURE =21, // exposure control done by camera,
// user can adjust refernce level // user can adjust refernce level
@ -422,7 +422,15 @@ enum
// Properties of cameras available through GStreamer interface // Properties of cameras available through GStreamer interface
CV_CAP_GSTREAMER_QUEUE_LENGTH = 200, // default is 1 CV_CAP_GSTREAMER_QUEUE_LENGTH = 200, // default is 1
CV_CAP_PROP_PVAPI_MULTICASTIP = 300, // ip for anable multicast master mode. 0 for disable multicast
// PVAPI
CV_CAP_PROP_PVAPI_MULTICASTIP = 300, // ip for anable multicast master mode. 0 for disable multicast
CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE = 301, // FrameStartTriggerMode: Determines how a frame is initiated
CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL = 302, // Horizontal sub-sampling of the image
CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL = 303, // Vertical sub-sampling of the image
CV_CAP_PROP_PVAPI_BINNINGX = 304, // Horizontal binning factor
CV_CAP_PROP_PVAPI_BINNINGY = 305, // Vertical binning factor
CV_CAP_PROP_PVAPI_PIXELFORMAT = 306, // Pixel format
// Properties of cameras available through XIMEA SDK interface // Properties of cameras available through XIMEA SDK interface
CV_CAP_PROP_XI_DOWNSAMPLING = 400, // Change image resolution by binning or skipping. CV_CAP_PROP_XI_DOWNSAMPLING = 400, // Change image resolution by binning or skipping.

View File

@ -2246,7 +2246,7 @@ int videoInput::getVideoPropertyFromCV(int cv_property){
case CV_CAP_PROP_GAMMA: case CV_CAP_PROP_GAMMA:
return VideoProcAmp_Gamma; return VideoProcAmp_Gamma;
case CV_CAP_PROP_MONOCROME: case CV_CAP_PROP_MONOCHROME:
return VideoProcAmp_ColorEnable; return VideoProcAmp_ColorEnable;
case CV_CAP_PROP_WHITE_BALANCE_U: case CV_CAP_PROP_WHITE_BALANCE_U:
@ -3245,7 +3245,7 @@ double CvCaptureCAM_DShow::getProperty( int property_id )
case CV_CAP_PROP_SATURATION: case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_SHARPNESS: case CV_CAP_PROP_SHARPNESS:
case CV_CAP_PROP_GAMMA: case CV_CAP_PROP_GAMMA:
case CV_CAP_PROP_MONOCROME: case CV_CAP_PROP_MONOCHROME:
case CV_CAP_PROP_WHITE_BALANCE_U: case CV_CAP_PROP_WHITE_BALANCE_U:
case CV_CAP_PROP_BACKLIGHT: case CV_CAP_PROP_BACKLIGHT:
case CV_CAP_PROP_GAIN: case CV_CAP_PROP_GAIN:
@ -3349,7 +3349,7 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
case CV_CAP_PROP_SATURATION: case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_SHARPNESS: case CV_CAP_PROP_SHARPNESS:
case CV_CAP_PROP_GAMMA: case CV_CAP_PROP_GAMMA:
case CV_CAP_PROP_MONOCROME: case CV_CAP_PROP_MONOCHROME:
case CV_CAP_PROP_WHITE_BALANCE_U: case CV_CAP_PROP_WHITE_BALANCE_U:
case CV_CAP_PROP_BACKLIGHT: case CV_CAP_PROP_BACKLIGHT:
case CV_CAP_PROP_GAIN: case CV_CAP_PROP_GAIN:

View File

@ -60,10 +60,10 @@
#ifdef WIN32 #ifdef WIN32
# include <io.h> # include <io.h>
#else #else
# include <time.h>
# include <unistd.h> # include <unistd.h>
#endif #endif
#include <string>
//#include <arpa/inet.h> //#include <arpa/inet.h>
#define MAX_CAMERAS 10 #define MAX_CAMERAS 10
@ -95,6 +95,10 @@ protected:
virtual void Sleep(unsigned int time); virtual void Sleep(unsigned int time);
#endif #endif
void stopCapture();
bool startCapture();
bool resizeCaptureFrame (int frameWidth, int frameHeight);
typedef struct typedef struct
{ {
unsigned long UID; unsigned long UID;
@ -103,16 +107,14 @@ protected:
} tCamera; } tCamera;
IplImage *frame; IplImage *frame;
IplImage *grayframe;
tCamera Camera; tCamera Camera;
tPvErr Errcode; tPvErr Errcode;
bool monocrome;
}; };
CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI() CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI()
{ {
monocrome=false; frame = NULL;
memset(&this->Camera, 0, sizeof(this->Camera)); memset(&this->Camera, 0, sizeof(this->Camera));
} }
@ -132,144 +134,94 @@ void CvCaptureCAM_PvAPI::Sleep(unsigned int time)
void CvCaptureCAM_PvAPI::close() void CvCaptureCAM_PvAPI::close()
{ {
// Stop the acquisition & free the camera // Stop the acquisition & free the camera
PvCommandRun(Camera.Handle, "AcquisitionStop"); stopCapture();
PvCaptureEnd(Camera.Handle);
PvCameraClose(Camera.Handle); PvCameraClose(Camera.Handle);
PvUnInitialize(); PvUnInitialize();
} }
// Initialize camera input // Initialize camera input
bool CvCaptureCAM_PvAPI::open( int ) bool CvCaptureCAM_PvAPI::open( int index )
{ {
tPvCameraInfo cameraList[MAX_CAMERAS]; tPvCameraInfo cameraList[MAX_CAMERAS];
tPvCameraInfo camInfo; tPvCameraInfo camInfo;
tPvIpSettings ipSettings; tPvIpSettings ipSettings;
// Initialization parameters [500 x 10 ms = 5000 ms timeout]
int initializeTimeOut = 500;
// Disregard any errors, since this might be called several times and only needs to be called once or it will return an if (PvInitialize()) {
// Important when wanting to use more than 1 AVT camera at the same time
PvInitialize();
while((!PvCameraCount()) && (initializeTimeOut--))
Sleep(10);
if (!initializeTimeOut){
fprintf(stderr,"ERROR: camera intialisation timeout [5000ms].\n");
return false;
} }
//return false;
unsigned int numCameras = PvCameraList(cameraList, MAX_CAMERAS, NULL); Sleep(1000);
// If no cameras are found //close();
if(!numCameras)
int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);
if (numCameras <= 0 || index >= numCameras)
return false;
Camera.UID = cameraList[index].UniqueId;
if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings))
{ {
fprintf(stderr, "ERROR: No cameras found.\n"); /*
return false; struct in_addr addr;
addr.s_addr = ipSettings.CurrentIpAddress;
printf("Current address:\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpSubnet;
printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpGateway;
printf("Current gateway:\t%s\n",inet_ntoa(addr));
*/
} }
else
// Try opening the cameras in the list, one-by-one until a camera that is not used is found
unsigned int findNewCamera;
for(findNewCamera=0; findNewCamera<numCameras; findNewCamera++)
{ {
Camera.UID = cameraList[findNewCamera].UniqueId; fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
if(PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess) return false;
break;
} }
if(findNewCamera == numCameras)
if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
{ {
fprintf(stderr, "Could not find a new camera to connect to.\n"); tPvUint32 frameWidth, frameHeight;
return false; unsigned long maxSize;
}
if(PvCameraIpSettingsGet(Camera.UID,&ipSettings)==ePvErrNotFound) PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
{ PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraIpSettingsGet().\n", Camera.UID);
return false;
}
if(PvCameraInfo(Camera.UID,&camInfo)==ePvErrNotFound) // Determine the maximum packet size supported by the system (ethernet adapter)
{ // and then configure the camera to use this value. If the system's NIC only supports
fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraInfo().\n", Camera.UID); // an MTU of 1500 or lower, this will automatically configure an MTU of 1500.
return false; // 8228 is the optimal size described by the API in order to enable jumbo frames
}
tPvUint32 frameWidth, frameHeight, frameSize; maxSize = 8228;
char pixelFormat[256]; //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize); if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
PvAttrUint32Get(Camera.Handle, "Width", &frameWidth); return false;
PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
// Start the camera resizeCaptureFrame(frameWidth, frameHeight);
PvCaptureStart(Camera.Handle);
// Set the camera explicitly to capture data frames continuously return startCapture();
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
{
fprintf(stderr,"Could not set Acquisition Mode\n");
return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{
fprintf(stderr,"Could not start acquisition\n");
return false;
} }
fprintf(stderr,"Error cannot open camera\n");
return false;
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting trigger to \"Freerun\"");
return false;
}
// Settings depending on the pixelformat
// This works for all AVT camera models that use the PvAPI interface
if (strcmp(pixelFormat, "Mono8")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
grayframe->widthStep = (int)frameWidth;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Mono16")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
grayframe->widthStep = (int)frameWidth*2;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Bgr24")==0) {
monocrome = false;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else{
fprintf(stderr, "Pixel format %s not supported; only Mono8, Mono16 and Bgr24 are currently supported.\n", pixelFormat);
return false;
}
return true;
} }
bool CvCaptureCAM_PvAPI::grabFrame() bool CvCaptureCAM_PvAPI::grabFrame()
{ {
//if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled)
return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess; return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess;
} }
IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int) IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int)
{ {
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess)
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) { {
if (!monocrome) { return frame;
return frame;
}
return grayframe;
} }
else return NULL; else return NULL;
} }
@ -297,65 +249,233 @@ double CvCaptureCAM_PvAPI::getProperty( int property_id )
char mEnable[2]; char mEnable[2];
char mIp[11]; char mIp[11];
PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL); PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
if (strcmp(mEnable, "Off") == 0) { if (strcmp(mEnable, "Off") == 0)
{
return -1; return -1;
} }
else { else
{
long int ip; long int ip;
int a,b,c,d; int a,b,c,d;
PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL); PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d; sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
return (double)ip; return (double)ip;
} }
case CV_CAP_PROP_GAIN:
PvAttrUint32Get(Camera.Handle, "GainValue", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
char triggerMode[256];
PvAttrEnumGet(Camera.Handle, "FrameStartTriggerMode", triggerMode, 256, NULL);
if (strcmp(triggerMode, "Freerun")==0)
return 0.0;
else if (strcmp(triggerMode, "SyncIn1")==0)
return 1.0;
else if (strcmp(triggerMode, "SyncIn2")==0)
return 2.0;
else if (strcmp(triggerMode, "FixedRate")==0)
return 3.0;
else if (strcmp(triggerMode, "Software")==0)
return 4.0;
else
return -1.0;
case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
PvAttrUint32Get(Camera.Handle, "DecimationHorizontal", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
PvAttrUint32Get(Camera.Handle, "DecimationVertical", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_BINNINGX:
PvAttrUint32Get(Camera.Handle,"BinningX",&nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_BINNINGY:
PvAttrUint32Get(Camera.Handle,"BinningY",&nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_PIXELFORMAT:
char pixelFormat[256];
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
if (strcmp(pixelFormat, "Mono8")==0)
return 1.0;
else if (strcmp(pixelFormat, "Mono16")==0)
return 2.0;
else if (strcmp(pixelFormat, "Bayer8")==0)
return 3.0;
else if (strcmp(pixelFormat, "Bayer16")==0)
return 4.0;
else if (strcmp(pixelFormat, "Rgb24")==0)
return 5.0;
else if (strcmp(pixelFormat, "Bgr24")==0)
return 6.0;
else if (strcmp(pixelFormat, "Rgba32")==0)
return 7.0;
else if (strcmp(pixelFormat, "Bgra32")==0)
return 8.0;
} }
return -1.0; return -1.0;
} }
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value ) bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{ {
tPvErr error;
switch ( property_id ) switch ( property_id )
{ {
/* TODO: Camera works, but IplImage must be modified for the new size
case CV_CAP_PROP_FRAME_WIDTH: case CV_CAP_PROP_FRAME_WIDTH:
PvAttrUint32Set(Camera.Handle, "Width", (tPvUint32)value); {
tPvUint32 currHeight;
PvAttrUint32Get(Camera.Handle, "Height", &currHeight);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(value, currHeight))
{
startCapture();
return false;
}
startCapture();
break; break;
}
case CV_CAP_PROP_FRAME_HEIGHT: case CV_CAP_PROP_FRAME_HEIGHT:
PvAttrUint32Set(Camera.Handle, "Heigth", (tPvUint32)value); {
tPvUint32 currWidth;
PvAttrUint32Get(Camera.Handle, "Width", &currWidth);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(currWidth, value))
{
startCapture();
return false;
}
startCapture();
break; break;
*/ }
case CV_CAP_PROP_MONOCROME: case CV_CAP_PROP_EXPOSURE:
if (value==1) { if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
char pixelFormat[256]; break;
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL); else
if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0) { return false;
monocrome=true; case CV_CAP_PROP_PVAPI_MULTICASTIP:
} if (value==-1)
{
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
break;
else else
return false; return false;
} }
else else
monocrome=false; {
break; cv::String ip=cv::format("%d.%d.%d.%d", ((unsigned int)value>>24)&255, ((unsigned int)value>>16)&255, ((unsigned int)value>>8)&255, (unsigned int)value&255);
case CV_CAP_PROP_EXPOSURE: if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess)) (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
break; break;
else else
return false; return false;
case CV_CAP_PROP_PVAPI_MULTICASTIP: }
case CV_CAP_PROP_GAIN:
if (value==-1) { if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess)) {
break;
else
return false; return false;
} }
else { break;
std::string ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255); case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) && if (value==0)
(PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess)) error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
else if (value==1)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn1");
else if (value==2)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn2");
else if (value==3)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "FixedRate");
else if (value==4)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software");
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break; break;
else else
return false; return false;
case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
if (value >= 1 && value <= 8)
error = PvAttrUint32Set(Camera.Handle, "DecimationHorizontal", value);
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
if (value >= 1 && value <= 8)
error = PvAttrUint32Set(Camera.Handle, "DecimationVertical", value);
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_BINNINGX:
error = PvAttrUint32Set(Camera.Handle, "BinningX", value);
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_BINNINGY:
error = PvAttrUint32Set(Camera.Handle, "BinningY", value);
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_PIXELFORMAT:
{
cv::String pixelFormat;
if (value==1)
pixelFormat = "Mono8";
else if (value==2)
pixelFormat = "Mono16";
else if (value==3)
pixelFormat = "Bayer8";
else if (value==4)
pixelFormat = "Bayer16";
else if (value==5)
pixelFormat = "Rgb24";
else if (value==6)
pixelFormat = "Bgr24";
else if (value==7)
pixelFormat = "Rgba32";
else if (value==8)
pixelFormat = "Bgra32";
else
return false;
if ((PvAttrEnumSet(Camera.Handle,"PixelFormat", pixelFormat.c_str())==ePvErrSuccess))
{
tPvUint32 currWidth;
tPvUint32 currHeight;
PvAttrUint32Get(Camera.Handle, "Width", &currWidth);
PvAttrUint32Get(Camera.Handle, "Height", &currHeight);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(currWidth, currHeight))
{
startCapture();
return false;
}
startCapture();
return true;
}
else
return false;
} }
default: default:
return false; return false;
@ -363,6 +483,121 @@ bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
return true; return true;
} }
void CvCaptureCAM_PvAPI::stopCapture()
{
PvCommandRun(Camera.Handle, "AcquisitionStop");
PvCaptureEnd(Camera.Handle);
}
bool CvCaptureCAM_PvAPI::startCapture()
{
// Start the camera
PvCaptureStart(Camera.Handle);
// Set the camera to capture continuously
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
{
fprintf(stderr,"Could not set PvAPI Acquisition Mode\n");
return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{
fprintf(stderr,"Could not start PvAPI acquisition\n");
return false;
}
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting PvAPI trigger to \"Freerun\"");
return false;
}
return true;
}
bool CvCaptureCAM_PvAPI::resizeCaptureFrame (int frameWidth, int frameHeight)
{
char pixelFormat[256];
tPvUint32 frameSize;
tPvUint32 sensorHeight;
tPvUint32 sensorWidth;
if (frame)
{
cvReleaseImage(&frame);
frame = NULL;
}
if (PvAttrUint32Get(Camera.Handle, "SensorWidth", &sensorWidth) != ePvErrSuccess)
{
return false;
}
if (PvAttrUint32Get(Camera.Handle, "SensorHeight", &sensorHeight) != ePvErrSuccess)
{
return false;
}
// Cap out of bounds widths to the max supported by the sensor
if ((frameWidth < 0) || ((tPvUint32)frameWidth > sensorWidth))
{
frameWidth = sensorWidth;
}
if ((frameHeight < 0) || ((tPvUint32)frameHeight > sensorHeight))
{
frameHeight = sensorHeight;
}
if (PvAttrUint32Set(Camera.Handle, "Height", frameHeight) != ePvErrSuccess)
{
return false;
}
if (PvAttrUint32Set(Camera.Handle, "Width", frameWidth) != ePvErrSuccess)
{
return false;
}
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
if ( (strcmp(pixelFormat, "Mono8")==0) || (strcmp(pixelFormat, "Bayer8")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
frame->widthStep = (int)frameWidth;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Mono16")==0) || (strcmp(pixelFormat, "Bayer16")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
frame->widthStep = (int)frameWidth*2;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Rgb24")==0) || (strcmp(pixelFormat, "Bgr24")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Rgba32")==0) || (strcmp(pixelFormat, "Bgra32")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 4);
frame->widthStep = (int)frameWidth*4;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else
return false;
return true;
}
CvCapture* cvCreateCameraCapture_PvAPI( int index ) CvCapture* cvCreateCameraCapture_PvAPI( int index )
{ {

View File

@ -73,7 +73,7 @@ const_ignore_list = (
"CV_CAP_PROP_CONVERT_RGB", "CV_CAP_PROP_CONVERT_RGB",
"CV_CAP_PROP_WHITE_BALANCE_U", "CV_CAP_PROP_WHITE_BALANCE_U",
"CV_CAP_PROP_RECTIFICATION", "CV_CAP_PROP_RECTIFICATION",
"CV_CAP_PROP_MONOCROME", "CV_CAP_PROP_MONOCHROME",
"CV_CAP_PROP_SHARPNESS", "CV_CAP_PROP_SHARPNESS",
"CV_CAP_PROP_AUTO_EXPOSURE", "CV_CAP_PROP_AUTO_EXPOSURE",
"CV_CAP_PROP_GAMMA", "CV_CAP_PROP_GAMMA",