Fixed property names in cap_dshow #1993
This commit is contained in:
parent
6ca618277c
commit
c492dc6a83
@ -63,9 +63,9 @@
|
|||||||
//Written by Theodore Watson - theo.watson@gmail.com //
|
//Written by Theodore Watson - theo.watson@gmail.com //
|
||||||
//Do whatever you want with this code but if you find //
|
//Do whatever you want with this code but if you find //
|
||||||
//a bug or make an improvement I would love to know! //
|
//a bug or make an improvement I would love to know! //
|
||||||
// //
|
// //
|
||||||
//Warning This code is experimental //
|
//Warning This code is experimental //
|
||||||
//use at your own risk :) //
|
//use at your own risk :) //
|
||||||
//////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////
|
||||||
/////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////
|
||||||
/* Shoutouts
|
/* Shoutouts
|
||||||
@ -164,7 +164,7 @@ interface IMPEG2PIDMap : public IUnknown
|
|||||||
/*
|
/*
|
||||||
MEDIASUBTYPE_I420 : TGUID ='{30323449-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_I420 : TGUID ='{30323449-0000-0010-8000-00AA00389B71}';
|
||||||
MEDIASUBTYPE_Y800 : TGUID ='{30303859-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_Y800 : TGUID ='{30303859-0000-0010-8000-00AA00389B71}';
|
||||||
MEDIASUBTYPE_Y8 : TGUID ='{20203859-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_Y8 : TGUID ='{20203859-0000-0010-8000-00AA00389B71}';
|
||||||
MEDIASUBTYPE_Y160 : TGUID ='{30363159-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_Y160 : TGUID ='{30363159-0000-0010-8000-00AA00389B71}';
|
||||||
MEDIASUBTYPE_YV16 : TGUID ='{32315659-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_YV16 : TGUID ='{32315659-0000-0010-8000-00AA00389B71}';
|
||||||
MEDIASUBTYPE_Y422 : TGUID ='{32323459-0000-0010-8000-00AA00389B71}';
|
MEDIASUBTYPE_Y422 : TGUID ='{32323459-0000-0010-8000-00AA00389B71}';
|
||||||
@ -285,13 +285,13 @@ interface ISampleGrabber : public IUnknown
|
|||||||
//setup the first device - there are a number of options:
|
//setup the first device - there are a number of options:
|
||||||
|
|
||||||
VI.setupDevice(device1); //setup the first device with the default settings
|
VI.setupDevice(device1); //setup the first device with the default settings
|
||||||
//VI.setupDevice(device1, VI_COMPOSITE); //or setup device with specific connection type
|
//VI.setupDevice(device1, VI_COMPOSITE); //or setup device with specific connection type
|
||||||
//VI.setupDevice(device1, 320, 240); //or setup device with specified video size
|
//VI.setupDevice(device1, 320, 240); //or setup device with specified video size
|
||||||
//VI.setupDevice(device1, 320, 240, VI_COMPOSITE); //or setup device with video size and connection type
|
//VI.setupDevice(device1, 320, 240, VI_COMPOSITE); //or setup device with video size and connection type
|
||||||
|
|
||||||
//VI.setFormat(device1, VI_NTSC_M); //if your card doesn't remember what format it should be
|
//VI.setFormat(device1, VI_NTSC_M); //if your card doesn't remember what format it should be
|
||||||
//call this with the appropriate format listed above
|
//call this with the appropriate format listed above
|
||||||
//NOTE: must be called after setupDevice!
|
//NOTE: must be called after setupDevice!
|
||||||
|
|
||||||
//optionally setup a second (or third, fourth ...) device - same options as above
|
//optionally setup a second (or third, fourth ...) device - same options as above
|
||||||
VI.setupDevice(device2);
|
VI.setupDevice(device2);
|
||||||
@ -299,8 +299,8 @@ interface ISampleGrabber : public IUnknown
|
|||||||
//As requested width and height can not always be accomodated
|
//As requested width and height can not always be accomodated
|
||||||
//make sure to check the size once the device is setup
|
//make sure to check the size once the device is setup
|
||||||
|
|
||||||
int width = VI.getWidth(device1);
|
int width = VI.getWidth(device1);
|
||||||
int height = VI.getHeight(device1);
|
int height = VI.getHeight(device1);
|
||||||
int size = VI.getSize(device1);
|
int size = VI.getSize(device1);
|
||||||
|
|
||||||
unsigned char * yourBuffer1 = new unsigned char[size];
|
unsigned char * yourBuffer1 = new unsigned char[size];
|
||||||
@ -308,7 +308,7 @@ interface ISampleGrabber : public IUnknown
|
|||||||
|
|
||||||
//to get the data from the device first check if the data is new
|
//to get the data from the device first check if the data is new
|
||||||
if(VI.isFrameNew(device1)){
|
if(VI.isFrameNew(device1)){
|
||||||
VI.getPixels(device1, yourBuffer1, false, false); //fills pixels as a BGR (for openCV) unsigned char array - no flipping
|
VI.getPixels(device1, yourBuffer1, false, false); //fills pixels as a BGR (for openCV) unsigned char array - no flipping
|
||||||
VI.getPixels(device1, yourBuffer2, true, true); //fills pixels as a RGB (for openGL) unsigned char array - flipping!
|
VI.getPixels(device1, yourBuffer2, true, true); //fills pixels as a RGB (for openGL) unsigned char array - flipping!
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,7 +338,7 @@ static bool verbose = true;
|
|||||||
//STUFF YOU DON'T CHANGE
|
//STUFF YOU DON'T CHANGE
|
||||||
|
|
||||||
//videoInput defines
|
//videoInput defines
|
||||||
#define VI_VERSION 0.1995
|
#define VI_VERSION 0.1995
|
||||||
#define VI_MAX_CAMERAS 20
|
#define VI_MAX_CAMERAS 20
|
||||||
#define VI_NUM_TYPES 20 //MGB
|
#define VI_NUM_TYPES 20 //MGB
|
||||||
#define VI_NUM_FORMATS 18 //DON'T TOUCH
|
#define VI_NUM_FORMATS 18 //DON'T TOUCH
|
||||||
@ -348,10 +348,10 @@ static bool verbose = true;
|
|||||||
#define VI_S_VIDEO 1
|
#define VI_S_VIDEO 1
|
||||||
#define VI_TUNER 2
|
#define VI_TUNER 2
|
||||||
#define VI_USB 3
|
#define VI_USB 3
|
||||||
#define VI_1394 4
|
#define VI_1394 4
|
||||||
|
|
||||||
//defines for formats
|
//defines for formats
|
||||||
#define VI_NTSC_M 0
|
#define VI_NTSC_M 0
|
||||||
#define VI_PAL_B 1
|
#define VI_PAL_B 1
|
||||||
#define VI_PAL_D 2
|
#define VI_PAL_D 2
|
||||||
#define VI_PAL_G 3
|
#define VI_PAL_G 3
|
||||||
@ -359,16 +359,16 @@ static bool verbose = true;
|
|||||||
#define VI_PAL_I 5
|
#define VI_PAL_I 5
|
||||||
#define VI_PAL_M 6
|
#define VI_PAL_M 6
|
||||||
#define VI_PAL_N 7
|
#define VI_PAL_N 7
|
||||||
#define VI_PAL_NC 8
|
#define VI_PAL_NC 8
|
||||||
#define VI_SECAM_B 9
|
#define VI_SECAM_B 9
|
||||||
#define VI_SECAM_D 10
|
#define VI_SECAM_D 10
|
||||||
#define VI_SECAM_G 11
|
#define VI_SECAM_G 11
|
||||||
#define VI_SECAM_H 12
|
#define VI_SECAM_H 12
|
||||||
#define VI_SECAM_K 13
|
#define VI_SECAM_K 13
|
||||||
#define VI_SECAM_K1 14
|
#define VI_SECAM_K1 14
|
||||||
#define VI_SECAM_L 15
|
#define VI_SECAM_L 15
|
||||||
#define VI_NTSC_M_J 16
|
#define VI_NTSC_M_J 16
|
||||||
#define VI_NTSC_433 17
|
#define VI_NTSC_433 17
|
||||||
|
|
||||||
|
|
||||||
//allows us to directShow classes here with the includes in the cpp
|
//allows us to directShow classes here with the includes in the cpp
|
||||||
@ -731,48 +731,48 @@ public:
|
|||||||
|
|
||||||
videoDevice::videoDevice(){
|
videoDevice::videoDevice(){
|
||||||
|
|
||||||
pCaptureGraph = NULL; // Capture graph builder object
|
pCaptureGraph = NULL; // Capture graph builder object
|
||||||
pGraph = NULL; // Graph builder object
|
pGraph = NULL; // Graph builder object
|
||||||
pControl = NULL; // Media control object
|
pControl = NULL; // Media control object
|
||||||
pVideoInputFilter = NULL; // Video Capture filter
|
pVideoInputFilter = NULL; // Video Capture filter
|
||||||
pGrabber = NULL; // Grabs frame
|
pGrabber = NULL; // Grabs frame
|
||||||
pDestFilter = NULL; // Null Renderer Filter
|
pDestFilter = NULL; // Null Renderer Filter
|
||||||
pGrabberF = NULL; // Grabber Filter
|
pGrabberF = NULL; // Grabber Filter
|
||||||
pMediaEvent = NULL;
|
pMediaEvent = NULL;
|
||||||
streamConf = NULL;
|
streamConf = NULL;
|
||||||
pAmMediaType = NULL;
|
pAmMediaType = NULL;
|
||||||
|
|
||||||
//This is our callback class that processes the frame.
|
//This is our callback class that processes the frame.
|
||||||
sgCallback = new SampleGrabberCallback();
|
sgCallback = new SampleGrabberCallback();
|
||||||
sgCallback->newFrame = false;
|
sgCallback->newFrame = false;
|
||||||
|
|
||||||
//Default values for capture type
|
//Default values for capture type
|
||||||
videoType = MEDIASUBTYPE_RGB24;
|
videoType = MEDIASUBTYPE_RGB24;
|
||||||
connection = PhysConn_Video_Composite;
|
connection = PhysConn_Video_Composite;
|
||||||
storeConn = 0;
|
storeConn = 0;
|
||||||
|
|
||||||
videoSize = 0;
|
videoSize = 0;
|
||||||
width = 0;
|
width = 0;
|
||||||
height = 0;
|
height = 0;
|
||||||
|
|
||||||
tryWidth = 640;
|
tryWidth = 640;
|
||||||
tryHeight = 480;
|
tryHeight = 480;
|
||||||
tryVideoType = MEDIASUBTYPE_RGB24;
|
tryVideoType = MEDIASUBTYPE_RGB24;
|
||||||
nFramesForReconnect= 10000;
|
nFramesForReconnect= 10000;
|
||||||
nFramesRunning = 0;
|
nFramesRunning = 0;
|
||||||
myID = -1;
|
myID = -1;
|
||||||
|
|
||||||
tryDiffSize = true;
|
tryDiffSize = true;
|
||||||
useCrossbar = false;
|
useCrossbar = false;
|
||||||
readyToCapture = false;
|
readyToCapture = false;
|
||||||
sizeSet = false;
|
sizeSet = false;
|
||||||
setupStarted = false;
|
setupStarted = false;
|
||||||
specificFormat = false;
|
specificFormat = false;
|
||||||
autoReconnect = false;
|
autoReconnect = false;
|
||||||
requestedFrameTime = -1;
|
requestedFrameTime = -1;
|
||||||
|
|
||||||
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
|
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
|
||||||
memset(nDeviceName, 0, sizeof(char) * 255);
|
memset(nDeviceName, 0, sizeof(char) * 255);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -808,39 +808,39 @@ void videoDevice::setSize(int w, int h){
|
|||||||
// ----------------------------------------------------------------------
|
// ----------------------------------------------------------------------
|
||||||
|
|
||||||
void videoDevice::NukeDownstream(IBaseFilter *pBF){
|
void videoDevice::NukeDownstream(IBaseFilter *pBF){
|
||||||
IPin *pP, *pTo;
|
IPin *pP, *pTo;
|
||||||
ULONG u;
|
ULONG u;
|
||||||
IEnumPins *pins = NULL;
|
IEnumPins *pins = NULL;
|
||||||
PIN_INFO pininfo;
|
PIN_INFO pininfo;
|
||||||
HRESULT hr = pBF->EnumPins(&pins);
|
HRESULT hr = pBF->EnumPins(&pins);
|
||||||
pins->Reset();
|
pins->Reset();
|
||||||
while (hr == NOERROR)
|
while (hr == NOERROR)
|
||||||
|
{
|
||||||
|
hr = pins->Next(1, &pP, &u);
|
||||||
|
if (hr == S_OK && pP)
|
||||||
{
|
{
|
||||||
hr = pins->Next(1, &pP, &u);
|
pP->ConnectedTo(&pTo);
|
||||||
if (hr == S_OK && pP)
|
if (pTo)
|
||||||
|
{
|
||||||
|
hr = pTo->QueryPinInfo(&pininfo);
|
||||||
|
if (hr == NOERROR)
|
||||||
{
|
{
|
||||||
pP->ConnectedTo(&pTo);
|
if (pininfo.dir == PINDIR_INPUT)
|
||||||
if (pTo)
|
{
|
||||||
{
|
NukeDownstream(pininfo.pFilter);
|
||||||
hr = pTo->QueryPinInfo(&pininfo);
|
pGraph->Disconnect(pTo);
|
||||||
if (hr == NOERROR)
|
pGraph->Disconnect(pP);
|
||||||
{
|
pGraph->RemoveFilter(pininfo.pFilter);
|
||||||
if (pininfo.dir == PINDIR_INPUT)
|
}
|
||||||
{
|
pininfo.pFilter->Release();
|
||||||
NukeDownstream(pininfo.pFilter);
|
pininfo.pFilter = NULL;
|
||||||
pGraph->Disconnect(pTo);
|
|
||||||
pGraph->Disconnect(pP);
|
|
||||||
pGraph->RemoveFilter(pininfo.pFilter);
|
|
||||||
}
|
|
||||||
pininfo.pFilter->Release();
|
|
||||||
pininfo.pFilter = NULL;
|
|
||||||
}
|
|
||||||
pTo->Release();
|
|
||||||
}
|
|
||||||
pP->Release();
|
|
||||||
}
|
}
|
||||||
|
pTo->Release();
|
||||||
|
}
|
||||||
|
pP->Release();
|
||||||
}
|
}
|
||||||
if (pins) pins->Release();
|
}
|
||||||
|
if (pins) pins->Release();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -949,27 +949,27 @@ videoDevice::~videoDevice(){
|
|||||||
if( (pVideoInputFilter) )NukeDownstream(pVideoInputFilter);
|
if( (pVideoInputFilter) )NukeDownstream(pVideoInputFilter);
|
||||||
|
|
||||||
//Release and zero pointers to our filters etc
|
//Release and zero pointers to our filters etc
|
||||||
if( (pDestFilter) ){ if(verbose)printf("SETUP: freeing Renderer \n");
|
if( (pDestFilter) ){ if(verbose)printf("SETUP: freeing Renderer \n");
|
||||||
(pDestFilter)->Release();
|
(pDestFilter)->Release();
|
||||||
(pDestFilter) = 0;
|
(pDestFilter) = 0;
|
||||||
}
|
}
|
||||||
if( (pVideoInputFilter) ){ if(verbose)printf("SETUP: freeing Capture Source \n");
|
if( (pVideoInputFilter) ){ if(verbose)printf("SETUP: freeing Capture Source \n");
|
||||||
(pVideoInputFilter)->Release();
|
(pVideoInputFilter)->Release();
|
||||||
(pVideoInputFilter) = 0;
|
(pVideoInputFilter) = 0;
|
||||||
}
|
}
|
||||||
if( (pGrabberF) ){ if(verbose)printf("SETUP: freeing Grabber Filter \n");
|
if( (pGrabberF) ){ if(verbose)printf("SETUP: freeing Grabber Filter \n");
|
||||||
(pGrabberF)->Release();
|
(pGrabberF)->Release();
|
||||||
(pGrabberF) = 0;
|
(pGrabberF) = 0;
|
||||||
}
|
}
|
||||||
if( (pGrabber) ){ if(verbose)printf("SETUP: freeing Grabber \n");
|
if( (pGrabber) ){ if(verbose)printf("SETUP: freeing Grabber \n");
|
||||||
(pGrabber)->Release();
|
(pGrabber)->Release();
|
||||||
(pGrabber) = 0;
|
(pGrabber) = 0;
|
||||||
}
|
}
|
||||||
if( (pControl) ){ if(verbose)printf("SETUP: freeing Control \n");
|
if( (pControl) ){ if(verbose)printf("SETUP: freeing Control \n");
|
||||||
(pControl)->Release();
|
(pControl)->Release();
|
||||||
(pControl) = 0;
|
(pControl) = 0;
|
||||||
}
|
}
|
||||||
if( (pMediaEvent) ){ if(verbose)printf("SETUP: freeing Media Event \n");
|
if( (pMediaEvent) ){ if(verbose)printf("SETUP: freeing Media Event \n");
|
||||||
(pMediaEvent)->Release();
|
(pMediaEvent)->Release();
|
||||||
(pMediaEvent) = 0;
|
(pMediaEvent) = 0;
|
||||||
}
|
}
|
||||||
@ -978,7 +978,7 @@ videoDevice::~videoDevice(){
|
|||||||
(streamConf) = 0;
|
(streamConf) = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if( (pAmMediaType) ){ if(verbose)printf("SETUP: freeing Media Type \n");
|
if( (pAmMediaType) ){ if(verbose)printf("SETUP: freeing Media Type \n");
|
||||||
MyDeleteMediaType(pAmMediaType);
|
MyDeleteMediaType(pAmMediaType);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -992,7 +992,7 @@ videoDevice::~videoDevice(){
|
|||||||
if( (pGraph) )destroyGraph();
|
if( (pGraph) )destroyGraph();
|
||||||
|
|
||||||
//Release and zero our capture graph and our main graph
|
//Release and zero our capture graph and our main graph
|
||||||
if( (pCaptureGraph) ){ if(verbose)printf("SETUP: freeing Capture Graph \n");
|
if( (pCaptureGraph) ){ if(verbose)printf("SETUP: freeing Capture Graph \n");
|
||||||
(pCaptureGraph)->Release();
|
(pCaptureGraph)->Release();
|
||||||
(pCaptureGraph) = 0;
|
(pCaptureGraph) = 0;
|
||||||
}
|
}
|
||||||
@ -1031,7 +1031,7 @@ videoInput::videoInput(){
|
|||||||
|
|
||||||
devicesFound = 0;
|
devicesFound = 0;
|
||||||
callbackSetCount = 0;
|
callbackSetCount = 0;
|
||||||
bCallback = true;
|
bCallback = true;
|
||||||
|
|
||||||
//setup a max no of device objects
|
//setup a max no of device objects
|
||||||
for(int i=0; i<VI_MAX_CAMERAS; i++) VDList[i] = new videoDevice();
|
for(int i=0; i<VI_MAX_CAMERAS; i++) VDList[i] = new videoDevice();
|
||||||
@ -1057,9 +1057,9 @@ videoInput::videoInput(){
|
|||||||
mediaSubtypes[8] = MEDIASUBTYPE_UYVY;
|
mediaSubtypes[8] = MEDIASUBTYPE_UYVY;
|
||||||
mediaSubtypes[9] = MEDIASUBTYPE_YV12;
|
mediaSubtypes[9] = MEDIASUBTYPE_YV12;
|
||||||
mediaSubtypes[10] = MEDIASUBTYPE_YVU9;
|
mediaSubtypes[10] = MEDIASUBTYPE_YVU9;
|
||||||
mediaSubtypes[11] = MEDIASUBTYPE_Y411;
|
mediaSubtypes[11] = MEDIASUBTYPE_Y411;
|
||||||
mediaSubtypes[12] = MEDIASUBTYPE_Y41P;
|
mediaSubtypes[12] = MEDIASUBTYPE_Y41P;
|
||||||
mediaSubtypes[13] = MEDIASUBTYPE_Y211;
|
mediaSubtypes[13] = MEDIASUBTYPE_Y211;
|
||||||
mediaSubtypes[14] = MEDIASUBTYPE_AYUV;
|
mediaSubtypes[14] = MEDIASUBTYPE_AYUV;
|
||||||
mediaSubtypes[15] = MEDIASUBTYPE_MJPG; // MGB
|
mediaSubtypes[15] = MEDIASUBTYPE_MJPG; // MGB
|
||||||
|
|
||||||
@ -1067,31 +1067,29 @@ videoInput::videoInput(){
|
|||||||
mediaSubtypes[16] = MEDIASUBTYPE_Y800;
|
mediaSubtypes[16] = MEDIASUBTYPE_Y800;
|
||||||
mediaSubtypes[17] = MEDIASUBTYPE_Y8;
|
mediaSubtypes[17] = MEDIASUBTYPE_Y8;
|
||||||
mediaSubtypes[18] = MEDIASUBTYPE_GREY;
|
mediaSubtypes[18] = MEDIASUBTYPE_GREY;
|
||||||
mediaSubtypes[19] = MEDIASUBTYPE_I420;
|
mediaSubtypes[19] = MEDIASUBTYPE_I420;
|
||||||
|
|
||||||
//The video formats we support
|
//The video formats we support
|
||||||
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
|
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
|
||||||
formatTypes[VI_NTSC_M_J] = AnalogVideo_NTSC_M_J;
|
formatTypes[VI_NTSC_M_J] = AnalogVideo_NTSC_M_J;
|
||||||
formatTypes[VI_NTSC_433] = AnalogVideo_NTSC_433;
|
formatTypes[VI_NTSC_433] = AnalogVideo_NTSC_433;
|
||||||
|
|
||||||
formatTypes[VI_PAL_B] = AnalogVideo_PAL_B;
|
formatTypes[VI_PAL_B] = AnalogVideo_PAL_B;
|
||||||
formatTypes[VI_PAL_D] = AnalogVideo_PAL_D;
|
formatTypes[VI_PAL_D] = AnalogVideo_PAL_D;
|
||||||
formatTypes[VI_PAL_G] = AnalogVideo_PAL_G;
|
formatTypes[VI_PAL_G] = AnalogVideo_PAL_G;
|
||||||
formatTypes[VI_PAL_H] = AnalogVideo_PAL_H;
|
formatTypes[VI_PAL_H] = AnalogVideo_PAL_H;
|
||||||
formatTypes[VI_PAL_I] = AnalogVideo_PAL_I;
|
formatTypes[VI_PAL_I] = AnalogVideo_PAL_I;
|
||||||
formatTypes[VI_PAL_M] = AnalogVideo_PAL_M;
|
formatTypes[VI_PAL_M] = AnalogVideo_PAL_M;
|
||||||
formatTypes[VI_PAL_N] = AnalogVideo_PAL_N;
|
formatTypes[VI_PAL_N] = AnalogVideo_PAL_N;
|
||||||
formatTypes[VI_PAL_NC] = AnalogVideo_PAL_N_COMBO;
|
formatTypes[VI_PAL_NC] = AnalogVideo_PAL_N_COMBO;
|
||||||
|
|
||||||
formatTypes[VI_SECAM_B] = AnalogVideo_SECAM_B;
|
formatTypes[VI_SECAM_B] = AnalogVideo_SECAM_B;
|
||||||
formatTypes[VI_SECAM_D] = AnalogVideo_SECAM_D;
|
formatTypes[VI_SECAM_D] = AnalogVideo_SECAM_D;
|
||||||
formatTypes[VI_SECAM_G] = AnalogVideo_SECAM_G;
|
formatTypes[VI_SECAM_G] = AnalogVideo_SECAM_G;
|
||||||
formatTypes[VI_SECAM_H] = AnalogVideo_SECAM_H;
|
formatTypes[VI_SECAM_H] = AnalogVideo_SECAM_H;
|
||||||
formatTypes[VI_SECAM_K] = AnalogVideo_SECAM_K;
|
formatTypes[VI_SECAM_K] = AnalogVideo_SECAM_K;
|
||||||
formatTypes[VI_SECAM_K1] = AnalogVideo_SECAM_K1;
|
formatTypes[VI_SECAM_K1] = AnalogVideo_SECAM_K1;
|
||||||
formatTypes[VI_SECAM_L] = AnalogVideo_SECAM_L;
|
formatTypes[VI_SECAM_L] = AnalogVideo_SECAM_L;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2029,30 +2027,30 @@ void videoInput::setAttemptCaptureSize(int id, int w, int h,GUID mediaType){
|
|||||||
|
|
||||||
void videoInput::setPhyCon(int id, int conn){
|
void videoInput::setPhyCon(int id, int conn){
|
||||||
|
|
||||||
switch(conn){
|
switch(conn){
|
||||||
|
|
||||||
case 0:
|
case 0:
|
||||||
VDList[id]->connection = PhysConn_Video_Composite;
|
VDList[id]->connection = PhysConn_Video_Composite;
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
VDList[id]->connection = PhysConn_Video_SVideo;
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
VDList[id]->connection = PhysConn_Video_Tuner;
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
VDList[id]->connection = PhysConn_Video_USB;
|
|
||||||
break;
|
|
||||||
case 4:
|
|
||||||
VDList[id]->connection = PhysConn_Video_1394;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
return; //if it is not these types don't set crossbar
|
|
||||||
break;
|
break;
|
||||||
}
|
case 1:
|
||||||
|
VDList[id]->connection = PhysConn_Video_SVideo;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
VDList[id]->connection = PhysConn_Video_Tuner;
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
VDList[id]->connection = PhysConn_Video_USB;
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
VDList[id]->connection = PhysConn_Video_1394;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return; //if it is not these types don't set crossbar
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
VDList[id]->storeConn = conn;
|
VDList[id]->storeConn = conn;
|
||||||
VDList[id]->useCrossbar = true;
|
VDList[id]->useCrossbar = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -2154,26 +2152,26 @@ void videoInput::processPixels(unsigned char * src, unsigned char * dst, int wid
|
|||||||
void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
|
void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
|
||||||
|
|
||||||
char tmpStr[8];
|
char tmpStr[8];
|
||||||
if( type == MEDIASUBTYPE_RGB24) sprintf(tmpStr, "RGB24");
|
if( type == MEDIASUBTYPE_RGB24) sprintf(tmpStr, "RGB24");
|
||||||
else if(type == MEDIASUBTYPE_RGB32) sprintf(tmpStr, "RGB32");
|
else if(type == MEDIASUBTYPE_RGB32) sprintf(tmpStr, "RGB32");
|
||||||
else if(type == MEDIASUBTYPE_RGB555)sprintf(tmpStr, "RGB555");
|
else if(type == MEDIASUBTYPE_RGB555)sprintf(tmpStr, "RGB555");
|
||||||
else if(type == MEDIASUBTYPE_RGB565)sprintf(tmpStr, "RGB565");
|
else if(type == MEDIASUBTYPE_RGB565)sprintf(tmpStr, "RGB565");
|
||||||
else if(type == MEDIASUBTYPE_YUY2) sprintf(tmpStr, "YUY2");
|
else if(type == MEDIASUBTYPE_YUY2) sprintf(tmpStr, "YUY2");
|
||||||
else if(type == MEDIASUBTYPE_YVYU) sprintf(tmpStr, "YVYU");
|
else if(type == MEDIASUBTYPE_YVYU) sprintf(tmpStr, "YVYU");
|
||||||
else if(type == MEDIASUBTYPE_YUYV) sprintf(tmpStr, "YUYV");
|
else if(type == MEDIASUBTYPE_YUYV) sprintf(tmpStr, "YUYV");
|
||||||
else if(type == MEDIASUBTYPE_IYUV) sprintf(tmpStr, "IYUV");
|
else if(type == MEDIASUBTYPE_IYUV) sprintf(tmpStr, "IYUV");
|
||||||
else if(type == MEDIASUBTYPE_UYVY) sprintf(tmpStr, "UYVY");
|
else if(type == MEDIASUBTYPE_UYVY) sprintf(tmpStr, "UYVY");
|
||||||
else if(type == MEDIASUBTYPE_YV12) sprintf(tmpStr, "YV12");
|
else if(type == MEDIASUBTYPE_YV12) sprintf(tmpStr, "YV12");
|
||||||
else if(type == MEDIASUBTYPE_YVU9) sprintf(tmpStr, "YVU9");
|
else if(type == MEDIASUBTYPE_YVU9) sprintf(tmpStr, "YVU9");
|
||||||
else if(type == MEDIASUBTYPE_Y411) sprintf(tmpStr, "Y411");
|
else if(type == MEDIASUBTYPE_Y411) sprintf(tmpStr, "Y411");
|
||||||
else if(type == MEDIASUBTYPE_Y41P) sprintf(tmpStr, "Y41P");
|
else if(type == MEDIASUBTYPE_Y41P) sprintf(tmpStr, "Y41P");
|
||||||
else if(type == MEDIASUBTYPE_Y211) sprintf(tmpStr, "Y211");
|
else if(type == MEDIASUBTYPE_Y211) sprintf(tmpStr, "Y211");
|
||||||
else if(type == MEDIASUBTYPE_AYUV) sprintf(tmpStr, "AYUV");
|
else if(type == MEDIASUBTYPE_AYUV) sprintf(tmpStr, "AYUV");
|
||||||
else if(type == MEDIASUBTYPE_MJPG) sprintf(tmpStr, "MJPG");
|
else if(type == MEDIASUBTYPE_MJPG) sprintf(tmpStr, "MJPG");
|
||||||
else if(type == MEDIASUBTYPE_Y800) sprintf(tmpStr, "Y800");
|
else if(type == MEDIASUBTYPE_Y800) sprintf(tmpStr, "Y800");
|
||||||
else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
|
else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
|
||||||
else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
|
else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
|
||||||
else if(type == MEDIASUBTYPE_I420) sprintf(tmpStr, "I420");
|
else if(type == MEDIASUBTYPE_I420) sprintf(tmpStr, "I420");
|
||||||
else sprintf(tmpStr, "OTHER");
|
else sprintf(tmpStr, "OTHER");
|
||||||
|
|
||||||
memcpy(typeAsString, tmpStr, sizeof(char)*8);
|
memcpy(typeAsString, tmpStr, sizeof(char)*8);
|
||||||
@ -2245,7 +2243,7 @@ int videoInput::getVideoPropertyFromCV(int cv_property){
|
|||||||
return VideoProcAmp_BacklightCompensation;
|
return VideoProcAmp_BacklightCompensation;
|
||||||
|
|
||||||
case CV_CAP_PROP_GAIN:
|
case CV_CAP_PROP_GAIN:
|
||||||
return VideoProcAmp_Gain;
|
return VideoProcAmp_Gain;
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -2254,7 +2252,6 @@ int videoInput::getCameraPropertyFromCV(int cv_property){
|
|||||||
|
|
||||||
// see CameraControlProperty in strmif.h
|
// see CameraControlProperty in strmif.h
|
||||||
switch (cv_property) {
|
switch (cv_property) {
|
||||||
|
|
||||||
case CV_CAP_PROP_PAN:
|
case CV_CAP_PROP_PAN:
|
||||||
return CameraControl_Pan;
|
return CameraControl_Pan;
|
||||||
|
|
||||||
@ -3075,7 +3072,7 @@ HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter *
|
|||||||
}
|
}
|
||||||
Crossbar->Route(pOIndex,pIndex);
|
Crossbar->Route(pOIndex,pIndex);
|
||||||
}else{
|
}else{
|
||||||
if(verbose)printf("SETUP: Didn't find specified Physical Connection type. Using Defualt. \n");
|
if(verbose) printf("SETUP: Didn't find specified Physical Connection type. Using Defualt. \n");
|
||||||
}
|
}
|
||||||
|
|
||||||
//we only free the crossbar when we close or restart the device
|
//we only free the crossbar when we close or restart the device
|
||||||
@ -3087,7 +3084,7 @@ HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter *
|
|||||||
if(pXBar1)pXBar1 = NULL;
|
if(pXBar1)pXBar1 = NULL;
|
||||||
|
|
||||||
}else{
|
}else{
|
||||||
if(verbose)printf("SETUP: You are a webcam or snazzy firewire cam! No Crossbar needed\n");
|
if(verbose) printf("SETUP: You are a webcam or snazzy firewire cam! No Crossbar needed\n");
|
||||||
return hr;
|
return hr;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3110,8 +3107,6 @@ public:
|
|||||||
virtual IplImage* retrieveFrame(int);
|
virtual IplImage* retrieveFrame(int);
|
||||||
virtual int getCaptureDomain() { return CV_CAP_DSHOW; } // Return the type of the capture object: CV_CAP_VFW, etc...
|
virtual int getCaptureDomain() { return CV_CAP_DSHOW; } // Return the type of the capture object: CV_CAP_VFW, etc...
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void init();
|
void init();
|
||||||
|
|
||||||
@ -3217,60 +3212,29 @@ double CvCaptureCAM_DShow::getProperty( int property_id )
|
|||||||
switch( property_id )
|
switch( property_id )
|
||||||
{
|
{
|
||||||
case CV_CAP_PROP_BRIGHTNESS:
|
case CV_CAP_PROP_BRIGHTNESS:
|
||||||
if ( VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BRIGHTNESS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_CONTRAST:
|
case CV_CAP_PROP_CONTRAST:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_CONTRAST),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_HUE:
|
case CV_CAP_PROP_HUE:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_HUE),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_SATURATION:
|
case CV_CAP_PROP_SATURATION:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SATURATION),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_SHARPNESS:
|
case CV_CAP_PROP_SHARPNESS:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SHARPNESS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_GAMMA:
|
case CV_CAP_PROP_GAMMA:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAMMA),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_MONOCROME:
|
case CV_CAP_PROP_MONOCROME:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_MONOCROME),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_WHITE_BALANCE_BLUE_U),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
case CV_CAP_PROP_BACKLIGHT:
|
||||||
|
|
||||||
case CV_CAP_PROP_BACKLIGHT:
|
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_GAIN:
|
case CV_CAP_PROP_GAIN:
|
||||||
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAIN),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(property_id),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
// camera properties
|
// camera properties
|
||||||
switch( property_id )
|
switch( property_id )
|
||||||
{
|
{
|
||||||
|
|
||||||
case CV_CAP_PROP_BACKLIGHT:
|
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_PAN:
|
case CV_CAP_PROP_PAN:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_PAN),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_TILT:
|
case CV_CAP_PROP_TILT:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_TILT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_ROLL:
|
case CV_CAP_PROP_ROLL:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ROLL),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_ZOOM:
|
case CV_CAP_PROP_ZOOM:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
case CV_CAP_PROP_EXPOSURE:
|
||||||
|
|
||||||
case CV_CAP_PROP_IRIS:
|
case CV_CAP_PROP_IRIS:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_IRIS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
|
||||||
|
|
||||||
case CV_CAP_PROP_FOCUS:
|
case CV_CAP_PROP_FOCUS:
|
||||||
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_FOCUS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(property_id),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3282,36 +3246,36 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
|
|||||||
{
|
{
|
||||||
// image capture properties
|
// image capture properties
|
||||||
bool handled = false;
|
bool handled = false;
|
||||||
switch( property_id )
|
switch( property_id )
|
||||||
{
|
{
|
||||||
case CV_CAP_PROP_FRAME_WIDTH:
|
case CV_CAP_PROP_FRAME_WIDTH:
|
||||||
width = cvRound(value);
|
width = cvRound(value);
|
||||||
handled = true;
|
handled = true;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case CV_CAP_PROP_FRAME_HEIGHT:
|
case CV_CAP_PROP_FRAME_HEIGHT:
|
||||||
height = cvRound(value);
|
height = cvRound(value);
|
||||||
handled = true;
|
handled = true;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case CV_CAP_PROP_FOURCC:
|
case CV_CAP_PROP_FOURCC:
|
||||||
fourcc = cvRound(value);
|
fourcc = cvRound(value);
|
||||||
if ( fourcc < 0 ) {
|
if ( fourcc < 0 ) {
|
||||||
// following cvCreateVideo usage will pop up caprturepindialog here if fourcc=-1
|
// following cvCreateVideo usage will pop up caprturepindialog here if fourcc=-1
|
||||||
// TODO - how to create a capture pin dialog
|
// TODO - how to create a capture pin dialog
|
||||||
}
|
}
|
||||||
handled = true;
|
handled = true;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case CV_CAP_PROP_FPS:
|
case CV_CAP_PROP_FPS:
|
||||||
int fps = cvRound(value);
|
int fps = cvRound(value);
|
||||||
if (fps != VI.getFPS(0))
|
if (fps != VI.getFPS(0))
|
||||||
{
|
{
|
||||||
VI.stopDevice(index);
|
VI.stopDevice(index);
|
||||||
VI.setIdealFramerate(index,fps);
|
VI.setIdealFramerate(index,fps);
|
||||||
VI.setupDevice(index);
|
VI.setupDevice(index);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3329,7 +3293,7 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
|
|||||||
width = height = fourcc = -1;
|
width = height = fourcc = -1;
|
||||||
return VI.isDeviceSetup(index);
|
return VI.isDeviceSetup(index);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// show video/camera filter dialog
|
// show video/camera filter dialog
|
||||||
@ -3341,67 +3305,32 @@ bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
|
|||||||
//video Filter properties
|
//video Filter properties
|
||||||
switch( property_id )
|
switch( property_id )
|
||||||
{
|
{
|
||||||
|
case CV_CAP_PROP_BRIGHTNESS:
|
||||||
case CV_CAP_PROP_BRIGHTNESS:
|
case CV_CAP_PROP_CONTRAST:
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BRIGHTNESS),(long)value);
|
case CV_CAP_PROP_HUE:
|
||||||
|
case CV_CAP_PROP_SATURATION:
|
||||||
case CV_CAP_PROP_CONTRAST:
|
case CV_CAP_PROP_SHARPNESS:
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_CONTRAST),(long)value);
|
case CV_CAP_PROP_GAMMA:
|
||||||
|
case CV_CAP_PROP_MONOCROME:
|
||||||
case CV_CAP_PROP_HUE:
|
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_HUE),(long)value);
|
case CV_CAP_PROP_BACKLIGHT:
|
||||||
|
case CV_CAP_PROP_GAIN:
|
||||||
case CV_CAP_PROP_SATURATION:
|
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(property_id),(long)value);
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SATURATION),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_SHARPNESS:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SHARPNESS),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_GAMMA:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAMMA),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_MONOCROME:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_MONOCROME),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_WHITE_BALANCE_BLUE_U),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_BACKLIGHT:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BACKLIGHT),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_GAIN:
|
|
||||||
return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAIN),(long)value);
|
|
||||||
|
|
||||||
default:
|
|
||||||
;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//camera properties
|
//camera properties
|
||||||
switch( property_id )
|
switch( property_id )
|
||||||
{
|
{
|
||||||
case CV_CAP_PROP_PAN:
|
case CV_CAP_PROP_PAN:
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_PAN),(long)value);
|
case CV_CAP_PROP_TILT:
|
||||||
|
case CV_CAP_PROP_ROLL:
|
||||||
case CV_CAP_PROP_TILT:
|
case CV_CAP_PROP_ZOOM:
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_TILT),(long)value);
|
case CV_CAP_PROP_EXPOSURE:
|
||||||
|
case CV_CAP_PROP_IRIS:
|
||||||
case CV_CAP_PROP_ROLL:
|
case CV_CAP_PROP_FOCUS:
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ROLL),(long)value);
|
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(property_id),(long)value);
|
||||||
|
|
||||||
case CV_CAP_PROP_ZOOM:
|
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ZOOM),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_EXPOSURE:
|
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_EXPOSURE),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_IRIS:
|
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_IRIS),(long)value);
|
|
||||||
|
|
||||||
case CV_CAP_PROP_FOCUS:
|
|
||||||
return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_FOCUS),(long)value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3410,8 +3339,16 @@ CvCapture* cvCreateCameraCapture_DShow( int index )
|
|||||||
{
|
{
|
||||||
CvCaptureCAM_DShow* capture = new CvCaptureCAM_DShow;
|
CvCaptureCAM_DShow* capture = new CvCaptureCAM_DShow;
|
||||||
|
|
||||||
if( capture->open( index ))
|
try
|
||||||
return capture;
|
{
|
||||||
|
if( capture->open( index ))
|
||||||
|
return capture;
|
||||||
|
}
|
||||||
|
catch(...)
|
||||||
|
{
|
||||||
|
delete capture;
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
delete capture;
|
delete capture;
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -9,7 +9,7 @@ legacy. Deprecated stuff
|
|||||||
|
|
||||||
motion_analysis
|
motion_analysis
|
||||||
expectation_maximization
|
expectation_maximization
|
||||||
histograms
|
histograms
|
||||||
planar_subdivisions
|
planar_subdivisions
|
||||||
feature_detection_and_description
|
feature_detection_and_description
|
||||||
common_interfaces_of_descriptor_extractors
|
common_interfaces_of_descriptor_extractors
|
||||||
|
@ -41,6 +41,8 @@ const cv::Size sznHD = cv::Size(640, 360);
|
|||||||
const cv::Size szqHD = cv::Size(960, 540);
|
const cv::Size szqHD = cv::Size(960, 540);
|
||||||
const cv::Size sz720p = cv::Size(1280, 720);
|
const cv::Size sz720p = cv::Size(1280, 720);
|
||||||
const cv::Size sz1080p = cv::Size(1920, 1080);
|
const cv::Size sz1080p = cv::Size(1920, 1080);
|
||||||
|
const cv::Size sz2160p = cv::Size(3840, 2160);//UHDTV1 4K
|
||||||
|
const cv::Size sz4320p = cv::Size(7680, 4320);//UHDTV2 8K
|
||||||
|
|
||||||
const cv::Size sz2K = cv::Size(2048, 2048);
|
const cv::Size sz2K = cv::Size(2048, 2048);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user