Merge pull request #1199 from devernay:2.4-qtkit
This commit is contained in:
commit
6057414a93
@ -289,11 +289,17 @@ bool CvCaptureCAM::grabFrame(double timeOut) {
|
|||||||
double sleepTime = 0.005;
|
double sleepTime = 0.005;
|
||||||
double total = 0;
|
double total = 0;
|
||||||
|
|
||||||
NSDate *loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
|
// If the capture is launched in a separate thread, then
|
||||||
while (![capture updateImage] && (total += sleepTime)<=timeOut &&
|
// [NSRunLoop currentRunLoop] is not the same as in the main thread, and has no timer.
|
||||||
[[NSRunLoop currentRunLoop] runMode: NSDefaultRunLoopMode
|
//see https://developer.apple.com/library/mac/#documentation/Cocoa/Reference/Foundation/Classes/nsrunloop_Class/Reference/Reference.html
|
||||||
beforeDate:loopUntil])
|
// "If no input sources or timers are attached to the run loop, this
|
||||||
loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
|
// method exits immediately"
|
||||||
|
// using usleep() is not a good alternative, because it may block the GUI.
|
||||||
|
// Create a dummy timer so that runUntilDate does not exit immediately:
|
||||||
|
[NSTimer scheduledTimerWithTimeInterval:100 target:nil selector:@selector(doFireTimer:) userInfo:nil repeats:YES];
|
||||||
|
while (![capture updateImage] && (total += sleepTime)<=timeOut) {
|
||||||
|
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:sleepTime]];
|
||||||
|
}
|
||||||
|
|
||||||
[localpool drain];
|
[localpool drain];
|
||||||
|
|
||||||
@ -338,9 +344,11 @@ int CvCaptureCAM::startCaptureDevice(int cameraNum) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (cameraNum >= 0) {
|
if (cameraNum >= 0) {
|
||||||
int nCameras = [devices count];
|
NSUInteger nCameras = [devices count];
|
||||||
if( cameraNum < 0 || cameraNum >= nCameras )
|
if( (NSUInteger)cameraNum >= nCameras ) {
|
||||||
|
[localpool drain];
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
device = [devices objectAtIndex:cameraNum] ;
|
device = [devices objectAtIndex:cameraNum] ;
|
||||||
} else {
|
} else {
|
||||||
device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo] ;
|
device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo] ;
|
||||||
@ -404,6 +412,7 @@ int CvCaptureCAM::startCaptureDevice(int cameraNum) {
|
|||||||
|
|
||||||
grabFrame(60);
|
grabFrame(60);
|
||||||
|
|
||||||
|
[localpool drain];
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -433,6 +442,7 @@ void CvCaptureCAM::setWidthHeight() {
|
|||||||
|
|
||||||
|
|
||||||
double CvCaptureCAM::getProperty(int property_id){
|
double CvCaptureCAM::getProperty(int property_id){
|
||||||
|
int retval;
|
||||||
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
|
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
|
||||||
|
|
||||||
NSArray* connections = [mCaptureDeviceInput connections];
|
NSArray* connections = [mCaptureDeviceInput connections];
|
||||||
@ -442,15 +452,18 @@ double CvCaptureCAM::getProperty(int property_id){
|
|||||||
int width=s1.width, height=s1.height;
|
int width=s1.width, height=s1.height;
|
||||||
switch (property_id) {
|
switch (property_id) {
|
||||||
case CV_CAP_PROP_FRAME_WIDTH:
|
case CV_CAP_PROP_FRAME_WIDTH:
|
||||||
return width;
|
retval = width;
|
||||||
|
break;
|
||||||
case CV_CAP_PROP_FRAME_HEIGHT:
|
case CV_CAP_PROP_FRAME_HEIGHT:
|
||||||
return height;
|
retval = height;
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
return 0;
|
retval = 0;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
[localpool drain];
|
[localpool drain];
|
||||||
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CvCaptureCAM::setProperty(int property_id, double value) {
|
bool CvCaptureCAM::setProperty(int property_id, double value) {
|
||||||
@ -498,13 +511,15 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
|
|||||||
@implementation CaptureDelegate
|
@implementation CaptureDelegate
|
||||||
|
|
||||||
- (id)init {
|
- (id)init {
|
||||||
[super init];
|
self = [super init];
|
||||||
newFrame = 0;
|
if (self) {
|
||||||
imagedata = NULL;
|
newFrame = 0;
|
||||||
bgr_imagedata = NULL;
|
imagedata = NULL;
|
||||||
currSize = 0;
|
bgr_imagedata = NULL;
|
||||||
image = NULL;
|
currSize = 0;
|
||||||
bgr_image = NULL;
|
image = NULL;
|
||||||
|
bgr_image = NULL;
|
||||||
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -579,26 +594,26 @@ didDropVideoFrameWithSampleBuffer:(QTSampleBuffer *)sampleBuffer
|
|||||||
memcpy(imagedata, baseaddress, currSize);
|
memcpy(imagedata, baseaddress, currSize);
|
||||||
|
|
||||||
if (image == NULL) {
|
if (image == NULL) {
|
||||||
image = cvCreateImageHeader(cvSize(width,height), IPL_DEPTH_8U, 4);
|
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
|
||||||
}
|
}
|
||||||
image->width =width;
|
image->width = (int)width;
|
||||||
image->height = height;
|
image->height = (int)height;
|
||||||
image->nChannels = 4;
|
image->nChannels = 4;
|
||||||
image->depth = IPL_DEPTH_8U;
|
image->depth = IPL_DEPTH_8U;
|
||||||
image->widthStep = rowBytes;
|
image->widthStep = (int)rowBytes;
|
||||||
image->imageData = imagedata;
|
image->imageData = imagedata;
|
||||||
image->imageSize = currSize;
|
image->imageSize = (int)currSize;
|
||||||
|
|
||||||
if (bgr_image == NULL) {
|
if (bgr_image == NULL) {
|
||||||
bgr_image = cvCreateImageHeader(cvSize(width,height), IPL_DEPTH_8U, 3);
|
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
|
||||||
}
|
}
|
||||||
bgr_image->width =width;
|
bgr_image->width = (int)width;
|
||||||
bgr_image->height = height;
|
bgr_image->height = (int)height;
|
||||||
bgr_image->nChannels = 3;
|
bgr_image->nChannels = 3;
|
||||||
bgr_image->depth = IPL_DEPTH_8U;
|
bgr_image->depth = IPL_DEPTH_8U;
|
||||||
bgr_image->widthStep = rowBytes;
|
bgr_image->widthStep = (int)rowBytes;
|
||||||
bgr_image->imageData = bgr_imagedata;
|
bgr_image->imageData = bgr_imagedata;
|
||||||
bgr_image->imageSize = currSize;
|
bgr_image->imageSize = (int)currSize;
|
||||||
|
|
||||||
cvCvtColor(image, bgr_image, CV_BGRA2BGR);
|
cvCvtColor(image, bgr_image, CV_BGRA2BGR);
|
||||||
|
|
||||||
@ -752,29 +767,29 @@ IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (image == NULL) {
|
if (image == NULL) {
|
||||||
image = cvCreateImageHeader(cvSize(width,height), IPL_DEPTH_8U, 4);
|
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
image->width =width;
|
image->width = (int)width;
|
||||||
image->height = height;
|
image->height = (int)height;
|
||||||
image->nChannels = 4;
|
image->nChannels = 4;
|
||||||
image->depth = IPL_DEPTH_8U;
|
image->depth = IPL_DEPTH_8U;
|
||||||
image->widthStep = rowBytes;
|
image->widthStep = (int)rowBytes;
|
||||||
image->imageData = imagedata;
|
image->imageData = imagedata;
|
||||||
image->imageSize = currSize;
|
image->imageSize = (int)currSize;
|
||||||
|
|
||||||
|
|
||||||
if (bgr_image == NULL) {
|
if (bgr_image == NULL) {
|
||||||
bgr_image = cvCreateImageHeader(cvSize(width,height), IPL_DEPTH_8U, 3);
|
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
bgr_image->width =width;
|
bgr_image->width = (int)width;
|
||||||
bgr_image->height = height;
|
bgr_image->height = (int)height;
|
||||||
bgr_image->nChannels = 3;
|
bgr_image->nChannels = 3;
|
||||||
bgr_image->depth = IPL_DEPTH_8U;
|
bgr_image->depth = IPL_DEPTH_8U;
|
||||||
bgr_image->widthStep = rowBytes;
|
bgr_image->widthStep = (int)rowBytes;
|
||||||
bgr_image->imageData = bgr_imagedata;
|
bgr_image->imageData = bgr_imagedata;
|
||||||
bgr_image->imageSize = currSize;
|
bgr_image->imageSize = (int)currSize;
|
||||||
|
|
||||||
cvCvtColor(image, bgr_image,CV_BGRA2BGR);
|
cvCvtColor(image, bgr_image,CV_BGRA2BGR);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user