Normalize line endings and whitespace

This commit is contained in:
OpenCV Buildbot
2012-10-17 03:18:30 +04:00
committed by Andrey Kamaev
parent 69020da607
commit 04384a71e4
1516 changed files with 258846 additions and 258162 deletions

View File

@@ -56,7 +56,7 @@ bool bsIsBigEndian( void )
///////////////////////// RBaseStream ////////////////////////////
bool RBaseStream::isOpened()
{
{
return m_is_opened;
}
@@ -219,7 +219,7 @@ int RLByteStream::getBytes( void* buffer, int count )
uchar* data = (uchar*)buffer;
int readed = 0;
assert( count >= 0 );
while( count > 0 )
{
int l;
@@ -350,7 +350,7 @@ WBaseStream::~WBaseStream()
bool WBaseStream::isOpened()
{
{
return m_is_opened;
}
@@ -368,7 +368,7 @@ void WBaseStream::allocate()
void WBaseStream::writeBlock()
{
int size = (int)(m_current - m_start);
assert( isOpened() );
if( size == 0 )
return;
@@ -392,7 +392,7 @@ bool WBaseStream::open( const string& filename )
{
close();
allocate();
m_file = fopen( filename.c_str(), "wb" );
if( m_file )
{
@@ -407,7 +407,7 @@ bool WBaseStream::open( vector<uchar>& buf )
{
close();
allocate();
m_buf = &buf;
m_is_opened = true;
m_block_pos = 0;
@@ -445,7 +445,7 @@ int WBaseStream::getPos()
}
///////////////////////////// WLByteStream ///////////////////////////////////
///////////////////////////// WLByteStream ///////////////////////////////////
WLByteStream::~WLByteStream()
{
@@ -462,16 +462,16 @@ void WLByteStream::putByte( int val )
void WLByteStream::putBytes( const void* buffer, int count )
{
uchar* data = (uchar*)buffer;
assert( data && m_current && count >= 0 );
while( count )
{
int l = (int)(m_end - m_current);
if( l > count )
l = count;
if( l > 0 )
{
memcpy( m_current, data, l );
@@ -529,7 +529,7 @@ void WLByteStream::putDWord( int val )
}
///////////////////////////// WMByteStream ///////////////////////////////////
///////////////////////////// WMByteStream ///////////////////////////////////
WMByteStream::~WMByteStream()
{

View File

@@ -65,7 +65,7 @@ public:
//methods
RBaseStream();
virtual ~RBaseStream();
virtual bool open( const string& filename );
virtual bool open( const Mat& buf );
virtual void close();
@@ -73,9 +73,9 @@ public:
void setPos( int pos );
int getPos();
void skip( int bytes );
protected:
bool m_allocated;
uchar* m_start;
uchar* m_end;
@@ -97,11 +97,11 @@ class RLByteStream : public RBaseStream
{
public:
virtual ~RLByteStream();
int getByte();
int getBytes( void* buffer, int count );
int getWord();
int getDWord();
int getDWord();
};
// class RMBitStream - uchar-oriented stream.
@@ -112,7 +112,7 @@ public:
virtual ~RMByteStream();
int getWord();
int getDWord();
int getDWord();
};
// WBaseStream - base class for output streams
@@ -122,15 +122,15 @@ public:
//methods
WBaseStream();
virtual ~WBaseStream();
virtual bool open( const string& filename );
virtual bool open( vector<uchar>& buf );
virtual void close();
bool isOpened();
int getPos();
protected:
uchar* m_start;
uchar* m_end;
uchar* m_current;
@@ -139,7 +139,7 @@ protected:
FILE* m_file;
bool m_is_opened;
vector<uchar>* m_buf;
virtual void writeBlock();
virtual void release();
virtual void allocate();
@@ -156,7 +156,7 @@ public:
void putByte( int val );
void putBytes( const void* buffer, int count );
void putWord( int val );
void putDWord( int val );
void putDWord( int val );
};
@@ -167,7 +167,7 @@ class WMByteStream : public WLByteStream
public:
virtual ~WMByteStream();
void putWord( int val );
void putDWord( int val );
void putDWord( int val );
};
inline unsigned BSWAP(unsigned v)

View File

@@ -461,14 +461,14 @@ VideoCapture::~VideoCapture()
bool VideoCapture::open(const string& filename)
{
if (!isOpened())
cap = cvCreateFileCapture(filename.c_str());
cap = cvCreateFileCapture(filename.c_str());
return isOpened();
}
bool VideoCapture::open(int device)
{
if (!isOpened())
cap = cvCreateCameraCapture(device);
cap = cvCreateCameraCapture(device);
return isOpened();
}

File diff suppressed because it is too large Load Diff

View File

@@ -398,17 +398,17 @@ bool CvCaptureCAM_CMU::grabFrame()
/*static void swapRedBlue(IplImage * im)
{
uchar * ptr = (uchar *) im->imageData;
uchar t;
for(int i=0; i<im->height; i++){
ptr = (uchar *) im->imageData+im->widthStep*i;
for(int j=0; j<im->width; j++){
t = ptr[0];
ptr[0] = ptr[2];
ptr[2] = t;
ptr+=3;
}
}
uchar * ptr = (uchar *) im->imageData;
uchar t;
for(int i=0; i<im->height; i++){
ptr = (uchar *) im->imageData+im->widthStep*i;
for(int j=0; j<im->width; j++){
t = ptr[0];
ptr[0] = ptr[2];
ptr[2] = t;
ptr+=3;
}
}
}*/
IplImage* CvCaptureCAM_CMU::retrieveFrame(int)

File diff suppressed because it is too large Load Diff

View File

@@ -202,8 +202,8 @@ public:
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() { return CV_CAP_DC1394; } // Return the type of the capture object: CV_CAP_VFW, etc...
virtual int getCaptureDomain() { return CV_CAP_DC1394; } // Return the type of the capture object: CV_CAP_VFW, etc...
protected:
virtual bool startCapture();
@@ -304,21 +304,21 @@ bool CvCaptureCAM_DC1394_v2_CPP::startCapture()
DC1394_ISO_SPEED_3200);
}
// should a specific mode be used
// should a specific mode be used
if (userMode >= 0)
{
dc1394video_mode_t wantedMode;
dc1394video_modes_t videoModes;
dc1394_video_get_supported_modes(dcCam, &videoModes);
//set mode from number, for example the second supported mode, i.e userMode = 1
if (userMode < (int)videoModes.num)
{
wantedMode = videoModes.modes[userMode];
}
//set modes directly from DC134 constants (from dc1394video_mode_t)
else if ((userMode >= DC1394_VIDEO_MODE_MIN) && (userMode <= DC1394_VIDEO_MODE_MAX ))
{
@@ -328,7 +328,7 @@ bool CvCaptureCAM_DC1394_v2_CPP::startCapture()
{
j++;
}
if ((int)videoModes.modes[j]==userMode)
{
wantedMode = videoModes.modes[j];
@@ -361,8 +361,8 @@ bool CvCaptureCAM_DC1394_v2_CPP::startCapture()
for (i = 0; i < (int)videoModes.num; i++)
{
dc1394video_mode_t mode = videoModes.modes[i];
if (mode >= DC1394_VIDEO_MODE_FORMAT7_MIN && mode <= DC1394_VIDEO_MODE_FORMAT7_MAX)
continue;
if (mode >= DC1394_VIDEO_MODE_FORMAT7_MIN && mode <= DC1394_VIDEO_MODE_FORMAT7_MAX)
continue;
int pref = -1;
dc1394color_coding_t colorCoding;
dc1394_get_color_coding_from_video_mode(dcCam, mode, &colorCoding);
@@ -398,7 +398,7 @@ bool CvCaptureCAM_DC1394_v2_CPP::startCapture()
if ((int)bestMode >= 0)
code = dc1394_video_set_mode(dcCam, bestMode);
}
if (fps > 0)
{
dc1394video_mode_t mode;
@@ -588,9 +588,9 @@ bool CvCaptureCAM_DC1394_v2_CPP::grabFrame()
cvInitImageHeader(&fhdr, cvSize(fc->size[0], fc->size[1]), 8, nch);
cvSetData(&fhdr, fc->image, fc->size[0]*nch);
// Swap R&B channels:
if (nch==3)
cvConvertImage(&fhdr,&fhdr,CV_CVTIMG_SWAP_RB);
// Swap R&B channels:
if (nch==3)
cvConvertImage(&fhdr,&fhdr,CV_CVTIMG_SWAP_RB);
if( rectify && cameraId == VIDERE && nimages == 2 )
{
@@ -662,7 +662,7 @@ double CvCaptureCAM_DC1394_v2_CPP::getProperty(int propId)
&feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].RV_value) == DC1394_SUCCESS)
return feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].RV_value;
break;
case CV_CAP_PROP_GUID:
case CV_CAP_PROP_GUID:
//the least 32 bits are enough to identify the camera
return (double) (guid & 0x00000000FFFFFFFF);
break;
@@ -688,19 +688,19 @@ bool CvCaptureCAM_DC1394_v2_CPP::setProperty(int propId, double value)
{
case CV_CAP_PROP_FRAME_WIDTH:
if(started)
return false;
return false;
frameWidth = cvRound(value);
frameHeight = 0;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
if(started)
return false;
if(started)
return false;
frameWidth = 0;
frameHeight = cvRound(value);
break;
case CV_CAP_PROP_FPS:
if(started)
return false;
if(started)
return false;
fps = value;
break;
case CV_CAP_PROP_RECTIFICATION:
@@ -709,26 +709,26 @@ bool CvCaptureCAM_DC1394_v2_CPP::setProperty(int propId, double value)
rectify = fabs(value) > FLT_EPSILON;
break;
case CV_CAP_PROP_MODE:
if(started)
if(started)
return false;
userMode = cvRound(value);
break;
case CV_CAP_PROP_ISO_SPEED:
case CV_CAP_PROP_ISO_SPEED:
if(started)
return false;
isoSpeed = cvRound(value);
break;
break;
//The code below is based on coriander, callbacks.c:795, refer to case RANGE_MENU_MAN :
default:
if (propId<CV_CAP_PROP_MAX_DC1394 && dc1394properties[propId]!=-1
&& dcCam)
{
//get the corresponding feature from property-id
//get the corresponding feature from property-id
dc1394feature_info_t *act_feature = &feature_set.feature[dc1394properties[propId]-DC1394_FEATURE_MIN];
if (cvRound(value) == CV_CAP_PROP_DC1394_OFF)
{
if ( (act_feature->on_off_capable)
if ( (act_feature->on_off_capable)
&& (dc1394_feature_set_power(dcCam, act_feature->id, DC1394_OFF) == DC1394_SUCCESS))
{
act_feature->is_on=DC1394_OFF;
@@ -793,8 +793,8 @@ bool CvCaptureCAM_DC1394_v2_CPP::setProperty(int propId, double value)
return true;
}
}
//first: check boundaries
//first: check boundaries
if (value < act_feature->min)
{
value = act_feature->min;
@@ -803,7 +803,7 @@ bool CvCaptureCAM_DC1394_v2_CPP::setProperty(int propId, double value)
{
value = act_feature->max;
}
if (dc1394_feature_set_value(dcCam, act_feature->id, cvRound(value)) == DC1394_SUCCESS)
{
act_feature->value = value;

View File

@@ -68,7 +68,7 @@ typedef void (*CvReleaseVideoWriter_Plugin)( void** writer );
/*
* For CUDA encoder
*/
OPENCV_FFMPEG_API struct OutputMediaStream_FFMPEG* create_OutputMediaStream_FFMPEG(const char* fileName, int width, int height, double fps);
OPENCV_FFMPEG_API void release_OutputMediaStream_FFMPEG(struct OutputMediaStream_FFMPEG* stream);
OPENCV_FFMPEG_API void write_OutputMediaStream_FFMPEG(struct OutputMediaStream_FFMPEG* stream, unsigned char* data, int size, int keyFrame);

View File

@@ -81,74 +81,74 @@
- (id)init;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") );
running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.parentView = nil;
self.useAVCaptureVideoPreviewLayer = NO;
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") );
running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.parentView = nil;
self.useAVCaptureVideoPreviewLayer = NO;
}
return self;
return self;
}
- (id)initWithParentView:(UIView*)parent;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") );
running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
self.parentView = parent;
self.useAVCaptureVideoPreviewLayer = YES;
}
return self;
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") );
running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
self.parentView = parent;
self.useAVCaptureVideoPreviewLayer = YES;
}
return self;
}
- (void)dealloc;
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
}
@@ -162,41 +162,41 @@
[self performSelectorOnMainThread:@selector(start) withObject:nil waitUntilDone:NO];
return;
}
if (running == YES) {
return;
}
running = YES;
if (running == YES) {
return;
}
running = YES;
// TOOD update image size data before actually starting (needed for recording)
[self updateSize];
if (cameraAvailable) {
[self startCaptureSession];
}
if (cameraAvailable) {
[self startCaptureSession];
}
}
- (void)pause;
{
running = NO;
[self.captureSession stopRunning];
running = NO;
[self.captureSession stopRunning];
}
- (void)stop;
{
running = NO;
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
[self.captureSession stopRunning];
self.captureSession = nil;
self.captureVideoPreviewLayer = nil;
self.videoCaptureConnection = nil;
captureSessionLoaded = NO;
running = NO;
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
[self.captureSession stopRunning];
self.captureSession = nil;
self.captureVideoPreviewLayer = nil;
self.videoCaptureConnection = nil;
captureSessionLoaded = NO;
}
@@ -204,18 +204,18 @@
// use front/back camera
- (void)switchCameras;
{
BOOL was_running = self.running;
if (was_running) {
[self stop];
}
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
} else {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
}
if (was_running) {
[self start];
}
BOOL was_running = self.running;
if (was_running) {
[self stop];
}
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
} else {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
}
if (was_running) {
[self start];
}
}
@@ -225,25 +225,25 @@
- (void)deviceOrientationDidChange:(NSNotification*)notification
{
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation)
{
case UIDeviceOrientationPortrait:
case UIDeviceOrientationPortraitUpsideDown:
case UIDeviceOrientationLandscapeLeft:
case UIDeviceOrientationLandscapeRight:
currentDeviceOrientation = orientation;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break;
}
NSLog(@"deviceOrientationDidChange: %d", orientation);
[self updateOrientation];
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation)
{
case UIDeviceOrientationPortrait:
case UIDeviceOrientationPortraitUpsideDown:
case UIDeviceOrientationLandscapeLeft:
case UIDeviceOrientationLandscapeRight:
currentDeviceOrientation = orientation;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break;
}
NSLog(@"deviceOrientationDidChange: %d", orientation);
[self updateOrientation];
}
@@ -252,41 +252,41 @@
- (void)createCaptureSession;
{
// set a av capture session preset
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:self.defaultAVCaptureSessionPreset]) {
[self.captureSession setSessionPreset:self.defaultAVCaptureSessionPreset];
} else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
} else {
NSLog(@"[Camera] Error: could not set session preset");
}
// set a av capture session preset
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:self.defaultAVCaptureSessionPreset]) {
[self.captureSession setSessionPreset:self.defaultAVCaptureSessionPreset];
} else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
} else {
NSLog(@"[Camera] Error: could not set session preset");
}
}
- (void)createCaptureDevice;
{
// setup the device
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
// setup the device
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
}
- (void)createVideoPreviewLayer;
{
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
if ([self.captureVideoPreviewLayer isOrientationSupported]) {
[self.captureVideoPreviewLayer setOrientation:self.defaultAVCaptureVideoOrientation];
}
if (parentView != nil) {
self.captureVideoPreviewLayer.frame = self.parentView.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.parentView.layer addSublayer:self.captureVideoPreviewLayer];
}
NSLog(@"[Camera] created AVCaptureVideoPreviewLayer");
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
if ([self.captureVideoPreviewLayer isOrientationSupported]) {
[self.captureVideoPreviewLayer setOrientation:self.defaultAVCaptureVideoOrientation];
}
if (parentView != nil) {
self.captureVideoPreviewLayer.frame = self.parentView.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.parentView.layer addSublayer:self.captureVideoPreviewLayer];
}
NSLog(@"[Camera] created AVCaptureVideoPreviewLayer");
}
@@ -294,81 +294,81 @@
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition;
{
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device position] == desiredPosition) {
[self.captureSession beginConfiguration];
NSError* error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
NSLog(@"error creating input %@", [error localizedDescription]);
}
// support for autofocus
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocos configuration %@", [error localizedDescription]);
}
}
[self.captureSession addInput:input];
for (AVCaptureInput *oldInput in self.captureSession.inputs) {
[self.captureSession removeInput:oldInput];
}
[self.captureSession addInput:input];
[self.captureSession commitConfiguration];
break;
}
}
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device position] == desiredPosition) {
[self.captureSession beginConfiguration];
NSError* error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
NSLog(@"error creating input %@", [error localizedDescription]);
}
// support for autofocus
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocos configuration %@", [error localizedDescription]);
}
}
[self.captureSession addInput:input];
for (AVCaptureInput *oldInput in self.captureSession.inputs) {
[self.captureSession removeInput:oldInput];
}
[self.captureSession addInput:input];
[self.captureSession commitConfiguration];
break;
}
}
}
- (void)startCaptureSession
{
if (!cameraAvailable) {
return;
}
if (self.captureSessionLoaded == NO) {
[self createCaptureSession];
[self createCaptureDevice];
[self createCaptureOutput];
// setup preview layer
if (self.useAVCaptureVideoPreviewLayer) {
[self createVideoPreviewLayer];
} else {
[self createCustomVideoPreview];
}
captureSessionLoaded = YES;
}
[self.captureSession startRunning];
if (!cameraAvailable) {
return;
}
if (self.captureSessionLoaded == NO) {
[self createCaptureSession];
[self createCaptureDevice];
[self createCaptureOutput];
// setup preview layer
if (self.useAVCaptureVideoPreviewLayer) {
[self createVideoPreviewLayer];
} else {
[self createCustomVideoPreview];
}
captureSessionLoaded = YES;
}
[self.captureSession startRunning];
}
- (void)createCaptureOutput;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)];
[NSException raise:NSInternalInconsistencyException
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)];
}
- (void)createCustomVideoPreview;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)];
[NSException raise:NSInternalInconsistencyException
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)];
}
- (void)updateOrientation;
{
// nothing to do here
// nothing to do here
}
@@ -385,7 +385,7 @@
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetMedium]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetLow]) {
//TODO: find the correct resolution
self.imageWidth = 640;

View File

@@ -61,67 +61,67 @@
- (void)takePicture
{
if (cameraAvailable == NO) {
return;
}
cameraAvailable = NO;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoCaptureConnection
completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error == nil && imageSampleBuffer != NULL)
{
// TODO check
// NSNumber* imageOrientation = [UIImage cgImageOrientationForUIDeviceOrientation:currentDeviceOrientation];
// CMSetAttachment(imageSampleBuffer, kCGImagePropertyOrientation, imageOrientation, 1);
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession stopRunning];
// Make sure we create objects on the main thread in the main context
UIImage* newImage = [UIImage imageWithData:jpegData];
//UIImageOrientation orientation = [newImage imageOrientation];
// TODO: only apply rotation, don't scale, since we can set this directly in the camera
/*
switch (orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
newImage = [newImage imageWithAppliedRotationAndMaxSize:CGSizeMake(640.0, 480.0)];
break;
case UIImageOrientationLeft:
case UIImageOrientationRight:
newImage = [newImage imageWithMaxSize:CGSizeMake(640.0, 480.0)];
default:
break;
}
*/
// We have captured the image, we can allow the user to take another picture
cameraAvailable = YES;
NSLog(@"CvPhotoCamera captured image");
if (self.delegate) {
[self.delegate photoCamera:self capturedImage:newImage];
}
[self.captureSession startRunning];
});
}
}];
if (cameraAvailable == NO) {
return;
}
cameraAvailable = NO;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoCaptureConnection
completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error == nil && imageSampleBuffer != NULL)
{
// TODO check
// NSNumber* imageOrientation = [UIImage cgImageOrientationForUIDeviceOrientation:currentDeviceOrientation];
// CMSetAttachment(imageSampleBuffer, kCGImagePropertyOrientation, imageOrientation, 1);
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession stopRunning];
// Make sure we create objects on the main thread in the main context
UIImage* newImage = [UIImage imageWithData:jpegData];
//UIImageOrientation orientation = [newImage imageOrientation];
// TODO: only apply rotation, don't scale, since we can set this directly in the camera
/*
switch (orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
newImage = [newImage imageWithAppliedRotationAndMaxSize:CGSizeMake(640.0, 480.0)];
break;
case UIImageOrientationLeft:
case UIImageOrientationRight:
newImage = [newImage imageWithMaxSize:CGSizeMake(640.0, 480.0)];
default:
break;
}
*/
// We have captured the image, we can allow the user to take another picture
cameraAvailable = YES;
NSLog(@"CvPhotoCamera captured image");
if (self.delegate) {
[self.delegate photoCamera:self capturedImage:newImage];
}
[self.captureSession startRunning];
});
}
}];
}
- (void)stop;
{
[super stop];
self.stillImageOutput = nil;
[super stop];
self.stillImageOutput = nil;
}
@@ -130,35 +130,35 @@
- (void)createStillImageOutput;
{
// setup still image output with jpeg codec
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.captureSession addOutput:self.stillImageOutput];
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([port.mediaType isEqual:AVMediaTypeVideo]) {
self.videoCaptureConnection = connection;
break;
}
}
if (self.videoCaptureConnection) {
break;
}
}
NSLog(@"[Camera] still image output created");
// setup still image output with jpeg codec
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.captureSession addOutput:self.stillImageOutput];
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([port.mediaType isEqual:AVMediaTypeVideo]) {
self.videoCaptureConnection = connection;
break;
}
}
if (self.videoCaptureConnection) {
break;
}
}
NSLog(@"[Camera] still image output created");
}
- (void)createCaptureOutput;
{
[self createStillImageOutput];
[self createStillImageOutput];
}
- (void)createCustomVideoPreview;
{
//do nothing, always use AVCaptureVideoPreviewLayer
//do nothing, always use AVCaptureVideoPreviewLayer
}

View File

@@ -81,12 +81,12 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
- (id)initWithParentView:(UIView*)parent;
{
self = [super initWithParentView:parent];
if (self) {
self.useAVCaptureVideoPreviewLayer = NO;
self.recordVideo = NO;
}
return self;
self = [super initWithParentView:parent];
if (self) {
self.useAVCaptureVideoPreviewLayer = NO;
self.recordVideo = NO;
}
return self;
}
@@ -97,176 +97,176 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
- (void)start;
{
[super start];
if (self.recordVideo == YES) {
if (self.recordVideo == YES) {
NSError* error;
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
}
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
}
if (error == nil) {
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
}
}
}
}
- (void)stop;
{
[super stop];
self.videoDataOutput = nil;
if (videoDataOutputQueue) {
dispatch_release(videoDataOutputQueue);
}
if (self.recordVideo == YES) {
[super stop];
self.videoDataOutput = nil;
if (videoDataOutputQueue) {
dispatch_release(videoDataOutputQueue);
}
if (self.recordVideo == YES) {
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
[self.recordAssetWriter finishWriting];
NSLog(@"[Camera] recording stopped");
} else {
NSLog(@"[Camera] Recording Error: asset writer status is not writing");
}
self.recordAssetWriter = nil;
self.recordAssetWriterInput = nil;
self.recordPixelBufferAdaptor = nil;
}
[self.customPreviewLayer removeFromSuperlayer];
self.customPreviewLayer = nil;
}
[self.customPreviewLayer removeFromSuperlayer];
self.customPreviewLayer = nil;
}
// TODO fix
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
{
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (interfaceOrientation) {
{
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (interfaceOrientation) {
case UIInterfaceOrientationPortrait:
NSLog(@"to Portrait");
NSLog(@"to Portrait");
rotation_angle = 270;
break;
case UIInterfaceOrientationPortraitUpsideDown:
rotation_angle = 90;
NSLog(@"to UpsideDown");
break;
NSLog(@"to UpsideDown");
break;
case UIInterfaceOrientationLandscapeLeft:
rotation_angle = 0;
NSLog(@"to LandscapeLeft");
break;
NSLog(@"to LandscapeLeft");
break;
case UIInterfaceOrientationLandscapeRight:
rotation_angle = 180;
NSLog(@"to LandscapeRight");
break;
NSLog(@"to LandscapeRight");
break;
default:
break; // leave the layer in its last known orientation
}
switch (defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
switch (defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
// TODO fix
- (void)layoutPreviewLayer;
{
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (currentDeviceOrientation) {
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (currentDeviceOrientation) {
case UIDeviceOrientationPortrait:
rotation_angle = 270;
break;
case UIDeviceOrientationPortraitUpsideDown:
rotation_angle = 90;
break;
break;
case UIDeviceOrientationLandscapeLeft:
NSLog(@"left");
NSLog(@"left");
rotation_angle = 180;
break;
break;
case UIDeviceOrientationLandscapeRight:
NSLog(@"right");
NSLog(@"right");
rotation_angle = 0;
break;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break; // leave the layer in its last known orientation
}
switch (defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
switch (defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
@@ -278,115 +278,115 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
- (void)createVideoDataOutput;
{
// Make a video data output
self.videoDataOutput = [AVCaptureVideoDataOutput new];
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
// In color mode we, BGRA format is used
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
// Make a video data output
self.videoDataOutput = [AVCaptureVideoDataOutput new];
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
// In color mode we, BGRA format is used
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// discard if the data output queue is blocked (as we process the still image)
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
[self.captureSession addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
// set default FPS
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMinFrameDuration = CMTimeMake(1, self.defaultFPS);
}
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMaxFrameDuration) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMaxFrameDuration = CMTimeMake(1, self.defaultFPS);
}
// set video mirroring for front camera (more intuitive)
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
} else {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
}
}
// set default video orientation
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
}
// create a custom preview layer
self.customPreviewLayer = [CALayer layer];
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
// discard if the data output queue is blocked (as we process the still image)
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
[self.captureSession addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
// set default FPS
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMinFrameDuration = CMTimeMake(1, self.defaultFPS);
}
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMaxFrameDuration) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMaxFrameDuration = CMTimeMake(1, self.defaultFPS);
}
// set video mirroring for front camera (more intuitive)
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
} else {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
}
}
// set default video orientation
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
}
// create a custom preview layer
self.customPreviewLayer = [CALayer layer];
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
}
- (void)createVideoFileOutput;
{
/* Video File Output in H.264, via AVAsserWriter */
/* Video File Output in H.264, via AVAsserWriter */
NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
NSDictionary *outputSettings
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
AVVideoCodecH264, AVVideoCodecKey,
nil
];
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.recordPixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.recordAssetWriterInput
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
NSError* error = nil;
NSDictionary *outputSettings
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
AVVideoCodecH264, AVVideoCodecKey,
nil
];
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.recordPixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.recordAssetWriterInput
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
NSError* error = nil;
NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
fileType:AVFileTypeMPEG4
error:&error];
if (error != nil) {
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
}
[self.recordAssetWriter addInput:self.recordAssetWriterInput];
self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
if (error != nil) {
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
}
[self.recordAssetWriter addInput:self.recordAssetWriterInput];
self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
NSLog(@"[Camera] created AVAssetWriter");
}
- (void)createCaptureOutput;
{
[self createVideoDataOutput];
if (self.recordVideo == YES) {
[self createVideoFileOutput];
}
[self createVideoDataOutput];
if (self.recordVideo == YES) {
[self createVideoFileOutput];
}
}
- (void)createCustomVideoPreview;
{
[self.parentView.layer addSublayer:self.customPreviewLayer];
[self.parentView.layer addSublayer:self.customPreviewLayer];
}
@@ -395,157 +395,157 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (self.delegate) {
// convert from Core Media to Core Video
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void* bufferAddress;
size_t width;
size_t height;
size_t bytesPerRow;
CGColorSpaceRef colorSpace;
CGContextRef context;
int format_opencv;
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
format_opencv = CV_8UC1;
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
} else { // expect kCVPixelFormatType_32BGRA
format_opencv = CV_8UC4;
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
width = CVPixelBufferGetWidth(imageBuffer);
height = CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
}
// delegate image processing to the delegate
cv::Mat image(height, width, format_opencv, bufferAddress, bytesPerRow);
cv::Mat* result = NULL;
CGImage* dstImage;
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
[self.delegate processImage:image];
}
// check if matrix data pointer or dimensions were changed by the delegate
bool iOSimage = false;
if (height == image.rows && width == image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
iOSimage = true;
}
// (create color space, create graphics context, render buffer)
CGBitmapInfo bitmapInfo;
// basically we decide if it's a grayscale, rgb or rgba image
if (image.channels() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
bitmapInfo = kCGImageAlphaNone;
} else if (image.channels() == 3) {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaNone;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaPremultipliedFirst;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
}
if (iOSimage) {
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
dstImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
} else {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
dstImage = CGImageCreate(image.cols, // width
image.rows, // height
8, // bits per component
8 * image.elemSize(), // bits per pixel
image.step, // bytesPerRow
colorSpace, // colorspace
bitmapInfo, // bitmap info
provider, // CGDataProviderRef
NULL, // decode
false, // should interpolate
kCGRenderingIntentDefault // intent
);
CGDataProviderRelease(provider);
}
// render buffer
dispatch_sync(dispatch_get_main_queue(), ^{
self.customPreviewLayer.contents = (__bridge id)dstImage;
});
if (self.recordVideo == YES) {
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.delegate) {
// convert from Core Media to Core Video
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void* bufferAddress;
size_t width;
size_t height;
size_t bytesPerRow;
CGColorSpaceRef colorSpace;
CGContextRef context;
int format_opencv;
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
format_opencv = CV_8UC1;
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
} else { // expect kCVPixelFormatType_32BGRA
format_opencv = CV_8UC4;
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
width = CVPixelBufferGetWidth(imageBuffer);
height = CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
}
// delegate image processing to the delegate
cv::Mat image(height, width, format_opencv, bufferAddress, bytesPerRow);
cv::Mat* result = NULL;
CGImage* dstImage;
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
[self.delegate processImage:image];
}
// check if matrix data pointer or dimensions were changed by the delegate
bool iOSimage = false;
if (height == image.rows && width == image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
iOSimage = true;
}
// (create color space, create graphics context, render buffer)
CGBitmapInfo bitmapInfo;
// basically we decide if it's a grayscale, rgb or rgba image
if (image.channels() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
bitmapInfo = kCGImageAlphaNone;
} else if (image.channels() == 3) {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaNone;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaPremultipliedFirst;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
}
if (iOSimage) {
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
dstImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
} else {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
dstImage = CGImageCreate(image.cols, // width
image.rows, // height
8, // bits per component
8 * image.elemSize(), // bits per pixel
image.step, // bytesPerRow
colorSpace, // colorspace
bitmapInfo, // bitmap info
provider, // CGDataProviderRef
NULL, // decode
false, // should interpolate
kCGRenderingIntentDefault // intent
);
CGDataProviderRelease(provider);
}
// render buffer
dispatch_sync(dispatch_get_main_queue(), ^{
self.customPreviewLayer.contents = (__bridge id)dstImage;
});
if (self.recordVideo == YES) {
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// CMTimeShow(lastSampleTime);
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordAssetWriter startWriting];
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
return;
} else {
NSLog(@"[Camera] Video recording started");
}
}
if (self.recordAssetWriterInput.readyForMoreMediaData) {
if (! [self.recordPixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:lastSampleTime] ) {
NSLog(@"Video Writing Error");
}
}
}
// cleanup
CGImageRelease(dstImage);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordAssetWriter startWriting];
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
return;
} else {
NSLog(@"[Camera] Video recording started");
}
}
if (self.recordAssetWriterInput.readyForMoreMediaData) {
if (! [self.recordPixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:lastSampleTime] ) {
NSLog(@"Video Writing Error");
}
}
}
// cleanup
CGImageRelease(dstImage);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
}
- (void)updateOrientation;
{
NSLog(@"rotate..");
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
NSLog(@"rotate..");
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
}
@@ -554,8 +554,8 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
if (self.recordVideo == NO) {
return;
}
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[self videoFileURL]]) {
[library writeVideoAtPathToSavedPhotosAlbum:[self videoFileURL]
completionBlock:^(NSURL *assetURL, NSError *error){}];

View File

@@ -73,7 +73,7 @@ public:
virtual bool setProperty(int, double) { return false; }
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() { return CV_CAP_MIL; } // Return the type of the capture object: CV_CAP_VFW, etc...
virtual int getCaptureDomain() { return CV_CAP_MIL; } // Return the type of the capture object: CV_CAP_VFW, etc...
protected:
void init();
@@ -193,10 +193,10 @@ double CvCaptureCAM_MIL::getProperty( int property_id )
{
switch( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
case CV_CAP_PROP_FRAME_WIDTH:
return rgb_frame ? rgb_frame->width : 0;
case CV_CAP_PROP_FRAME_HEIGHT:
return rgb_frame ? rgb_frame->height : 0;
case CV_CAP_PROP_FRAME_HEIGHT:
return rgb_frame ? rgb_frame->height : 0;
}
return 0;
}
@@ -209,7 +209,7 @@ bool CvCaptureCAM_MIL::setProperty( int, double )
CvCapture* cvCreateCameraCapture_MIL( int index )
{
CvCaptureCAM_MIL* capture = new CvCaptureCAM_MIL;
CvCaptureCAM_MIL* capture = new CvCaptureCAM_MIL;
if( capture->open( index ))
return capture;

View File

@@ -575,12 +575,12 @@ CvCapture_OpenNI::CvCapture_OpenNI( int index )
// Chose device according to index
xn::NodeInfoList::Iterator it = devicesList.Begin();
for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++;
if ( it == devicesList.End() )
{
std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index " << index << std::endl;
return;
}
for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++;
if ( it == devicesList.End() )
{
std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index " << index << std::endl;
return;
}
xn::NodeInfo deviceNode = *it;
status = context.CreateProductionTree( deviceNode, productionNode );

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -43,7 +43,7 @@
#include <DeepSeaIF.h>
#if _MSC_VER >= 1200
#pragma comment(lib,"DeepSeaIF.lib")
#pragma comment(lib,"DeepSeaIF.lib")
#endif
@@ -64,12 +64,12 @@ public:
virtual bool setProperty(int, double) { return false; }
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() { return CV_CAP_TYZX; } // Return the type of the capture object: CV_CAP_VFW, etc...
virtual int getCaptureDomain() { return CV_CAP_TYZX; } // Return the type of the capture object: CV_CAP_VFW, etc...
protected:
virtual bool allocateImage();
int index;
int index;
IplImage* image;
}
CvCaptureCAM_TYZX;
@@ -79,149 +79,149 @@ int g_tyzx_refcount = 0;
bool CvCaptureCAM_TYZX::open( int _index )
{
close();
close();
if(!g_tyzx_camera){
g_tyzx_camera = new DeepSeaIF;
if(!g_tyzx_camera) return false;
g_tyzx_camera = new DeepSeaIF;
if(!g_tyzx_camera) return false;
if(!g_tyzx_camera->initializeSettings(NULL)){
delete g_tyzx_camera;
return false;
}
if(!g_tyzx_camera->initializeSettings(NULL)){
delete g_tyzx_camera;
return false;
}
// set initial sensor mode
// TODO is g_tyzx_camera redundant?
g_tyzx_camera->setSensorMode(g_tyzx_camera->getSensorMode());
// set initial sensor mode
// TODO is g_tyzx_camera redundant?
g_tyzx_camera->setSensorMode(g_tyzx_camera->getSensorMode());
// mm's
g_tyzx_camera->setZUnits((int) 1000);
// mm's
g_tyzx_camera->setZUnits((int) 1000);
g_tyzx_camera->enableLeftColor(true);
g_tyzx_camera->setColorMode(DeepSeaIF::BGRcolor);
g_tyzx_camera->setDoIntensityCrop(true);
g_tyzx_camera->enable8bitImages(true);
if(!g_tyzx_camera->startCapture()){
return false;
}
g_tyzx_refcount++;
}
index = _index;
return true;
g_tyzx_camera->enableLeftColor(true);
g_tyzx_camera->setColorMode(DeepSeaIF::BGRcolor);
g_tyzx_camera->setDoIntensityCrop(true);
g_tyzx_camera->enable8bitImages(true);
if(!g_tyzx_camera->startCapture()){
return false;
}
g_tyzx_refcount++;
}
index = _index;
return true;
}
void CvCaptureCAM_TYZX::close()
{
if( isOpened() )
{
cvReleaseImage( &image );
g_tyzx_refcount--;
if(g_tyzx_refcount==0){
delete g_tyzx_camera;
}
}
if( isOpened() )
{
cvReleaseImage( &image );
g_tyzx_refcount--;
if(g_tyzx_refcount==0){
delete g_tyzx_camera;
}
}
}
bool CvCaptureCAM_TYZX::grabFrame()
{
return isOpened() && g_tyzx_camera && g_tyzx_camera->grab();
return isOpened() && g_tyzx_camera && g_tyzx_camera->grab();
}
bool CvCaptureCAM_TYZX::allocateImage()
{
int depth, nch;
CvSize size;
int depth, nch;
CvSize size;
// assume we want to resize
// assume we want to resize
cvReleaseImage(&image);
// figure out size depending on index provided
switch(index){
case CV_TYZX_RIGHT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
depth = 8;
nch = 1;
break;
case CV_TYZX_Z:
size = cvSize(g_tyzx_camera->zWidth(), g_tyzx_camera->zHeight());
depth = IPL_DEPTH_16S;
nch = 1;
break;
case CV_TYZX_LEFT:
default:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
depth = 8;
nch = 1;
break;
}
image = cvCreateImage(size, depth, nch);
// figure out size depending on index provided
switch(index){
case CV_TYZX_RIGHT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
depth = 8;
nch = 1;
break;
case CV_TYZX_Z:
size = cvSize(g_tyzx_camera->zWidth(), g_tyzx_camera->zHeight());
depth = IPL_DEPTH_16S;
nch = 1;
break;
case CV_TYZX_LEFT:
default:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
depth = 8;
nch = 1;
break;
}
image = cvCreateImage(size, depth, nch);
return image != 0;
}
/// Copy 'grabbed' image into capture buffer and return it.
IplImage * CvCaptureCAM_TYZX::retrieveFrame(int)
{
if(!isOpened() || !g_tyzx_camera) return 0;
if(!isOpened() || !g_tyzx_camera) return 0;
if(!image && !alocateImage())
if(!image && !alocateImage())
return 0;
// copy camera image into buffer.
// tempting to reference TYZX memory directly to avoid copying.
switch (index)
{
case CV_TYZX_RIGHT:
memcpy(image->imageData, g_tyzx_camera->getRImage(), image->imageSize);
break;
case CV_TYZX_Z:
memcpy(image->imageData, g_tyzx_camera->getZImage(), image->imageSize);
break;
case CV_TYZX_LEFT:
default:
memcpy(image->imageData, g_tyzx_camera->getLImage(), image->imageSize);
break;
}
// copy camera image into buffer.
// tempting to reference TYZX memory directly to avoid copying.
switch (index)
{
case CV_TYZX_RIGHT:
memcpy(image->imageData, g_tyzx_camera->getRImage(), image->imageSize);
break;
case CV_TYZX_Z:
memcpy(image->imageData, g_tyzx_camera->getZImage(), image->imageSize);
break;
case CV_TYZX_LEFT:
default:
memcpy(image->imageData, g_tyzx_camera->getLImage(), image->imageSize);
break;
}
return image;
return image;
}
double CvCaptureCAM_TYZX::getProperty(int property_id)
{
CvSize size;
switch(capture->index)
{
case CV_TYZX_LEFT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
break;
case CV_TYZX_RIGHT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
break;
case CV_TYZX_Z:
size = cvSize(g_tyzx_camera->zWidth(), g_tyzx_camera->zHeight());
break;
default:
size = cvSize(0,0);
}
CvSize size;
switch(capture->index)
{
case CV_TYZX_LEFT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
break;
case CV_TYZX_RIGHT:
size = cvSize(g_tyzx_camera->intensityWidth(), g_tyzx_camera->intensityHeight());
break;
case CV_TYZX_Z:
size = cvSize(g_tyzx_camera->zWidth(), g_tyzx_camera->zHeight());
break;
default:
size = cvSize(0,0);
}
switch( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
return size.width;
case CV_CAP_PROP_FRAME_HEIGHT:
return size.height;
}
switch( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
return size.width;
case CV_CAP_PROP_FRAME_HEIGHT:
return size.height;
}
return 0;
return 0;
}
bool CvCaptureCAM_TYZX::setProperty( int, double )
{
return false;
return false;
}
CvCapture * cvCreateCameraCapture_TYZX (int index)
{
CvCaptureCAM_TYZX * capture = new CvCaptureCAM_TYZX;
CvCaptureCAM_TYZX * capture = new CvCaptureCAM_TYZX;
if( capture->open(index) )
return capture;

View File

@@ -166,8 +166,8 @@ bool CvCapture_Unicap::initDevice() {
memset(&raw_buffer, 0x0, sizeof(unicap_data_buffer_t));
raw_frame = cvCreateImage(cvSize(format.size.width,
format.size.height),
8, format.bpp / 8);
format.size.height),
8, format.bpp / 8);
memcpy(&raw_buffer.format, &format, sizeof(raw_buffer.format));
raw_buffer.data = (unsigned char*)raw_frame->imageData;
raw_buffer.buffer_size = format.size.width *
@@ -183,8 +183,8 @@ bool CvCapture_Unicap::initDevice() {
// buffer.format.bpp = 8;
frame = cvCreateImage(cvSize(buffer.format.size.width,
buffer.format.size.height),
8, buffer.format.bpp / 8);
buffer.format.size.height),
8, buffer.format.bpp / 8);
buffer.data = (unsigned char*)frame->imageData;
buffer.buffer_size = buffer.format.size.width *
buffer.format.size.height * buffer.format.bpp / 8;

View File

@@ -548,7 +548,7 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture)
#ifdef HAVE_JPEG
#ifdef __USE_GNU
/* support for MJPEG is only available with libjpeg and gcc,
because it's use libjepg and fmemopen()
because it's use libjepg and fmemopen()
*/
if (try_palette_v4l2(capture, V4L2_PIX_FMT_MJPEG) == 0 ||
try_palette_v4l2(capture, V4L2_PIX_FMT_JPEG) == 0)
@@ -582,7 +582,7 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture)
}
else
{
fprintf(stderr, "HIGHGUI ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
fprintf(stderr, "HIGHGUI ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
}
@@ -617,7 +617,7 @@ static int autosetup_capture_mode_v4l(CvCaptureCAM_V4L* capture)
//printf("negotiated palette YUV420P\n");
}
else {
fprintf(stderr, "HIGHGUI ERROR: V4L: Pixel format of incoming image is unsupported by OpenCV\n");
fprintf(stderr, "HIGHGUI ERROR: V4L: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
}
@@ -928,9 +928,9 @@ static int _capture_V4L2 (CvCaptureCAM_V4L *capture, char *deviceName)
return -1;
} else {
buffer_number--;
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
goto try_again;
goto try_again;
}
}
@@ -969,8 +969,8 @@ static int _capture_V4L2 (CvCaptureCAM_V4L *capture, char *deviceName)
}
if (n_buffers == 0) {
capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length );
capture->buffers[MAX_V4L_BUFFERS].length = buf.length;
capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length );
capture->buffers[MAX_V4L_BUFFERS].length = buf.length;
}
}
@@ -1183,14 +1183,14 @@ static int read_frame_v4l2(CvCaptureCAM_V4L* capture) {
return 0;
case EIO:
if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE)))
{
if (xioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1)
{
return 0;
}
}
return 0;
if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE)))
{
if (xioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1)
{
return 0;
}
}
return 0;
default:
/* display the error and stop processing */
@@ -1202,8 +1202,8 @@ static int read_frame_v4l2(CvCaptureCAM_V4L* capture) {
assert(buf.index < capture->req.count);
memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
capture->buffers[buf.index].start,
capture->buffers[MAX_V4L_BUFFERS].length );
capture->buffers[buf.index].start,
capture->buffers[MAX_V4L_BUFFERS].length );
capture->bufferIndex = MAX_V4L_BUFFERS;
//printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n",
// buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused);
@@ -1347,9 +1347,9 @@ static int icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) {
capture->mmaps[capture->bufferIndex].format = capture->imageProperties.palette;
if (ioctl (capture->deviceHandle, VIDIOCMCAPTURE,
&capture->mmaps[capture->bufferIndex]) == -1) {
/* capture is on the way, so just exit */
return 1;
&capture->mmaps[capture->bufferIndex]) == -1) {
/* capture is on the way, so just exit */
return 1;
}
++capture->bufferIndex;
@@ -1647,9 +1647,9 @@ yuyv_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
*d++ = b;
*d++ = g;
*d++ = r;
r = y2 + cr;
b = y2 + cb;
@@ -1658,9 +1658,9 @@ yuyv_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
*d++ = b;
*d++ = g;
*d++ = r;
}
}
}
@@ -1693,9 +1693,9 @@ uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
*d++ = b;
*d++ = g;
*d++ = r;
r = y2 + cr;
b = y2 + cb;
@@ -1704,9 +1704,9 @@ uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
*d++ = b;
*d++ = g;
*d++ = r;
}
}
}
@@ -1716,8 +1716,8 @@ uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
/* convert from mjpeg to rgb24 */
static bool
mjpeg_to_rgb24 (int width, int height,
unsigned char *src, int length,
unsigned char *dst)
unsigned char *src, int length,
unsigned char *dst)
{
cv::Mat temp=cv::imdecode(cv::Mat(std::vector<uchar>(src, src + length)), 1);
if( !temp.data || temp.cols != width || temp.rows != height )
@@ -2126,85 +2126,85 @@ static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) {
switch (capture->palette)
{
case PALETTE_BGR24:
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
break;
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
break;
case PALETTE_YVU420:
yuv420p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
yuv420p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_YUV411P:
yuv411p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
yuv411p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
#ifdef HAVE_JPEG
#ifdef __USE_GNU
/* support for MJPEG is only available with libjpeg and gcc,
because it's use libjepg and fmemopen()
*/
case PALETTE_MJPEG:
if (!mjpeg_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex]
.start),
capture->buffers[capture->bufferIndex].length,
(unsigned char*)capture->frame.imageData))
return 0;
break;
if (!mjpeg_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex]
.start),
capture->buffers[capture->bufferIndex].length,
(unsigned char*)capture->frame.imageData))
return 0;
break;
#endif
#endif
case PALETTE_YUYV:
yuyv_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
yuyv_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_UYVY:
uyvy_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
uyvy_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SBGGR8:
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
break;
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SN9C10X:
sonix_decompress_init();
sonix_decompress(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start);
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SGBRG:
sgbrg2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SN9C10X:
sonix_decompress_init();
sonix_decompress(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start);
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SGBRG:
sgbrg2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
}
} else
#endif /* HAVE_CAMV4L2 */
{
switch(capture->imageProperties.palette) {
case VIDEO_PALETTE_RGB24:
memcpy((char *)capture->frame.imageData,
@@ -2778,8 +2778,8 @@ static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){
if (capture->buffers[MAX_V4L_BUFFERS].start)
{
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = 0;
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = 0;
}
}
#endif /* HAVE_CAMV4L2 */

View File

@@ -103,7 +103,7 @@ public:
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() { return CV_CAP_VFW; } // Return the type of the capture object: CV_CAP_VFW, etc...
virtual int getCaptureDomain() { return CV_CAP_VFW; } // Return the type of the capture object: CV_CAP_VFW, etc...
protected:
void init();
@@ -182,12 +182,12 @@ bool CvCaptureAVI_VFW::open( const char* filename )
getframe = AVIStreamGetFrameOpen( avistream, &bmihdr );
if( getframe != 0 )
return true;
// Attempt to open as 8-bit AVI.
// Attempt to open as 8-bit AVI.
bmihdr = icvBitmapHeader( size.width, size.height, 8);
getframe = AVIStreamGetFrameOpen( avistream, &bmihdr );
if( getframe != 0 )
return true;
if( getframe != 0 )
return true;
}
}
}
@@ -207,23 +207,23 @@ IplImage* CvCaptureAVI_VFW::retrieveFrame(int)
{
if( avistream && bmih )
{
bool isColor = bmih->biBitCount == 24;
int nChannels = (isColor) ? 3 : 1;
bool isColor = bmih->biBitCount == 24;
int nChannels = (isColor) ? 3 : 1;
IplImage src;
cvInitImageHeader( &src, cvSize( bmih->biWidth, bmih->biHeight ),
IPL_DEPTH_8U, nChannels, IPL_ORIGIN_BL, 4 );
char* dataPtr = (char*)(bmih + 1);
char* dataPtr = (char*)(bmih + 1);
// Only account for the color map size if we are an 8-bit image and the color map is used
if (!isColor)
{
static int RGBQUAD_SIZE_PER_BYTE = sizeof(RGBQUAD)/sizeof(BYTE);
int offsetFromColormapToData = (int)bmih->biClrUsed*RGBQUAD_SIZE_PER_BYTE;
dataPtr += offsetFromColormapToData;
}
// Only account for the color map size if we are an 8-bit image and the color map is used
if (!isColor)
{
static int RGBQUAD_SIZE_PER_BYTE = sizeof(RGBQUAD)/sizeof(BYTE);
int offsetFromColormapToData = (int)bmih->biClrUsed*RGBQUAD_SIZE_PER_BYTE;
dataPtr += offsetFromColormapToData;
}
cvSetData( &src, dataPtr, src.widthStep );
cvSetData( &src, dataPtr, src.widthStep );
if( !frame || frame->width != src.width || frame->height != src.height )
{
@@ -321,7 +321,7 @@ public:
virtual bool setProperty(int, double) { return false; }
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual int getCaptureDomain() { return CV_CAP_VFW; } // Return the type of the capture object: CV_CAP_VFW, etc...
virtual int getCaptureDomain() { return CV_CAP_VFW; } // Return the type of the capture object: CV_CAP_VFW, etc...
protected:
void init();

0
modules/highgui/src/cap_ximea.cpp Executable file → Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +1,19 @@
From:
http://iconeden.com/icon/milky-a-free-vector-iconset.html
License Agreement
This is a legal agreement between you (the downloader) and IconEden.com. On download of any royalty-free icons from our website you agree to the following:
All of the icons remain the property of IconEden.com. The icons can be used royalty-free by the license for any personal or commercial project including web application, web design, software application, mobile application, documentation, presentation, computer game, advertising, film, video.
You may modify the icons in shape, color, and/or file format and use the modified icons royalty-free according to the license terms for any personal or commercial product.
The license does not permit the following uses:
1. The icons may not be resold, sublicensed, rented, transferred or otherwise made available for use or detached from a product, software application or web page;
2. The icons may not be placed on any electronic bulletin board or downloadable format;
You may not use, or allow anyone else to use the icons to create pornographic, libelous, obscene, or defamatory material.
From:
http://iconeden.com/icon/milky-a-free-vector-iconset.html
License Agreement
This is a legal agreement between you (the downloader) and IconEden.com. On download of any royalty-free icons from our website you agree to the following:
All of the icons remain the property of IconEden.com. The icons can be used royalty-free by the license for any personal or commercial project including web application, web design, software application, mobile application, documentation, presentation, computer game, advertising, film, video.
You may modify the icons in shape, color, and/or file format and use the modified icons royalty-free according to the license terms for any personal or commercial product.
The license does not permit the following uses:
1. The icons may not be resold, sublicensed, rented, transferred or otherwise made available for use or detached from a product, software application or web page;
2. The icons may not be placed on any electronic bulletin board or downloadable format;
You may not use, or allow anyone else to use the icons to create pornographic, libelous, obscene, or defamatory material.
All icon files are provided "as is". You agree not to hold IconEden.com liable for any damages that may occur due to use, or inability to use, icons or image data from IconEden.com.

View File

@@ -102,7 +102,7 @@ public:
protected:
string m_description;
string m_filename;
vector<uchar>* m_buf;
bool m_buf_supported;

View File

@@ -170,7 +170,7 @@ bool BmpDecoder::readHeader()
m_type = iscolor ? CV_8UC3 : CV_8UC1;
m_origin = m_height > 0 ? IPL_ORIGIN_BL : IPL_ORIGIN_TL;
m_height = std::abs(m_height);
if( !result )
{
m_offset = -1;

View File

@@ -61,10 +61,10 @@ enum BmpCompression
class BmpDecoder : public BaseImageDecoder
{
public:
BmpDecoder();
~BmpDecoder();
bool readData( Mat& img );
bool readHeader();
void close();
@@ -72,7 +72,7 @@ public:
ImageDecoder newDecoder() const;
protected:
RLByteStream m_strm;
PaletteEntry m_palette[256];
int m_origin;
@@ -88,7 +88,7 @@ class BmpEncoder : public BaseImageEncoder
public:
BmpEncoder();
~BmpEncoder();
bool write( const Mat& img, const vector<int>& params );
ImageEncoder newEncoder() const;

View File

@@ -107,7 +107,7 @@ bool ExrDecoder::readHeader()
bool result = false;
m_file = new InputFile( m_filename.c_str() );
if( !m_file ) // probably paranoid
return false;
@@ -167,7 +167,7 @@ bool ExrDecoder::readHeader()
uintcnt += ( m_blue->type == UINT );
}
m_type = (chcnt == uintcnt) ? UINT : FLOAT;
m_isfloat = (m_type == FLOAT);
}
@@ -182,7 +182,7 @@ bool ExrDecoder::readData( Mat& img )
{
m_native_depth = CV_MAT_DEPTH(type()) == img.depth();
bool color = img.channels() > 1;
uchar* data = img.data;
int step = img.step;
bool justcopy = m_native_depth;
@@ -549,12 +549,12 @@ void ExrDecoder::RGBToGray( float *in, float *out )
}
}
ImageDecoder ExrDecoder::newDecoder() const
{
return new ExrDecoder;
}
}
/////////////////////// ExrEncoder ///////////////////
@@ -728,8 +728,8 @@ bool ExrEncoder::write( const Mat& img, const vector<int>& )
ImageEncoder ExrEncoder::newEncoder() const
{
return new ExrEncoder;
}
}
}
#endif

View File

@@ -45,7 +45,7 @@ bool ImageIODecoder::checkSignature( const string& signature ) const
// TODO: implement real signature check
return true;
}
ImageDecoder ImageIODecoder::newDecoder() const
{
return new ImageIODecoder;
@@ -162,31 +162,31 @@ bool ImageIODecoder::readData( Mat& img )
int bitmapIndex = 0;
if( color == CV_LOAD_IMAGE_COLOR )
{
uchar * base = data;
{
uchar * base = data;
for (int y = 0; y < m_height; y++)
{
uchar * line = base + y * step;
for (int y = 0; y < m_height; y++)
{
uchar * line = base + y * step;
for (int x = 0; x < m_width; x++)
{
// Blue channel
line[0] = bitdata[bitmapIndex + 2];
// Green channel
line[1] = bitdata[bitmapIndex + 1];
// Red channel
line[2] = bitdata[bitmapIndex + 0];
for (int x = 0; x < m_width; x++)
{
// Blue channel
line[0] = bitdata[bitmapIndex + 2];
// Green channel
line[1] = bitdata[bitmapIndex + 1];
// Red channel
line[2] = bitdata[bitmapIndex + 0];
line += 3;
bitmapIndex += bpp;
}
}
line += 3;
bitmapIndex += bpp;
}
}
}
else if( color == CV_LOAD_IMAGE_GRAYSCALE )
{
for (int y = 0; y < m_height; y++)
memcpy (data + y * step, bitmap + y * m_width, m_width);
for (int y = 0; y < m_height; y++)
memcpy (data + y * step, bitmap + y * m_width, m_width);
}
free( bitmap );
@@ -212,7 +212,7 @@ ImageEncoder ImageIOEncoder::newEncoder() const
{
return new ImageIOEncoder;
}
static
CFStringRef FilenameToUTI( const char* filename )
{
@@ -264,7 +264,7 @@ bool ImageIOEncoder::write( const Mat& img, const vector<int>& params )
int _channels = img.channels();
const uchar* data = img.data;
int step = img.step;
// Determine the appropriate UTI based on the filename extension
CFStringRef imageUTI = FilenameToUTI( m_filename.c_str() );
@@ -319,30 +319,30 @@ bool ImageIOEncoder::write( const Mat& img, const vector<int>& params )
if (bpp == 4)
{
int bitmapIndex = 0;
const uchar * base = data;
const uchar * base = data;
for (int y = 0; y < height; y++)
{
const uchar * line = base + y * step;
for (int y = 0; y < height; y++)
{
const uchar * line = base + y * step;
for (int x = 0; x < width; x++)
{
// Blue channel
for (int x = 0; x < width; x++)
{
// Blue channel
bitmapData[bitmapIndex + 2] = line[0];
// Green channel
bitmapData[bitmapIndex + 1] = line[1];
// Red channel
bitmapData[bitmapIndex + 0] = line[2];
// Green channel
bitmapData[bitmapIndex + 1] = line[1];
// Red channel
bitmapData[bitmapIndex + 0] = line[2];
line += 3;
bitmapIndex += bpp;
}
}
line += 3;
bitmapIndex += bpp;
}
}
}
else if (bpp == 1)
{
for (int y = 0; y < height; y++)
memcpy (bitmapData + y * width, data + y * step, width);
for (int y = 0; y < height; y++)
memcpy (bitmapData + y * width, data + y * step, width);
}
// Turn the bitmap context into an imageRef

View File

@@ -1,6 +1,6 @@
/*
* grfmt_imageio.h
*
*
*
* Created by Morgan Conbere on 5/17/07.
*
@@ -16,7 +16,7 @@
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
#include <MobileCoreServices/MobileCoreServices.h>
#include <MobileCoreServices/MobileCoreServices.h>
#include <ImageIO/ImageIO.h>
#else
@@ -31,21 +31,21 @@ namespace cv
class ImageIODecoder : public BaseImageDecoder
{
public:
ImageIODecoder();
~ImageIODecoder();
bool readData( Mat& img );
bool readHeader();
void close();
size_t signatureLength() const;
bool checkSignature( const string& signature ) const;
ImageDecoder newDecoder() const;
protected:
CGImageRef imageRef;
};

View File

@@ -359,7 +359,7 @@ bool PngEncoder::write( const Mat& img, const vector<int>& params )
int compression_level = -1; // Invalid value to allow setting 0-9 as valid
int compression_strategy = Z_RLE; // Default strategy
bool isBilevel = false;
bool isBilevel = false;
for( size_t i = 0; i < params.size(); i += 2 )
{
@@ -375,7 +375,7 @@ bool PngEncoder::write( const Mat& img, const vector<int>& params )
}
if( params[i] == CV_IMWRITE_PNG_BILEVEL )
{
isBilevel = params[i+1] != 0;
isBilevel = params[i+1] != 0;
}
}
@@ -402,8 +402,8 @@ bool PngEncoder::write( const Mat& img, const vector<int>& params )
png_write_info( png_ptr, info_ptr );
if (isBilevel)
png_set_packing(png_ptr);
if (isBilevel)
png_set_packing(png_ptr);
png_set_bgr( png_ptr );
if( !isBigEndian() )

View File

@@ -54,7 +54,7 @@ namespace cv
class PngDecoder : public BaseImageDecoder
{
public:
PngDecoder();
virtual ~PngDecoder();
@@ -86,7 +86,7 @@ public:
bool isFormatSupported( int depth ) const;
bool write( const Mat& img, const vector<int>& params );
ImageEncoder newEncoder() const;
protected:

View File

@@ -69,7 +69,7 @@ static int ReadNumber( RLByteStream& strm, int maxdigits )
}
while( code != '\n' && code != '\r' );
}
code = strm.getByte();
while( isspace(code))
@@ -128,7 +128,7 @@ void PxMDecoder::close()
bool PxMDecoder::readHeader()
{
bool result = false;
if( !m_buf.empty() )
{
if( !m_strm.open(m_buf) )
@@ -151,13 +151,13 @@ bool PxMDecoder::readHeader()
case '3': case '6': m_bpp = 24; break;
default: throw RBS_BAD_HEADER;
}
m_binary = code >= '4';
m_type = m_bpp > 8 ? CV_8UC3 : CV_8UC1;
m_width = ReadNumber( m_strm, INT_MAX );
m_height = ReadNumber( m_strm, INT_MAX );
m_maxval = m_bpp == 1 ? 1 : ReadNumber( m_strm, INT_MAX );
if( m_maxval > 65535 )
throw RBS_BAD_HEADER;
@@ -166,7 +166,7 @@ bool PxMDecoder::readHeader()
if( m_maxval > 255 )
m_type = CV_MAKETYPE(CV_16U, CV_MAT_CN(m_type));
if( m_width > 0 && m_height > 0 && m_maxval > 0 && m_maxval < (1 << 16))
if( m_width > 0 && m_height > 0 && m_maxval > 0 && m_maxval < (1 << 16))
{
m_offset = m_strm.getPos();
result = true;
@@ -201,7 +201,7 @@ bool PxMDecoder::readData( Mat& img )
if( m_offset < 0 || !m_strm.isOpened())
return false;
AutoBuffer<uchar,1024> _src(src_pitch + 32);
uchar* src = _src;
AutoBuffer<uchar,1024> _gray_palette;
@@ -222,7 +222,7 @@ bool PxMDecoder::readData( Mat& img )
try
{
m_strm.setPos( m_offset );
switch( m_bpp )
{
////////////////////////// 1 BPP /////////////////////////
@@ -245,7 +245,7 @@ bool PxMDecoder::readData( Mat& img )
for( y = 0; y < m_height; y++, data += step )
{
m_strm.getBytes( src, src_pitch );
if( color )
FillColorRow1( data, src, m_width, palette );
else

View File

@@ -52,10 +52,10 @@ namespace cv
class PxMDecoder : public BaseImageDecoder
{
public:
PxMDecoder();
virtual ~PxMDecoder();
bool readData( Mat& img );
bool readHeader();
void close();
@@ -65,7 +65,7 @@ public:
ImageDecoder newDecoder() const;
protected:
RLByteStream m_strm;
PaletteEntry m_palette[256];
int m_bpp;

View File

@@ -78,7 +78,7 @@ public:
ImageDecoder newDecoder() const;
protected:
RMByteStream m_strm;
PaletteEntry m_palette[256];
int m_bpp;

View File

@@ -401,7 +401,7 @@ Mat imdecode( InputArray _buf, int flags, Mat* dst )
imdecode_( buf, flags, LOAD_MAT, dst );
return *dst;
}
bool imencode( const string& ext, InputArray _image,
vector<uchar>& buf, const vector<int>& params )
{

View File

@@ -42,7 +42,7 @@
#ifndef _UTILS_H_
#define _UTILS_H_
struct PaletteEntry
struct PaletteEntry
{
unsigned char b, g, r, a;
};

View File

@@ -396,9 +396,9 @@ void cv::pointCloudShow(const string& winname, const GlCamera& camera, const GlA
{
#ifndef HAVE_OPENGL
CV_Error(CV_OpenGlNotSupported, "The library is compiled without OpenGL support");
(void)winname;
(void)camera;
(void)arr;
(void)winname;
(void)camera;
(void)arr;
#else
namedWindow(winname, WINDOW_OPENGL);
@@ -442,10 +442,10 @@ void cv::pointCloudShow(const string& winname, const GlCamera& camera, const GlA
void cv::pointCloudShow(const std::string& winname, const cv::GlCamera& camera, InputArray points, InputArray colors)
{
#ifndef HAVE_OPENGL
(void)winname;
(void)camera;
(void)points;
(void)colors;
(void)winname;
(void)camera;
(void)points;
(void)colors;
CV_Error(CV_OpenGlNotSupported, "The library is compiled without OpenGL support");
#else
namedWindow(winname, WINDOW_OPENGL);

7078
modules/highgui/src/window_QT.cpp Executable file → Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff