Deleted all trailing whitespace.
This commit is contained in:
@@ -19,7 +19,7 @@ Follow this step by step guide to link OpenCV to iOS.
|
||||
|
||||
1. Create a new XCode project.
|
||||
|
||||
2. Now we need to link *opencv2.framework* with Xcode. Select the project Navigator in the left hand panel and click on project name.
|
||||
2. Now we need to link *opencv2.framework* with Xcode. Select the project Navigator in the left hand panel and click on project name.
|
||||
|
||||
3. Under the TARGETS click on Build Phases. Expand Link Binary With Libraries option.
|
||||
|
||||
@@ -29,10 +29,10 @@ Follow this step by step guide to link OpenCV to iOS.
|
||||
|
||||
.. image:: images/linking_opencv_ios.png
|
||||
:alt: OpenCV iOS in Xcode
|
||||
:align: center
|
||||
:align: center
|
||||
|
||||
*Hello OpenCV iOS Application*
|
||||
===============================
|
||||
===============================
|
||||
|
||||
Now we will learn how to write a simple Hello World Application in Xcode using OpenCV.
|
||||
|
||||
@@ -49,7 +49,7 @@ Now we will learn how to write a simple Hello World Application in Xcode using O
|
||||
|
||||
.. image:: images/header_directive.png
|
||||
:alt: header
|
||||
:align: center
|
||||
:align: center
|
||||
|
||||
.. container:: enumeratevisibleitemswithsquare
|
||||
|
||||
@@ -61,7 +61,7 @@ Now we will learn how to write a simple Hello World Application in Xcode using O
|
||||
|
||||
.. image:: images/view_did_load.png
|
||||
:alt: view did load
|
||||
:align: center
|
||||
:align: center
|
||||
|
||||
.. container:: enumeratevisibleitemswithsquare
|
||||
|
||||
@@ -73,4 +73,4 @@ Now we will learn how to write a simple Hello World Application in Xcode using O
|
||||
.. image:: images/output.png
|
||||
:alt: output
|
||||
:align: center
|
||||
|
||||
|
||||
|
@@ -21,9 +21,9 @@ In *OpenCV* all the image processing operations are done on *Mat*. iOS uses UIIm
|
||||
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
|
||||
CGFloat cols = image.size.width;
|
||||
CGFloat rows = image.size.height;
|
||||
|
||||
|
||||
cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
|
||||
|
||||
|
||||
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
|
||||
cols, // Width of bitmap
|
||||
rows, // Height of bitmap
|
||||
@@ -32,11 +32,11 @@ In *OpenCV* all the image processing operations are done on *Mat*. iOS uses UIIm
|
||||
colorSpace, // Colorspace
|
||||
kCGImageAlphaNoneSkipLast |
|
||||
kCGBitmapByteOrderDefault); // Bitmap info flags
|
||||
|
||||
|
||||
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
|
||||
CGContextRelease(contextRef);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
||||
|
||||
return cvMat;
|
||||
}
|
||||
|
||||
@@ -47,9 +47,9 @@ In *OpenCV* all the image processing operations are done on *Mat*. iOS uses UIIm
|
||||
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
|
||||
CGFloat cols = image.size.width;
|
||||
CGFloat rows = image.size.height;
|
||||
|
||||
|
||||
cv::Mat cvMat(rows, cols, CV_8UC1); // 8 bits per component, 1 channels
|
||||
|
||||
|
||||
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
|
||||
cols, // Width of bitmap
|
||||
rows, // Height of bitmap
|
||||
@@ -58,11 +58,11 @@ In *OpenCV* all the image processing operations are done on *Mat*. iOS uses UIIm
|
||||
colorSpace, // Colorspace
|
||||
kCGImageAlphaNoneSkipLast |
|
||||
kCGBitmapByteOrderDefault); // Bitmap info flags
|
||||
|
||||
|
||||
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
|
||||
CGContextRelease(contextRef);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
||||
|
||||
return cvMat;
|
||||
}
|
||||
|
||||
@@ -81,15 +81,15 @@ After the processing we need to convert it back to UIImage.
|
||||
{
|
||||
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
|
||||
CGColorSpaceRef colorSpace;
|
||||
|
||||
|
||||
if (cvMat.elemSize() == 1) {
|
||||
colorSpace = CGColorSpaceCreateDeviceGray();
|
||||
} else {
|
||||
colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
}
|
||||
|
||||
|
||||
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
|
||||
|
||||
|
||||
// Creating CGImage from cv::Mat
|
||||
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
|
||||
cvMat.rows, //height
|
||||
@@ -103,15 +103,15 @@ After the processing we need to convert it back to UIImage.
|
||||
false, //should interpolate
|
||||
kCGRenderingIntentDefault //intent
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
// Getting UIImage from CGImage
|
||||
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
|
||||
CGImageRelease(imageRef);
|
||||
CGDataProviderRelease(provider);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
||||
return finalImage;
|
||||
|
||||
return finalImage;
|
||||
}
|
||||
|
||||
*Output*
|
||||
@@ -119,9 +119,9 @@ After the processing we need to convert it back to UIImage.
|
||||
|
||||
.. image:: images/output.jpg
|
||||
:alt: header
|
||||
:align: center
|
||||
:align: center
|
||||
|
||||
Check out an instance of running code with more Image Effects on `YouTube <http://www.youtube.com/watch?v=Ko3K_xdhJ1I>`_ .
|
||||
Check out an instance of running code with more Image Effects on `YouTube <http://www.youtube.com/watch?v=Ko3K_xdhJ1I>`_ .
|
||||
|
||||
.. raw:: html
|
||||
|
||||
|
@@ -69,7 +69,7 @@
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
||||
|
||||
../hello/hello
|
||||
../image_manipulation/image_manipulation
|
||||
../video_processing/video_processing
|
||||
|
@@ -18,34 +18,34 @@ Including OpenCV library in your iOS project
|
||||
The OpenCV library comes as a so-called framework, which you can directly drag-and-drop into your XCode project. Download the latest binary from <http://sourceforge.net/projects/opencvlibrary/files/opencv-ios/>. Alternatively follow this guide :ref:`iOS-Installation` to compile the framework manually. Once you have the framework, just drag-and-drop into XCode:
|
||||
|
||||
.. image:: images/xcode_hello_ios_framework_drag_and_drop.png
|
||||
|
||||
|
||||
|
||||
|
||||
Also you have to locate the prefix header that is used for all header files in the project. The file is typically located at "ProjectName/Supporting Files/ProjectName-Prefix.pch". There, you have add an include statement to import the opencv library. However, make sure you include opencv before you include UIKit and Foundation, because else you will get some weird compile errors that some macros like min and max are defined multiple times. For example the prefix header could look like the following:
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
//
|
||||
// Prefix header for all source files of the 'VideoFilters' target in the 'VideoFilters' project
|
||||
//
|
||||
|
||||
|
||||
#import <Availability.h>
|
||||
|
||||
|
||||
#ifndef __IPHONE_4_0
|
||||
#warning "This project uses features only available in iOS SDK 4.0 and later."
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
#import <opencv2/opencv.hpp>
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __OBJC__
|
||||
#import <UIKit/UIKit.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Example video frame processing project
|
||||
--------------------------------------
|
||||
User Interface
|
||||
@@ -60,18 +60,18 @@ Make sure to add and connect the IBOutlets and IBActions to the corresponding Vi
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
@interface ViewController : UIViewController
|
||||
{
|
||||
IBOutlet UIImageView* imageView;
|
||||
IBOutlet UIButton* button;
|
||||
}
|
||||
|
||||
|
||||
- (IBAction)actionStart:(id)sender;
|
||||
|
||||
|
||||
@end
|
||||
|
||||
|
||||
|
||||
|
||||
Adding the Camera
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -79,21 +79,21 @@ We add a camera controller to the view controller and initialize it when the vie
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
#import <opencv2/highgui/cap_ios.h>
|
||||
using namespace cv;
|
||||
|
||||
|
||||
|
||||
|
||||
@interface ViewController : UIViewController
|
||||
{
|
||||
...
|
||||
...
|
||||
CvVideoCamera* videoCamera;
|
||||
}
|
||||
...
|
||||
@property (nonatomic, retain) CvVideoCamera* videoCamera;
|
||||
|
||||
|
||||
@end
|
||||
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
@@ -101,7 +101,7 @@ We add a camera controller to the view controller and initialize it when the vie
|
||||
{
|
||||
[super viewDidLoad];
|
||||
// Do any additional setup after loading the view, typically from a nib.
|
||||
|
||||
|
||||
self.videoCamera = [[CvVideoCamera alloc] initWithParentView:imageView];
|
||||
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
|
||||
self.videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
|
||||
@@ -109,7 +109,7 @@ We add a camera controller to the view controller and initialize it when the vie
|
||||
self.videoCamera.defaultFPS = 30;
|
||||
self.videoCamera.grayscale = NO;
|
||||
}
|
||||
|
||||
|
||||
In this case, we initialize the camera and provide the imageView as a target for rendering each frame. CvVideoCamera is basically a wrapper around AVFoundation, so we provie as properties some of the AVFoundation camera options. For example we want to use the front camera, set the video size to 352x288 and a video orientation (the video camera normally outputs in landscape mode, which results in transposed data when you design a portrait application).
|
||||
|
||||
The property defaultFPS sets the FPS of the camera. If the processing is less fast than the desired FPS, frames are automatically dropped.
|
||||
@@ -153,14 +153,14 @@ We follow the delegation pattern, which is very common in iOS, to provide access
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
@interface ViewController : UIViewController<CvVideoCameraDelegate>
|
||||
|
||||
|
||||
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
- (void)viewDidLoad
|
||||
{
|
||||
...
|
||||
@@ -194,13 +194,13 @@ From here you can start processing video frames. For example the following snipp
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
- (void)processImage:(Mat&)image;
|
||||
{
|
||||
// Do some OpenCV stuff with the image
|
||||
Mat image_copy;
|
||||
cvtColor(image, image_copy, CV_BGRA2BGR);
|
||||
|
||||
|
||||
// invert image
|
||||
bitwise_not(image_copy, image_copy);
|
||||
cvtColor(image_copy, image, CV_BGR2BGRA);
|
||||
@@ -214,9 +214,9 @@ Finally, we have to tell the camera to actually start/stop working. The followin
|
||||
|
||||
.. code-block:: objc
|
||||
:linenos:
|
||||
|
||||
|
||||
#pragma mark - UI Actions
|
||||
|
||||
|
||||
- (IBAction)actionStart:(id)sender;
|
||||
{
|
||||
[self.videoCamera start];
|
||||
|
Reference in New Issue
Block a user