Revert support for audio
This commit is contained in:
@@ -40,7 +40,6 @@
|
|||||||
{
|
{
|
||||||
AVCaptureSession* captureSession;
|
AVCaptureSession* captureSession;
|
||||||
AVCaptureConnection* videoCaptureConnection;
|
AVCaptureConnection* videoCaptureConnection;
|
||||||
AVCaptureConnection* audioCaptureConnection;
|
|
||||||
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
|
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
|
||||||
|
|
||||||
UIDeviceOrientation currentDeviceOrientation;
|
UIDeviceOrientation currentDeviceOrientation;
|
||||||
@@ -64,7 +63,6 @@
|
|||||||
|
|
||||||
@property (nonatomic, retain) AVCaptureSession* captureSession;
|
@property (nonatomic, retain) AVCaptureSession* captureSession;
|
||||||
@property (nonatomic, retain) AVCaptureConnection* videoCaptureConnection;
|
@property (nonatomic, retain) AVCaptureConnection* videoCaptureConnection;
|
||||||
@property (nonatomic, retain) AVCaptureConnection* audioCaptureConnection;
|
|
||||||
|
|
||||||
@property (nonatomic, readonly) BOOL running;
|
@property (nonatomic, readonly) BOOL running;
|
||||||
@property (nonatomic, readonly) BOOL captureSessionLoaded;
|
@property (nonatomic, readonly) BOOL captureSessionLoaded;
|
||||||
@@ -109,12 +107,11 @@
|
|||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
// delegate method for processing image frames
|
// delegate method for processing image frames
|
||||||
- (void)processImage:(cv::Mat&)image;
|
- (void)processImage:(cv::Mat&)image;
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface CvVideoCamera : CvAbstractCamera<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate>
|
@interface CvVideoCamera : CvAbstractCamera<AVCaptureVideoDataOutputSampleBufferDelegate>
|
||||||
{
|
{
|
||||||
AVCaptureVideoDataOutput *videoDataOutput;
|
AVCaptureVideoDataOutput *videoDataOutput;
|
||||||
|
|
||||||
@@ -129,6 +126,8 @@
|
|||||||
AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
|
AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
|
||||||
AVAssetWriter* recordAssetWriter;
|
AVAssetWriter* recordAssetWriter;
|
||||||
|
|
||||||
|
CMTime lastSampleTime;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@property (nonatomic, assign) id<CvVideoCameraDelegate> delegate;
|
@property (nonatomic, assign) id<CvVideoCameraDelegate> delegate;
|
||||||
@@ -139,7 +138,6 @@
|
|||||||
@property (nonatomic, retain) AVAssetWriterInput* recordAssetWriterInput;
|
@property (nonatomic, retain) AVAssetWriterInput* recordAssetWriterInput;
|
||||||
@property (nonatomic, retain) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
|
@property (nonatomic, retain) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
|
||||||
@property (nonatomic, retain) AVAssetWriter* recordAssetWriter;
|
@property (nonatomic, retain) AVAssetWriter* recordAssetWriter;
|
||||||
@property (nonatomic, readonly) int64_t timestampMs;
|
|
||||||
|
|
||||||
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
|
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
|
||||||
- (void)layoutPreviewLayer;
|
- (void)layoutPreviewLayer;
|
||||||
|
|||||||
@@ -35,9 +35,7 @@
|
|||||||
|
|
||||||
#pragma mark - Private Interface
|
#pragma mark - Private Interface
|
||||||
|
|
||||||
@interface CvAbstractCamera () {
|
@interface CvAbstractCamera ()
|
||||||
AVCaptureAudioDataOutput *audioOut;
|
|
||||||
}
|
|
||||||
|
|
||||||
@property (nonatomic, retain) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer;
|
@property (nonatomic, retain) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer;
|
||||||
|
|
||||||
@@ -74,7 +72,6 @@
|
|||||||
@synthesize captureSession;
|
@synthesize captureSession;
|
||||||
@synthesize captureVideoPreviewLayer;
|
@synthesize captureVideoPreviewLayer;
|
||||||
@synthesize videoCaptureConnection;
|
@synthesize videoCaptureConnection;
|
||||||
@synthesize audioCaptureConnection;
|
|
||||||
@synthesize running;
|
@synthesize running;
|
||||||
@synthesize captureSessionLoaded;
|
@synthesize captureSessionLoaded;
|
||||||
@synthesize useAVCaptureVideoPreviewLayer;
|
@synthesize useAVCaptureVideoPreviewLayer;
|
||||||
@@ -209,7 +206,6 @@
|
|||||||
self.captureSession = nil;
|
self.captureSession = nil;
|
||||||
self.captureVideoPreviewLayer = nil;
|
self.captureVideoPreviewLayer = nil;
|
||||||
self.videoCaptureConnection = nil;
|
self.videoCaptureConnection = nil;
|
||||||
self.audioCaptureConnection = nil;
|
|
||||||
captureSessionLoaded = NO;
|
captureSessionLoaded = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -276,123 +272,8 @@
|
|||||||
} else {
|
} else {
|
||||||
NSLog(@"[Camera] Error: could not set session preset");
|
NSLog(@"[Camera] Error: could not set session preset");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#if 0
|
|
||||||
- (void)sampleCaptureSessionSetup {
|
|
||||||
|
|
||||||
if ( _captureSession ) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
_captureSession = [[AVCaptureSession alloc] init];
|
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession];
|
|
||||||
_applicationWillEnterForegroundNotificationObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[UIApplication sharedApplication] queue:nil usingBlock:^(NSNotification *note) {
|
|
||||||
// Retain self while the capture session is alive by referencing it in this observer block which is tied to the session lifetime
|
|
||||||
// Client must stop us running before we can be deallocated
|
|
||||||
[self applicationWillEnterForeground];
|
|
||||||
}];
|
|
||||||
|
|
||||||
#if RECORD_AUDIO
|
|
||||||
/* Audio */
|
|
||||||
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
||||||
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
|
|
||||||
if ( [_captureSession canAddInput:audioIn] ) {
|
|
||||||
[_captureSession addInput:audioIn];
|
|
||||||
}
|
|
||||||
[audioIn release];
|
|
||||||
|
|
||||||
AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
|
|
||||||
// Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio
|
|
||||||
dispatch_queue_t audioCaptureQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.audio", DISPATCH_QUEUE_SERIAL );
|
|
||||||
[audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
|
|
||||||
[audioCaptureQueue release];
|
|
||||||
|
|
||||||
if ( [_captureSession canAddOutput:audioOut] ) {
|
|
||||||
[_captureSession addOutput:audioOut];
|
|
||||||
}
|
|
||||||
self.audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
|
|
||||||
[audioOut release];
|
|
||||||
#endif // RECORD_AUDIO
|
|
||||||
|
|
||||||
/* Video */
|
|
||||||
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
|
||||||
_videoDevice = videoDevice;
|
|
||||||
AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:nil];
|
|
||||||
if ( [_captureSession canAddInput:videoIn] ) {
|
|
||||||
[_captureSession addInput:videoIn];
|
|
||||||
}
|
|
||||||
[videoIn release];
|
|
||||||
|
|
||||||
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
|
|
||||||
videoOut.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(_renderer.inputPixelFormat) };
|
|
||||||
[videoOut setSampleBufferDelegate:self queue:_videoDataOutputQueue];
|
|
||||||
|
|
||||||
// RosyWriter records videos and we prefer not to have any dropped frames in the video recording.
|
|
||||||
// By setting alwaysDiscardsLateVideoFrames to NO we ensure that minor fluctuations in system load or in our processing time for a given frame won't cause framedrops.
|
|
||||||
// We do however need to ensure that on average we can process frames in realtime.
|
|
||||||
// If we were doing preview only we would probably want to set alwaysDiscardsLateVideoFrames to YES.
|
|
||||||
videoOut.alwaysDiscardsLateVideoFrames = NO;
|
|
||||||
|
|
||||||
if ( [_captureSession canAddOutput:videoOut] ) {
|
|
||||||
[_captureSession addOutput:videoOut];
|
|
||||||
}
|
|
||||||
|
|
||||||
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
|
|
||||||
|
|
||||||
int frameRate;
|
|
||||||
NSString *sessionPreset = AVCaptureSessionPresetHigh;
|
|
||||||
CMTime frameDuration = kCMTimeInvalid;
|
|
||||||
// For single core systems like iPhone 4 and iPod Touch 4th Generation we use a lower resolution and framerate to maintain real-time performance.
|
|
||||||
if ( [NSProcessInfo processInfo].processorCount == 1 )
|
|
||||||
{
|
|
||||||
if ( [_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480] ) {
|
|
||||||
sessionPreset = AVCaptureSessionPreset640x480;
|
|
||||||
}
|
|
||||||
frameRate = 15;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
#if ! USE_OPENGL_RENDERER
|
|
||||||
// When using the CPU renderers or the CoreImage renderer we lower the resolution to 720p so that all devices can maintain real-time performance (this is primarily for A5 based devices like iPhone 4s and iPod Touch 5th Generation).
|
|
||||||
if ( [_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720] ) {
|
|
||||||
sessionPreset = AVCaptureSessionPreset1280x720;
|
|
||||||
}
|
|
||||||
#endif // ! USE_OPENGL_RENDERER
|
|
||||||
|
|
||||||
frameRate = 30;
|
|
||||||
}
|
|
||||||
|
|
||||||
_captureSession.sessionPreset = sessionPreset;
|
|
||||||
|
|
||||||
frameDuration = CMTimeMake( 1, frameRate );
|
|
||||||
|
|
||||||
NSError *error = nil;
|
|
||||||
if ( [videoDevice lockForConfiguration:&error] ) {
|
|
||||||
videoDevice.activeVideoMaxFrameDuration = frameDuration;
|
|
||||||
videoDevice.activeVideoMinFrameDuration = frameDuration;
|
|
||||||
[videoDevice unlockForConfiguration];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
NSLog( @"videoDevice lockForConfiguration returned error %@", error );
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the recommended compression settings after configuring the session/device.
|
|
||||||
#if RECORD_AUDIO
|
|
||||||
_audioCompressionSettings = [[audioOut recommendedAudioSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
|
|
||||||
#endif
|
|
||||||
_videoCompressionSettings = [[videoOut recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
|
|
||||||
|
|
||||||
self.videoOrientation = _videoConnection.videoOrientation;
|
|
||||||
|
|
||||||
[videoOut release];
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
- (void)createCaptureDevice;
|
- (void)createCaptureDevice;
|
||||||
{
|
{
|
||||||
// setup the device
|
// setup the device
|
||||||
@@ -400,36 +281,6 @@
|
|||||||
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
|
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
|
||||||
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
|
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
|
||||||
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
|
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
|
||||||
|
|
||||||
#if 0
|
|
||||||
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
||||||
NSError *error = nil;
|
|
||||||
//AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
|
|
||||||
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:nil];
|
|
||||||
if (audioInput) {
|
|
||||||
NSLog(@"Adding audio capture devices ");
|
|
||||||
[self.captureSession addInput:audioInput];
|
|
||||||
[audioInput release];
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio
|
|
||||||
audioOut = [[AVCaptureAudioDataOutput alloc] init];
|
|
||||||
dispatch_queue_t audioCaptureQueue = dispatch_queue_create("opencv.ios.audio", DISPATCH_QUEUE_SERIAL );
|
|
||||||
[audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
|
|
||||||
|
|
||||||
if ( [self.captureSession canAddOutput:audioOut] ) {
|
|
||||||
[self.captureSession addOutput:audioOut];
|
|
||||||
NSLog(@"audioOut added ");
|
|
||||||
}
|
|
||||||
|
|
||||||
[audioCaptureQueue release];
|
|
||||||
|
|
||||||
self.audioCaptureConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
|
|
||||||
|
|
||||||
NSLog(@"Audio has been setup with callback ");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -42,24 +42,15 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
|
|
||||||
@interface CvVideoCamera () {
|
@interface CvVideoCamera () {
|
||||||
NSString* mediaPath;
|
int recordingCountDown;
|
||||||
int recordCountDown;
|
|
||||||
CMTime _lastSampleTime;
|
|
||||||
int64_t _timestampMs;
|
|
||||||
dispatch_queue_t movieWriterQueue;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
- (void)createVideoDataOutput;
|
- (void)createVideoDataOutput;
|
||||||
- (void)createVideoFileOutput;
|
- (void)createVideoFileOutput;
|
||||||
- (void)createMovieFileOutput;
|
|
||||||
- (NSString*) mediaFileString;
|
|
||||||
|
|
||||||
@property (nonatomic, retain) CALayer *customPreviewLayer;
|
@property (nonatomic, retain) CALayer *customPreviewLayer;
|
||||||
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
|
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
|
||||||
@property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
|
|
||||||
@property (nonatomic, retain) dispatch_queue_t movieWriterQueue;
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@@ -79,7 +70,6 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
@synthesize customPreviewLayer;
|
@synthesize customPreviewLayer;
|
||||||
@synthesize videoDataOutput;
|
@synthesize videoDataOutput;
|
||||||
@synthesize movieFileOutput;
|
|
||||||
|
|
||||||
@synthesize recordVideo;
|
@synthesize recordVideo;
|
||||||
@synthesize rotateVideo;
|
@synthesize rotateVideo;
|
||||||
@@ -88,24 +78,18 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
@synthesize recordPixelBufferAdaptor;
|
@synthesize recordPixelBufferAdaptor;
|
||||||
@synthesize recordAssetWriter;
|
@synthesize recordAssetWriter;
|
||||||
|
|
||||||
@synthesize timestampMs = _timestampMs;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#pragma mark - Constructors
|
#pragma mark - Constructors
|
||||||
|
|
||||||
- (id)initWithParentView:(UIView*)parent;
|
- (id)initWithParentView:(UIView*)parent;
|
||||||
{
|
{
|
||||||
recordCountDown = 1000000000;
|
|
||||||
self = [super initWithParentView:parent];
|
self = [super initWithParentView:parent];
|
||||||
if (self) {
|
if (self) {
|
||||||
self.useAVCaptureVideoPreviewLayer = NO;
|
self.useAVCaptureVideoPreviewLayer = NO;
|
||||||
self.recordVideo = NO;
|
self.recordVideo = NO;
|
||||||
self.rotateVideo = NO;
|
self.rotateVideo = NO;
|
||||||
}
|
}
|
||||||
movieWriterQueue = nil;
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,8 +100,7 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
- (void)start;
|
- (void)start;
|
||||||
{
|
{
|
||||||
recordCountDown = 5;
|
recordingCountDown = 10;
|
||||||
movieWriterQueue = nil;
|
|
||||||
[super start];
|
[super start];
|
||||||
|
|
||||||
if (self.recordVideo == YES) {
|
if (self.recordVideo == YES) {
|
||||||
@@ -125,12 +108,10 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
|
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
|
||||||
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
|
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error == nil) {
|
if (error == nil) {
|
||||||
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
|
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -156,9 +137,6 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
self.recordAssetWriter = nil;
|
self.recordAssetWriter = nil;
|
||||||
self.recordAssetWriterInput = nil;
|
self.recordAssetWriterInput = nil;
|
||||||
self.recordPixelBufferAdaptor = nil;
|
self.recordPixelBufferAdaptor = nil;
|
||||||
if (movieWriterQueue)
|
|
||||||
dispatch_release(movieWriterQueue);
|
|
||||||
self.movieWriterQueue = nil;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[self.customPreviewLayer removeFromSuperlayer];
|
[self.customPreviewLayer removeFromSuperlayer];
|
||||||
@@ -318,9 +296,6 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
}
|
}
|
||||||
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
|
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
|
||||||
|
|
||||||
//self.videoCaptureConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
||||||
//[self.videoCaptureConnection setEnabled:YES];
|
|
||||||
|
|
||||||
|
|
||||||
// set default FPS
|
// set default FPS
|
||||||
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
|
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
|
||||||
@@ -357,35 +332,10 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
|
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
|
||||||
|
|
||||||
|
|
||||||
if (self.recordVideo == YES && movieWriterQueue == nil) {
|
|
||||||
movieWriterQueue = dispatch_queue_create("opencv_movieWriter", DISPATCH_QUEUE_SERIAL);
|
|
||||||
}
|
|
||||||
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
|
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
- (void)createMovieFileOutput;
|
|
||||||
{
|
|
||||||
NSLog(@"createVideoFileOutput...");
|
|
||||||
self.movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
||||||
CMTime maxDuration = CMTimeMake(30*60, 1);
|
|
||||||
movieFileOutput.maxRecordedDuration = maxDuration;
|
|
||||||
movieFileOutput.minFreeDiskSpaceLimit = (1024L)*(1024L*1024L);
|
|
||||||
movieFileOutput.maxRecordedFileSize = (400L)*(1024L*1024L);
|
|
||||||
|
|
||||||
|
|
||||||
if ([self.captureSession canAddOutput:movieFileOutput]) {
|
|
||||||
[captureSession addOutput:movieFileOutput];
|
|
||||||
NSLog(@"Successfully added movie output ");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
NSLog(@"Couldn't add movie output ");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.recordVideo == YES)
|
|
||||||
[self.movieFileOutput startRecordingToOutputFileURL:[self mediaFileURL] recordingDelegate:self];
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
- (void)createVideoFileOutput;
|
- (void)createVideoFileOutput;
|
||||||
{
|
{
|
||||||
@@ -431,7 +381,6 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
[self createVideoDataOutput];
|
[self createVideoDataOutput];
|
||||||
if (self.recordVideo == YES) {
|
if (self.recordVideo == YES) {
|
||||||
[self createVideoFileOutput];
|
[self createVideoFileOutput];
|
||||||
//[self createMovieFileOutput];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -473,47 +422,13 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
return pxbuffer;
|
return pxbuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
|
|
||||||
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|
||||||
fromConnections:(NSArray *)connections
|
|
||||||
error:(NSError *)error {
|
|
||||||
|
|
||||||
#if 0
|
|
||||||
BOOL recordedSuccessfully = YES;
|
|
||||||
if ([error code] != noErr) {
|
|
||||||
// A problem occurred: Find out if the recording was successful.
|
|
||||||
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
|
||||||
if (value) {
|
|
||||||
recordedSuccessfully = [value boolValue];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
NSLog(@"Capture File output done ");
|
|
||||||
}
|
|
||||||
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
|
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
|
||||||
|
|
||||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
|
|
||||||
{
|
|
||||||
if (connection == self.audioCaptureConnection) {
|
|
||||||
NSLog(@"Audio sample did drop ");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
NSLog(@"Video Frame did drop ");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
|
||||||
{
|
{
|
||||||
(void)captureOutput;
|
(void)captureOutput;
|
||||||
(void)connection;
|
(void)connection;
|
||||||
|
|
||||||
if (connection == self.audioCaptureConnection) {
|
|
||||||
//NSLog(@"Audio Sample came in ");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
//NSLog(@"Video sample came in ");
|
|
||||||
if (self.delegate) {
|
if (self.delegate) {
|
||||||
|
|
||||||
// convert from Core Media to Core Video
|
// convert from Core Media to Core Video
|
||||||
@@ -551,22 +466,15 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
||||||
int64_t msec = lastSampleTime.value / (lastSampleTime.timescale / 1000);
|
|
||||||
_timestampMs = msec;
|
|
||||||
//NSLog(@"Timestamp %u / %u, msec = %lu ", lastSampleTime.value, lastSampleTime.timescale, msec);
|
|
||||||
|
|
||||||
|
|
||||||
// delegate image processing to the delegate
|
// delegate image processing to the delegate
|
||||||
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
|
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
|
||||||
|
|
||||||
|
CGImage* dstImage;
|
||||||
|
|
||||||
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
|
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
|
||||||
[self.delegate processImage:image];
|
[self.delegate processImage:image];
|
||||||
}
|
}
|
||||||
|
|
||||||
CGImage* dstImage;
|
|
||||||
|
|
||||||
// check if matrix data pointer or dimensions were changed by the delegate
|
// check if matrix data pointer or dimensions were changed by the delegate
|
||||||
bool iOSimage = false;
|
bool iOSimage = false;
|
||||||
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
|
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
|
||||||
@@ -627,20 +535,18 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
|
|
||||||
// render buffer
|
// render buffer
|
||||||
//dispatch_sync(dispatch_get_main_queue(), ^{
|
|
||||||
dispatch_sync(dispatch_get_main_queue(), ^{
|
dispatch_sync(dispatch_get_main_queue(), ^{
|
||||||
self.customPreviewLayer.contents = (__bridge id)dstImage;
|
self.customPreviewLayer.contents = (__bridge id)dstImage;
|
||||||
});
|
});
|
||||||
|
|
||||||
if (recordCountDown > 0)
|
|
||||||
recordCountDown--;
|
|
||||||
|
|
||||||
if (self.recordVideo == YES && recordCountDown <= 0) {
|
|
||||||
//CMTimeShow(lastSampleTime);
|
|
||||||
|
|
||||||
|
recordingCountDown--;
|
||||||
|
if (self.recordVideo == YES && recordingCountDown < 0) {
|
||||||
|
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||||
|
// CMTimeShow(lastSampleTime);
|
||||||
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
|
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
|
||||||
[self.recordAssetWriter startWriting];
|
[self.recordAssetWriter startWriting];
|
||||||
[self.recordAssetWriter startSessionAtSourceTime:_lastSampleTime];
|
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
|
||||||
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
|
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
|
||||||
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
|
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
|
||||||
return;
|
return;
|
||||||
@@ -658,8 +564,10 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
if (pixelBuffer != nullptr)
|
if (pixelBuffer != nullptr)
|
||||||
CVPixelBufferRelease(pixelBuffer);
|
CVPixelBufferRelease(pixelBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// cleanup
|
// cleanup
|
||||||
CGImageRelease(dstImage);
|
CGImageRelease(dstImage);
|
||||||
|
|
||||||
@@ -697,8 +605,7 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
|
|
||||||
- (NSURL *)videoFileURL;
|
- (NSURL *)videoFileURL;
|
||||||
{
|
{
|
||||||
//NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
||||||
NSString *outputPath = self.videoFileString;
|
|
||||||
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
|
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
|
||||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||||
if ([fileManager fileExistsAtPath:outputPath]) {
|
if ([fileManager fileExistsAtPath:outputPath]) {
|
||||||
@@ -708,17 +615,6 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
- (NSURL *)mediaFileURL;
|
|
||||||
{
|
|
||||||
NSString *outputPath = self.mediaFileString;
|
|
||||||
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
|
|
||||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
||||||
if ([fileManager fileExistsAtPath:outputPath]) {
|
|
||||||
NSLog(@"file exists");
|
|
||||||
}
|
|
||||||
NSLog(@"media URL %@", outputURL);
|
|
||||||
return outputURL;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSString *)videoFileString;
|
- (NSString *)videoFileString;
|
||||||
{
|
{
|
||||||
@@ -726,9 +622,4 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
|
|||||||
return outputPath;
|
return outputPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
- (NSString*) mediaFileString {
|
|
||||||
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"media.mov"];
|
|
||||||
return outputPath;
|
|
||||||
}
|
|
||||||
@end
|
@end
|
||||||
|
|||||||
Reference in New Issue
Block a user