iOS camera switching video capturer.

Introduces a new capture class derived from cricket::VideoCapturer that
provides the ability to switch cameras and updates AppRTCDemo to use it.
Some future work pending to clean up AppRTCDemo UI.

BUG=4070
R=magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/48279005

Cr-Commit-Position: refs/heads/master@{#9137}
This commit is contained in:
Zeke Chin
2015-05-05 07:52:31 -07:00
parent 5cb9ce4c74
commit 57cc74e32c
17 changed files with 899 additions and 84 deletions

View File

@@ -0,0 +1,36 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCAVFoundationVideoSource.h"
#include "avfoundationvideocapturer.h"
@interface RTCAVFoundationVideoSource ()
@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer* capturer;
@end

View File

@@ -0,0 +1,69 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCAVFoundationVideoSource+Internal.h"
#import "RTCMediaConstraints+Internal.h"
#import "RTCMediaSource+Internal.h"
#import "RTCPeerConnectionFactory+Internal.h"
#import "RTCVideoSource+Internal.h"
@implementation RTCAVFoundationVideoSource
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
constraints:(RTCMediaConstraints*)constraints {
NSParameterAssert(factory);
rtc::scoped_ptr<webrtc::AVFoundationVideoCapturer> capturer;
capturer.reset(new webrtc::AVFoundationVideoCapturer());
rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
factory.nativeFactory->CreateVideoSource(capturer.release(),
constraints.constraints);
return [super initWithMediaSource:source];
}
- (BOOL)useBackCamera {
return self.capturer->GetUseBackCamera();
}
- (void)setUseBackCamera:(BOOL)useBackCamera {
self.capturer->SetUseBackCamera(useBackCamera);
}
- (AVCaptureSession*)captureSession {
return self.capturer->GetCaptureSession();
}
- (webrtc::AVFoundationVideoCapturer*)capturer {
cricket::VideoCapturer* capturer = self.videoSource->GetVideoCapturer();
// This should be safe because no one should have changed the underlying video
// source.
webrtc::AVFoundationVideoCapturer* foundationCapturer =
static_cast<webrtc::AVFoundationVideoCapturer*>(capturer);
return foundationCapturer;
}
@end

View File

@@ -0,0 +1,38 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCPeerConnectionFactory.h"
#include "talk/app/webrtc/peerconnectionfactory.h"
#include "webrtc/base/scoped_ptr.h"
@interface RTCPeerConnectionFactory ()
@property(nonatomic, assign) rtc::scoped_refptr<
webrtc::PeerConnectionFactoryInterface> nativeFactory;
@end

View File

@@ -29,7 +29,7 @@
#error "This file requires ARC support." #error "This file requires ARC support."
#endif #endif
#import "RTCPeerConnectionFactory.h" #import "RTCPeerConnectionFactory+Internal.h"
#include <vector> #include <vector>
@@ -47,20 +47,12 @@
#include "talk/app/webrtc/audiotrack.h" #include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastreaminterface.h" #include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectionfactory.h"
#include "talk/app/webrtc/peerconnectioninterface.h" #include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/videosourceinterface.h" #include "talk/app/webrtc/videosourceinterface.h"
#include "talk/app/webrtc/videotrack.h" #include "talk/app/webrtc/videotrack.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h" #include "webrtc/base/ssladapter.h"
@interface RTCPeerConnectionFactory ()
@property(nonatomic, assign) rtc::scoped_refptr<
webrtc::PeerConnectionFactoryInterface> nativeFactory;
@end
@implementation RTCPeerConnectionFactory { @implementation RTCPeerConnectionFactory {
rtc::scoped_ptr<rtc::Thread> _signalingThread; rtc::scoped_ptr<rtc::Thread> _signalingThread;

View File

@@ -32,21 +32,50 @@
#import "RTCVideoTrack+Internal.h" #import "RTCVideoTrack+Internal.h"
#import "RTCMediaStreamTrack+Internal.h" #import "RTCMediaStreamTrack+Internal.h"
#import "RTCPeerConnectionFactory+Internal.h"
#import "RTCVideoRendererAdapter.h" #import "RTCVideoRendererAdapter.h"
#import "RTCMediaSource+Internal.h"
#import "RTCVideoSource+Internal.h"
@implementation RTCVideoTrack { @implementation RTCVideoTrack {
NSMutableArray* _adapters; NSMutableArray* _adapters;
} }
- (id)initWithMediaTrack: @synthesize source = _source;
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack { - (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
if (self = [super initWithMediaTrack:mediaTrack]) { source:(RTCVideoSource*)source
_adapters = [NSMutableArray array]; trackId:(NSString*)trackId {
NSParameterAssert(factory);
NSParameterAssert(source);
NSParameterAssert(trackId.length);
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
factory.nativeFactory->CreateVideoTrack([trackId UTF8String],
source.videoSource);
if (self = [super initWithMediaTrack:track]) {
[self configure];
_source = source;
} }
return self; return self;
} }
- (instancetype)initWithMediaTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
if (self = [super initWithMediaTrack:mediaTrack]) {
[self configure];
rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
self.nativeVideoTrack->GetSource();
if (source) {
_source = [[RTCVideoSource alloc] initWithMediaSource:source.get()];
}
}
return self;
}
- (void)configure {
_adapters = [NSMutableArray array];
}
- (void)dealloc { - (void)dealloc {
for (RTCVideoRendererAdapter *adapter in _adapters) { for (RTCVideoRendererAdapter *adapter in _adapters) {
self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer); self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer);

View File

@@ -0,0 +1,79 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
#define TALK_APP_WEBRTC_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
#include "talk/media/base/videocapturer.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/video_frame.h"
#import <AVFoundation/AVFoundation.h>
@class RTCAVFoundationVideoCapturerInternal;
namespace webrtc {
class AVFoundationVideoCapturer : public cricket::VideoCapturer {
public:
AVFoundationVideoCapturer();
~AVFoundationVideoCapturer();
cricket::CaptureState Start(const cricket::VideoFormat& format) override;
void Stop() override;
bool IsRunning() override;
bool IsScreencast() const override {
return false;
}
bool GetPreferredFourccs(std::vector<uint32>* fourccs) override {
fourccs->push_back(cricket::FOURCC_NV12);
return true;
}
// Returns the active capture session.
AVCaptureSession* GetCaptureSession();
// Switches the camera being used (either front or back).
void SetUseBackCamera(bool useBackCamera);
bool GetUseBackCamera() const;
// Converts the sample buffer into a cricket::CapturedFrame and signals the
// frame for capture.
void CaptureSampleBuffer(CMSampleBufferRef sampleBuffer);
private:
// Used to signal frame capture on the thread that capturer was started on.
void SignalFrameCapturedOnStartThread(const cricket::CapturedFrame* frame);
RTCAVFoundationVideoCapturerInternal* _capturer;
rtc::Thread* _startThread; // Set in Start(), unset in Stop().
uint64_t _startTime;
}; // AVFoundationVideoCapturer
} // namespace webrtc
#endif // TALK_APP_WEBRTC_OBJC_AVFOUNDATION_CAPTURER_H_

View File

@@ -0,0 +1,446 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
#include "webrtc/base/bind.h"
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
// TODO(tkchin): support other formats.
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
static cricket::VideoFormat const kDefaultFormat =
cricket::VideoFormat(640,
480,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_NV12);
// This queue is used to start and stop the capturer without blocking the
// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
// running.
static dispatch_queue_t kBackgroundQueue = nil;
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
// because other webrtc objects own cricket::VideoCapturer, which is not
// ref counted. To prevent bad behavior we do not expose this class directly.
@interface RTCAVFoundationVideoCapturerInternal : NSObject
<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) AVCaptureSession* captureSession;
@property(nonatomic, readonly) BOOL isRunning;
@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
// when we receive frames. This is safe because this object should be owned by
// it.
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer;
- (void)startCaptureAsync;
- (void)stopCaptureAsync;
@end
@implementation RTCAVFoundationVideoCapturerInternal {
// Keep pointers to inputs for convenience.
AVCaptureDeviceInput* _frontDeviceInput;
AVCaptureDeviceInput* _backDeviceInput;
AVCaptureVideoDataOutput* _videoOutput;
// The cricket::VideoCapturer that owns this class. Should never be NULL.
webrtc::AVFoundationVideoCapturer* _capturer;
BOOL _orientationHasChanged;
}
@synthesize captureSession = _captureSession;
@synthesize useBackCamera = _useBackCamera;
@synthesize isRunning = _isRunning;
+ (void)initialize {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
kBackgroundQueue = dispatch_queue_create(
"com.google.webrtc.RTCAVFoundationCapturerBackground",
DISPATCH_QUEUE_SERIAL);
});
}
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
NSParameterAssert(capturer);
if (self = [super init]) {
_capturer = capturer;
if (![self setupCaptureSession]) {
return nil;
}
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserverForName:AVCaptureSessionRuntimeErrorNotification
object:nil
queue:nil
usingBlock:^(NSNotification* notification) {
NSLog(@"Capture session error: %@", notification.userInfo);
}];
}
return self;
}
- (void)dealloc {
[self stopCaptureAsync];
[[NSNotificationCenter defaultCenter] removeObserver:self];
_capturer = nullptr;
}
- (void)setUseBackCamera:(BOOL)useBackCamera {
if (_useBackCamera == useBackCamera) {
return;
}
_useBackCamera = useBackCamera;
[self updateSessionInput];
}
- (void)startCaptureAsync {
if (_isRunning) {
return;
}
_orientationHasChanged = NO;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
AVCaptureSession* session = _captureSession;
dispatch_async(kBackgroundQueue, ^{
[session startRunning];
});
_isRunning = YES;
}
- (void)stopCaptureAsync {
if (!_isRunning) {
return;
}
AVCaptureSession* session = _captureSession;
dispatch_async(kBackgroundQueue, ^{
[session stopRunning];
});
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
_isRunning = NO;
}
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput*)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection {
NSParameterAssert(captureOutput == _videoOutput);
if (!_isRunning) {
return;
}
_capturer->CaptureSampleBuffer(sampleBuffer);
}
- (void)captureOutput:(AVCaptureOutput*)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection {
NSLog(@"Dropped sample buffer.");
}
#pragma mark - Private
- (BOOL)setupCaptureSession {
_captureSession = [[AVCaptureSession alloc] init];
#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
NSString* version = [[UIDevice currentDevice] systemVersion];
if ([version integerValue] >= 7) {
_captureSession.usesApplicationAudioSession = NO;
}
#endif
if (![_captureSession canSetSessionPreset:kDefaultPreset]) {
NSLog(@"Default video capture preset unsupported.");
return NO;
}
_captureSession.sessionPreset = kDefaultPreset;
// Make the capturer output NV12. Ideally we want I420 but that's not
// currently supported on iPhone / iPad.
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.videoSettings = @{
(NSString*)kCVPixelBufferPixelFormatTypeKey :
@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
[_videoOutput setSampleBufferDelegate:self
queue:dispatch_get_main_queue()];
if (![_captureSession canAddOutput:_videoOutput]) {
NSLog(@"Default video capture output unsupported.");
return NO;
}
[_captureSession addOutput:_videoOutput];
// Find the capture devices.
AVCaptureDevice* frontCaptureDevice = nil;
AVCaptureDevice* backCaptureDevice = nil;
for (AVCaptureDevice* captureDevice in
[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (captureDevice.position == AVCaptureDevicePositionBack) {
backCaptureDevice = captureDevice;
}
if (captureDevice.position == AVCaptureDevicePositionFront) {
frontCaptureDevice = captureDevice;
}
}
if (!frontCaptureDevice || !backCaptureDevice) {
NSLog(@"Failed to get capture devices.");
return NO;
}
// Set up the session inputs.
NSError* error = nil;
_frontDeviceInput =
[AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
error:&error];
if (!_frontDeviceInput) {
NSLog(@"Failed to get capture device input: %@",
error.localizedDescription);
return NO;
}
_backDeviceInput =
[AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
error:&error];
if (!_backDeviceInput) {
NSLog(@"Failed to get capture device input: %@",
error.localizedDescription);
return NO;
}
// Add the inputs.
if (![_captureSession canAddInput:_frontDeviceInput] ||
![_captureSession canAddInput:_backDeviceInput]) {
NSLog(@"Session does not support capture inputs.");
return NO;
}
[self updateSessionInput];
return YES;
}
- (void)deviceOrientationDidChange:(NSNotification*)notification {
_orientationHasChanged = YES;
[self updateOrientation];
}
- (void)updateOrientation {
AVCaptureConnection* connection =
[_videoOutput connectionWithMediaType:AVMediaTypeVideo];
if (!connection.supportsVideoOrientation) {
// TODO(tkchin): set rotation bit on frames.
return;
}
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeLeft:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
if (!_orientationHasChanged) {
connection.videoOrientation = orientation;
}
return;
}
connection.videoOrientation = orientation;
}
- (void)updateSessionInput {
// Update the current session input to match what's stored in _useBackCamera.
[_captureSession beginConfiguration];
AVCaptureDeviceInput* oldInput = _backDeviceInput;
AVCaptureDeviceInput* newInput = _frontDeviceInput;
if (_useBackCamera) {
oldInput = _frontDeviceInput;
newInput = _backDeviceInput;
}
// Ok to remove this even if it's not attached. Will be no-op.
[_captureSession removeInput:oldInput];
[_captureSession addInput:newInput];
[self updateOrientation];
[_captureSession commitConfiguration];
}
@end
namespace webrtc {
AVFoundationVideoCapturer::AVFoundationVideoCapturer()
: _capturer(nil), _startThread(nullptr), _startTime(0) {
// Set our supported formats. This matches kDefaultPreset.
std::vector<cricket::VideoFormat> supportedFormats;
supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
SetSupportedFormats(supportedFormats);
_capturer =
[[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
}
AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
_capturer = nil;
}
cricket::CaptureState AVFoundationVideoCapturer::Start(
const cricket::VideoFormat& format) {
if (!_capturer) {
LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
return cricket::CaptureState::CS_FAILED;
}
if (_capturer.isRunning) {
LOG(LS_ERROR) << "The capturer is already running.";
return cricket::CaptureState::CS_FAILED;
}
if (format != kDefaultFormat) {
LOG(LS_ERROR) << "Unsupported format provided.";
return cricket::CaptureState::CS_FAILED;
}
// Keep track of which thread capture started on. This is the thread that
// frames need to be sent to.
DCHECK(!_startThread);
_startThread = rtc::Thread::Current();
SetCaptureFormat(&format);
// This isn't super accurate because it takes a while for the AVCaptureSession
// to spin up, and this call returns async.
// TODO(tkchin): make this better.
[_capturer startCaptureAsync];
_startTime = rtc::TimeNanos();
SetCaptureState(cricket::CaptureState::CS_RUNNING);
return cricket::CaptureState::CS_STARTING;
}
void AVFoundationVideoCapturer::Stop() {
[_capturer stopCaptureAsync];
SetCaptureFormat(NULL);
_startThread = nullptr;
}
bool AVFoundationVideoCapturer::IsRunning() {
return _capturer.isRunning;
}
AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
return _capturer.captureSession;
}
void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
_capturer.useBackCamera = useBackCamera;
}
bool AVFoundationVideoCapturer::GetUseBackCamera() const {
return _capturer.useBackCamera;
}
void AVFoundationVideoCapturer::CaptureSampleBuffer(
CMSampleBufferRef sampleBuffer) {
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
!CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer == NULL) {
return;
}
// Base address must be unlocked to access frame data.
CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly;
CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags);
if (ret != kCVReturnSuccess) {
return;
}
static size_t const kYPlaneIndex = 0;
static size_t const kUVPlaneIndex = 1;
uint8_t* yPlaneAddress =
(uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
size_t yPlaneHeight =
CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex);
size_t yPlaneWidth =
CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex);
size_t yPlaneBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex);
size_t uvPlaneHeight =
CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex);
size_t uvPlaneBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex);
size_t frameSize =
yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
// Sanity check assumption that planar bytes are contiguous.
uint8_t* uvPlaneAddress =
(uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
DCHECK(uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
// Stuff data into a cricket::CapturedFrame.
int64 currentTime = rtc::TimeNanos();
cricket::CapturedFrame frame;
frame.width = yPlaneWidth;
frame.height = yPlaneHeight;
frame.pixel_width = 1;
frame.pixel_height = 1;
frame.fourcc = static_cast<uint32>(cricket::FOURCC_NV12);
frame.time_stamp = currentTime;
frame.elapsed_time = currentTime - _startTime;
frame.data = yPlaneAddress;
frame.data_size = frameSize;
if (_startThread->IsCurrent()) {
SignalFrameCaptured(this, &frame);
} else {
_startThread->Invoke<void>(
rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
this, &frame));
}
CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
}
void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
const cricket::CapturedFrame* frame) {
DCHECK(_startThread->IsCurrent());
// This will call a superclass method that will perform the frame conversion
// to I420.
SignalFrameCaptured(this, frame);
}
} // namespace webrtc

View File

@@ -0,0 +1,49 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCVideoSource.h"
@class AVCaptureSession;
@class RTCMediaConstraints;
@class RTCPeerConnectionFactory;
// RTCAVFoundationVideoSource is a video source that uses
// webrtc::AVFoundationVideoCapturer. We do not currently provide a wrapper for
// that capturer because cricket::VideoCapturer is not ref counted and we cannot
// guarantee its lifetime. Instead, we expose its properties through the ref
// counted video source interface.
@interface RTCAVFoundationVideoSource : RTCVideoSource
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
constraints:(RTCMediaConstraints*)constraints;
// Switches the camera being used (either front or back).
@property(nonatomic, assign) BOOL useBackCamera;
// Returns the active capture session.
@property(nonatomic, readonly) AVCaptureSession* captureSession;
@end

View File

@@ -28,10 +28,18 @@
#import "RTCMediaStreamTrack.h" #import "RTCMediaStreamTrack.h"
@protocol RTCVideoRenderer; @protocol RTCVideoRenderer;
@class RTCPeerConnectionFactory;
@class RTCVideoSource;
// RTCVideoTrack is an ObjectiveC wrapper for VideoTrackInterface. // RTCVideoTrack is an ObjectiveC wrapper for VideoTrackInterface.
@interface RTCVideoTrack : RTCMediaStreamTrack @interface RTCVideoTrack : RTCMediaStreamTrack
@property(nonatomic, readonly) RTCVideoSource* source;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
source:(RTCVideoSource*)source
trackId:(NSString*)trackId;
// Register a renderer that will render all frames received on this track. // Register a renderer that will render all frames received on this track.
- (void)addRenderer:(id<RTCVideoRenderer>)renderer; - (void)addRenderer:(id<RTCVideoRenderer>)renderer;

View File

@@ -27,7 +27,15 @@
#import "ARDAppClient+Internal.h" #import "ARDAppClient+Internal.h"
#import <AVFoundation/AVFoundation.h> #if defined(WEBRTC_IOS)
#import "RTCAVFoundationVideoSource.h"
#endif
#import "RTCICEServer.h"
#import "RTCMediaConstraints.h"
#import "RTCMediaStream.h"
#import "RTCPair.h"
#import "RTCVideoCapturer.h"
#import "RTCAVFoundationVideoSource.h"
#import "ARDAppEngineClient.h" #import "ARDAppEngineClient.h"
#import "ARDCEODTURNClient.h" #import "ARDCEODTURNClient.h"
@@ -37,13 +45,8 @@
#import "ARDUtilities.h" #import "ARDUtilities.h"
#import "ARDWebSocketChannel.h" #import "ARDWebSocketChannel.h"
#import "RTCICECandidate+JSON.h" #import "RTCICECandidate+JSON.h"
#import "RTCICEServer.h"
#import "RTCMediaConstraints.h"
#import "RTCMediaStream.h"
#import "RTCPair.h"
#import "RTCSessionDescription+JSON.h" #import "RTCSessionDescription+JSON.h"
#import "RTCVideoCapturer.h"
#import "RTCVideoTrack.h"
static NSString * const kARDDefaultSTUNServerUrl = static NSString * const kARDDefaultSTUNServerUrl =
@"stun:stun.l.google.com:19302"; @"stun:stun.l.google.com:19302";
@@ -484,39 +487,33 @@ static NSInteger const kARDAppClientErrorInvalidRoom = -6;
- (RTCMediaStream *)createLocalMediaStream { - (RTCMediaStream *)createLocalMediaStream {
RTCMediaStream* localStream = [_factory mediaStreamWithLabel:@"ARDAMS"]; RTCMediaStream* localStream = [_factory mediaStreamWithLabel:@"ARDAMS"];
RTCVideoTrack* localVideoTrack = nil; RTCVideoTrack* localVideoTrack = [self createLocalVideoTrack];
if (localVideoTrack) {
[localStream addVideoTrack:localVideoTrack];
[_delegate appClient:self didReceiveLocalVideoTrack:localVideoTrack];
}
[localStream addAudioTrack:[_factory audioTrackWithID:@"ARDAMSa0"]];
return localStream;
}
- (RTCVideoTrack *)createLocalVideoTrack {
RTCVideoTrack* localVideoTrack = nil;
// The iOS simulator doesn't provide any sort of camera capture // The iOS simulator doesn't provide any sort of camera capture
// support or emulation (http://goo.gl/rHAnC1) so don't bother // support or emulation (http://goo.gl/rHAnC1) so don't bother
// trying to open a local stream. // trying to open a local stream.
// TODO(tkchin): local video capture for OSX. See // TODO(tkchin): local video capture for OSX. See
// https://code.google.com/p/webrtc/issues/detail?id=3417. // https://code.google.com/p/webrtc/issues/detail?id=3417.
#if !TARGET_IPHONE_SIMULATOR && TARGET_OS_IPHONE #if !TARGET_IPHONE_SIMULATOR && TARGET_OS_IPHONE
NSString *cameraID = nil;
for (AVCaptureDevice *captureDevice in
[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (captureDevice.position == AVCaptureDevicePositionFront) {
cameraID = [captureDevice localizedName];
break;
}
}
NSAssert(cameraID, @"Unable to get the front camera id");
RTCVideoCapturer *capturer =
[RTCVideoCapturer capturerWithDeviceName:cameraID];
RTCMediaConstraints *mediaConstraints = [self defaultMediaStreamConstraints]; RTCMediaConstraints *mediaConstraints = [self defaultMediaStreamConstraints];
RTCVideoSource *videoSource = RTCAVFoundationVideoSource *source =
[_factory videoSourceWithCapturer:capturer [[RTCAVFoundationVideoSource alloc] initWithFactory:_factory
constraints:mediaConstraints]; constraints:mediaConstraints];
localVideoTrack = localVideoTrack =
[_factory videoTrackWithID:@"ARDAMSv0" source:videoSource]; [[RTCVideoTrack alloc] initWithFactory:_factory
if (localVideoTrack) { source:source
[localStream addVideoTrack:localVideoTrack]; trackId:@"ARDAMSv0"];
}
[_delegate appClient:self didReceiveLocalVideoTrack:localVideoTrack];
#endif #endif
[localStream addAudioTrack:[_factory audioTrackWithID:@"ARDAMSa0"]]; return localVideoTrack;
return localStream;
} }
#pragma mark - Collider methods #pragma mark - Collider methods

View File

@@ -32,6 +32,9 @@
@class ARDVideoCallView; @class ARDVideoCallView;
@protocol ARDVideoCallViewDelegate <NSObject> @protocol ARDVideoCallViewDelegate <NSObject>
// Called when the camera switch button is pressed.
- (void)videoCallViewDidSwitchCamera:(ARDVideoCallView *)view;
// Called when the hangup button is pressed. // Called when the hangup button is pressed.
- (void)videoCallViewDidHangup:(ARDVideoCallView *)view; - (void)videoCallViewDidHangup:(ARDVideoCallView *)view;

View File

@@ -30,19 +30,20 @@
#import <AVFoundation/AVFoundation.h> #import <AVFoundation/AVFoundation.h>
#import "UIImage+ARDUtilities.h" #import "UIImage+ARDUtilities.h"
static CGFloat const kHangupButtonPadding = 16; static CGFloat const kButtonPadding = 16;
static CGFloat const kHangupButtonSize = 48; static CGFloat const kButtonSize = 48;
static CGFloat const kLocalVideoViewWidth = 90; static CGFloat const kLocalVideoViewSize = 120;
static CGFloat const kLocalVideoViewHeight = 120;
static CGFloat const kLocalVideoViewPadding = 8; static CGFloat const kLocalVideoViewPadding = 8;
@interface ARDVideoCallView () <RTCEAGLVideoViewDelegate> @interface ARDVideoCallView () <RTCEAGLVideoViewDelegate>
@end @end
@implementation ARDVideoCallView { @implementation ARDVideoCallView {
UIButton *_cameraSwitchButton;
UIButton *_hangupButton; UIButton *_hangupButton;
CGSize _localVideoSize; CGSize _localVideoSize;
CGSize _remoteVideoSize; CGSize _remoteVideoSize;
BOOL _useRearCamera;
} }
@synthesize statusLabel = _statusLabel; @synthesize statusLabel = _statusLabel;
@@ -56,17 +57,30 @@ static CGFloat const kLocalVideoViewPadding = 8;
_remoteVideoView.delegate = self; _remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView]; [self addSubview:_remoteVideoView];
// TODO(tkchin): replace this with a view that renders layer from
// AVCaptureSession.
_localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero]; _localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
_localVideoView.transform = CGAffineTransformMakeScale(-1, 1);
_localVideoView.delegate = self; _localVideoView.delegate = self;
[self addSubview:_localVideoView]; [self addSubview:_localVideoView];
// TODO(tkchin): don't display this if we can't actually do camera switch.
_cameraSwitchButton = [UIButton buttonWithType:UIButtonTypeCustom];
_cameraSwitchButton.backgroundColor = [UIColor whiteColor];
_cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
_cameraSwitchButton.layer.masksToBounds = YES;
UIImage *image = [UIImage imageNamed:@"ic_switch_video_black_24dp.png"];
[_cameraSwitchButton setImage:image forState:UIControlStateNormal];
[_cameraSwitchButton addTarget:self
action:@selector(onCameraSwitch:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_cameraSwitchButton];
_hangupButton = [UIButton buttonWithType:UIButtonTypeCustom]; _hangupButton = [UIButton buttonWithType:UIButtonTypeCustom];
_hangupButton.backgroundColor = [UIColor redColor]; _hangupButton.backgroundColor = [UIColor redColor];
_hangupButton.layer.cornerRadius = kHangupButtonSize / 2; _hangupButton.layer.cornerRadius = kButtonSize / 2;
_hangupButton.layer.masksToBounds = YES; _hangupButton.layer.masksToBounds = YES;
UIImage *image = [UIImage imageForName:@"ic_call_end_black_24dp.png" image = [UIImage imageForName:@"ic_call_end_black_24dp.png"
color:[UIColor whiteColor]]; color:[UIColor whiteColor]];
[_hangupButton setImage:image forState:UIControlStateNormal]; [_hangupButton setImage:image forState:UIControlStateNormal];
[_hangupButton addTarget:self [_hangupButton addTarget:self
action:@selector(onHangup:) action:@selector(onHangup:)
@@ -104,21 +118,36 @@ static CGFloat const kLocalVideoViewPadding = 8;
_remoteVideoView.frame = bounds; _remoteVideoView.frame = bounds;
} }
CGRect localVideoFrame = CGRectZero; if (_localVideoSize.width && _localVideoSize.height > 0) {
localVideoFrame.origin.x = // Aspect fit local video view into a square box.
CGRectGetMaxX(bounds) - kLocalVideoViewWidth - kLocalVideoViewPadding; CGRect localVideoFrame =
localVideoFrame.origin.y = CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
CGRectGetMaxY(bounds) - kLocalVideoViewHeight - kLocalVideoViewPadding; localVideoFrame =
localVideoFrame.size.width = kLocalVideoViewWidth; AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
localVideoFrame.size.height = kLocalVideoViewHeight;
_localVideoView.frame = localVideoFrame;
// Place the view in the bottom right.
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- localVideoFrame.size.width - kLocalVideoViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- localVideoFrame.size.height - kLocalVideoViewPadding;
_localVideoView.frame = localVideoFrame;
} else {
_localVideoView.frame = bounds;
}
// Place hangup button in the bottom left.
_hangupButton.frame = _hangupButton.frame =
CGRectMake(CGRectGetMinX(bounds) + kHangupButtonPadding, CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
CGRectGetMaxY(bounds) - kHangupButtonPadding - CGRectGetMaxY(bounds) - kButtonPadding -
kHangupButtonSize, kButtonSize,
kHangupButtonSize, kButtonSize,
kHangupButtonSize); kButtonSize);
// Place button to the right of hangup button.
CGRect cameraSwitchFrame = _hangupButton.frame;
cameraSwitchFrame.origin.x =
CGRectGetMaxX(cameraSwitchFrame) + kButtonPadding;
_cameraSwitchButton.frame = cameraSwitchFrame;
[_statusLabel sizeToFit]; [_statusLabel sizeToFit];
_statusLabel.center = _statusLabel.center =
@@ -130,6 +159,7 @@ static CGFloat const kLocalVideoViewPadding = 8;
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size { - (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
if (videoView == _localVideoView) { if (videoView == _localVideoView) {
_localVideoSize = size; _localVideoSize = size;
_localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
} else if (videoView == _remoteVideoView) { } else if (videoView == _remoteVideoView) {
_remoteVideoSize = size; _remoteVideoSize = size;
} }
@@ -138,6 +168,10 @@ static CGFloat const kLocalVideoViewPadding = 8;
#pragma mark - Private #pragma mark - Private
- (void)onCameraSwitch:(id)sender {
[_delegate videoCallViewDidSwitchCamera:self];
}
- (void)onHangup:(id)sender { - (void)onHangup:(id)sender {
[_delegate videoCallViewDidHangup:self]; [_delegate videoCallViewDidHangup:self];
} }

View File

@@ -27,11 +27,15 @@
#import "ARDVideoCallViewController.h" #import "ARDVideoCallViewController.h"
#import "RTCAVFoundationVideoSource.h"
#import "ARDAppClient.h" #import "ARDAppClient.h"
#import "ARDVideoCallView.h" #import "ARDVideoCallView.h"
@interface ARDVideoCallViewController () <ARDAppClientDelegate, @interface ARDVideoCallViewController () <ARDAppClientDelegate,
ARDVideoCallViewDelegate> ARDVideoCallViewDelegate>
@property(nonatomic, strong) RTCVideoTrack *localVideoTrack;
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView; @property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@end @end
@@ -90,19 +94,13 @@
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack { didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
if (!_localVideoTrack) { self.localVideoTrack = localVideoTrack;
_localVideoTrack = localVideoTrack;
[_localVideoTrack addRenderer:_videoCallView.localVideoView];
}
} }
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack { didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (!_remoteVideoTrack) { self.remoteVideoTrack = remoteVideoTrack;
_remoteVideoTrack = remoteVideoTrack; _videoCallView.statusLabel.hidden = YES;
[_remoteVideoTrack addRenderer:_videoCallView.remoteVideoView];
_videoCallView.statusLabel.hidden = YES;
}
} }
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
@@ -119,24 +117,54 @@
[self hangup]; [self hangup];
} }
- (void)videoCallViewDidSwitchCamera:(ARDVideoCallView *)view {
// TODO(tkchin): Rate limit this so you can't tap continously on it.
// Probably through an animation.
[self switchCamera];
}
#pragma mark - Private #pragma mark - Private
- (void)setLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
if (_localVideoTrack == localVideoTrack) {
return;
}
[_localVideoTrack removeRenderer:_videoCallView.localVideoView];
_localVideoTrack = nil;
[_videoCallView.localVideoView renderFrame:nil];
_localVideoTrack = localVideoTrack;
[_localVideoTrack addRenderer:_videoCallView.localVideoView];
}
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}
[_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
_remoteVideoTrack = remoteVideoTrack;
[_remoteVideoTrack addRenderer:_videoCallView.remoteVideoView];
}
- (void)hangup { - (void)hangup {
if (_remoteVideoTrack) { self.remoteVideoTrack = nil;
[_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView]; self.localVideoTrack = nil;
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
}
if (_localVideoTrack) {
[_localVideoTrack removeRenderer:_videoCallView.localVideoView];
_localVideoTrack = nil;
[_videoCallView.localVideoView renderFrame:nil];
}
[_client disconnect]; [_client disconnect];
[self.presentingViewController dismissViewControllerAnimated:YES [self.presentingViewController dismissViewControllerAnimated:YES
completion:nil]; completion:nil];
} }
- (void)switchCamera {
RTCVideoSource* source = self.localVideoTrack.source;
if ([source isKindOfClass:[RTCAVFoundationVideoSource class]]) {
RTCAVFoundationVideoSource* avSource = (RTCAVFoundationVideoSource*)source;
avSource.useBackCamera = !avSource.useBackCamera;
_videoCallView.localVideoView.transform = avSource.useBackCamera ?
CGAffineTransformIdentity : CGAffineTransformMakeScale(-1, 1);
}
}
- (NSString *)statusTextForState:(RTCICEConnectionState)state { - (NSString *)statusTextForState:(RTCICEConnectionState)state {
switch (state) { switch (state) {
case RTCICEConnectionNew: case RTCICEConnectionNew:

Binary file not shown.

After

Width:  |  Height:  |  Size: 242 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 311 B

View File

@@ -325,8 +325,13 @@
'conditions': [ 'conditions': [
['OS=="ios"', { ['OS=="ios"', {
'sources': [ 'sources': [
'app/webrtc/objc/avfoundationvideocapturer.h',
'app/webrtc/objc/avfoundationvideocapturer.mm',
'app/webrtc/objc/RTCAVFoundationVideoSource+Internal.h',
'app/webrtc/objc/RTCAVFoundationVideoSource.mm',
'app/webrtc/objc/RTCEAGLVideoView.m', 'app/webrtc/objc/RTCEAGLVideoView.m',
'app/webrtc/objc/public/RTCEAGLVideoView.h', 'app/webrtc/objc/public/RTCEAGLVideoView.h',
'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
], ],
'link_settings': { 'link_settings': {
'xcode_settings': { 'xcode_settings': {

View File

@@ -226,6 +226,8 @@
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png', 'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png', 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png',
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png', 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png',
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp.png',
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp@2x.png',
'examples/objc/Icon.png', 'examples/objc/Icon.png',
], ],
'sources': [ 'sources': [