From d3d6bce9edfb708aee93518e9d5a4a222a35a935 Mon Sep 17 00:00:00 2001 From: "henrike@webrtc.org" Date: Mon, 10 Mar 2014 20:41:22 +0000 Subject: [PATCH] (Auto)update libjingle 62865357-> 62871616 git-svn-id: http://webrtc.googlecode.com/svn/trunk@5674 4adac7df-926f-26a2-2b94-8c16560cd09d --- talk/app/webrtc/objc/RTCMediaStream.mm | 16 +- .../webrtc/objc/RTCPeerConnectionFactory.mm | 2 +- .../webrtc/objc/RTCVideoCapturer+Internal.h | 2 +- talk/app/webrtc/objc/RTCVideoCapturer.mm | 6 +- .../webrtc/objc/RTCVideoRenderer+Internal.h | 2 - talk/app/webrtc/objc/RTCVideoRenderer.mm | 149 +++++++- .../app/webrtc/objc/public/RTCVideoRenderer.h | 10 + talk/app/webrtc/statscollector.cc | 7 +- .../examples/ios/AppRTCDemo/APPRTCAppClient.h | 3 + .../examples/ios/AppRTCDemo/APPRTCAppClient.m | 18 + .../ios/AppRTCDemo/APPRTCAppDelegate.h | 3 + .../ios/AppRTCDemo/APPRTCAppDelegate.m | 105 ++++-- .../ios/AppRTCDemo/APPRTCViewController.h | 6 + .../ios/AppRTCDemo/APPRTCViewController.m | 90 +++++ talk/examples/ios/AppRTCDemo/VideoView.h | 50 +++ talk/examples/ios/AppRTCDemo/VideoView.m | 168 +++++++++ .../en.lproj/APPRTCViewController.xib | 333 ++++++++++++++---- talk/libjingle_examples.gyp | 3 + talk/media/devices/devicemanager.cc | 9 - talk/media/webrtc/webrtcvideocapturer.cc | 6 +- 20 files changed, 856 insertions(+), 132 deletions(-) create mode 100644 talk/examples/ios/AppRTCDemo/VideoView.h create mode 100644 talk/examples/ios/AppRTCDemo/VideoView.m diff --git a/talk/app/webrtc/objc/RTCMediaStream.mm b/talk/app/webrtc/objc/RTCMediaStream.mm index dd4aab690..3c59860d3 100644 --- a/talk/app/webrtc/objc/RTCMediaStream.mm +++ b/talk/app/webrtc/objc/RTCMediaStream.mm @@ -126,14 +126,14 @@ [[RTCAudioTrack alloc] initWithMediaTrack:track]; [_audioTracks addObject:audioTrack]; } - // TODO(hughv): Add video. -// for (size_t i = 0; i < video_tracks.size(); ++i) { -// talk_base::scoped_refptr track = -// video_tracks[i]; -// RTCVideoTrack *videoTrack = -// [[RTCVideoTrack alloc] initWithMediaTrack:track]; -// [_videoTracks addObject:videoTrack]; -// } + + for (size_t i = 0; i < video_tracks.size(); ++i) { + talk_base::scoped_refptr track = + video_tracks[i]; + RTCVideoTrack* videoTrack = + [[RTCVideoTrack alloc] initWithMediaTrack:track]; + [_videoTracks addObject:videoTrack]; + } } return self; } diff --git a/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm index 325110fb3..3b0e1c451 100644 --- a/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm +++ b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm @@ -120,7 +120,7 @@ return nil; } talk_base::scoped_refptr source = - self.nativeFactory->CreateVideoSource(capturer.capturer.get(), + self.nativeFactory->CreateVideoSource([capturer release_native_capturer], constraints.constraints); return [[RTCVideoSource alloc] initWithMediaSource:source]; } diff --git a/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h index d0d685b2c..444fdfa90 100644 --- a/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h +++ b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h @@ -31,7 +31,7 @@ @interface RTCVideoCapturer (Internal) -@property(nonatomic, assign, readonly) const talk_base::scoped_ptr &capturer; +- (cricket::VideoCapturer*)release_native_capturer; - (id)initWithCapturer:(cricket::VideoCapturer*)capturer; diff --git a/talk/app/webrtc/objc/RTCVideoCapturer.mm b/talk/app/webrtc/objc/RTCVideoCapturer.mm index f7282c55d..8529487d6 100644 --- a/talk/app/webrtc/objc/RTCVideoCapturer.mm +++ b/talk/app/webrtc/objc/RTCVideoCapturer.mm @@ -67,10 +67,8 @@ return self; } -// TODO(hughv): When capturer is implemented, this needs to return -// _capturer.release() instead. For now, this isn't used. -- (const talk_base::scoped_ptr &)capturer { - return _capturer; +- (cricket::VideoCapturer*)release_native_capturer { + return _capturer.release(); } @end diff --git a/talk/app/webrtc/objc/RTCVideoRenderer+Internal.h b/talk/app/webrtc/objc/RTCVideoRenderer+Internal.h index 8854ed71f..6672cfabf 100644 --- a/talk/app/webrtc/objc/RTCVideoRenderer+Internal.h +++ b/talk/app/webrtc/objc/RTCVideoRenderer+Internal.h @@ -35,6 +35,4 @@ @property(nonatomic, assign, readonly) webrtc::VideoRendererInterface *videoRenderer; -- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer; - @end diff --git a/talk/app/webrtc/objc/RTCVideoRenderer.mm b/talk/app/webrtc/objc/RTCVideoRenderer.mm index 231361521..6eb17d26d 100644 --- a/talk/app/webrtc/objc/RTCVideoRenderer.mm +++ b/talk/app/webrtc/objc/RTCVideoRenderer.mm @@ -33,18 +33,70 @@ #if TARGET_OS_IPHONE #import -#endif #import "RTCI420Frame.h" #import "RTCVideoRendererDelegate.h" -@implementation RTCVideoRenderer +#import "webrtc/modules/video_render/ios/video_render_ios_impl.h" +#import "webrtc/modules/video_render/ios/video_render_ios_view.h" + +#include "common_video/interface/i420_video_frame.h" +#include "talk/app/webrtc/mediastreaminterface.h" +#include "talk/media/base/videoframe.h" +#include "webrtc/modules/video_render/include/video_render_defines.h" + +// An adapter presenting VideoRendererInterface's API and delegating to +// a VideoRenderCallback. Suitable for feeding to +// VideoTrackInterface::AddRenderer(). +class CallbackConverter : public webrtc::VideoRendererInterface { + + public: + CallbackConverter(webrtc::VideoRenderCallback* callback, + const uint32_t streamId) + : callback_(callback), streamId_(streamId) {} + + virtual void SetSize(int width, int height) {}; + virtual void RenderFrame(const cricket::VideoFrame* frame) { + // Make this into an I420VideoFrame. + size_t width = frame->GetWidth(); + size_t height = frame->GetHeight(); + + size_t y_plane_size = width * height; + size_t uv_plane_size = frame->GetChromaSize(); + + webrtc::I420VideoFrame i420Frame; + i420Frame.CreateFrame(y_plane_size, + frame->GetYPlane(), + uv_plane_size, + frame->GetUPlane(), + uv_plane_size, + frame->GetVPlane(), + width, + height, + frame->GetYPitch(), + frame->GetUPitch(), + frame->GetVPitch()); + + i420Frame.set_render_time_ms(frame->GetTimeStamp() / 1000000); + + callback_->RenderFrame(streamId_, i420Frame); + } + + private: + webrtc::VideoRenderCallback* callback_; + const uint32_t streamId_; +}; + +@implementation RTCVideoRenderer { + CallbackConverter* _converter; + talk_base::scoped_ptr _iosRenderer; +} @synthesize delegate = _delegate; + (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame { - // TODO (hughv): Implement. - return nil; + return [[RTCVideoRenderer alloc] + initWithRenderView:[RTCVideoRenderer newRenderViewWithFrame:frame]]; } - (id)initWithDelegate:(id)delegate { @@ -55,20 +107,93 @@ return self; } -@end ++ (UIView*)newRenderViewWithFrame:(CGRect)frame { + VideoRenderIosView* newView = + [[VideoRenderIosView alloc] initWithFrame:frame]; + return newView; +} -@implementation RTCVideoRenderer (Internal) - -- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer { +- (id)initWithRenderView:(UIView*)view { + NSAssert([view isKindOfClass:[VideoRenderIosView class]], + @"The view must be of kind 'VideoRenderIosView'"); if ((self = [super init])) { - // TODO (hughv): Implement. + VideoRenderIosView* renderView = (VideoRenderIosView*)view; + _iosRenderer.reset( + new webrtc::VideoRenderIosImpl(0, (__bridge void*)renderView, NO)); + if (_iosRenderer->Init() != -1) { + webrtc::VideoRenderCallback* callback = + _iosRenderer->AddIncomingRenderStream(0, 1, 0, 0, 1, 1); + _converter = new CallbackConverter(callback, 0); + _iosRenderer->StartRender(); + } else { + self = nil; + } } return self; } -- (webrtc::VideoRendererInterface *)videoRenderer { - // TODO (hughv): Implement. - return NULL; +- (void)start { + _iosRenderer->StartRender(); +} + +- (void)stop { + _iosRenderer->StopRender(); } @end + +@implementation RTCVideoRenderer (Internal) + +- (webrtc::VideoRendererInterface*)videoRenderer { + return _converter; +} + +@end + +#else // TARGET_OS_IPHONE + +// TODO(fischman): implement an OS/X RTCVideoRenderer (and add to +// RTCPeerConnectionTest!). + +#import "RTCI420Frame.h" +#import "RTCVideoRendererDelegate.h" +@implementation RTCVideoRenderer +@synthesize delegate = _delegate; ++ (RTCVideoRenderer*)videoRenderGUIWithFrame:(CGRect)frame { + // TODO(hughv): Implement. + return nil; +} +- (id)initWithDelegate:(id)delegate { + if ((self = [super init])) { + _delegate = delegate; + // TODO(hughv): Create video renderer. + } + return self; +} + ++ (UIView*)newRenderViewWithFrame:(CGRect)frame { + return nil; +} +- (id)initWithRenderView:(UIView*)renderView { + return nil; +} +- (void)start { +} +- (void)stop { +} + +@end +@implementation RTCVideoRenderer (Internal) +- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer { + if ((self = [super init])) { + // TODO(hughv): Implement. + } + return self; +} +- (webrtc::VideoRendererInterface *)videoRenderer { + // TODO(hughv): Implement. + return NULL; +} +@end + +#endif // TARGET_OS_IPHONE diff --git a/talk/app/webrtc/objc/public/RTCVideoRenderer.h b/talk/app/webrtc/objc/public/RTCVideoRenderer.h index cc7ba7184..f0a83716c 100644 --- a/talk/app/webrtc/objc/public/RTCVideoRenderer.h +++ b/talk/app/webrtc/objc/public/RTCVideoRenderer.h @@ -29,6 +29,7 @@ @protocol RTCVideoRendererDelegate; struct CGRect; +@class UIView; // Interface for rendering VideoFrames from a VideoTrack @interface RTCVideoRenderer : NSObject @@ -38,11 +39,20 @@ struct CGRect; // A convenience method to create a renderer and window and render frames into // that window. + (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame; ++ (UIView*)newRenderViewWithFrame:(CGRect)frame; +// The view to the following constructor +// must be one of the views from newRenderViewWithFrame. +- (id)initWithRenderView:(UIView*)renderView; // Initialize the renderer. Requires a delegate which does the actual drawing // of frames. - (id)initWithDelegate:(id)delegate; +// Starts rendering. +- (void)start; +// Stops rendering. It can be restarted again using the 'start' method above. +- (void)stop; + #ifndef DOXYGEN_SHOULD_SKIP_THIS // Disallow init and don't add to documentation - (id)init __attribute__( diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc index 9bcb429ba..1f6114e42 100644 --- a/talk/app/webrtc/statscollector.cc +++ b/talk/app/webrtc/statscollector.cc @@ -922,7 +922,12 @@ void StatsCollector::UpdateStatsFromExistingLocalAudioTracks() { std::string ssrc_id = talk_base::ToString(ssrc); StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc, ssrc_id); - ASSERT(report != NULL); + if (report == NULL) { + // This can happen if a local audio track is added to a stream on the + // fly and the report has not been set up yet. Do nothing in this case. + LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc; + continue; + } // The same ssrc can be used by both local and remote audio tracks. std::string track_id; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h index 608ed0505..410ead6a9 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h @@ -36,6 +36,8 @@ @end +@class RTCMediaConstraints; + // Negotiates signaling for chatting with apprtc.appspot.com "rooms". // Uses the client<->server specifics of the apprtc AppEngine webapp. // @@ -48,6 +50,7 @@ @property(nonatomic, assign) id ICEServerDelegate; @property(nonatomic, assign) id messageHandler; @property(nonatomic, assign) BOOL initiator; +@property(nonatomic, strong) RTCMediaConstraints* videoConstraints; - (void)connectToRoom:(NSURL *)room; - (void)sendData:(NSData *)data; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m index 710811601..5b035e3c3 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m @@ -31,6 +31,8 @@ #import "GAEChannelClient.h" #import "RTCICEServer.h" +#import "APPRTCAppDelegate.h" +#import "RTCMediaConstraints.h" @interface APPRTCAppClient () @@ -62,6 +64,7 @@ @synthesize token = _token; @synthesize verboseLogging = _verboseLogging; @synthesize initiator = _initiator; +@synthesize videoConstraints = _videoConstraints; - (id)init { if (self = [super init]) { @@ -263,6 +266,9 @@ options:0 range:NSMakeRange(0, [self.roomHtml length])]) { [self showMessage:@"Room full"]; + APPRTCAppDelegate *ad = + (APPRTCAppDelegate *)[[UIApplication sharedApplication] delegate]; + [ad closeVideoUI]; return; } @@ -331,6 +337,18 @@ } [self updateICEServers:ICEServers withTurnServer:turnServerUrl]; + NSString* mc = [self findVar:@"mediaConstraints" strippingQuotes:NO]; + if (mc) { + error = nil; + NSData *mcData = [mc dataUsingEncoding:NSUTF8StringEncoding]; + json = + [NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error]; + NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); + if ([[json objectForKey:@"video"] boolValue]) { + self.videoConstraints = [[RTCMediaConstraints alloc] init]; + } + } + [self maybeLogMessage: [NSString stringWithFormat:@"About to open GAE with token: %@", self.token]]; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h index 22754e3ad..22a0225b5 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h @@ -40,6 +40,7 @@ @end @class APPRTCViewController; +@class RTCVideoTrack; // The main application class of the AppRTCDemo iOS app demonstrating // interoperability between the Objcective C implementation of PeerConnection @@ -53,4 +54,6 @@ @property (strong, nonatomic) UIWindow *window; @property (strong, nonatomic) APPRTCViewController *viewController; +- (void)closeVideoUI; + @end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m index 9b8c96d5f..681876eae 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m @@ -25,6 +25,8 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +#import + #import "APPRTCAppDelegate.h" #import "APPRTCViewController.h" @@ -37,17 +39,25 @@ #import "RTCPeerConnectionDelegate.h" #import "RTCPeerConnectionFactory.h" #import "RTCSessionDescription.h" +#import "RTCVideoRenderer.h" +#import "RTCVideoCapturer.h" +#import "RTCVideoTrack.h" +#import "VideoView.h" @interface PCObserver : NSObject - (id)initWithDelegate:(id)delegate; +@property(nonatomic, strong) VideoView *videoView; + @end @implementation PCObserver { id _delegate; } +@synthesize videoView = _videoView; + - (id)initWithDelegate:(id)delegate { if (self = [super init]) { _delegate = delegate; @@ -71,16 +81,18 @@ dispatch_async(dispatch_get_main_queue(), ^(void) { NSAssert([stream.audioTracks count] >= 1, @"Expected at least 1 audio stream"); - //NSAssert([stream.videoTracks count] >= 1, - // @"Expected at least 1 video stream"); - // TODO(hughv): Add video support + NSAssert([stream.videoTracks count] <= 1, + @"Expected at most 1 video stream"); + if ([stream.videoTracks count] != 0) { + [[self videoView] + renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]]; + } }); } - (void)peerConnection:(RTCPeerConnection *)peerConnection removedStream:(RTCMediaStream *)stream { NSLog(@"PCO onRemoveStream."); - // TODO(hughv): Remove video track. } - (void) @@ -166,8 +178,7 @@ - (void)applicationWillResignActive:(UIApplication *)application { [self displayLogMessage:@"Application lost focus, connection broken."]; - [self disconnect]; - [self.viewController resetUI]; + [self closeVideoUI]; } - (void)applicationDidEnterBackground:(UIApplication *)application { @@ -212,7 +223,21 @@ - (void)onICEServers:(NSArray *)servers { self.queuedRemoteCandidates = [NSMutableArray array]; self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init]; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] init]; + RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] + initWithMandatoryConstraints: + @[[[RTCPair alloc] + initWithKey:@"OfferToReceiveAudio" + value:@"true"], + [[RTCPair alloc] + initWithKey:@"OfferToReceiveVideo" + value:@"true"]] + optionalConstraints: + @[[[RTCPair alloc] + initWithKey:@"internalSctpDataChannels" + value:@"true"], + [[RTCPair alloc] + initWithKey:@"DtlsSrtpKeyAgreement" + value:@"true"]]]; self.pcObserver = [[PCObserver alloc] initWithDelegate:self]; self.peerConnection = [self.peerConnectionFactory peerConnectionWithICEServers:servers @@ -220,7 +245,34 @@ delegate:self.pcObserver]; RTCMediaStream *lms = [self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"]; - // TODO(hughv): Add video. + + NSString *cameraID = nil; + for (AVCaptureDevice *captureDevice in + [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] ) { + if (captureDevice.position == AVCaptureDevicePositionFront) { + cameraID = [captureDevice localizedName]; + break; + } + } + NSAssert(cameraID, @"Unable to get the front camera id"); + + RTCVideoCapturer *capturer = + [RTCVideoCapturer capturerWithDeviceName:cameraID]; + RTCVideoSource *videoSource = + [self.peerConnectionFactory + videoSourceWithCapturer:capturer constraints:self.client.videoConstraints]; + RTCVideoTrack *localVideoTrack = + [self.peerConnectionFactory + videoTrackWithID:@"ARDAMSv0" source:videoSource]; + if (localVideoTrack) { + [lms addVideoTrack:localVideoTrack]; + } + + [self.viewController.localVideoView + renderVideoTrackInterface:localVideoTrack]; + + self.pcObserver.videoView = self.viewController.remoteVideoView; + [lms addAudioTrack:[self.peerConnectionFactory audioTrackWithID:@"ARDAMSa0"]]; [self.peerConnection addStream:lms constraints:constraints]; [self displayLogMessage:@"onICEServers - add local stream."]; @@ -236,10 +288,9 @@ [self displayLogMessage:@"GAE onOpen - create offer."]; RTCPair *audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"]; - // TODO(hughv): Add video. - // RTCPair *video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" - // value:@"true"]; - NSArray *mandatory = @[ audio /*, video*/ ]; + RTCPair *video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" + value:@"true"]; + NSArray *mandatory = @[ audio , video ]; RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory optionalConstraints:nil]; @@ -283,7 +334,14 @@ sessionDescription:sdp]; [self displayLogMessage:@"PC - setRemoteDescription."]; } else if ([value compare:@"bye"] == NSOrderedSame) { - [self disconnect]; + [self closeVideoUI]; + UIAlertView *alertView = + [[UIAlertView alloc] initWithTitle:@"Remote end hung up" + message:@"dropping PeerConnection" + delegate:nil + cancelButtonTitle:@"OK" + otherButtonTitles:nil]; + [alertView show]; } else { NSAssert(NO, @"Invalid message: %@", data); } @@ -291,13 +349,13 @@ - (void)onClose { [self displayLogMessage:@"GAE onClose."]; - [self disconnect]; + [self closeVideoUI]; } - (void)onError:(int)code withDescription:(NSString *)description { [self displayLogMessage: [NSString stringWithFormat:@"GAE onError: %@", description]]; - [self disconnect]; + [self closeVideoUI]; } #pragma mark - RTCSessionDescriptonDelegate methods @@ -411,11 +469,10 @@ RTCPair *audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"]; - // TODO(hughv): Add video. - // RTCPair *video = - // [[RTCPair alloc] - // initWithKey:@"OfferToReceiveVideo" value:@"true"]; - NSArray *mandatory = @[ audio /*, video*/ ]; + RTCPair *video = + [[RTCPair alloc] + initWithKey:@"OfferToReceiveVideo" value:@"true"]; + NSArray *mandatory = @[ audio , video ]; RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory @@ -441,6 +498,7 @@ - (void)disconnect { [self.client sendData:[@"{\"type\": \"bye\"}" dataUsingEncoding:NSUTF8StringEncoding]]; + [self.peerConnection close]; self.peerConnection = nil; self.peerConnectionFactory = nil; self.pcObserver = nil; @@ -479,4 +537,11 @@ return removeBackslash; } +#pragma mark - public methods + +- (void)closeVideoUI { + [self disconnect]; + [self.viewController resetUI]; +} + @end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCViewController.h b/talk/examples/ios/AppRTCDemo/APPRTCViewController.h index 6b107a564..2db01a60c 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCViewController.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCViewController.h @@ -27,12 +27,18 @@ #import +@class VideoView; + // The view controller that is displayed when AppRTCDemo is loaded. @interface APPRTCViewController : UIViewController @property (weak, nonatomic) IBOutlet UITextField *textField; @property (weak, nonatomic) IBOutlet UITextView *textInstructions; @property (weak, nonatomic) IBOutlet UITextView *textOutput; +@property(weak, nonatomic) IBOutlet UIView* blackView; + +@property(nonatomic, strong) VideoView* remoteVideoView; +@property(nonatomic, strong) VideoView* localVideoView; - (void)displayText:(NSString *)text; - (void)resetUI; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCViewController.m b/talk/examples/ios/AppRTCDemo/APPRTCViewController.m index bd346efcd..cacac14a4 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCViewController.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCViewController.m @@ -27,8 +27,12 @@ #import "APPRTCViewController.h" +#import "VideoView.h" + @interface APPRTCViewController () +@property (nonatomic, assign) UIInterfaceOrientation statusBarOrientation; + @end @implementation APPRTCViewController @@ -36,13 +40,31 @@ @synthesize textField = _textField; @synthesize textInstructions = _textInstructions; @synthesize textOutput = _textOutput; +@synthesize blackView = _blackView; + +@synthesize remoteVideoView = _remoteVideoView; +@synthesize localVideoView = _localVideoView; + +@synthesize statusBarOrientation = _statusBarOrientation; - (void)viewDidLoad { [super viewDidLoad]; + self.statusBarOrientation = + [UIApplication sharedApplication].statusBarOrientation; self.textField.delegate = self; [self.textField becomeFirstResponder]; } +- (void)viewDidLayoutSubviews { + if (self.statusBarOrientation != + [UIApplication sharedApplication].statusBarOrientation) { + self.statusBarOrientation = + [UIApplication sharedApplication].statusBarOrientation; + [[NSNotificationCenter defaultCenter] + postNotificationName:@"StatusBarOrientationDidChange" object:nil]; + } +} + - (void)displayText:(NSString *)text { dispatch_async(dispatch_get_main_queue(), ^(void) { NSString *output = @@ -52,11 +74,75 @@ } - (void)resetUI { + [self.textField resignFirstResponder]; self.textField.text = nil; self.textField.hidden = NO; self.textInstructions.hidden = NO; self.textOutput.hidden = YES; self.textOutput.text = nil; + self.blackView.hidden = YES; + + [_remoteVideoView stop]; + [_remoteVideoView removeFromSuperview]; + self.remoteVideoView = nil; + + [_localVideoView stop]; + [_localVideoView removeFromSuperview]; + self.localVideoView = nil; +} + +// TODO(fischman): Use video dimensions from the incoming video stream +// and resize the Video View accordingly w.r.t. aspect ratio. +enum { + // Remote video view dimensions. + kRemoteVideoWidth = 640, + kRemoteVideoHeight = 480, + // Padding space for local video view with its parent. + kLocalViewPadding = 20 +}; + +- (void)setupCaptureSession { + self.blackView.hidden = NO; + + CGRect frame = CGRectMake((self.blackView.bounds.size.width + -kRemoteVideoWidth)/2, + (self.blackView.bounds.size.height + -kRemoteVideoHeight)/2, + kRemoteVideoWidth, + kRemoteVideoHeight); + VideoView *videoView = [[VideoView alloc] initWithFrame:frame]; + videoView.isRemote = TRUE; + + [self.blackView addSubview:videoView]; + videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | + UIViewAutoresizingFlexibleRightMargin | + UIViewAutoresizingFlexibleBottomMargin | + UIViewAutoresizingFlexibleTopMargin; + videoView.translatesAutoresizingMaskIntoConstraints = YES; + _remoteVideoView = videoView; + + CGSize screenSize = [[UIScreen mainScreen] bounds].size; + CGFloat localVideoViewWidth = + UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ? + screenSize.width/4 : screenSize.height/4; + CGFloat localVideoViewHeight = + UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ? + screenSize.height/4 : screenSize.width/4; + frame = CGRectMake(self.blackView.bounds.size.width + -localVideoViewWidth-kLocalViewPadding, + kLocalViewPadding, + localVideoViewWidth, + localVideoViewHeight); + videoView = [[VideoView alloc] initWithFrame:frame]; + videoView.isRemote = FALSE; + + [self.blackView addSubview:videoView]; + videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | + UIViewAutoresizingFlexibleBottomMargin | + UIViewAutoresizingFlexibleHeight | + UIViewAutoresizingFlexibleWidth; + videoView.translatesAutoresizingMaskIntoConstraints = YES; + _localVideoView = videoView; } #pragma mark - UITextFieldDelegate @@ -76,6 +162,10 @@ NSString *url = [NSString stringWithFormat:@"apprtc://apprtc.appspot.com/?r=%@", room]; [[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]]; + + dispatch_async(dispatch_get_main_queue(), ^{ + [self setupCaptureSession]; + }); } - (BOOL)textFieldShouldReturn:(UITextField *)textField { diff --git a/talk/examples/ios/AppRTCDemo/VideoView.h b/talk/examples/ios/AppRTCDemo/VideoView.h new file mode 100644 index 000000000..ff3167c52 --- /dev/null +++ b/talk/examples/ios/AppRTCDemo/VideoView.h @@ -0,0 +1,50 @@ +/* + * libjingle + * Copyright 2013, Google Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#import + +@class RTCVideoTrack; + +// This class encapsulates VideoRenderIosView. +@interface VideoView : UIView + +// Property to get/set required video orientation. +@property(nonatomic, assign) UIInterfaceOrientation videoOrientation; +// Specifies whether the object represents a local or remote video stream. +@property(nonatomic, assign) BOOL isRemote; + +// Sets up the underlying renderer and track objects. +- (void)renderVideoTrackInterface:(RTCVideoTrack*)track; + +// Stops rendering. +- (void)pause; +// Starts rendering. +- (void)resume; +// Stops rendering and resets underlying renderer and track objects. +- (void)stop; + +@end diff --git a/talk/examples/ios/AppRTCDemo/VideoView.m b/talk/examples/ios/AppRTCDemo/VideoView.m new file mode 100644 index 000000000..d563fb32f --- /dev/null +++ b/talk/examples/ios/AppRTCDemo/VideoView.m @@ -0,0 +1,168 @@ +/* + * libjingle + * Copyright 2013, Google Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This VideoView must be initialzed and added to a View to get + * either the local or remote video stream rendered. + * It is a view itself and it encapsulates + * an object of VideoRenderIosView and UIActivityIndicatorView. + * Both of the views will get resized as per the frame of their parent. + */ + +#import "VideoView.h" + +#import "RTCVideoRenderer.h" +#import "RTCVideoTrack.h" + +@interface VideoView () { + RTCVideoTrack *_track; + RTCVideoRenderer *_renderer; +} + +@property (nonatomic, weak) UIView *renderView; +@property (nonatomic, weak) UIActivityIndicatorView *activityView; + +@end + +@implementation VideoView + +@synthesize videoOrientation = _videoOrientation; +@synthesize isRemote = _isRemote; +@synthesize renderView = _renderView; +@synthesize activityView = _activityView; + +static void init(VideoView *self) { + UIView *renderView = [RTCVideoRenderer newRenderViewWithFrame: + CGRectMake(0, + 0, + self.bounds.size.width, + self.bounds.size.height)]; + [self addSubview:renderView]; + renderView.autoresizingMask = UIViewAutoresizingFlexibleHeight | + UIViewAutoresizingFlexibleWidth; + renderView.translatesAutoresizingMaskIntoConstraints = YES; + self.renderView = renderView; + + UIActivityIndicatorView *indicatorView = + [[UIActivityIndicatorView alloc] + initWithActivityIndicatorStyle: + UIActivityIndicatorViewStyleWhiteLarge]; + indicatorView.frame = self.bounds; + indicatorView.hidesWhenStopped = YES; + [self addSubview:indicatorView]; + indicatorView.autoresizingMask = UIViewAutoresizingFlexibleWidth | + UIViewAutoresizingFlexibleHeight; + indicatorView.translatesAutoresizingMaskIntoConstraints = YES; + [indicatorView startAnimating]; + self.activityView = indicatorView; +} + +- (id)initWithFrame:(CGRect)frame { + self = [super initWithFrame:frame]; + if (self) { + init(self); + } + return self; +} + +-(id)initWithCoder:(NSCoder *)aDecoder { + self = [super initWithCoder:aDecoder]; + if (self) { + init(self); + } + return self; +} + +- (UIInterfaceOrientation)videoOrientation { + return _videoOrientation; +} + +- (void)setVideoOrientation:(UIInterfaceOrientation)videoOrientation { + if (_videoOrientation != videoOrientation) { + _videoOrientation = videoOrientation; + + CGFloat angle; + switch (videoOrientation) { + case UIInterfaceOrientationPortrait: + angle = M_PI_2; + break; + case UIInterfaceOrientationPortraitUpsideDown: + angle = -M_PI_2; + break; + case UIInterfaceOrientationLandscapeLeft: + angle = M_PI; + break; + case UIInterfaceOrientationLandscapeRight: + angle = 0; + break; + } + // The video comes in mirrored. That is fine for the local video, + // but the remote video should be put back to original. + CGAffineTransform xform = + CGAffineTransformMakeScale([self isRemote] ? -1 : 1, 1); + xform = CGAffineTransformRotate(xform, angle); + [[self renderView] setTransform:xform]; + } +} + +- (void)renderVideoTrackInterface:(RTCVideoTrack *)videoTrack { + [self stop]; + + _track = videoTrack; + + if (_track) { + if (!_renderer) { + _renderer = [[RTCVideoRenderer alloc] + initWithRenderView:[self renderView]]; + } + [_track addRenderer:_renderer]; + [self resume]; + } + + [self setVideoOrientation:UIInterfaceOrientationLandscapeLeft]; + [self setVideoOrientation:UIInterfaceOrientationPortrait]; + [self setVideoOrientation:UIInterfaceOrientationLandscapeLeft]; +} + +-(void)pause { + [_renderer stop]; +} + +-(void)resume { + [self.activityView stopAnimating]; + [self.activityView removeFromSuperview]; + self.activityView = nil; + + [_renderer start]; +} + +- (void)stop { + [_track removeRenderer:_renderer]; + [_renderer stop]; +} + +@end diff --git a/talk/examples/ios/AppRTCDemo/en.lproj/APPRTCViewController.xib b/talk/examples/ios/AppRTCDemo/en.lproj/APPRTCViewController.xib index cd73ea64e..92d2adebf 100644 --- a/talk/examples/ios/AppRTCDemo/en.lproj/APPRTCViewController.xib +++ b/talk/examples/ios/AppRTCDemo/en.lproj/APPRTCViewController.xib @@ -1,14 +1,14 @@ - 1552 - 12D78 - 3084 - 1187.37 - 626.00 + 1536 + 13B42 + 4514 + 1265 + 696.00 com.apple.InterfaceBuilder.IBCocoaTouchPlugin - 2083 + 3747 IBNSLayoutConstraint @@ -34,7 +34,7 @@ IBCocoaTouchFramework - + 274 @@ -42,7 +42,8 @@ 292 {{20, 20}, {280, 141}} - + + _NS:9 1 @@ -60,8 +61,8 @@ 1 14 - - Helvetica + + HelveticaNeue 14 16 @@ -71,7 +72,8 @@ 292 {{20, 180}, {280, 30}} - + + _NS:9 NO YES @@ -95,13 +97,15 @@ IBCocoaTouchFramework - + -2147483356 - {{20, 20}, {280, 508}} + {{20, 20}, {280, 190}} + + _NS:9 YES @@ -114,10 +118,26 @@ IBCocoaTouchFramework - + + + + + -2147483356 + {{20, 228}, {280, 300}} + + + + _NS:9 + + 3 + MAA + + IBCocoaTouchFramework {{0, 20}, {320, 548}} + + 3 @@ -140,7 +160,7 @@ IBCocoaTouchFramework - Retina 4 Full Screen + Retina 4-inch Full Screen 2 IBCocoaTouchFramework @@ -180,6 +200,14 @@ 138 + + + blackView + + + + 151 + @@ -204,6 +232,74 @@ 6 + + + 4 + 0 + + 4 + 1 + + 20 + + 1000 + + 8 + 23 + 3 + NO + + + + 3 + 0 + + 3 + 1 + + 228 + + 1000 + + 3 + 9 + 3 + NO + + + + 5 + 0 + + 5 + 1 + + 0.0 + + 1000 + + 6 + 24 + 2 + NO + + + + 6 + 0 + + 6 + 1 + + 0.0 + + 1000 + + 6 + 24 + 2 + NO + 6 @@ -216,25 +312,10 @@ 1000 - 8 + 0 29 3 - - - - 3 - 0 - - 3 - 1 - - 180 - - 1000 - - 3 - 9 - 3 + NO @@ -248,9 +329,27 @@ 1000 - 8 + 0 29 3 + NO + + + + 4 + 0 + + 4 + 1 + + 0.0 + + 1000 + + 6 + 24 + 2 + NO @@ -264,9 +363,10 @@ 1000 - 8 + 0 29 3 + NO @@ -280,25 +380,10 @@ 1000 - 8 - 29 - 3 - - - - 4 - 0 - - 4 - 1 - - 20 - - 1000 - - 8 + 0 29 3 + NO @@ -312,9 +397,10 @@ 1000 - 8 + 0 29 3 + NO @@ -328,9 +414,10 @@ 1000 - 8 + 0 29 3 + NO @@ -344,9 +431,10 @@ 1000 - 8 + 0 29 3 + NO @@ -360,13 +448,15 @@ 1000 - 8 + 0 29 3 + NO + @@ -389,6 +479,7 @@ 3 9 1 + NO @@ -424,11 +515,6 @@ - - 124 - - - 126 @@ -437,7 +523,25 @@ 128 - + + + + 8 + 0 + + 0 + 1 + + 190 + + 1000 + + 3 + 9 + 1 + NO + + @@ -445,11 +549,6 @@ - - 136 - - - 137 @@ -460,6 +559,41 @@ + + 141 + + + + + 142 + + + + + 148 + + + + + 149 + + + + + 153 + + + + + 154 + + + + + 155 + + + @@ -471,31 +605,43 @@ com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin - com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin + + + com.apple.InterfaceBuilder.IBCocoaTouchPlugin - com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin + com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin - + - + - + + + + com.apple.InterfaceBuilder.IBCocoaTouchPlugin com.apple.InterfaceBuilder.IBCocoaTouchPlugin @@ -505,10 +651,42 @@ - 139 + 155 + + APPRTCViewController + UIViewController + + UIView + UITextField + UITextView + UITextView + + + + blackView + UIView + + + textField + UITextField + + + textInstructions + UITextView + + + textOutput + UITextView + + + + IBProjectSource + ./Classes/APPRTCViewController.h + + NSLayoutConstraint NSObject @@ -521,9 +699,18 @@ 0 IBCocoaTouchFramework + YES + + com.apple.InterfaceBuilder.CocoaTouchPlugin.iPhoneOS + + + + com.apple.InterfaceBuilder.CocoaTouchPlugin.InterfaceBuilder3 + + YES 3 YES - 2083 + 3747 diff --git a/talk/libjingle_examples.gyp b/talk/libjingle_examples.gyp index 76642bcb6..3acf28d6b 100755 --- a/talk/libjingle_examples.gyp +++ b/talk/libjingle_examples.gyp @@ -251,12 +251,15 @@ 'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch', 'examples/ios/AppRTCDemo/GAEChannelClient.h', 'examples/ios/AppRTCDemo/GAEChannelClient.m', + 'examples/ios/AppRTCDemo/VideoView.h', + 'examples/ios/AppRTCDemo/VideoView.m', 'examples/ios/AppRTCDemo/main.m', ], 'xcode_settings': { 'CLANG_ENABLE_OBJC_ARC': 'YES', 'INFOPLIST_FILE': 'examples/ios/AppRTCDemo/Info.plist', 'OTHER_LDFLAGS': [ + '-framework CoreGraphics', '-framework Foundation', '-framework UIKit', ], diff --git a/talk/media/devices/devicemanager.cc b/talk/media/devices/devicemanager.cc index e90ec5efd..75b935ce5 100644 --- a/talk/media/devices/devicemanager.cc +++ b/talk/media/devices/devicemanager.cc @@ -39,8 +39,6 @@ #include "talk/media/devices/filevideocapturer.h" #include "talk/media/devices/yuvframescapturer.h" -#if !defined(IOS) - #if defined(HAVE_WEBRTC_VIDEO) #include "talk/media/webrtc/webrtcvideocapturer.h" #endif @@ -51,8 +49,6 @@ #endif -#endif - namespace { bool StringMatchWithWildcard( @@ -216,10 +212,6 @@ void DeviceManager::ClearVideoCaptureDeviceMaxFormat( } VideoCapturer* DeviceManager::CreateVideoCapturer(const Device& device) const { -#if defined(IOS) - LOG_F(LS_ERROR) << " should never be called!"; - return NULL; -#else VideoCapturer* capturer = ConstructFakeVideoCapturer(device); if (capturer) { return capturer; @@ -237,7 +229,6 @@ VideoCapturer* DeviceManager::CreateVideoCapturer(const Device& device) const { capturer->ConstrainSupportedFormats(video_format); } return capturer; -#endif } VideoCapturer* DeviceManager::ConstructFakeVideoCapturer( diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc index 1afa45060..ce1cae08b 100644 --- a/talk/media/webrtc/webrtcvideocapturer.cc +++ b/talk/media/webrtc/webrtcvideocapturer.cc @@ -190,11 +190,15 @@ bool WebRtcVideoCapturer::Init(const Device& device) { } } factory_->DestroyDeviceInfo(info); +// TODO(fischman): Remove the following check +// when capabilities for iOS are implemented +// https://code.google.com/p/webrtc/issues/detail?id=2968 +#if !defined(IOS) if (supported.empty()) { LOG(LS_ERROR) << "Failed to find usable formats for id: " << device.id; return false; } - +#endif module_ = factory_->Create(0, vcm_id); if (!module_) { LOG(LS_ERROR) << "Failed to create capturer for id: " << device.id;