From 7fa1fcb72cc7b0d68a5e11d52724504c1cd4ac36 Mon Sep 17 00:00:00 2001 From: "fischman@webrtc.org" Date: Tue, 25 Mar 2014 00:11:56 +0000 Subject: [PATCH] AppRTCDemo(ios): style/cleanup fixes following cr/62871616-p10 BUG=2168 R=noahric@google.com Review URL: https://webrtc-codereview.appspot.com/9709004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@5768 4adac7df-926f-26a2-2b94-8c16560cd09d --- talk/app/webrtc/objc/RTCAudioTrack.mm | 2 +- talk/app/webrtc/objc/RTCICECandidate.mm | 14 +- talk/app/webrtc/objc/RTCICEServer.mm | 12 +- talk/app/webrtc/objc/RTCMediaConstraints.mm | 12 +- talk/app/webrtc/objc/RTCMediaStream.mm | 30 +- talk/app/webrtc/objc/RTCMediaStreamTrack.mm | 14 +- talk/app/webrtc/objc/RTCPair.m | 2 +- talk/app/webrtc/objc/RTCPeerConnection.mm | 104 +++--- .../webrtc/objc/RTCPeerConnectionFactory.mm | 33 +- .../webrtc/objc/RTCPeerConnectionObserver.mm | 16 +- talk/app/webrtc/objc/RTCSessionDescription.mm | 8 +- .../webrtc/objc/RTCVideoCapturer+Internal.h | 2 +- talk/app/webrtc/objc/RTCVideoCapturer.mm | 12 +- talk/app/webrtc/objc/RTCVideoRenderer.mm | 65 ++-- talk/app/webrtc/objc/RTCVideoSource.mm | 2 +- talk/app/webrtc/objc/RTCVideoTrack.mm | 15 +- talk/app/webrtc/objc/public/RTCMediaSource.h | 6 +- .../app/webrtc/objc/public/RTCVideoRenderer.h | 16 +- .../objctests/RTCPeerConnectionSyncObserver.m | 65 ++-- .../webrtc/objctests/RTCPeerConnectionTest.mm | 101 +++-- .../RTCSessionDescriptionSyncObserver.m | 14 +- talk/app/webrtc/objctests/mac/main.mm | 2 +- .../examples/ios/AppRTCDemo/APPRTCAppClient.h | 18 +- .../examples/ios/AppRTCDemo/APPRTCAppClient.m | 282 +++++++------- .../ios/AppRTCDemo/APPRTCAppDelegate.h | 8 +- .../ios/AppRTCDemo/APPRTCAppDelegate.m | 351 +++++++++--------- .../{VideoView.h => APPRTCVideoView.h} | 9 +- .../examples/ios/AppRTCDemo/APPRTCVideoView.m | 82 ++++ .../ios/AppRTCDemo/APPRTCViewController.h | 14 +- .../ios/AppRTCDemo/APPRTCViewController.m | 140 ++++--- .../ios/AppRTCDemo/GAEChannelClient.m | 58 ++- talk/examples/ios/AppRTCDemo/VideoView.m | 168 --------- talk/examples/ios/AppRTCDemo/main.m | 2 +- talk/libjingle_examples.gyp | 6 +- talk/media/devices/macdevicemanagermm.mm | 53 ++- 35 files changed, 809 insertions(+), 929 deletions(-) rename talk/examples/ios/AppRTCDemo/{VideoView.h => APPRTCVideoView.h} (91%) create mode 100644 talk/examples/ios/AppRTCDemo/APPRTCVideoView.m delete mode 100644 talk/examples/ios/AppRTCDemo/VideoView.m diff --git a/talk/app/webrtc/objc/RTCAudioTrack.mm b/talk/app/webrtc/objc/RTCAudioTrack.mm index 3ed802e30..2364c2942 100644 --- a/talk/app/webrtc/objc/RTCAudioTrack.mm +++ b/talk/app/webrtc/objc/RTCAudioTrack.mm @@ -39,7 +39,7 @@ @implementation RTCAudioTrack (Internal) - (talk_base::scoped_refptr)audioTrack { - return static_cast(self.mediaTrack.get()); + return static_cast(self.mediaTrack.get()); } @end diff --git a/talk/app/webrtc/objc/RTCICECandidate.mm b/talk/app/webrtc/objc/RTCICECandidate.mm index dd2cc84fa..4b5b0ed44 100644 --- a/talk/app/webrtc/objc/RTCICECandidate.mm +++ b/talk/app/webrtc/objc/RTCICECandidate.mm @@ -37,9 +37,9 @@ @synthesize sdpMLineIndex = _sdpMLineIndex; @synthesize sdp = _sdp; -- (id)initWithMid:(NSString *)sdpMid +- (id)initWithMid:(NSString*)sdpMid index:(NSInteger)sdpMLineIndex - sdp:(NSString *)sdp { + sdp:(NSString*)sdp { if (!sdpMid || !sdp) { NSAssert(NO, @"nil arguments not allowed"); return nil; @@ -52,18 +52,18 @@ return self; } -- (NSString *)description { +- (NSString*)description { return [NSString stringWithFormat:@"%@:%ld:%@", - self.sdpMid, - (long)self.sdpMLineIndex, - self.sdp]; + self.sdpMid, + (long)self.sdpMLineIndex, + self.sdp]; } @end @implementation RTCICECandidate (Internal) -- (id)initWithCandidate:(const webrtc::IceCandidateInterface *)candidate { +- (id)initWithCandidate:(const webrtc::IceCandidateInterface*)candidate { if ((self = [super init])) { std::string sdp; if (candidate->ToString(&sdp)) { diff --git a/talk/app/webrtc/objc/RTCICEServer.mm b/talk/app/webrtc/objc/RTCICEServer.mm index 05cf5df8b..9f6ecb618 100644 --- a/talk/app/webrtc/objc/RTCICEServer.mm +++ b/talk/app/webrtc/objc/RTCICEServer.mm @@ -37,9 +37,9 @@ @synthesize username = _username; @synthesize password = _password; -- (id)initWithURI:(NSURL *)URI - username:(NSString *)username - password:(NSString *)password { +- (id)initWithURI:(NSURL*)URI + username:(NSString*)username + password:(NSString*)password { if (!URI || !username || !password) { NSAssert(NO, @"nil arguments not allowed"); self = nil; @@ -53,9 +53,11 @@ return self; } -- (NSString *)description { +- (NSString*)description { return [NSString stringWithFormat:@"RTCICEServer: [%@:%@:%@]", - [self.URI absoluteString], self.username, self.password]; + [self.URI absoluteString], + self.username, + self.password]; } @end diff --git a/talk/app/webrtc/objc/RTCMediaConstraints.mm b/talk/app/webrtc/objc/RTCMediaConstraints.mm index d4fa66259..a1cc5a564 100644 --- a/talk/app/webrtc/objc/RTCMediaConstraints.mm +++ b/talk/app/webrtc/objc/RTCMediaConstraints.mm @@ -44,8 +44,8 @@ webrtc::MediaConstraintsInterface::Constraints _optional; } -- (id)initWithMandatoryConstraints:(NSArray *)mandatory - optionalConstraints:(NSArray *)optional { +- (id)initWithMandatoryConstraints:(NSArray*)mandatory + optionalConstraints:(NSArray*)optional { if ((self = [super init])) { _mandatory = [[self class] constraintsFromArray:mandatory]; _optional = [[self class] constraintsFromArray:optional]; @@ -55,10 +55,10 @@ return self; } -+ (webrtc::MediaConstraintsInterface::Constraints) - constraintsFromArray:(NSArray *)array { ++ (webrtc::MediaConstraintsInterface::Constraints)constraintsFromArray: + (NSArray*)array { webrtc::MediaConstraintsInterface::Constraints constraints; - for (RTCPair *pair in array) { + for (RTCPair* pair in array) { constraints.push_back(webrtc::MediaConstraintsInterface::Constraint( [pair.key UTF8String], [pair.value UTF8String])); } @@ -69,7 +69,7 @@ @implementation RTCMediaConstraints (internal) -- (const webrtc::RTCMediaConstraintsNative *)constraints { +- (const webrtc::RTCMediaConstraintsNative*)constraints { return _constraints.get(); } diff --git a/talk/app/webrtc/objc/RTCMediaStream.mm b/talk/app/webrtc/objc/RTCMediaStream.mm index 707911b39..94e14fc57 100644 --- a/talk/app/webrtc/objc/RTCMediaStream.mm +++ b/talk/app/webrtc/objc/RTCMediaStream.mm @@ -38,31 +38,31 @@ #include "talk/app/webrtc/mediastreaminterface.h" @implementation RTCMediaStream { - NSMutableArray *_audioTracks; - NSMutableArray *_videoTracks; + NSMutableArray* _audioTracks; + NSMutableArray* _videoTracks; talk_base::scoped_refptr _mediaStream; } -- (NSString *)description { +- (NSString*)description { return [NSString stringWithFormat:@"[%@:A=%lu:V=%lu]", - [self label], - (unsigned long)[self.audioTracks count], - (unsigned long)[self.videoTracks count]]; + [self label], + (unsigned long)[self.audioTracks count], + (unsigned long)[self.videoTracks count]]; } -- (NSArray *)audioTracks { +- (NSArray*)audioTracks { return [_audioTracks copy]; } -- (NSArray *)videoTracks { +- (NSArray*)videoTracks { return [_videoTracks copy]; } -- (NSString *)label { +- (NSString*)label { return @(self.mediaStream->label().c_str()); } -- (BOOL)addAudioTrack:(RTCAudioTrack *)track { +- (BOOL)addAudioTrack:(RTCAudioTrack*)track { if (self.mediaStream->AddTrack(track.audioTrack)) { [_audioTracks addObject:track]; return YES; @@ -70,7 +70,7 @@ return NO; } -- (BOOL)addVideoTrack:(RTCVideoTrack *)track { +- (BOOL)addVideoTrack:(RTCVideoTrack*)track { if (self.mediaStream->AddTrack(track.videoTrack)) { [_videoTracks addObject:track]; return YES; @@ -78,7 +78,7 @@ return NO; } -- (BOOL)removeAudioTrack:(RTCAudioTrack *)track { +- (BOOL)removeAudioTrack:(RTCAudioTrack*)track { NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:track]; NSAssert(index != NSNotFound, @"|removeAudioTrack| called on unexpected RTCAudioTrack"); @@ -89,7 +89,7 @@ return NO; } -- (BOOL)removeVideoTrack:(RTCVideoTrack *)track { +- (BOOL)removeVideoTrack:(RTCVideoTrack*)track { NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track]; NSAssert(index != NSNotFound, @"|removeAudioTrack| called on unexpected RTCVideoTrack"); @@ -105,7 +105,7 @@ @implementation RTCMediaStream (Internal) - (id)initWithMediaStream: - (talk_base::scoped_refptr)mediaStream { + (talk_base::scoped_refptr)mediaStream { if (!mediaStream) { NSAssert(NO, @"nil arguments not allowed"); self = nil; @@ -122,7 +122,7 @@ for (size_t i = 0; i < audio_tracks.size(); ++i) { talk_base::scoped_refptr track = audio_tracks[i]; - RTCAudioTrack *audioTrack = + RTCAudioTrack* audioTrack = [[RTCAudioTrack alloc] initWithMediaTrack:track]; [_audioTracks addObject:audioTrack]; } diff --git a/talk/app/webrtc/objc/RTCMediaStreamTrack.mm b/talk/app/webrtc/objc/RTCMediaStreamTrack.mm index ca44c0384..0c7fc5c0d 100644 --- a/talk/app/webrtc/objc/RTCMediaStreamTrack.mm +++ b/talk/app/webrtc/objc/RTCMediaStreamTrack.mm @@ -40,12 +40,13 @@ - (BOOL)isEqual:(id)other { // Equality is purely based on the label just like the C++ implementation. - if (self == other) return YES; + if (self == other) + return YES; if (![other isKindOfClass:[self class]] || ![self isKindOfClass:[other class]]) { return NO; } - RTCMediaStreamTrack *otherMediaStream = (RTCMediaStreamTrack *)other; + RTCMediaStreamTrack* otherMediaStream = (RTCMediaStreamTrack*)other; return [self.label isEqual:otherMediaStream.label]; } @@ -53,11 +54,11 @@ return [self.label hash]; } -- (NSString *)kind { +- (NSString*)kind { return @(self.mediaTrack->kind().c_str()); } -- (NSString *)label { +- (NSString*)label { return @(self.mediaTrack->id().c_str()); } @@ -82,8 +83,9 @@ @implementation RTCMediaStreamTrack (Internal) -- (id)initWithMediaTrack:( - talk_base::scoped_refptr)mediaTrack { +- (id)initWithMediaTrack: + (talk_base::scoped_refptr) + mediaTrack { if (!mediaTrack) { NSAssert(NO, @"nil arguments not allowed"); self = nil; diff --git a/talk/app/webrtc/objc/RTCPair.m b/talk/app/webrtc/objc/RTCPair.m index 31ac53ac1..226484522 100644 --- a/talk/app/webrtc/objc/RTCPair.m +++ b/talk/app/webrtc/objc/RTCPair.m @@ -32,7 +32,7 @@ @synthesize key = _key; @synthesize value = _value; -- (id)initWithKey:(NSString *)key value:(NSString *)value { +- (id)initWithKey:(NSString*)key value:(NSString*)value { if ((self = [super init])) { _key = [key copy]; _value = [value copy]; diff --git a/talk/app/webrtc/objc/RTCPeerConnection.mm b/talk/app/webrtc/objc/RTCPeerConnection.mm index d2599c386..a7e31a1d7 100644 --- a/talk/app/webrtc/objc/RTCPeerConnection.mm +++ b/talk/app/webrtc/objc/RTCPeerConnection.mm @@ -51,39 +51,39 @@ class RTCCreateSessionDescriptionObserver : public CreateSessionDescriptionObserver { public: RTCCreateSessionDescriptionObserver(id delegate, - RTCPeerConnection *peerConnection) { + RTCPeerConnection* peerConnection) { _delegate = delegate; _peerConnection = peerConnection; } - virtual void OnSuccess(SessionDescriptionInterface *desc) OVERRIDE { - RTCSessionDescription *session = + virtual void OnSuccess(SessionDescriptionInterface* desc) OVERRIDE { + RTCSessionDescription* session = [[RTCSessionDescription alloc] initWithSessionDescription:desc]; [_delegate peerConnection:_peerConnection didCreateSessionDescription:session - error:nil]; + error:nil]; } - virtual void OnFailure(const std::string &error) OVERRIDE { - NSString *str = @(error.c_str()); - NSError *err = + virtual void OnFailure(const std::string& error) OVERRIDE { + NSString* str = @(error.c_str()); + NSError* err = [NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain code:kRTCSessionDescriptionDelegateErrorCode - userInfo:@{ @"error" : str }]; + userInfo:@{@"error" : str}]; [_delegate peerConnection:_peerConnection didCreateSessionDescription:nil - error:err]; + error:err]; } private: id _delegate; - RTCPeerConnection *_peerConnection; + RTCPeerConnection* _peerConnection; }; class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver { public: RTCSetSessionDescriptionObserver(id delegate, - RTCPeerConnection *peerConnection) { + RTCPeerConnection* peerConnection) { _delegate = delegate; _peerConnection = peerConnection; } @@ -93,37 +93,36 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver { didSetSessionDescriptionWithError:nil]; } - virtual void OnFailure(const std::string &error) OVERRIDE { - NSString *str = @(error.c_str()); - NSError *err = + virtual void OnFailure(const std::string& error) OVERRIDE { + NSString* str = @(error.c_str()); + NSError* err = [NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain code:kRTCSessionDescriptionDelegateErrorCode - userInfo:@{ @"error" : str }]; + userInfo:@{@"error" : str}]; [_delegate peerConnection:_peerConnection didSetSessionDescriptionWithError:err]; } private: id _delegate; - RTCPeerConnection *_peerConnection; + RTCPeerConnection* _peerConnection; }; - } @implementation RTCPeerConnection { - NSMutableArray *_localStreams; - talk_base::scoped_ptr_observer; + NSMutableArray* _localStreams; + talk_base::scoped_ptr _observer; talk_base::scoped_refptr _peerConnection; } -- (BOOL)addICECandidate:(RTCICECandidate *)candidate { +- (BOOL)addICECandidate:(RTCICECandidate*)candidate { talk_base::scoped_ptr iceCandidate( candidate.candidate); return self.peerConnection->AddIceCandidate(iceCandidate.get()); } -- (BOOL)addStream:(RTCMediaStream *)stream - constraints:(RTCMediaConstraints *)constraints { +- (BOOL)addStream:(RTCMediaStream*)stream + constraints:(RTCMediaConstraints*)constraints { BOOL ret = self.peerConnection->AddStream(stream.mediaStream, constraints.constraints); if (!ret) { @@ -134,7 +133,7 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver { } - (void)createAnswerWithDelegate:(id)delegate - constraints:(RTCMediaConstraints *)constraints { + constraints:(RTCMediaConstraints*)constraints { talk_base::scoped_refptr observer(new talk_base::RefCountedObject< webrtc::RTCCreateSessionDescriptionObserver>(delegate, self)); @@ -142,73 +141,73 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver { } - (void)createOfferWithDelegate:(id)delegate - constraints:(RTCMediaConstraints *)constraints { + constraints:(RTCMediaConstraints*)constraints { talk_base::scoped_refptr observer(new talk_base::RefCountedObject< webrtc::RTCCreateSessionDescriptionObserver>(delegate, self)); self.peerConnection->CreateOffer(observer, constraints.constraints); } -- (void)removeStream:(RTCMediaStream *)stream { +- (void)removeStream:(RTCMediaStream*)stream { self.peerConnection->RemoveStream(stream.mediaStream); [_localStreams removeObject:stream]; } -- (void) - setLocalDescriptionWithDelegate:(id)delegate - sessionDescription:(RTCSessionDescription *)sdp { +- (void)setLocalDescriptionWithDelegate: + (id)delegate + sessionDescription:(RTCSessionDescription*)sdp { talk_base::scoped_refptr observer( new talk_base::RefCountedObject( delegate, self)); self.peerConnection->SetLocalDescription(observer, sdp.sessionDescription); } -- (void) - setRemoteDescriptionWithDelegate:(id)delegate - sessionDescription:(RTCSessionDescription *)sdp { +- (void)setRemoteDescriptionWithDelegate: + (id)delegate + sessionDescription:(RTCSessionDescription*)sdp { talk_base::scoped_refptr observer( new talk_base::RefCountedObject( delegate, self)); self.peerConnection->SetRemoteDescription(observer, sdp.sessionDescription); } -- (BOOL)updateICEServers:(NSArray *)servers - constraints:(RTCMediaConstraints *)constraints { +- (BOOL)updateICEServers:(NSArray*)servers + constraints:(RTCMediaConstraints*)constraints { webrtc::PeerConnectionInterface::IceServers iceServers; - for (RTCICEServer *server in servers) { + for (RTCICEServer* server in servers) { iceServers.push_back(server.iceServer); } return self.peerConnection->UpdateIce(iceServers, constraints.constraints); } -- (RTCSessionDescription *)localDescription { - const webrtc::SessionDescriptionInterface *sdi = +- (RTCSessionDescription*)localDescription { + const webrtc::SessionDescriptionInterface* sdi = self.peerConnection->local_description(); - return sdi ? - [[RTCSessionDescription alloc] initWithSessionDescription:sdi] : - nil; + return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi] + : nil; } -- (NSArray *)localStreams { +- (NSArray*)localStreams { return [_localStreams copy]; } -- (RTCSessionDescription *)remoteDescription { - const webrtc::SessionDescriptionInterface *sdi = +- (RTCSessionDescription*)remoteDescription { + const webrtc::SessionDescriptionInterface* sdi = self.peerConnection->remote_description(); - return sdi ? - [[RTCSessionDescription alloc] initWithSessionDescription:sdi] : - nil; + return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi] + : nil; } - (RTCICEConnectionState)iceConnectionState { - return [RTCEnumConverter convertIceConnectionStateToObjC: - self.peerConnection->ice_connection_state()]; + return [RTCEnumConverter + convertIceConnectionStateToObjC:self.peerConnection + ->ice_connection_state()]; } - (RTCICEGatheringState)iceGatheringState { - return [RTCEnumConverter convertIceGatheringStateToObjC: - self.peerConnection->ice_gathering_state()]; + return [RTCEnumConverter + convertIceGatheringStateToObjC:self.peerConnection + ->ice_gathering_state()]; } - (RTCSignalingState)signalingState { @@ -224,9 +223,10 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver { @implementation RTCPeerConnection (Internal) -- (id)initWithPeerConnection:( - talk_base::scoped_refptr)peerConnection - observer:(webrtc::RTCPeerConnectionObserver *)observer { +- (id)initWithPeerConnection: + (talk_base::scoped_refptr) + peerConnection + observer:(webrtc::RTCPeerConnectionObserver*)observer { if (!peerConnection || !observer) { NSAssert(NO, @"nil arguments not allowed"); self = nil; diff --git a/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm index ca2eb72bb..81af8e11f 100644 --- a/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm +++ b/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm @@ -86,54 +86,55 @@ return self; } -- (RTCPeerConnection *) - peerConnectionWithICEServers:(NSArray *)servers - constraints:(RTCMediaConstraints *)constraints +- (RTCPeerConnection*) + peerConnectionWithICEServers:(NSArray*)servers + constraints:(RTCMediaConstraints*)constraints delegate:(id)delegate { webrtc::PeerConnectionInterface::IceServers iceServers; - for (RTCICEServer *server in servers) { + for (RTCICEServer* server in servers) { iceServers.push_back(server.iceServer); } - webrtc::RTCPeerConnectionObserver *observer = + webrtc::RTCPeerConnectionObserver* observer = new webrtc::RTCPeerConnectionObserver(delegate); webrtc::DTLSIdentityServiceInterface* dummy_dtls_identity_service = NULL; talk_base::scoped_refptr peerConnection = - self.nativeFactory->CreatePeerConnection( - iceServers, constraints.constraints, dummy_dtls_identity_service, - observer); - RTCPeerConnection *pc = + self.nativeFactory->CreatePeerConnection(iceServers, + constraints.constraints, + dummy_dtls_identity_service, + observer); + RTCPeerConnection* pc = [[RTCPeerConnection alloc] initWithPeerConnection:peerConnection observer:observer]; observer->SetPeerConnection(pc); return pc; } -- (RTCMediaStream *)mediaStreamWithLabel:(NSString *)label { +- (RTCMediaStream*)mediaStreamWithLabel:(NSString*)label { talk_base::scoped_refptr nativeMediaStream = self.nativeFactory->CreateLocalMediaStream([label UTF8String]); return [[RTCMediaStream alloc] initWithMediaStream:nativeMediaStream]; } -- (RTCVideoSource *)videoSourceWithCapturer:(RTCVideoCapturer *)capturer - constraints:(RTCMediaConstraints *)constraints { +- (RTCVideoSource*)videoSourceWithCapturer:(RTCVideoCapturer*)capturer + constraints:(RTCMediaConstraints*)constraints { if (!capturer) { return nil; } talk_base::scoped_refptr source = - self.nativeFactory->CreateVideoSource([capturer release_native_capturer], + self.nativeFactory->CreateVideoSource([capturer takeNativeCapturer], constraints.constraints); return [[RTCVideoSource alloc] initWithMediaSource:source]; } -- (RTCVideoTrack *)videoTrackWithID:(NSString *)videoId - source:(RTCVideoSource *)source { +- (RTCVideoTrack*)videoTrackWithID:(NSString*)videoId + source:(RTCVideoSource*)source { talk_base::scoped_refptr track = self.nativeFactory->CreateVideoTrack([videoId UTF8String], source.videoSource); return [[RTCVideoTrack alloc] initWithMediaTrack:track]; } -- (RTCAudioTrack *)audioTrackWithID:(NSString *)audioId { +- (RTCAudioTrack*)audioTrackWithID:(NSString*)audioId { talk_base::scoped_refptr track = self.nativeFactory->CreateAudioTrack([audioId UTF8String], NULL); return [[RTCAudioTrack alloc] initWithMediaTrack:track]; diff --git a/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm b/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm index 6e155e975..d9b9b42ae 100644 --- a/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm +++ b/talk/app/webrtc/objc/RTCPeerConnectionObserver.mm @@ -43,7 +43,7 @@ RTCPeerConnectionObserver::RTCPeerConnectionObserver( } void RTCPeerConnectionObserver::SetPeerConnection( - RTCPeerConnection *peerConnection) { + RTCPeerConnection* peerConnection) { _peerConnection = peerConnection; } @@ -54,8 +54,8 @@ void RTCPeerConnectionObserver::OnError() { void RTCPeerConnectionObserver::OnSignalingChange( PeerConnectionInterface::SignalingState new_state) { [_delegate peerConnection:_peerConnection - signalingStateChanged: - [RTCEnumConverter convertSignalingStateToObjC:new_state]]; + signalingStateChanged:[RTCEnumConverter + convertSignalingStateToObjC:new_state]]; } void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) { @@ -82,15 +82,15 @@ void RTCPeerConnectionObserver::OnRenegotiationNeeded() { void RTCPeerConnectionObserver::OnIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) { [_delegate peerConnection:_peerConnection - iceConnectionChanged: - [RTCEnumConverter convertIceConnectionStateToObjC:new_state]]; + iceConnectionChanged:[RTCEnumConverter + convertIceConnectionStateToObjC:new_state]]; } void RTCPeerConnectionObserver::OnIceGatheringChange( PeerConnectionInterface::IceGatheringState new_state) { [_delegate peerConnection:_peerConnection - iceGatheringChanged: - [RTCEnumConverter convertIceGatheringStateToObjC:new_state]]; + iceGatheringChanged:[RTCEnumConverter + convertIceGatheringStateToObjC:new_state]]; } void RTCPeerConnectionObserver::OnIceCandidate( @@ -100,4 +100,4 @@ void RTCPeerConnectionObserver::OnIceCandidate( [_delegate peerConnection:_peerConnection gotICECandidate:iceCandidate]; } -} // namespace webrtc +} // namespace webrtc diff --git a/talk/app/webrtc/objc/RTCSessionDescription.mm b/talk/app/webrtc/objc/RTCSessionDescription.mm index 165c2bc1b..49dfa2d9c 100644 --- a/talk/app/webrtc/objc/RTCSessionDescription.mm +++ b/talk/app/webrtc/objc/RTCSessionDescription.mm @@ -36,7 +36,7 @@ @synthesize description = _description; @synthesize type = _type; -- (id)initWithType:(NSString *)type sdp:(NSString *)sdp { +- (id)initWithType:(NSString*)type sdp:(NSString*)sdp { if (!type || !sdp) { NSAssert(NO, @"nil arguments not allowed"); return nil; @@ -53,14 +53,14 @@ @implementation RTCSessionDescription (Internal) - (id)initWithSessionDescription: - (const webrtc::SessionDescriptionInterface *)sessionDescription { + (const webrtc::SessionDescriptionInterface*)sessionDescription { if (!sessionDescription) { NSAssert(NO, @"nil arguments not allowed"); self = nil; return nil; } if ((self = [super init])) { - const std::string &type = sessionDescription->type(); + const std::string& type = sessionDescription->type(); std::string sdp; if (!sessionDescription->ToString(&sdp)) { NSAssert(NO, @"Invalid SessionDescriptionInterface."); @@ -73,7 +73,7 @@ return self; } -- (webrtc::SessionDescriptionInterface *)sessionDescription { +- (webrtc::SessionDescriptionInterface*)sessionDescription { return webrtc::CreateSessionDescription( [self.type UTF8String], [self.description UTF8String], NULL); } diff --git a/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h index 444fdfa90..4a4810b47 100644 --- a/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h +++ b/talk/app/webrtc/objc/RTCVideoCapturer+Internal.h @@ -31,7 +31,7 @@ @interface RTCVideoCapturer (Internal) -- (cricket::VideoCapturer*)release_native_capturer; +- (cricket::VideoCapturer*)takeNativeCapturer; - (id)initWithCapturer:(cricket::VideoCapturer*)capturer; diff --git a/talk/app/webrtc/objc/RTCVideoCapturer.mm b/talk/app/webrtc/objc/RTCVideoCapturer.mm index eb8c2156d..d947f02ad 100644 --- a/talk/app/webrtc/objc/RTCVideoCapturer.mm +++ b/talk/app/webrtc/objc/RTCVideoCapturer.mm @@ -35,11 +35,11 @@ #include "talk/media/devices/devicemanager.h" @implementation RTCVideoCapturer { - talk_base::scoped_ptr_capturer; + talk_base::scoped_ptr _capturer; } -+ (RTCVideoCapturer *)capturerWithDeviceName:(NSString *)deviceName { - const std::string &device_name = std::string([deviceName UTF8String]); ++ (RTCVideoCapturer*)capturerWithDeviceName:(NSString*)deviceName { + const std::string& device_name = std::string([deviceName UTF8String]); talk_base::scoped_ptr device_manager( cricket::DeviceManagerFactory::Create()); bool initialized = device_manager->Init(); @@ -51,7 +51,7 @@ } talk_base::scoped_ptr capturer( device_manager->CreateVideoCapturer(device)); - RTCVideoCapturer *rtcCapturer = + RTCVideoCapturer* rtcCapturer = [[RTCVideoCapturer alloc] initWithCapturer:capturer.release()]; return rtcCapturer; } @@ -60,14 +60,14 @@ @implementation RTCVideoCapturer (Internal) -- (id)initWithCapturer:(cricket::VideoCapturer *)capturer { +- (id)initWithCapturer:(cricket::VideoCapturer*)capturer { if ((self = [super init])) { _capturer.reset(capturer); } return self; } -- (cricket::VideoCapturer*)release_native_capturer { +- (cricket::VideoCapturer*)takeNativeCapturer { return _capturer.release(); } diff --git a/talk/app/webrtc/objc/RTCVideoRenderer.mm b/talk/app/webrtc/objc/RTCVideoRenderer.mm index 64d147f59..f6eef1c0b 100644 --- a/talk/app/webrtc/objc/RTCVideoRenderer.mm +++ b/talk/app/webrtc/objc/RTCVideoRenderer.mm @@ -49,7 +49,6 @@ // a VideoRenderCallback. Suitable for feeding to // VideoTrackInterface::AddRenderer(). class CallbackConverter : public webrtc::VideoRendererInterface { - public: CallbackConverter(webrtc::VideoRenderCallback* callback, const uint32_t streamId) @@ -88,51 +87,57 @@ class CallbackConverter : public webrtc::VideoRendererInterface { }; @implementation RTCVideoRenderer { + VideoRenderIosView* _renderView; + UIActivityIndicatorView* _activityIndicator; CallbackConverter* _converter; talk_base::scoped_ptr _iosRenderer; } @synthesize delegate = _delegate; -+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame { - return [[RTCVideoRenderer alloc] - initWithRenderView:[RTCVideoRenderer newRenderViewWithFrame:frame]]; -} - - (id)initWithDelegate:(id)delegate { - if ((self = [super init])) { - _delegate = delegate; - // TODO (hughv): Create video renderer. - } + // TODO(hughv): Create video renderer. + [self doesNotRecognizeSelector:_cmd]; return self; } -+ (UIView*)newRenderViewWithFrame:(CGRect)frame { - VideoRenderIosView* newView = - [[VideoRenderIosView alloc] initWithFrame:frame]; - return newView; -} - -- (id)initWithRenderView:(UIView*)view { - NSAssert([view isKindOfClass:[VideoRenderIosView class]], - @"The view must be of kind 'VideoRenderIosView'"); +- (id)initWithView:(UIView*)view { if ((self = [super init])) { - VideoRenderIosView* renderView = (VideoRenderIosView*)view; + CGRect frame = + CGRectMake(0, 0, view.bounds.size.width, view.bounds.size.height); + _renderView = [[VideoRenderIosView alloc] initWithFrame:frame]; _iosRenderer.reset( - new webrtc::VideoRenderIosImpl(0, (__bridge void*)renderView, NO)); - if (_iosRenderer->Init() != -1) { + new webrtc::VideoRenderIosImpl(0, (__bridge void*)_renderView, NO)); + if (_iosRenderer->Init() == -1) { + self = nil; + } else { webrtc::VideoRenderCallback* callback = _iosRenderer->AddIncomingRenderStream(0, 1, 0, 0, 1, 1); _converter = new CallbackConverter(callback, 0); _iosRenderer->StartRender(); - } else { - self = nil; + [view addSubview:_renderView]; + _renderView.autoresizingMask = + UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth; + _renderView.translatesAutoresizingMaskIntoConstraints = YES; + + _activityIndicator = [[UIActivityIndicatorView alloc] + initWithActivityIndicatorStyle: + UIActivityIndicatorViewStyleWhiteLarge]; + _activityIndicator.frame = view.bounds; + _activityIndicator.hidesWhenStopped = YES; + [view addSubview:_activityIndicator]; + _activityIndicator.autoresizingMask = + UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight; + _activityIndicator.translatesAutoresizingMaskIntoConstraints = YES; + [_activityIndicator startAnimating]; } } return self; } - (void)start { + [_activityIndicator stopAnimating]; + [_activityIndicator removeFromSuperview]; _iosRenderer->StartRender(); } @@ -159,7 +164,7 @@ class CallbackConverter : public webrtc::VideoRendererInterface { #import "RTCVideoRendererDelegate.h" @implementation RTCVideoRenderer @synthesize delegate = _delegate; -+ (RTCVideoRenderer*)videoRenderGUIWithFrame:(CGRect)frame { ++ (RTCVideoRenderer*)videoRendererWithFrame:(CGRect)frame { // TODO(hughv): Implement. return nil; } @@ -170,12 +175,10 @@ class CallbackConverter : public webrtc::VideoRendererInterface { } return self; } - -+ (UIView*)newRenderViewWithFrame:(CGRect)frame { +- (id)initWithView:(UIView*)view { return nil; } -- (id)initWithRenderView:(UIView*)renderView { - return nil; +- (void)setTransform:(CGAffineTransform)transform { } - (void)start { } @@ -184,13 +187,13 @@ class CallbackConverter : public webrtc::VideoRendererInterface { @end @implementation RTCVideoRenderer (Internal) -- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer { +- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface*)videoRenderer { if ((self = [super init])) { // TODO(hughv): Implement. } return self; } -- (webrtc::VideoRendererInterface *)videoRenderer { +- (webrtc::VideoRendererInterface*)videoRenderer { // TODO(hughv): Implement. return NULL; } diff --git a/talk/app/webrtc/objc/RTCVideoSource.mm b/talk/app/webrtc/objc/RTCVideoSource.mm index fb6bf2a90..b4554e08d 100644 --- a/talk/app/webrtc/objc/RTCVideoSource.mm +++ b/talk/app/webrtc/objc/RTCVideoSource.mm @@ -38,7 +38,7 @@ @implementation RTCVideoSource (Internal) - (talk_base::scoped_refptr)videoSource { - return static_cast(self.mediaSource.get()); + return static_cast(self.mediaSource.get()); } @end diff --git a/talk/app/webrtc/objc/RTCVideoTrack.mm b/talk/app/webrtc/objc/RTCVideoTrack.mm index 92de82ba2..d6c8ed8a4 100644 --- a/talk/app/webrtc/objc/RTCVideoTrack.mm +++ b/talk/app/webrtc/objc/RTCVideoTrack.mm @@ -35,18 +35,19 @@ #import "RTCVideoRenderer+Internal.h" @implementation RTCVideoTrack { - NSMutableArray *_rendererArray; + NSMutableArray* _rendererArray; } -- (id)initWithMediaTrack:( - talk_base::scoped_refptr)mediaTrack { +- (id)initWithMediaTrack: + (talk_base::scoped_refptr) + mediaTrack { if (self = [super initWithMediaTrack:mediaTrack]) { _rendererArray = [NSMutableArray array]; } return self; } -- (void)addRenderer:(RTCVideoRenderer *)renderer { +- (void)addRenderer:(RTCVideoRenderer*)renderer { NSAssert1(![self.renderers containsObject:renderer], @"renderers already contains object [%@]", [renderer description]); @@ -54,7 +55,7 @@ self.videoTrack->AddRenderer(renderer.videoRenderer); } -- (void)removeRenderer:(RTCVideoRenderer *)renderer { +- (void)removeRenderer:(RTCVideoRenderer*)renderer { NSUInteger index = [self.renderers indexOfObjectIdenticalTo:renderer]; if (index != NSNotFound) { [_rendererArray removeObjectAtIndex:index]; @@ -62,7 +63,7 @@ } } -- (NSArray *)renderers { +- (NSArray*)renderers { return [_rendererArray copy]; } @@ -71,7 +72,7 @@ @implementation RTCVideoTrack (Internal) - (talk_base::scoped_refptr)videoTrack { - return static_cast(self.mediaTrack.get()); + return static_cast(self.mediaTrack.get()); } @end diff --git a/talk/app/webrtc/objc/public/RTCMediaSource.h b/talk/app/webrtc/objc/public/RTCMediaSource.h index be3ad3291..e31817f3f 100644 --- a/talk/app/webrtc/objc/public/RTCMediaSource.h +++ b/talk/app/webrtc/objc/public/RTCMediaSource.h @@ -33,12 +33,12 @@ @interface RTCMediaSource : NSObject // The current state of the RTCMediaSource. -@property (nonatomic, assign, readonly)RTCSourceState state; +@property(nonatomic, assign, readonly) RTCSourceState state; #ifndef DOXYGEN_SHOULD_SKIP_THIS // Disallow init and don't add to documentation -- (id)init __attribute__( - (unavailable("init is not a supported initializer for this class."))); +- (id)init __attribute__(( + unavailable("init is not a supported initializer for this class."))); #endif /* DOXYGEN_SHOULD_SKIP_THIS */ @end diff --git a/talk/app/webrtc/objc/public/RTCVideoRenderer.h b/talk/app/webrtc/objc/public/RTCVideoRenderer.h index f0a83716c..8a0b74cfb 100644 --- a/talk/app/webrtc/objc/public/RTCVideoRenderer.h +++ b/talk/app/webrtc/objc/public/RTCVideoRenderer.h @@ -26,6 +26,7 @@ */ #import +#import @protocol RTCVideoRendererDelegate; struct CGRect; @@ -36,18 +37,15 @@ struct CGRect; @property(nonatomic, strong) id delegate; -// A convenience method to create a renderer and window and render frames into -// that window. -+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame; -+ (UIView*)newRenderViewWithFrame:(CGRect)frame; -// The view to the following constructor -// must be one of the views from newRenderViewWithFrame. -- (id)initWithRenderView:(UIView*)renderView; +- (id)initWithView:(UIView*)view; // Initialize the renderer. Requires a delegate which does the actual drawing // of frames. - (id)initWithDelegate:(id)delegate; +// Set an affine transform on relevant UIViews. +- (void)setTransform:(CGAffineTransform)transform; + // Starts rendering. - (void)start; // Stops rendering. It can be restarted again using the 'start' method above. @@ -55,8 +53,8 @@ struct CGRect; #ifndef DOXYGEN_SHOULD_SKIP_THIS // Disallow init and don't add to documentation -- (id)init __attribute__( - (unavailable("init is not a supported initializer for this class."))); +- (id)init __attribute__(( + unavailable("init is not a supported initializer for this class."))); #endif /* DOXYGEN_SHOULD_SKIP_THIS */ @end diff --git a/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m index 0604d67c5..645fb778e 100644 --- a/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m +++ b/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m @@ -35,13 +35,13 @@ @implementation RTCPeerConnectionSyncObserver { int _expectedErrors; - NSMutableArray *_expectedSignalingChanges; - NSMutableArray *_expectedAddStreamLabels; - NSMutableArray *_expectedRemoveStreamLabels; + NSMutableArray* _expectedSignalingChanges; + NSMutableArray* _expectedAddStreamLabels; + NSMutableArray* _expectedRemoveStreamLabels; int _expectedICECandidates; - NSMutableArray *_receivedICECandidates; - NSMutableArray *_expectedICEConnectionChanges; - NSMutableArray *_expectedICEGatheringChanges; + NSMutableArray* _receivedICECandidates; + NSMutableArray* _expectedICEConnectionChanges; + NSMutableArray* _expectedICEGatheringChanges; } - (id)init { @@ -58,24 +58,23 @@ return self; } -- (int)popFirstElementAsInt:(NSMutableArray *)array { +- (int)popFirstElementAsInt:(NSMutableArray*)array { NSAssert([array count] > 0, @"Empty array"); - NSNumber *boxedState = [array objectAtIndex:0]; + NSNumber* boxedState = [array objectAtIndex:0]; [array removeObjectAtIndex:0]; return [boxedState intValue]; } -- (NSString *)popFirstElementAsNSString:(NSMutableArray *)array { +- (NSString*)popFirstElementAsNSString:(NSMutableArray*)array { NSAssert([array count] > 0, @"Empty expectation array"); - NSString *string = [array objectAtIndex:0]; + NSString* string = [array objectAtIndex:0]; [array removeObjectAtIndex:0]; return string; } - (BOOL)areAllExpectationsSatisfied { return _expectedICECandidates <= 0 && // See comment in gotICECandidate. - _expectedErrors == 0 && - [_expectedSignalingChanges count] == 0 && + _expectedErrors == 0 && [_expectedSignalingChanges count] == 0 && [_expectedICEConnectionChanges count] == 0 && [_expectedICEGatheringChanges count] == 0 && [_expectedAddStreamLabels count] == 0 && @@ -83,7 +82,7 @@ // TODO(hughv): Test video state here too. } -- (NSArray *)releaseReceivedICECandidates { +- (NSArray*)releaseReceivedICECandidates { NSArray* ret = _receivedICECandidates; _receivedICECandidates = [NSMutableArray array]; return ret; @@ -97,11 +96,11 @@ [_expectedSignalingChanges addObject:@((int)state)]; } -- (void)expectAddStream:(NSString *)label { +- (void)expectAddStream:(NSString*)label { [_expectedAddStreamLabels addObject:label]; } -- (void)expectRemoveStream:(NSString *)label { +- (void)expectRemoveStream:(NSString*)label { [_expectedRemoveStreamLabels addObject:label]; } @@ -129,39 +128,41 @@ #pragma mark - RTCPeerConnectionDelegate methods -- (void)peerConnectionOnError:(RTCPeerConnection *)peerConnection { +- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection { NSLog(@"RTCPeerConnectionDelegate::onError"); NSAssert(--_expectedErrors >= 0, @"Unexpected error"); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection signalingStateChanged:(RTCSignalingState)stateChanged { int expectedState = [self popFirstElementAsInt:_expectedSignalingChanges]; - NSString *message = [NSString stringWithFormat: @"RTCPeerConnectionDelegate::" - @"onSignalingStateChange [%d] expected[%d]", stateChanged, expectedState]; - NSAssert(expectedState == (int) stateChanged, message); + NSString* message = + [NSString stringWithFormat:@"RTCPeerConnectionDelegate::" + @"onSignalingStateChange [%d] expected[%d]", + stateChanged, + expectedState]; + NSAssert(expectedState == (int)stateChanged, message); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - addedStream:(RTCMediaStream *)stream { - NSString *expectedLabel = +- (void)peerConnection:(RTCPeerConnection*)peerConnection + addedStream:(RTCMediaStream*)stream { + NSString* expectedLabel = [self popFirstElementAsNSString:_expectedAddStreamLabels]; NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected"); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - removedStream:(RTCMediaStream *)stream { - NSString *expectedLabel = +- (void)peerConnection:(RTCPeerConnection*)peerConnection + removedStream:(RTCMediaStream*)stream { + NSString* expectedLabel = [self popFirstElementAsNSString:_expectedRemoveStreamLabels]; NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected"); } -- (void)peerConnectionOnRenegotiationNeeded: - (RTCPeerConnection *)peerConnection { +- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection { } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - gotICECandidate:(RTCICECandidate *)candidate { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + gotICECandidate:(RTCICECandidate*)candidate { --_expectedICECandidates; // We don't assert expectedICECandidates >= 0 because it's hard to know // how many to expect, in general. We only use expectICECandidates to @@ -169,7 +170,7 @@ [_receivedICECandidates addObject:candidate]; } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection iceGatheringChanged:(RTCICEGatheringState)newState { // It's fine to get a variable number of GATHERING messages before // COMPLETE fires (depending on how long the test runs) so we don't assert @@ -181,7 +182,7 @@ NSAssert(expectedState == (int)newState, @"Empty expectation array"); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection iceConnectionChanged:(RTCICEConnectionState)newState { // See TODO(fischman) in RTCPeerConnectionTest.mm about Completed. if (newState == RTCICEConnectionCompleted) diff --git a/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm b/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm index 2acd7f969..9a0291411 100644 --- a/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm +++ b/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm @@ -48,17 +48,16 @@ @interface RTCPeerConnectionTest : NSObject // Returns whether the two sessions are of the same type. -+ (BOOL)isSession:(RTCSessionDescription *)session1 - ofSameTypeAsSession:(RTCSessionDescription *)session2; ++ (BOOL)isSession:(RTCSessionDescription*)session1 + ofSameTypeAsSession:(RTCSessionDescription*)session2; // Create and add tracks to pc, with the given source, label, and IDs -- (RTCMediaStream *) - addTracksToPeerConnection:(RTCPeerConnection *)pc - withFactory:(RTCPeerConnectionFactory *)factory - videoSource:(RTCVideoSource *)videoSource - streamLabel:(NSString *)streamLabel - videoTrackID:(NSString *)videoTrackID - audioTrackID:(NSString *)audioTrackID; +- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc + withFactory:(RTCPeerConnectionFactory*)factory + videoSource:(RTCVideoSource*)videoSource + streamLabel:(NSString*)streamLabel + videoTrackID:(NSString*)videoTrackID + audioTrackID:(NSString*)audioTrackID; - (void)testCompleteSession; @@ -66,46 +65,45 @@ @implementation RTCPeerConnectionTest -+ (BOOL)isSession:(RTCSessionDescription *)session1 - ofSameTypeAsSession:(RTCSessionDescription *)session2 { ++ (BOOL)isSession:(RTCSessionDescription*)session1 + ofSameTypeAsSession:(RTCSessionDescription*)session2 { return [session1.type isEqual:session2.type]; } -- (RTCMediaStream *) - addTracksToPeerConnection:(RTCPeerConnection *)pc - withFactory:(RTCPeerConnectionFactory *)factory - videoSource:(RTCVideoSource *)videoSource - streamLabel:(NSString *)streamLabel - videoTrackID:(NSString *)videoTrackID - audioTrackID:(NSString *)audioTrackID { - RTCMediaStream *localMediaStream = [factory mediaStreamWithLabel:streamLabel]; - RTCVideoTrack *videoTrack = +- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc + withFactory:(RTCPeerConnectionFactory*)factory + videoSource:(RTCVideoSource*)videoSource + streamLabel:(NSString*)streamLabel + videoTrackID:(NSString*)videoTrackID + audioTrackID:(NSString*)audioTrackID { + RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel]; + RTCVideoTrack* videoTrack = [factory videoTrackWithID:videoTrackID source:videoSource]; - RTCVideoRenderer *videoRenderer = + RTCVideoRenderer* videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:nil]; [videoTrack addRenderer:videoRenderer]; [localMediaStream addVideoTrack:videoTrack]; // Test that removal/re-add works. [localMediaStream removeVideoTrack:videoTrack]; [localMediaStream addVideoTrack:videoTrack]; - RTCAudioTrack *audioTrack = [factory audioTrackWithID:audioTrackID]; + RTCAudioTrack* audioTrack = [factory audioTrackWithID:audioTrackID]; [localMediaStream addAudioTrack:audioTrack]; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] init]; + RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] init]; [pc addStream:localMediaStream constraints:constraints]; return localMediaStream; } - (void)testCompleteSession { - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] init]; - RTCPeerConnectionSyncObserver *offeringExpectations = + RTCPeerConnectionFactory* factory = [[RTCPeerConnectionFactory alloc] init]; + RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] init]; + RTCPeerConnectionSyncObserver* offeringExpectations = [[RTCPeerConnectionSyncObserver alloc] init]; RTCPeerConnection* pcOffer = [factory peerConnectionWithICEServers:nil constraints:constraints delegate:offeringExpectations]; - RTCPeerConnectionSyncObserver *answeringExpectations = + RTCPeerConnectionSyncObserver* answeringExpectations = [[RTCPeerConnectionSyncObserver alloc] init]; RTCPeerConnection* pcAnswer = @@ -113,51 +111,48 @@ constraints:constraints delegate:answeringExpectations]; // TODO(hughv): Create video capturer - RTCVideoCapturer *capturer = nil; - RTCVideoSource *videoSource = + RTCVideoCapturer* capturer = nil; + RTCVideoSource* videoSource = [factory videoSourceWithCapturer:capturer constraints:constraints]; // Here and below, "oLMS" refers to offerer's local media stream, and "aLMS" // refers to the answerer's local media stream, with suffixes of "a0" and "v0" // for audio and video tracks, resp. These mirror chrome historical naming. - RTCMediaStream *oLMSUnused = - [self addTracksToPeerConnection:pcOffer - withFactory:factory - videoSource:videoSource - streamLabel:@"oLMS" - videoTrackID:@"oLMSv0" - audioTrackID:@"oLMSa0"]; - RTCSessionDescriptionSyncObserver *sdpObserver = + RTCMediaStream* oLMSUnused = [self addTracksToPeerConnection:pcOffer + withFactory:factory + videoSource:videoSource + streamLabel:@"oLMS" + videoTrackID:@"oLMSv0" + audioTrackID:@"oLMSa0"]; + RTCSessionDescriptionSyncObserver* sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init]; [pcOffer createOfferWithDelegate:sdpObserver constraints:constraints]; [sdpObserver wait]; EXPECT_TRUE(sdpObserver.success); - RTCSessionDescription *offerSDP = sdpObserver.sessionDescription; + RTCSessionDescription* offerSDP = sdpObserver.sessionDescription; EXPECT_EQ([@"offer" compare:offerSDP.type options:NSCaseInsensitiveSearch], NSOrderedSame); EXPECT_GT([offerSDP.description length], 0); sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init]; - [answeringExpectations - expectSignalingChange:RTCSignalingHaveRemoteOffer]; + [answeringExpectations expectSignalingChange:RTCSignalingHaveRemoteOffer]; [answeringExpectations expectAddStream:@"oLMS"]; [pcAnswer setRemoteDescriptionWithDelegate:sdpObserver sessionDescription:offerSDP]; [sdpObserver wait]; - RTCMediaStream *aLMSUnused = - [self addTracksToPeerConnection:pcAnswer - withFactory:factory - videoSource:videoSource - streamLabel:@"aLMS" - videoTrackID:@"aLMSv0" - audioTrackID:@"aLMSa0"]; + RTCMediaStream* aLMSUnused = [self addTracksToPeerConnection:pcAnswer + withFactory:factory + videoSource:videoSource + streamLabel:@"aLMS" + videoTrackID:@"aLMSv0" + audioTrackID:@"aLMSa0"]; sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init]; [pcAnswer createAnswerWithDelegate:sdpObserver constraints:constraints]; [sdpObserver wait]; EXPECT_TRUE(sdpObserver.success); - RTCSessionDescription *answerSDP = sdpObserver.sessionDescription; + RTCSessionDescription* answerSDP = sdpObserver.sessionDescription; EXPECT_EQ([@"answer" compare:answerSDP.type options:NSCaseInsensitiveSearch], NSOrderedSame); EXPECT_GT([answerSDP.description length], 0); @@ -203,12 +198,12 @@ EXPECT_TRUE([offerSDP.type isEqual:pcAnswer.remoteDescription.type]); EXPECT_TRUE([answerSDP.type isEqual:pcAnswer.localDescription.type]); - for (RTCICECandidate *candidate in - offeringExpectations.releaseReceivedICECandidates) { + for (RTCICECandidate* candidate in offeringExpectations + .releaseReceivedICECandidates) { [pcAnswer addICECandidate:candidate]; } - for (RTCICECandidate *candidate in - answeringExpectations.releaseReceivedICECandidates) { + for (RTCICECandidate* candidate in answeringExpectations + .releaseReceivedICECandidates) { [pcOffer addICECandidate:candidate]; } @@ -231,7 +226,7 @@ // a TestBase since it's not. TEST(RTCPeerConnectionTest, SessionTest) { talk_base::InitializeSSL(); - RTCPeerConnectionTest *pcTest = [[RTCPeerConnectionTest alloc] init]; + RTCPeerConnectionTest* pcTest = [[RTCPeerConnectionTest alloc] init]; [pcTest testCompleteSession]; talk_base::CleanupSSL(); } diff --git a/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m index 85a4482b8..75a4671b1 100644 --- a/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m +++ b/talk/app/webrtc/objctests/RTCSessionDescriptionSyncObserver.m @@ -33,10 +33,10 @@ #import "RTCSessionDescription.h" -@interface RTCSessionDescriptionSyncObserver() +@interface RTCSessionDescriptionSyncObserver () // CondVar used to wait for, and signal arrival of, an SDP-related callback. -@property(nonatomic, strong) NSCondition *condition; +@property(nonatomic, strong) NSCondition* condition; // Whether an SDP-related callback has fired; cleared before wait returns. @property(atomic, assign) BOOL signaled; @@ -72,9 +72,9 @@ } #pragma mark - RTCSessionDescriptonDelegate methods -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didCreateSessionDescription:(RTCSessionDescription *)sdp - error:(NSError *)error { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didCreateSessionDescription:(RTCSessionDescription*)sdp + error:(NSError*)error { [self.condition lock]; if (error) { self.success = NO; @@ -87,8 +87,8 @@ [self.condition unlock]; } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didSetSessionDescriptionWithError:(NSError *)error { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didSetSessionDescriptionWithError:(NSError*)error { [self.condition lock]; if (error) { self.success = NO; diff --git a/talk/app/webrtc/objctests/mac/main.mm b/talk/app/webrtc/objctests/mac/main.mm index 3fb24f37b..c44b97706 100644 --- a/talk/app/webrtc/objctests/mac/main.mm +++ b/talk/app/webrtc/objctests/mac/main.mm @@ -27,7 +27,7 @@ #include "talk/base/gunit.h" -int main(int argc, char *argv[]) { +int main(int argc, char* argv[]) { testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); } diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h index 410ead6a9..41a795eba 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.h @@ -47,12 +47,20 @@ // for the registered handler to be called with received messages. @interface APPRTCAppClient : NSObject -@property(nonatomic, assign) id ICEServerDelegate; -@property(nonatomic, assign) id messageHandler; +@property(nonatomic, weak, readonly) id ICEServerDelegate; +@property(nonatomic, weak, readonly) id messageHandler; @property(nonatomic, assign) BOOL initiator; -@property(nonatomic, strong) RTCMediaConstraints* videoConstraints; +@property(nonatomic, copy, readonly) RTCMediaConstraints* videoConstraints; -- (void)connectToRoom:(NSURL *)room; -- (void)sendData:(NSData *)data; +- (id)initWithICEServerDelegate:(id)delegate + messageHandler:(id)handler; +- (void)connectToRoom:(NSURL*)room; +- (void)sendData:(NSData*)data; + +#ifndef DOXYGEN_SHOULD_SKIP_THIS +// Disallow init and don't add to documentation +- (id)init __attribute__(( + unavailable("init is not a supported initializer for this class."))); +#endif /* DOXYGEN_SHOULD_SKIP_THIS */ @end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m index 5b035e3c3..9ac83ffc1 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m @@ -37,13 +37,13 @@ @interface APPRTCAppClient () @property(nonatomic, strong) dispatch_queue_t backgroundQueue; -@property(nonatomic, copy) NSString *baseURL; -@property(nonatomic, strong) GAEChannelClient *gaeChannel; -@property(nonatomic, copy) NSString *postMessageUrl; -@property(nonatomic, copy) NSString *pcConfig; -@property(nonatomic, strong) NSMutableString *roomHtml; -@property(atomic, strong) NSMutableArray *sendQueue; -@property(nonatomic, copy) NSString *token; +@property(nonatomic, copy) NSString* baseURL; +@property(nonatomic, strong) GAEChannelClient* gaeChannel; +@property(nonatomic, copy) NSString* postMessageUrl; +@property(nonatomic, copy) NSString* pcConfig; +@property(nonatomic, strong) NSMutableString* roomHtml; +@property(atomic, strong) NSMutableArray* sendQueue; +@property(nonatomic, copy) NSString* token; @property(nonatomic, assign) BOOL verboseLogging; @@ -51,23 +51,11 @@ @implementation APPRTCAppClient -@synthesize ICEServerDelegate = _ICEServerDelegate; -@synthesize messageHandler = _messageHandler; - -@synthesize backgroundQueue = _backgroundQueue; -@synthesize baseURL = _baseURL; -@synthesize gaeChannel = _gaeChannel; -@synthesize postMessageUrl = _postMessageUrl; -@synthesize pcConfig = _pcConfig; -@synthesize roomHtml = _roomHtml; -@synthesize sendQueue = _sendQueue; -@synthesize token = _token; -@synthesize verboseLogging = _verboseLogging; -@synthesize initiator = _initiator; -@synthesize videoConstraints = _videoConstraints; - -- (id)init { +- (id)initWithICEServerDelegate:(id)delegate + messageHandler:(id)handler { if (self = [super init]) { + _ICEServerDelegate = delegate; + _messageHandler = handler; _backgroundQueue = dispatch_queue_create("RTCBackgroundQueue", NULL); _sendQueue = [NSMutableArray array]; // Uncomment to see Request/Response logging. @@ -78,12 +66,12 @@ #pragma mark - Public methods -- (void)connectToRoom:(NSURL *)url { - NSURLRequest *request = [self getRequestFromUrl:url]; +- (void)connectToRoom:(NSURL*)url { + NSURLRequest* request = [self getRequestFromUrl:url]; [NSURLConnection connectionWithRequest:request delegate:self]; } -- (void)sendData:(NSData *)data { +- (void)sendData:(NSData*)data { @synchronized(self) { [self maybeLogMessage:@"Send message"]; [self.sendQueue addObject:[data copy]]; @@ -93,49 +81,53 @@ #pragma mark - Internal methods -- (NSString*)findVar:(NSString*)name - strippingQuotes:(BOOL)strippingQuotes { +- (NSString*)findVar:(NSString*)name strippingQuotes:(BOOL)strippingQuotes { NSError* error; NSString* pattern = [NSString stringWithFormat:@".*\n *var %@ = ([^\n]*);\n.*", name]; - NSRegularExpression *regexp = + NSRegularExpression* regexp = [NSRegularExpression regularExpressionWithPattern:pattern options:0 error:&error]; - NSAssert(!error, @"Unexpected error compiling regex: ", + NSAssert(!error, + @"Unexpected error compiling regex: ", error.localizedDescription); NSRange fullRange = NSMakeRange(0, [self.roomHtml length]); - NSArray *matches = + NSArray* matches = [regexp matchesInString:self.roomHtml options:0 range:fullRange]; if ([matches count] != 1) { [self showMessage:[NSString stringWithFormat:@"%d matches for %@ in %@", - [matches count], name, self.roomHtml]]; + [matches count], + name, + self.roomHtml]]; return nil; } NSRange matchRange = [matches[0] rangeAtIndex:1]; NSString* value = [self.roomHtml substringWithRange:matchRange]; if (strippingQuotes) { NSAssert([value length] > 2, - @"Can't strip quotes from short string: [%@]", value); + @"Can't strip quotes from short string: [%@]", + value); NSAssert(([value characterAtIndex:0] == '\'' && [value characterAtIndex:[value length] - 1] == '\''), - @"Can't strip quotes from unquoted string: [%@]", value); + @"Can't strip quotes from unquoted string: [%@]", + value); value = [value substringWithRange:NSMakeRange(1, [value length] - 2)]; } return value; } -- (NSURLRequest *)getRequestFromUrl:(NSURL *)url { +- (NSURLRequest*)getRequestFromUrl:(NSURL*)url { self.roomHtml = [NSMutableString stringWithCapacity:20000]; - NSString *path = + NSString* path = [NSString stringWithFormat:@"https:%@", [url resourceSpecifier]]; - NSURLRequest *request = + NSURLRequest* request = [NSURLRequest requestWithURL:[NSURL URLWithString:path]]; return request; } -- (void)maybeLogMessage:(NSString *)message { +- (void)maybeLogMessage:(NSString*)message { if (self.verboseLogging) { NSLog(@"%@", message); } @@ -143,33 +135,33 @@ - (void)requestQueueDrainInBackground { dispatch_async(self.backgroundQueue, ^(void) { - // TODO(hughv): This can block the UI thread. Fix. - @synchronized(self) { - if ([self.postMessageUrl length] < 1) { - return; + // TODO(hughv): This can block the UI thread. Fix. + @synchronized(self) { + if ([self.postMessageUrl length] < 1) { + return; + } + for (NSData* data in self.sendQueue) { + NSString* url = + [NSString stringWithFormat:@"%@/%@", + self.baseURL, self.postMessageUrl]; + [self sendData:data withUrl:url]; + } + [self.sendQueue removeAllObjects]; } - for (NSData *data in self.sendQueue) { - NSString *url = [NSString stringWithFormat:@"%@/%@", - self.baseURL, - self.postMessageUrl]; - [self sendData:data withUrl:url]; - } - [self.sendQueue removeAllObjects]; - } - }); + }); } -- (void)sendData:(NSData *)data withUrl:(NSString *)url { - NSMutableURLRequest *request = +- (void)sendData:(NSData*)data withUrl:(NSString*)url { + NSMutableURLRequest* request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:url]]; request.HTTPMethod = @"POST"; [request setHTTPBody:data]; - NSURLResponse *response; - NSError *error; - NSData *responseData = [NSURLConnection sendSynchronousRequest:request + NSURLResponse* response; + NSError* error; + NSData* responseData = [NSURLConnection sendSynchronousRequest:request returningResponse:&response error:&error]; - NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response; + NSHTTPURLResponse* httpResponse = (NSHTTPURLResponse*)response; int status = [httpResponse statusCode]; NSAssert(status == 200, @"Bad response [%d] to message: %@\n\n%@", @@ -178,9 +170,9 @@ [NSString stringWithUTF8String:[responseData bytes]]); } -- (void)showMessage:(NSString *)message { +- (void)showMessage:(NSString*)message { NSLog(@"%@", message); - UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Unable to join" + UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:@"Unable to join" message:message delegate:nil cancelButtonTitle:@"OK" @@ -188,96 +180,97 @@ [alertView show]; } -- (void)updateICEServers:(NSMutableArray *)ICEServers - withTurnServer:(NSString *)turnServerUrl { +- (void)updateICEServers:(NSMutableArray*)ICEServers + withTurnServer:(NSString*)turnServerUrl { if ([turnServerUrl length] < 1) { [self.ICEServerDelegate onICEServers:ICEServers]; return; } dispatch_async(self.backgroundQueue, ^(void) { - NSMutableURLRequest *request = [NSMutableURLRequest - requestWithURL:[NSURL URLWithString:turnServerUrl]]; - [request addValue:@"Mozilla/5.0" forHTTPHeaderField:@"user-agent"]; - [request addValue:@"https://apprtc.appspot.com" - forHTTPHeaderField:@"origin"]; - NSURLResponse *response; - NSError *error; - NSData *responseData = [NSURLConnection sendSynchronousRequest:request - returningResponse:&response - error:&error]; - if (!error) { - NSDictionary *json = [NSJSONSerialization JSONObjectWithData:responseData - options:0 - error:&error]; - NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); - NSString *username = json[@"username"]; - NSString *password = json[@"password"]; - NSArray* uris = json[@"uris"]; - for (int i = 0; i < [uris count]; ++i) { - NSString *turnServer = [uris objectAtIndex:i]; - RTCICEServer *ICEServer = - [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:turnServer] - username:username - password:password]; - NSLog(@"Added ICE Server: %@", ICEServer); - [ICEServers addObject:ICEServer]; + NSMutableURLRequest* request = [NSMutableURLRequest + requestWithURL:[NSURL URLWithString:turnServerUrl]]; + [request addValue:@"Mozilla/5.0" forHTTPHeaderField:@"user-agent"]; + [request addValue:@"https://apprtc.appspot.com" + forHTTPHeaderField:@"origin"]; + NSURLResponse* response; + NSError* error; + NSData* responseData = [NSURLConnection sendSynchronousRequest:request + returningResponse:&response + error:&error]; + if (!error) { + NSDictionary* json = + [NSJSONSerialization JSONObjectWithData:responseData + options:0 + error:&error]; + NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); + NSString* username = json[@"username"]; + NSString* password = json[@"password"]; + NSArray* uris = json[@"uris"]; + for (int i = 0; i < [uris count]; ++i) { + NSString* turnServer = [uris objectAtIndex:i]; + RTCICEServer* ICEServer = + [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:turnServer] + username:username + password:password]; + NSLog(@"Added ICE Server: %@", ICEServer); + [ICEServers addObject:ICEServer]; + } + } else { + NSLog(@"Unable to get TURN server. Error: %@", error.description); } - } else { - NSLog(@"Unable to get TURN server. Error: %@", error.description); - } - dispatch_async(dispatch_get_main_queue(), ^(void) { - [self.ICEServerDelegate onICEServers:ICEServers]; - }); + dispatch_async(dispatch_get_main_queue(), ^(void) { + [self.ICEServerDelegate onICEServers:ICEServers]; + }); }); } #pragma mark - NSURLConnectionDataDelegate methods -- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data { - NSString *roomHtml = [NSString stringWithUTF8String:[data bytes]]; - [self maybeLogMessage: - [NSString stringWithFormat:@"Received %d chars", [roomHtml length]]]; +- (void)connection:(NSURLConnection*)connection didReceiveData:(NSData*)data { + NSString* roomHtml = [NSString stringWithUTF8String:[data bytes]]; + [self maybeLogMessage:[NSString stringWithFormat:@"Received %d chars", + [roomHtml length]]]; [self.roomHtml appendString:roomHtml]; } -- (void)connection:(NSURLConnection *)connection - didReceiveResponse:(NSURLResponse *)response { - NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response; +- (void)connection:(NSURLConnection*)connection + didReceiveResponse:(NSURLResponse*)response { + NSHTTPURLResponse* httpResponse = (NSHTTPURLResponse*)response; int statusCode = [httpResponse statusCode]; - [self maybeLogMessage: + [self + maybeLogMessage: [NSString stringWithFormat: - @"Response received\nURL\n%@\nStatus [%d]\nHeaders\n%@", - [httpResponse URL], - statusCode, - [httpResponse allHeaderFields]]]; + @"Response received\nURL\n%@\nStatus [%d]\nHeaders\n%@", + [httpResponse URL], + statusCode, + [httpResponse allHeaderFields]]]; NSAssert(statusCode == 200, @"Invalid response of %d received.", statusCode); } -- (void)connectionDidFinishLoading:(NSURLConnection *)connection { +- (void)connectionDidFinishLoading:(NSURLConnection*)connection { [self maybeLogMessage:[NSString stringWithFormat:@"finished loading %d chars", - [self.roomHtml length]]]; + [self.roomHtml length]]]; NSRegularExpression* fullRegex = - [NSRegularExpression regularExpressionWithPattern:@"room is full" - options:0 - error:nil]; + [NSRegularExpression regularExpressionWithPattern:@"room is full" + options:0 + error:nil]; if ([fullRegex numberOfMatchesInString:self.roomHtml options:0 range:NSMakeRange(0, [self.roomHtml length])]) { [self showMessage:@"Room full"]; - APPRTCAppDelegate *ad = - (APPRTCAppDelegate *)[[UIApplication sharedApplication] delegate]; + APPRTCAppDelegate* ad = + (APPRTCAppDelegate*)[[UIApplication sharedApplication] delegate]; [ad closeVideoUI]; return; } - - NSString *fullUrl = [[[connection originalRequest] URL] absoluteString]; + NSString* fullUrl = [[[connection originalRequest] URL] absoluteString]; NSRange queryRange = [fullUrl rangeOfString:@"?"]; self.baseURL = [fullUrl substringToIndex:queryRange.location]; - [self maybeLogMessage: - [NSString stringWithFormat:@"Base URL: %@", self.baseURL]]; + [self maybeLogMessage:[NSString + stringWithFormat:@"Base URL: %@", self.baseURL]]; self.initiator = [[self findVar:@"initiator" strippingQuotes:NO] boolValue]; self.token = [self findVar:@"channelToken" strippingQuotes:YES]; @@ -290,45 +283,45 @@ if (!roomKey || !me) return; self.postMessageUrl = - [NSString stringWithFormat:@"/message?r=%@&u=%@", roomKey, me]; + [NSString stringWithFormat:@"/message?r=%@&u=%@", roomKey, me]; [self maybeLogMessage:[NSString stringWithFormat:@"POST message URL: %@", - self.postMessageUrl]]; + self.postMessageUrl]]; NSString* pcConfig = [self findVar:@"pcConfig" strippingQuotes:NO]; if (!pcConfig) return; - [self maybeLogMessage: - [NSString stringWithFormat:@"PC Config JSON: %@", pcConfig]]; + [self maybeLogMessage:[NSString + stringWithFormat:@"PC Config JSON: %@", pcConfig]]; - NSString *turnServerUrl = [self findVar:@"turnUrl" strippingQuotes:YES]; + NSString* turnServerUrl = [self findVar:@"turnUrl" strippingQuotes:YES]; if (turnServerUrl) { - [self maybeLogMessage: - [NSString stringWithFormat:@"TURN server request URL: %@", - turnServerUrl]]; + [self maybeLogMessage:[NSString + stringWithFormat:@"TURN server request URL: %@", + turnServerUrl]]; } - NSError *error; - NSData *pcData = [pcConfig dataUsingEncoding:NSUTF8StringEncoding]; - NSDictionary *json = + NSError* error; + NSData* pcData = [pcConfig dataUsingEncoding:NSUTF8StringEncoding]; + NSDictionary* json = [NSJSONSerialization JSONObjectWithData:pcData options:0 error:&error]; NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); - NSArray *servers = [json objectForKey:@"iceServers"]; - NSMutableArray *ICEServers = [NSMutableArray array]; - for (NSDictionary *server in servers) { - NSString *url = [server objectForKey:@"urls"]; - NSString *username = json[@"username"]; - NSString *credential = [server objectForKey:@"credential"]; + NSArray* servers = [json objectForKey:@"iceServers"]; + NSMutableArray* ICEServers = [NSMutableArray array]; + for (NSDictionary* server in servers) { + NSString* url = [server objectForKey:@"urls"]; + NSString* username = json[@"username"]; + NSString* credential = [server objectForKey:@"credential"]; if (!username) { username = @""; } if (!credential) { credential = @""; } - [self maybeLogMessage: - [NSString stringWithFormat:@"url [%@] - credential [%@]", - url, - credential]]; - RTCICEServer *ICEServer = + [self maybeLogMessage:[NSString + stringWithFormat:@"url [%@] - credential [%@]", + url, + credential]]; + RTCICEServer* ICEServer = [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:url] username:username password:credential]; @@ -340,18 +333,19 @@ NSString* mc = [self findVar:@"mediaConstraints" strippingQuotes:NO]; if (mc) { error = nil; - NSData *mcData = [mc dataUsingEncoding:NSUTF8StringEncoding]; + NSData* mcData = [mc dataUsingEncoding:NSUTF8StringEncoding]; json = - [NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error]; + [NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error]; NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); if ([[json objectForKey:@"video"] boolValue]) { - self.videoConstraints = [[RTCMediaConstraints alloc] init]; + _videoConstraints = [[RTCMediaConstraints alloc] init]; } } - [self maybeLogMessage: - [NSString stringWithFormat:@"About to open GAE with token: %@", - self.token]]; + [self + maybeLogMessage:[NSString + stringWithFormat:@"About to open GAE with token: %@", + self.token]]; self.gaeChannel = [[GAEChannelClient alloc] initWithToken:self.token delegate:self.messageHandler]; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h index 22a0225b5..517cade8d 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h @@ -34,9 +34,9 @@ // Used to send a message to an apprtc.appspot.com "room". @protocol APPRTCSendMessage -- (void)sendData:(NSData *)data; +- (void)sendData:(NSData*)data; // Logging helper. -- (void)displayLogMessage:(NSString *)message; +- (void)displayLogMessage:(NSString*)message; @end @class APPRTCViewController; @@ -51,8 +51,8 @@ RTCSessionDescriptonDelegate, UIApplicationDelegate> -@property (strong, nonatomic) UIWindow *window; -@property (strong, nonatomic) APPRTCViewController *viewController; +@property(strong, nonatomic) UIWindow* window; +@property(strong, nonatomic) APPRTCViewController* viewController; - (void)closeVideoUI; diff --git a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m index 681876eae..ee3660451 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m @@ -42,13 +42,13 @@ #import "RTCVideoRenderer.h" #import "RTCVideoCapturer.h" #import "RTCVideoTrack.h" -#import "VideoView.h" +#import "APPRTCVideoView.h" @interface PCObserver : NSObject - (id)initWithDelegate:(id)delegate; -@property(nonatomic, strong) VideoView *videoView; +@property(nonatomic, strong) APPRTCVideoView* videoView; @end @@ -56,8 +56,6 @@ id _delegate; } -@synthesize videoView = _videoView; - - (id)initWithDelegate:(id)delegate { if (self = [super init]) { _delegate = delegate; @@ -65,70 +63,71 @@ return self; } -- (void)peerConnectionOnError:(RTCPeerConnection *)peerConnection { +- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection { NSLog(@"PCO onError."); NSAssert(NO, @"PeerConnection failed."); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection signalingStateChanged:(RTCSignalingState)stateChanged { NSLog(@"PCO onSignalingStateChange: %d", stateChanged); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - addedStream:(RTCMediaStream *)stream { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + addedStream:(RTCMediaStream*)stream { NSLog(@"PCO onAddStream."); dispatch_async(dispatch_get_main_queue(), ^(void) { - NSAssert([stream.audioTracks count] >= 1, - @"Expected at least 1 audio stream"); - NSAssert([stream.videoTracks count] <= 1, - @"Expected at most 1 video stream"); - if ([stream.videoTracks count] != 0) { - [[self videoView] - renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]]; - } + NSAssert([stream.audioTracks count] >= 1, + @"Expected at least 1 audio stream"); + NSAssert([stream.videoTracks count] <= 1, + @"Expected at most 1 video stream"); + if ([stream.videoTracks count] != 0) { + [self.videoView + renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]]; + } }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - removedStream:(RTCMediaStream *)stream { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + removedStream:(RTCMediaStream*)stream { NSLog(@"PCO onRemoveStream."); } -- (void) - peerConnectionOnRenegotiationNeeded:(RTCPeerConnection *)peerConnection { +- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection { NSLog(@"PCO onRenegotiationNeeded."); // TODO(hughv): Handle this. } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - gotICECandidate:(RTCICECandidate *)candidate { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + gotICECandidate:(RTCICECandidate*)candidate { NSLog(@"PCO onICECandidate.\n Mid[%@] Index[%d] Sdp[%@]", candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp); - NSDictionary *json = - @{ @"type" : @"candidate", - @"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex], - @"id" : candidate.sdpMid, - @"candidate" : candidate.sdp }; - NSError *error; - NSData *data = + NSDictionary* json = @{ + @"type" : @"candidate", + @"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex], + @"id" : candidate.sdpMid, + @"candidate" : candidate.sdp + }; + NSError* error; + NSData* data = [NSJSONSerialization dataWithJSONObject:json options:0 error:&error]; if (!error) { [_delegate sendData:data]; } else { - NSAssert(NO, @"Unable to serialize JSON object with error: %@", + NSAssert(NO, + @"Unable to serialize JSON object with error: %@", error.localizedDescription); } } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection iceGatheringChanged:(RTCICEGatheringState)newState { NSLog(@"PCO onIceGatheringChange. %d", newState); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTCPeerConnection*)peerConnection iceConnectionChanged:(RTCICEConnectionState)newState { NSLog(@"PCO onIceConnectionChange. %d", newState); if (newState == RTCICEConnectionConnected) @@ -136,7 +135,7 @@ NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!"); } -- (void)displayLogMessage:(NSString *)message { +- (void)displayLogMessage:(NSString*)message { [_delegate displayLogMessage:message]; } @@ -144,28 +143,20 @@ @interface APPRTCAppDelegate () -@property(nonatomic, strong) APPRTCAppClient *client; -@property(nonatomic, strong) PCObserver *pcObserver; -@property(nonatomic, strong) RTCPeerConnection *peerConnection; -@property(nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property(nonatomic, strong) NSMutableArray *queuedRemoteCandidates; +@property(nonatomic, strong) APPRTCAppClient* client; +@property(nonatomic, strong) PCObserver* pcObserver; +@property(nonatomic, strong) RTCPeerConnection* peerConnection; +@property(nonatomic, strong) RTCPeerConnectionFactory* peerConnectionFactory; +@property(nonatomic, strong) NSMutableArray* queuedRemoteCandidates; @end @implementation APPRTCAppDelegate -@synthesize window = _window; -@synthesize viewController = _viewController; -@synthesize client = _client; -@synthesize pcObserver = _pcObserver; -@synthesize peerConnection = _peerConnection; -@synthesize peerConnectionFactory = _peerConnectionFactory; -@synthesize queuedRemoteCandidates = _queuedRemoteCandidates; - #pragma mark - UIApplicationDelegate methods -- (BOOL)application:(UIApplication *)application - didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { +- (BOOL)application:(UIApplication*)application + didFinishLaunchingWithOptions:(NSDictionary*)launchOptions { [RTCPeerConnectionFactory initializeSSL]; self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; self.viewController = @@ -176,100 +167,97 @@ return YES; } -- (void)applicationWillResignActive:(UIApplication *)application { +- (void)applicationWillResignActive:(UIApplication*)application { [self displayLogMessage:@"Application lost focus, connection broken."]; [self closeVideoUI]; } -- (void)applicationDidEnterBackground:(UIApplication *)application { +- (void)applicationDidEnterBackground:(UIApplication*)application { } -- (void)applicationWillEnterForeground:(UIApplication *)application { +- (void)applicationWillEnterForeground:(UIApplication*)application { } -- (void)applicationDidBecomeActive:(UIApplication *)application { +- (void)applicationDidBecomeActive:(UIApplication*)application { } -- (void)applicationWillTerminate:(UIApplication *)application { +- (void)applicationWillTerminate:(UIApplication*)application { } -- (BOOL)application:(UIApplication *)application - openURL:(NSURL *)url - sourceApplication:(NSString *)sourceApplication +- (BOOL)application:(UIApplication*)application + openURL:(NSURL*)url + sourceApplication:(NSString*)sourceApplication annotation:(id)annotation { if (self.client) { return NO; } - self.client = [[APPRTCAppClient alloc] init]; - self.client.ICEServerDelegate = self; - self.client.messageHandler = self; + self.client = [[APPRTCAppClient alloc] initWithICEServerDelegate:self + messageHandler:self]; [self.client connectToRoom:url]; return YES; } -- (void)displayLogMessage:(NSString *)message { +- (void)displayLogMessage:(NSString*)message { NSLog(@"%@", message); [self.viewController displayText:message]; } #pragma mark - RTCSendMessage method -- (void)sendData:(NSData *)data { +- (void)sendData:(NSData*)data { [self.client sendData:data]; } #pragma mark - ICEServerDelegate method -- (void)onICEServers:(NSArray *)servers { +- (void)onICEServers:(NSArray*)servers { self.queuedRemoteCandidates = [NSMutableArray array]; self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init]; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] - initWithMandatoryConstraints: - @[[[RTCPair alloc] - initWithKey:@"OfferToReceiveAudio" - value:@"true"], - [[RTCPair alloc] - initWithKey:@"OfferToReceiveVideo" - value:@"true"]] - optionalConstraints: - @[[[RTCPair alloc] - initWithKey:@"internalSctpDataChannels" - value:@"true"], - [[RTCPair alloc] - initWithKey:@"DtlsSrtpKeyAgreement" - value:@"true"]]]; + RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] + initWithMandatoryConstraints: + @[ + [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"], + [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"] + ] + optionalConstraints: + @[ + [[RTCPair alloc] initWithKey:@"internalSctpDataChannels" + value:@"true"], + [[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement" + value:@"true"] + ]]; self.pcObserver = [[PCObserver alloc] initWithDelegate:self]; self.peerConnection = [self.peerConnectionFactory peerConnectionWithICEServers:servers constraints:constraints delegate:self.pcObserver]; - RTCMediaStream *lms = + RTCMediaStream* lms = [self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"]; - NSString *cameraID = nil; - for (AVCaptureDevice *captureDevice in - [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] ) { - if (captureDevice.position == AVCaptureDevicePositionFront) { - cameraID = [captureDevice localizedName]; - break; - } + NSString* cameraID = nil; + for (AVCaptureDevice* captureDevice in + [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { + if (captureDevice.position == AVCaptureDevicePositionFront) { + cameraID = [captureDevice localizedName]; + break; + } } NSAssert(cameraID, @"Unable to get the front camera id"); - RTCVideoCapturer *capturer = - [RTCVideoCapturer capturerWithDeviceName:cameraID]; - RTCVideoSource *videoSource = - [self.peerConnectionFactory - videoSourceWithCapturer:capturer constraints:self.client.videoConstraints]; - RTCVideoTrack *localVideoTrack = - [self.peerConnectionFactory - videoTrackWithID:@"ARDAMSv0" source:videoSource]; + RTCVideoCapturer* capturer = + [RTCVideoCapturer capturerWithDeviceName:cameraID]; + RTCVideoSource* videoSource = [self.peerConnectionFactory + videoSourceWithCapturer:capturer + constraints:self.client.videoConstraints]; + RTCVideoTrack* localVideoTrack = + [self.peerConnectionFactory videoTrackWithID:@"ARDAMSv0" + source:videoSource]; if (localVideoTrack) { - [lms addVideoTrack:localVideoTrack]; + [lms addVideoTrack:localVideoTrack]; } [self.viewController.localVideoView - renderVideoTrackInterface:localVideoTrack]; + renderVideoTrackInterface:localVideoTrack]; self.pcObserver.videoView = self.viewController.remoteVideoView; @@ -282,26 +270,26 @@ - (void)onOpen { if (!self.client.initiator) { - [self displayLogMessage:@"Callee; waiting for remote offer"]; - return; + [self displayLogMessage:@"Callee; waiting for remote offer"]; + return; } [self displayLogMessage:@"GAE onOpen - create offer."]; - RTCPair *audio = + RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"]; - RTCPair *video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" - value:@"true"]; - NSArray *mandatory = @[ audio , video ]; - RTCMediaConstraints *constraints = + RTCPair* video = + [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]; + NSArray* mandatory = @[ audio, video ]; + RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory optionalConstraints:nil]; [self.peerConnection createOfferWithDelegate:self constraints:constraints]; [self displayLogMessage:@"PC - createOffer."]; } -- (void)onMessage:(NSString *)data { - NSString *message = [self unHTMLifyString:data]; - NSError *error; - NSDictionary *objects = [NSJSONSerialization +- (void)onMessage:(NSString*)data { + NSString* message = [self unHTMLifyString:data]; + NSError* error; + NSDictionary* objects = [NSJSONSerialization JSONObjectWithData:[message dataUsingEncoding:NSUTF8StringEncoding] options:0 error:&error]; @@ -309,14 +297,14 @@ @"%@", [NSString stringWithFormat:@"Error: %@", error.description]); NSAssert([objects count] > 0, @"Invalid JSON object"); - NSString *value = [objects objectForKey:@"type"]; - [self displayLogMessage: - [NSString stringWithFormat:@"GAE onMessage type - %@", value]]; + NSString* value = [objects objectForKey:@"type"]; + [self displayLogMessage:[NSString stringWithFormat:@"GAE onMessage type - %@", + value]]; if ([value compare:@"candidate"] == NSOrderedSame) { - NSString *mid = [objects objectForKey:@"id"]; - NSNumber *sdpLineIndex = [objects objectForKey:@"label"]; - NSString *sdp = [objects objectForKey:@"candidate"]; - RTCICECandidate *candidate = + NSString* mid = [objects objectForKey:@"id"]; + NSNumber* sdpLineIndex = [objects objectForKey:@"label"]; + NSString* sdp = [objects objectForKey:@"candidate"]; + RTCICECandidate* candidate = [[RTCICECandidate alloc] initWithMid:mid index:sdpLineIndex.intValue sdp:sdp]; @@ -327,20 +315,21 @@ } } else if (([value compare:@"offer"] == NSOrderedSame) || ([value compare:@"answer"] == NSOrderedSame)) { - NSString *sdpString = [objects objectForKey:@"sdp"]; - RTCSessionDescription *sdp = [[RTCSessionDescription alloc] - initWithType:value sdp:[APPRTCAppDelegate preferISAC:sdpString]]; + NSString* sdpString = [objects objectForKey:@"sdp"]; + RTCSessionDescription* sdp = [[RTCSessionDescription alloc] + initWithType:value + sdp:[APPRTCAppDelegate preferISAC:sdpString]]; [self.peerConnection setRemoteDescriptionWithDelegate:self sessionDescription:sdp]; [self displayLogMessage:@"PC - setRemoteDescription."]; } else if ([value compare:@"bye"] == NSOrderedSame) { [self closeVideoUI]; - UIAlertView *alertView = - [[UIAlertView alloc] initWithTitle:@"Remote end hung up" - message:@"dropping PeerConnection" - delegate:nil - cancelButtonTitle:@"OK" - otherButtonTitles:nil]; + UIAlertView* alertView = + [[UIAlertView alloc] initWithTitle:@"Remote end hung up" + message:@"dropping PeerConnection" + delegate:nil + cancelButtonTitle:@"OK" + otherButtonTitles:nil]; [alertView show]; } else { NSAssert(NO, @"Invalid message: %@", data); @@ -352,9 +341,9 @@ [self closeVideoUI]; } -- (void)onError:(int)code withDescription:(NSString *)description { - [self displayLogMessage: - [NSString stringWithFormat:@"GAE onError: %@", description]]; +- (void)onError:(int)code withDescription:(NSString*)description { + [self displayLogMessage:[NSString stringWithFormat:@"GAE onError: %@", + description]]; [self closeVideoUI]; } @@ -362,19 +351,19 @@ // Match |pattern| to |string| and return the first group of the first // match, or nil if no match was found. -+ (NSString *)firstMatch:(NSRegularExpression *)pattern - withString:(NSString *)string { ++ (NSString*)firstMatch:(NSRegularExpression*)pattern + withString:(NSString*)string { NSTextCheckingResult* result = - [pattern firstMatchInString:string - options:0 - range:NSMakeRange(0, [string length])]; + [pattern firstMatchInString:string + options:0 + range:NSMakeRange(0, [string length])]; if (!result) return nil; return [string substringWithRange:[result rangeAtIndex:1]]; } // Mangle |origSDP| to prefer the ISAC/16k audio codec. -+ (NSString *)preferISAC:(NSString *)origSDP { ++ (NSString*)preferISAC:(NSString*)origSDP { int mLineIndex = -1; NSString* isac16kRtpMap = nil; NSArray* lines = [origSDP componentsSeparatedByString:@"\n"]; @@ -411,8 +400,8 @@ [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; [newMLine addObject:isac16kRtpMap]; for (; origPartIndex < [origMLineParts count]; ++origPartIndex) { - if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]] - != NSOrderedSame) { + if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]] != + NSOrderedSame) { [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex]]; } } @@ -423,9 +412,9 @@ return [newLines componentsJoinedByString:@"\n"]; } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didCreateSessionDescription:(RTCSessionDescription *)origSdp - error:(NSError *)error { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didCreateSessionDescription:(RTCSessionDescription*)origSdp + error:(NSError*)error { if (error) { [self displayLogMessage:@"SDP onFailure."]; NSAssert(NO, error.description); @@ -433,27 +422,26 @@ } [self displayLogMessage:@"SDP onSuccess(SDP) - set local description."]; - RTCSessionDescription* sdp = - [[RTCSessionDescription alloc] - initWithType:origSdp.type - sdp:[APPRTCAppDelegate preferISAC:origSdp.description]]; + RTCSessionDescription* sdp = [[RTCSessionDescription alloc] + initWithType:origSdp.type + sdp:[APPRTCAppDelegate preferISAC:origSdp.description]]; [self.peerConnection setLocalDescriptionWithDelegate:self sessionDescription:sdp]; [self displayLogMessage:@"PC setLocalDescription."]; dispatch_async(dispatch_get_main_queue(), ^(void) { - NSDictionary *json = @{ @"type" : sdp.type, @"sdp" : sdp.description }; - NSError *error; - NSData *data = - [NSJSONSerialization dataWithJSONObject:json options:0 error:&error]; - NSAssert(!error, - @"%@", - [NSString stringWithFormat:@"Error: %@", error.description]); - [self sendData:data]; + NSDictionary* json = @{@"type" : sdp.type, @"sdp" : sdp.description}; + NSError* error; + NSData* data = + [NSJSONSerialization dataWithJSONObject:json options:0 error:&error]; + NSAssert(!error, + @"%@", + [NSString stringWithFormat:@"Error: %@", error.description]); + [self sendData:data]; }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didSetSessionDescriptionWithError:(NSError *)error { +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didSetSessionDescriptionWithError:(NSError*)error { if (error) { [self displayLogMessage:@"SDP onFailure."]; NSAssert(NO, error.description); @@ -462,34 +450,31 @@ [self displayLogMessage:@"SDP onSuccess() - possibly drain candidates"]; dispatch_async(dispatch_get_main_queue(), ^(void) { - if (!self.client.initiator) { - if (self.peerConnection.remoteDescription - && !self.peerConnection.localDescription) { - [self displayLogMessage:@"Callee, setRemoteDescription succeeded"]; - RTCPair *audio = - [[RTCPair alloc] - initWithKey:@"OfferToReceiveAudio" value:@"true"]; - RTCPair *video = - [[RTCPair alloc] - initWithKey:@"OfferToReceiveVideo" value:@"true"]; - NSArray *mandatory = @[ audio , video ]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatory - optionalConstraints:nil]; - [self.peerConnection - createAnswerWithDelegate:self constraints:constraints]; - [self displayLogMessage:@"PC - createAnswer."]; + if (!self.client.initiator) { + if (self.peerConnection.remoteDescription && + !self.peerConnection.localDescription) { + [self displayLogMessage:@"Callee, setRemoteDescription succeeded"]; + RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" + value:@"true"]; + RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" + value:@"true"]; + NSArray* mandatory = @[ audio, video ]; + RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] + initWithMandatoryConstraints:mandatory + optionalConstraints:nil]; + [self.peerConnection createAnswerWithDelegate:self + constraints:constraints]; + [self displayLogMessage:@"PC - createAnswer."]; } else { - [self displayLogMessage:@"SDP onSuccess - drain candidates"]; - [self drainRemoteCandidates]; + [self displayLogMessage:@"SDP onSuccess - drain candidates"]; + [self drainRemoteCandidates]; } - } else { + } else { if (self.peerConnection.remoteDescription) { - [self displayLogMessage:@"SDP onSuccess - drain candidates"]; - [self drainRemoteCandidates]; + [self displayLogMessage:@"SDP onSuccess - drain candidates"]; + [self drainRemoteCandidates]; } - } + } }); } @@ -502,36 +487,34 @@ self.peerConnection = nil; self.peerConnectionFactory = nil; self.pcObserver = nil; - self.client.ICEServerDelegate = nil; - self.client.messageHandler = nil; self.client = nil; [RTCPeerConnectionFactory deinitializeSSL]; } - (void)drainRemoteCandidates { - for (RTCICECandidate *candidate in self.queuedRemoteCandidates) { + for (RTCICECandidate* candidate in self.queuedRemoteCandidates) { [self.peerConnection addICECandidate:candidate]; } self.queuedRemoteCandidates = nil; } -- (NSString *)unHTMLifyString:(NSString *)base { +- (NSString*)unHTMLifyString:(NSString*)base { // TODO(hughv): Investigate why percent escapes are being added. Removing // them isn't necessary on Android. // convert HTML escaped characters to UTF8. - NSString *removePercent = + NSString* removePercent = [base stringByReplacingPercentEscapesUsingEncoding:NSUTF8StringEncoding]; // remove leading and trailing ". NSRange range; range.length = [removePercent length] - 2; range.location = 1; - NSString *removeQuotes = [removePercent substringWithRange:range]; + NSString* removeQuotes = [removePercent substringWithRange:range]; // convert \" to ". - NSString *removeEscapedQuotes = + NSString* removeEscapedQuotes = [removeQuotes stringByReplacingOccurrencesOfString:@"\\\"" withString:@"\""]; // convert \\ to \. - NSString *removeBackslash = + NSString* removeBackslash = [removeEscapedQuotes stringByReplacingOccurrencesOfString:@"\\\\" withString:@"\\"]; return removeBackslash; @@ -540,8 +523,8 @@ #pragma mark - public methods - (void)closeVideoUI { - [self disconnect]; - [self.viewController resetUI]; + [self disconnect]; + [self.viewController resetUI]; } @end diff --git a/talk/examples/ios/AppRTCDemo/VideoView.h b/talk/examples/ios/AppRTCDemo/APPRTCVideoView.h similarity index 91% rename from talk/examples/ios/AppRTCDemo/VideoView.h rename to talk/examples/ios/AppRTCDemo/APPRTCVideoView.h index ff3167c52..238798e39 100644 --- a/talk/examples/ios/AppRTCDemo/VideoView.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCVideoView.h @@ -30,7 +30,7 @@ @class RTCVideoTrack; // This class encapsulates VideoRenderIosView. -@interface VideoView : UIView +@interface APPRTCVideoView : UIView // Property to get/set required video orientation. @property(nonatomic, assign) UIInterfaceOrientation videoOrientation; @@ -40,11 +40,4 @@ // Sets up the underlying renderer and track objects. - (void)renderVideoTrackInterface:(RTCVideoTrack*)track; -// Stops rendering. -- (void)pause; -// Starts rendering. -- (void)resume; -// Stops rendering and resets underlying renderer and track objects. -- (void)stop; - @end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCVideoView.m b/talk/examples/ios/AppRTCDemo/APPRTCVideoView.m new file mode 100644 index 000000000..23466b6c9 --- /dev/null +++ b/talk/examples/ios/AppRTCDemo/APPRTCVideoView.m @@ -0,0 +1,82 @@ +/* + * libjingle + * Copyright 2013, Google Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This APPRTCVideoView must be initialzed and added to a View to get + * either the local or remote video stream rendered. + * It is a view itself and it encapsulates + * an object of VideoRenderIosView and UIActivityIndicatorView. + * Both of the views will get resized as per the frame of their parent. + */ + +#import "APPRTCVideoView.h" + +#import "RTCVideoRenderer.h" +#import "RTCVideoTrack.h" + +@interface APPRTCVideoView () { + RTCVideoTrack* _track; + RTCVideoRenderer* _renderer; +} + +@property(nonatomic, weak) UIView* renderView; +@property(nonatomic, weak) UIActivityIndicatorView* activityView; + +@end + +@implementation APPRTCVideoView + +@synthesize videoOrientation = _videoOrientation; + +- (void)layoutSubviews { + [super layoutSubviews]; + if (!_renderer) { + // Left-right (mirror) flip the remote view. + CGAffineTransform xform = + CGAffineTransformMakeScale(self.isRemote ? -1 : 1, 1); + // TODO(fischman): why is this rotate (vertical+horizontal flip) needed?!? + xform = CGAffineTransformRotate(xform, M_PI); + // TODO(fischman): ensure back-camera flip is correct in all orientations, + // when back-camera support is added. + [self setTransform:xform]; + _renderer = [[RTCVideoRenderer alloc] initWithView:self]; + } +} + +- (void)renderVideoTrackInterface:(RTCVideoTrack*)videoTrack { + [_track removeRenderer:_renderer]; + [_renderer stop]; + + _track = videoTrack; + + if (_track) { + [_track addRenderer:_renderer]; + [_renderer start]; + } +} + +@end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCViewController.h b/talk/examples/ios/AppRTCDemo/APPRTCViewController.h index 2db01a60c..c42a37293 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCViewController.h +++ b/talk/examples/ios/AppRTCDemo/APPRTCViewController.h @@ -27,20 +27,20 @@ #import -@class VideoView; +@class APPRTCVideoView; // The view controller that is displayed when AppRTCDemo is loaded. @interface APPRTCViewController : UIViewController -@property (weak, nonatomic) IBOutlet UITextField *textField; -@property (weak, nonatomic) IBOutlet UITextView *textInstructions; -@property (weak, nonatomic) IBOutlet UITextView *textOutput; +@property(weak, nonatomic) IBOutlet UITextField* textField; +@property(weak, nonatomic) IBOutlet UITextView* textInstructions; +@property(weak, nonatomic) IBOutlet UITextView* textOutput; @property(weak, nonatomic) IBOutlet UIView* blackView; -@property(nonatomic, strong) VideoView* remoteVideoView; -@property(nonatomic, strong) VideoView* localVideoView; +@property(nonatomic, strong) APPRTCVideoView* remoteVideoView; +@property(nonatomic, strong) APPRTCVideoView* localVideoView; -- (void)displayText:(NSString *)text; +- (void)displayText:(NSString*)text; - (void)resetUI; @end diff --git a/talk/examples/ios/AppRTCDemo/APPRTCViewController.m b/talk/examples/ios/AppRTCDemo/APPRTCViewController.m index cacac14a4..8c5ccde85 100644 --- a/talk/examples/ios/AppRTCDemo/APPRTCViewController.m +++ b/talk/examples/ios/AppRTCDemo/APPRTCViewController.m @@ -27,49 +27,40 @@ #import "APPRTCViewController.h" -#import "VideoView.h" +#import "APPRTCVideoView.h" @interface APPRTCViewController () -@property (nonatomic, assign) UIInterfaceOrientation statusBarOrientation; +@property(nonatomic, assign) UIInterfaceOrientation statusBarOrientation; @end @implementation APPRTCViewController -@synthesize textField = _textField; -@synthesize textInstructions = _textInstructions; -@synthesize textOutput = _textOutput; -@synthesize blackView = _blackView; - -@synthesize remoteVideoView = _remoteVideoView; -@synthesize localVideoView = _localVideoView; - -@synthesize statusBarOrientation = _statusBarOrientation; - - (void)viewDidLoad { [super viewDidLoad]; self.statusBarOrientation = - [UIApplication sharedApplication].statusBarOrientation; + [UIApplication sharedApplication].statusBarOrientation; self.textField.delegate = self; [self.textField becomeFirstResponder]; } - (void)viewDidLayoutSubviews { - if (self.statusBarOrientation != - [UIApplication sharedApplication].statusBarOrientation) { - self.statusBarOrientation = - [UIApplication sharedApplication].statusBarOrientation; - [[NSNotificationCenter defaultCenter] - postNotificationName:@"StatusBarOrientationDidChange" object:nil]; - } + if (self.statusBarOrientation != + [UIApplication sharedApplication].statusBarOrientation) { + self.statusBarOrientation = + [UIApplication sharedApplication].statusBarOrientation; + [[NSNotificationCenter defaultCenter] + postNotificationName:@"StatusBarOrientationDidChange" + object:nil]; + } } -- (void)displayText:(NSString *)text { +- (void)displayText:(NSString*)text { dispatch_async(dispatch_get_main_queue(), ^(void) { - NSString *output = - [NSString stringWithFormat:@"%@\n%@", self.textOutput.text, text]; - self.textOutput.text = output; + NSString* output = + [NSString stringWithFormat:@"%@\n%@", self.textOutput.text, text]; + self.textOutput.text = output; }); } @@ -82,11 +73,11 @@ self.textOutput.text = nil; self.blackView.hidden = YES; - [_remoteVideoView stop]; + [_remoteVideoView renderVideoTrackInterface:nil]; [_remoteVideoView removeFromSuperview]; self.remoteVideoView = nil; - [_localVideoView stop]; + [_remoteVideoView renderVideoTrackInterface:nil]; [_localVideoView removeFromSuperview]; self.localVideoView = nil; } @@ -94,61 +85,62 @@ // TODO(fischman): Use video dimensions from the incoming video stream // and resize the Video View accordingly w.r.t. aspect ratio. enum { - // Remote video view dimensions. - kRemoteVideoWidth = 640, - kRemoteVideoHeight = 480, - // Padding space for local video view with its parent. - kLocalViewPadding = 20 + // Remote video view dimensions. + kRemoteVideoWidth = 640, + kRemoteVideoHeight = 480, + // Padding space for local video view with its parent. + kLocalViewPadding = 20 }; - (void)setupCaptureSession { - self.blackView.hidden = NO; + self.blackView.hidden = NO; - CGRect frame = CGRectMake((self.blackView.bounds.size.width - -kRemoteVideoWidth)/2, - (self.blackView.bounds.size.height - -kRemoteVideoHeight)/2, - kRemoteVideoWidth, - kRemoteVideoHeight); - VideoView *videoView = [[VideoView alloc] initWithFrame:frame]; - videoView.isRemote = TRUE; + CGRect frame = + CGRectMake((self.blackView.bounds.size.width - kRemoteVideoWidth) / 2, + (self.blackView.bounds.size.height - kRemoteVideoHeight) / 2, + kRemoteVideoWidth, + kRemoteVideoHeight); + APPRTCVideoView* videoView = [[APPRTCVideoView alloc] initWithFrame:frame]; + videoView.isRemote = TRUE; - [self.blackView addSubview:videoView]; - videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | - UIViewAutoresizingFlexibleRightMargin | - UIViewAutoresizingFlexibleBottomMargin | - UIViewAutoresizingFlexibleTopMargin; - videoView.translatesAutoresizingMaskIntoConstraints = YES; - _remoteVideoView = videoView; + [self.blackView addSubview:videoView]; + videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | + UIViewAutoresizingFlexibleRightMargin | + UIViewAutoresizingFlexibleBottomMargin | + UIViewAutoresizingFlexibleTopMargin; + videoView.translatesAutoresizingMaskIntoConstraints = YES; + _remoteVideoView = videoView; - CGSize screenSize = [[UIScreen mainScreen] bounds].size; - CGFloat localVideoViewWidth = - UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ? - screenSize.width/4 : screenSize.height/4; - CGFloat localVideoViewHeight = - UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ? - screenSize.height/4 : screenSize.width/4; - frame = CGRectMake(self.blackView.bounds.size.width - -localVideoViewWidth-kLocalViewPadding, - kLocalViewPadding, - localVideoViewWidth, - localVideoViewHeight); - videoView = [[VideoView alloc] initWithFrame:frame]; - videoView.isRemote = FALSE; + CGSize screenSize = [[UIScreen mainScreen] bounds].size; + CGFloat localVideoViewWidth = + UIInterfaceOrientationIsPortrait(self.statusBarOrientation) + ? screenSize.width / 4 + : screenSize.height / 4; + CGFloat localVideoViewHeight = + UIInterfaceOrientationIsPortrait(self.statusBarOrientation) + ? screenSize.height / 4 + : screenSize.width / 4; + frame = CGRectMake(self.blackView.bounds.size.width - localVideoViewWidth - + kLocalViewPadding, + kLocalViewPadding, + localVideoViewWidth, + localVideoViewHeight); + videoView = [[APPRTCVideoView alloc] initWithFrame:frame]; + videoView.isRemote = FALSE; - [self.blackView addSubview:videoView]; - videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | - UIViewAutoresizingFlexibleBottomMargin | - UIViewAutoresizingFlexibleHeight | - UIViewAutoresizingFlexibleWidth; - videoView.translatesAutoresizingMaskIntoConstraints = YES; - _localVideoView = videoView; + [self.blackView addSubview:videoView]; + videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | + UIViewAutoresizingFlexibleBottomMargin | + UIViewAutoresizingFlexibleHeight | + UIViewAutoresizingFlexibleWidth; + videoView.translatesAutoresizingMaskIntoConstraints = YES; + _localVideoView = videoView; } #pragma mark - UITextFieldDelegate -- (void)textFieldDidEndEditing:(UITextField *)textField { - NSString *room = textField.text; +- (void)textFieldDidEndEditing:(UITextField*)textField { + NSString* room = textField.text; if ([room length] == 0) { return; } @@ -159,16 +151,14 @@ enum { // prepopulating the textField with a valid URL missing the room. This allows // the user to have the simplicity of just entering the room or the ability to // override to a custom appspot instance. Remove apprtc:// when this is done. - NSString *url = + NSString* url = [NSString stringWithFormat:@"apprtc://apprtc.appspot.com/?r=%@", room]; [[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]]; - dispatch_async(dispatch_get_main_queue(), ^{ - [self setupCaptureSession]; - }); + dispatch_async(dispatch_get_main_queue(), ^{ [self setupCaptureSession]; }); } -- (BOOL)textFieldShouldReturn:(UITextField *)textField { +- (BOOL)textFieldShouldReturn:(UITextField*)textField { // There is no other control that can take focus, so manually resign focus // when return (Join) is pressed to trigger |textFieldDidEndEditing|. [textField resignFirstResponder]; diff --git a/talk/examples/ios/AppRTCDemo/GAEChannelClient.m b/talk/examples/ios/AppRTCDemo/GAEChannelClient.m index e0d9a8076..1b5e55924 100644 --- a/talk/examples/ios/AppRTCDemo/GAEChannelClient.m +++ b/talk/examples/ios/AppRTCDemo/GAEChannelClient.m @@ -32,27 +32,23 @@ @interface GAEChannelClient () @property(nonatomic, assign) id delegate; -@property(nonatomic, strong) UIWebView *webView; +@property(nonatomic, strong) UIWebView* webView; @end @implementation GAEChannelClient -@synthesize delegate = _delegate; -@synthesize webView = _webView; - -- (id)initWithToken:(NSString *)token delegate:(id)delegate { +- (id)initWithToken:(NSString*)token delegate:(id)delegate { self = [super init]; if (self) { _webView = [[UIWebView alloc] init]; _webView.delegate = self; _delegate = delegate; - NSString *htmlPath = + NSString* htmlPath = [[NSBundle mainBundle] pathForResource:@"ios_channel" ofType:@"html"]; - NSURL *htmlUrl = [NSURL fileURLWithPath:htmlPath]; - NSString *path = [NSString stringWithFormat:@"%@?token=%@", - [htmlUrl absoluteString], - token]; + NSURL* htmlUrl = [NSURL fileURLWithPath:htmlPath]; + NSString* path = [NSString + stringWithFormat:@"%@?token=%@", [htmlUrl absoluteString], token]; [_webView loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:path]]]; @@ -67,17 +63,17 @@ #pragma mark - UIWebViewDelegate method -- (BOOL)webView:(UIWebView *)webView - shouldStartLoadWithRequest:(NSURLRequest *)request +- (BOOL)webView:(UIWebView*)webView + shouldStartLoadWithRequest:(NSURLRequest*)request navigationType:(UIWebViewNavigationType)navigationType { - NSString *scheme = [request.URL scheme]; + NSString* scheme = [request.URL scheme]; if ([scheme compare:@"js-frame"] != NSOrderedSame) { return YES; } - NSString *resourceSpecifier = [request.URL resourceSpecifier]; + NSString* resourceSpecifier = [request.URL resourceSpecifier]; NSRange range = [resourceSpecifier rangeOfString:@":"]; - NSString *method; - NSString *message; + NSString* method; + NSString* message; if (range.length == 0 && range.location == NSNotFound) { method = resourceSpecifier; } else { @@ -85,21 +81,21 @@ message = [resourceSpecifier substringFromIndex:range.location + 1]; } dispatch_async(dispatch_get_main_queue(), ^(void) { - if ([method compare:@"onopen"] == NSOrderedSame) { - [self.delegate onOpen]; - } else if ([method compare:@"onmessage"] == NSOrderedSame) { - [self.delegate onMessage:message]; - } else if ([method compare:@"onclose"] == NSOrderedSame) { - [self.delegate onClose]; - } else if ([method compare:@"onerror"] == NSOrderedSame) { - // TODO(hughv): Get error. - int code = -1; - NSString *description = message; - [self.delegate onError:code withDescription:description]; - } else { - NSAssert(NO, @"Invalid message sent from UIWebView: %@", - resourceSpecifier); - } + if ([method compare:@"onopen"] == NSOrderedSame) { + [self.delegate onOpen]; + } else if ([method compare:@"onmessage"] == NSOrderedSame) { + [self.delegate onMessage:message]; + } else if ([method compare:@"onclose"] == NSOrderedSame) { + [self.delegate onClose]; + } else if ([method compare:@"onerror"] == NSOrderedSame) { + // TODO(hughv): Get error. + int code = -1; + NSString* description = message; + [self.delegate onError:code withDescription:description]; + } else { + NSAssert( + NO, @"Invalid message sent from UIWebView: %@", resourceSpecifier); + } }); return YES; } diff --git a/talk/examples/ios/AppRTCDemo/VideoView.m b/talk/examples/ios/AppRTCDemo/VideoView.m deleted file mode 100644 index d563fb32f..000000000 --- a/talk/examples/ios/AppRTCDemo/VideoView.m +++ /dev/null @@ -1,168 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -/* - * This VideoView must be initialzed and added to a View to get - * either the local or remote video stream rendered. - * It is a view itself and it encapsulates - * an object of VideoRenderIosView and UIActivityIndicatorView. - * Both of the views will get resized as per the frame of their parent. - */ - -#import "VideoView.h" - -#import "RTCVideoRenderer.h" -#import "RTCVideoTrack.h" - -@interface VideoView () { - RTCVideoTrack *_track; - RTCVideoRenderer *_renderer; -} - -@property (nonatomic, weak) UIView *renderView; -@property (nonatomic, weak) UIActivityIndicatorView *activityView; - -@end - -@implementation VideoView - -@synthesize videoOrientation = _videoOrientation; -@synthesize isRemote = _isRemote; -@synthesize renderView = _renderView; -@synthesize activityView = _activityView; - -static void init(VideoView *self) { - UIView *renderView = [RTCVideoRenderer newRenderViewWithFrame: - CGRectMake(0, - 0, - self.bounds.size.width, - self.bounds.size.height)]; - [self addSubview:renderView]; - renderView.autoresizingMask = UIViewAutoresizingFlexibleHeight | - UIViewAutoresizingFlexibleWidth; - renderView.translatesAutoresizingMaskIntoConstraints = YES; - self.renderView = renderView; - - UIActivityIndicatorView *indicatorView = - [[UIActivityIndicatorView alloc] - initWithActivityIndicatorStyle: - UIActivityIndicatorViewStyleWhiteLarge]; - indicatorView.frame = self.bounds; - indicatorView.hidesWhenStopped = YES; - [self addSubview:indicatorView]; - indicatorView.autoresizingMask = UIViewAutoresizingFlexibleWidth | - UIViewAutoresizingFlexibleHeight; - indicatorView.translatesAutoresizingMaskIntoConstraints = YES; - [indicatorView startAnimating]; - self.activityView = indicatorView; -} - -- (id)initWithFrame:(CGRect)frame { - self = [super initWithFrame:frame]; - if (self) { - init(self); - } - return self; -} - --(id)initWithCoder:(NSCoder *)aDecoder { - self = [super initWithCoder:aDecoder]; - if (self) { - init(self); - } - return self; -} - -- (UIInterfaceOrientation)videoOrientation { - return _videoOrientation; -} - -- (void)setVideoOrientation:(UIInterfaceOrientation)videoOrientation { - if (_videoOrientation != videoOrientation) { - _videoOrientation = videoOrientation; - - CGFloat angle; - switch (videoOrientation) { - case UIInterfaceOrientationPortrait: - angle = M_PI_2; - break; - case UIInterfaceOrientationPortraitUpsideDown: - angle = -M_PI_2; - break; - case UIInterfaceOrientationLandscapeLeft: - angle = M_PI; - break; - case UIInterfaceOrientationLandscapeRight: - angle = 0; - break; - } - // The video comes in mirrored. That is fine for the local video, - // but the remote video should be put back to original. - CGAffineTransform xform = - CGAffineTransformMakeScale([self isRemote] ? -1 : 1, 1); - xform = CGAffineTransformRotate(xform, angle); - [[self renderView] setTransform:xform]; - } -} - -- (void)renderVideoTrackInterface:(RTCVideoTrack *)videoTrack { - [self stop]; - - _track = videoTrack; - - if (_track) { - if (!_renderer) { - _renderer = [[RTCVideoRenderer alloc] - initWithRenderView:[self renderView]]; - } - [_track addRenderer:_renderer]; - [self resume]; - } - - [self setVideoOrientation:UIInterfaceOrientationLandscapeLeft]; - [self setVideoOrientation:UIInterfaceOrientationPortrait]; - [self setVideoOrientation:UIInterfaceOrientationLandscapeLeft]; -} - --(void)pause { - [_renderer stop]; -} - --(void)resume { - [self.activityView stopAnimating]; - [self.activityView removeFromSuperview]; - self.activityView = nil; - - [_renderer start]; -} - -- (void)stop { - [_track removeRenderer:_renderer]; - [_renderer stop]; -} - -@end diff --git a/talk/examples/ios/AppRTCDemo/main.m b/talk/examples/ios/AppRTCDemo/main.m index bf35f4cbf..e9a1f63ef 100644 --- a/talk/examples/ios/AppRTCDemo/main.m +++ b/talk/examples/ios/AppRTCDemo/main.m @@ -29,7 +29,7 @@ #import "APPRTCAppDelegate.h" -int main(int argc, char *argv[]) { +int main(int argc, char* argv[]) { @autoreleasepool { return UIApplicationMain( argc, argv, nil, NSStringFromClass([APPRTCAppDelegate class])); diff --git a/talk/libjingle_examples.gyp b/talk/libjingle_examples.gyp index 3acf28d6b..cfae7418d 100755 --- a/talk/libjingle_examples.gyp +++ b/talk/libjingle_examples.gyp @@ -248,18 +248,18 @@ 'examples/ios/AppRTCDemo/APPRTCAppDelegate.m', 'examples/ios/AppRTCDemo/APPRTCViewController.h', 'examples/ios/AppRTCDemo/APPRTCViewController.m', + 'examples/ios/AppRTCDemo/APPRTCVideoView.h', + 'examples/ios/AppRTCDemo/APPRTCVideoView.m', 'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch', 'examples/ios/AppRTCDemo/GAEChannelClient.h', 'examples/ios/AppRTCDemo/GAEChannelClient.m', - 'examples/ios/AppRTCDemo/VideoView.h', - 'examples/ios/AppRTCDemo/VideoView.m', 'examples/ios/AppRTCDemo/main.m', ], 'xcode_settings': { 'CLANG_ENABLE_OBJC_ARC': 'YES', 'INFOPLIST_FILE': 'examples/ios/AppRTCDemo/Info.plist', 'OTHER_LDFLAGS': [ - '-framework CoreGraphics', + '-framework CoreGraphics', '-framework Foundation', '-framework UIKit', ], diff --git a/talk/media/devices/macdevicemanagermm.mm b/talk/media/devices/macdevicemanagermm.mm index 8cc77518c..fdde91fa5 100644 --- a/talk/media/devices/macdevicemanagermm.mm +++ b/talk/media/devices/macdevicemanagermm.mm @@ -1,6 +1,6 @@ /* * libjingle - * Copyright 2004--2010, Google Inc. + * Copyright 2010, Google Inc. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: @@ -27,7 +27,7 @@ // support GCC compiler #ifndef __has_feature -# define __has_feature(x) 0 +#define __has_feature(x) 0 #endif #include "talk/media/devices/devicemanager.h" @@ -42,7 +42,7 @@ cricket::DeviceManagerInterface* manager_; } - (id)init:(cricket::DeviceManagerInterface*)manager; -- (void)onDevicesChanged:(NSNotification *)notification; +- (void)onDevicesChanged:(NSNotification*)notification; @end @implementation DeviceWatcherImpl @@ -50,14 +50,16 @@ if ((self = [super init])) { assert(manager != NULL); manager_ = manager; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(onDevicesChanged:) - name:QTCaptureDeviceWasConnectedNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(onDevicesChanged:) - name:QTCaptureDeviceWasDisconnectedNotification - object:nil]; + [[NSNotificationCenter defaultCenter] + addObserver:self + selector:@selector(onDevicesChanged:) + name:QTCaptureDeviceWasConnectedNotification + object:nil]; + [[NSNotificationCenter defaultCenter] + addObserver:self + selector:@selector(onDevicesChanged:) + name:QTCaptureDeviceWasDisconnectedNotification + object:nil]; } return self; } @@ -68,7 +70,7 @@ [super dealloc]; #endif } -- (void)onDevicesChanged:(NSNotification *)notification { +- (void)onDevicesChanged:(NSNotification*)notification { manager_->SignalDevicesChange(); } @end @@ -83,9 +85,7 @@ DeviceWatcherImpl* CreateDeviceWatcherCallback( #else @autoreleasepool #endif - { - impl = [[DeviceWatcherImpl alloc] init:manager]; - } + { impl = [[DeviceWatcherImpl alloc] init:manager]; } #if !__has_feature(objc_arc) [pool drain]; #endif @@ -115,20 +115,19 @@ bool GetQTKitVideoDevices(std::vector* devices) { static NSString* const kFormat = @"localizedDisplayName: \"%@\", " @"modelUniqueID: \"%@\", uniqueID \"%@\", isConnected: %d, " @"isOpen: %d, isInUseByAnotherApplication: %d"; - NSString* info = [NSString stringWithFormat:kFormat, - [qt_capture_device localizedDisplayName], - [qt_capture_device modelUniqueID], - [qt_capture_device uniqueID], - [qt_capture_device isConnected], - [qt_capture_device isOpen], - [qt_capture_device isInUseByAnotherApplication]]; + NSString* info = [NSString + stringWithFormat:kFormat, + [qt_capture_device localizedDisplayName], + [qt_capture_device modelUniqueID], + [qt_capture_device uniqueID], + [qt_capture_device isConnected], + [qt_capture_device isOpen], + [qt_capture_device isInUseByAnotherApplication]]; LOG(LS_INFO) << [info UTF8String]; - std::string name([[qt_capture_device localizedDisplayName] - UTF8String]); - devices->push_back(Device(name, - [[qt_capture_device uniqueID] - UTF8String])); + std::string name([[qt_capture_device localizedDisplayName] UTF8String]); + devices->push_back( + Device(name, [[qt_capture_device uniqueID] UTF8String])); } } #if !__has_feature(objc_arc)