AppRTCDemo(ios): style/cleanup fixes following cr/62871616-p10

BUG=2168
R=noahric@google.com

Review URL: https://webrtc-codereview.appspot.com/9709004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5768 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org 2014-03-25 00:11:56 +00:00
parent ce12f1fd32
commit 7fa1fcb72c
35 changed files with 809 additions and 929 deletions

View File

@ -39,7 +39,7 @@
@implementation RTCAudioTrack (Internal)
- (talk_base::scoped_refptr<webrtc::AudioTrackInterface>)audioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.mediaTrack.get());
return static_cast<webrtc::AudioTrackInterface*>(self.mediaTrack.get());
}
@end

View File

@ -37,9 +37,9 @@
@synthesize sdpMLineIndex = _sdpMLineIndex;
@synthesize sdp = _sdp;
- (id)initWithMid:(NSString *)sdpMid
- (id)initWithMid:(NSString*)sdpMid
index:(NSInteger)sdpMLineIndex
sdp:(NSString *)sdp {
sdp:(NSString*)sdp {
if (!sdpMid || !sdp) {
NSAssert(NO, @"nil arguments not allowed");
return nil;
@ -52,18 +52,18 @@
return self;
}
- (NSString *)description {
- (NSString*)description {
return [NSString stringWithFormat:@"%@:%ld:%@",
self.sdpMid,
(long)self.sdpMLineIndex,
self.sdp];
self.sdpMid,
(long)self.sdpMLineIndex,
self.sdp];
}
@end
@implementation RTCICECandidate (Internal)
- (id)initWithCandidate:(const webrtc::IceCandidateInterface *)candidate {
- (id)initWithCandidate:(const webrtc::IceCandidateInterface*)candidate {
if ((self = [super init])) {
std::string sdp;
if (candidate->ToString(&sdp)) {

View File

@ -37,9 +37,9 @@
@synthesize username = _username;
@synthesize password = _password;
- (id)initWithURI:(NSURL *)URI
username:(NSString *)username
password:(NSString *)password {
- (id)initWithURI:(NSURL*)URI
username:(NSString*)username
password:(NSString*)password {
if (!URI || !username || !password) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
@ -53,9 +53,11 @@
return self;
}
- (NSString *)description {
- (NSString*)description {
return [NSString stringWithFormat:@"RTCICEServer: [%@:%@:%@]",
[self.URI absoluteString], self.username, self.password];
[self.URI absoluteString],
self.username,
self.password];
}
@end

View File

@ -44,8 +44,8 @@
webrtc::MediaConstraintsInterface::Constraints _optional;
}
- (id)initWithMandatoryConstraints:(NSArray *)mandatory
optionalConstraints:(NSArray *)optional {
- (id)initWithMandatoryConstraints:(NSArray*)mandatory
optionalConstraints:(NSArray*)optional {
if ((self = [super init])) {
_mandatory = [[self class] constraintsFromArray:mandatory];
_optional = [[self class] constraintsFromArray:optional];
@ -55,10 +55,10 @@
return self;
}
+ (webrtc::MediaConstraintsInterface::Constraints)
constraintsFromArray:(NSArray *)array {
+ (webrtc::MediaConstraintsInterface::Constraints)constraintsFromArray:
(NSArray*)array {
webrtc::MediaConstraintsInterface::Constraints constraints;
for (RTCPair *pair in array) {
for (RTCPair* pair in array) {
constraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
[pair.key UTF8String], [pair.value UTF8String]));
}
@ -69,7 +69,7 @@
@implementation RTCMediaConstraints (internal)
- (const webrtc::RTCMediaConstraintsNative *)constraints {
- (const webrtc::RTCMediaConstraintsNative*)constraints {
return _constraints.get();
}

View File

@ -38,31 +38,31 @@
#include "talk/app/webrtc/mediastreaminterface.h"
@implementation RTCMediaStream {
NSMutableArray *_audioTracks;
NSMutableArray *_videoTracks;
NSMutableArray* _audioTracks;
NSMutableArray* _videoTracks;
talk_base::scoped_refptr<webrtc::MediaStreamInterface> _mediaStream;
}
- (NSString *)description {
- (NSString*)description {
return [NSString stringWithFormat:@"[%@:A=%lu:V=%lu]",
[self label],
(unsigned long)[self.audioTracks count],
(unsigned long)[self.videoTracks count]];
[self label],
(unsigned long)[self.audioTracks count],
(unsigned long)[self.videoTracks count]];
}
- (NSArray *)audioTracks {
- (NSArray*)audioTracks {
return [_audioTracks copy];
}
- (NSArray *)videoTracks {
- (NSArray*)videoTracks {
return [_videoTracks copy];
}
- (NSString *)label {
- (NSString*)label {
return @(self.mediaStream->label().c_str());
}
- (BOOL)addAudioTrack:(RTCAudioTrack *)track {
- (BOOL)addAudioTrack:(RTCAudioTrack*)track {
if (self.mediaStream->AddTrack(track.audioTrack)) {
[_audioTracks addObject:track];
return YES;
@ -70,7 +70,7 @@
return NO;
}
- (BOOL)addVideoTrack:(RTCVideoTrack *)track {
- (BOOL)addVideoTrack:(RTCVideoTrack*)track {
if (self.mediaStream->AddTrack(track.videoTrack)) {
[_videoTracks addObject:track];
return YES;
@ -78,7 +78,7 @@
return NO;
}
- (BOOL)removeAudioTrack:(RTCAudioTrack *)track {
- (BOOL)removeAudioTrack:(RTCAudioTrack*)track {
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:track];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
@ -89,7 +89,7 @@
return NO;
}
- (BOOL)removeVideoTrack:(RTCVideoTrack *)track {
- (BOOL)removeVideoTrack:(RTCVideoTrack*)track {
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCVideoTrack");
@ -105,7 +105,7 @@
@implementation RTCMediaStream (Internal)
- (id)initWithMediaStream:
(talk_base::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
(talk_base::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
if (!mediaStream) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
@ -122,7 +122,7 @@
for (size_t i = 0; i < audio_tracks.size(); ++i) {
talk_base::scoped_refptr<webrtc::AudioTrackInterface> track =
audio_tracks[i];
RTCAudioTrack *audioTrack =
RTCAudioTrack* audioTrack =
[[RTCAudioTrack alloc] initWithMediaTrack:track];
[_audioTracks addObject:audioTrack];
}

View File

@ -40,12 +40,13 @@
- (BOOL)isEqual:(id)other {
// Equality is purely based on the label just like the C++ implementation.
if (self == other) return YES;
if (self == other)
return YES;
if (![other isKindOfClass:[self class]] ||
![self isKindOfClass:[other class]]) {
return NO;
}
RTCMediaStreamTrack *otherMediaStream = (RTCMediaStreamTrack *)other;
RTCMediaStreamTrack* otherMediaStream = (RTCMediaStreamTrack*)other;
return [self.label isEqual:otherMediaStream.label];
}
@ -53,11 +54,11 @@
return [self.label hash];
}
- (NSString *)kind {
- (NSString*)kind {
return @(self.mediaTrack->kind().c_str());
}
- (NSString *)label {
- (NSString*)label {
return @(self.mediaTrack->id().c_str());
}
@ -82,8 +83,9 @@
@implementation RTCMediaStreamTrack (Internal)
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
- (id)initWithMediaTrack:
(talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack {
if (!mediaTrack) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;

View File

@ -32,7 +32,7 @@
@synthesize key = _key;
@synthesize value = _value;
- (id)initWithKey:(NSString *)key value:(NSString *)value {
- (id)initWithKey:(NSString*)key value:(NSString*)value {
if ((self = [super init])) {
_key = [key copy];
_value = [value copy];

View File

@ -51,39 +51,39 @@ class RTCCreateSessionDescriptionObserver
: public CreateSessionDescriptionObserver {
public:
RTCCreateSessionDescriptionObserver(id<RTCSessionDescriptonDelegate> delegate,
RTCPeerConnection *peerConnection) {
RTCPeerConnection* peerConnection) {
_delegate = delegate;
_peerConnection = peerConnection;
}
virtual void OnSuccess(SessionDescriptionInterface *desc) OVERRIDE {
RTCSessionDescription *session =
virtual void OnSuccess(SessionDescriptionInterface* desc) OVERRIDE {
RTCSessionDescription* session =
[[RTCSessionDescription alloc] initWithSessionDescription:desc];
[_delegate peerConnection:_peerConnection
didCreateSessionDescription:session
error:nil];
error:nil];
}
virtual void OnFailure(const std::string &error) OVERRIDE {
NSString *str = @(error.c_str());
NSError *err =
virtual void OnFailure(const std::string& error) OVERRIDE {
NSString* str = @(error.c_str());
NSError* err =
[NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
code:kRTCSessionDescriptionDelegateErrorCode
userInfo:@{ @"error" : str }];
userInfo:@{@"error" : str}];
[_delegate peerConnection:_peerConnection
didCreateSessionDescription:nil
error:err];
error:err];
}
private:
id<RTCSessionDescriptonDelegate> _delegate;
RTCPeerConnection *_peerConnection;
RTCPeerConnection* _peerConnection;
};
class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
public:
RTCSetSessionDescriptionObserver(id<RTCSessionDescriptonDelegate> delegate,
RTCPeerConnection *peerConnection) {
RTCPeerConnection* peerConnection) {
_delegate = delegate;
_peerConnection = peerConnection;
}
@ -93,37 +93,36 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
didSetSessionDescriptionWithError:nil];
}
virtual void OnFailure(const std::string &error) OVERRIDE {
NSString *str = @(error.c_str());
NSError *err =
virtual void OnFailure(const std::string& error) OVERRIDE {
NSString* str = @(error.c_str());
NSError* err =
[NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
code:kRTCSessionDescriptionDelegateErrorCode
userInfo:@{ @"error" : str }];
userInfo:@{@"error" : str}];
[_delegate peerConnection:_peerConnection
didSetSessionDescriptionWithError:err];
}
private:
id<RTCSessionDescriptonDelegate> _delegate;
RTCPeerConnection *_peerConnection;
RTCPeerConnection* _peerConnection;
};
}
@implementation RTCPeerConnection {
NSMutableArray *_localStreams;
talk_base::scoped_ptr<webrtc::RTCPeerConnectionObserver>_observer;
NSMutableArray* _localStreams;
talk_base::scoped_ptr<webrtc::RTCPeerConnectionObserver> _observer;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
}
- (BOOL)addICECandidate:(RTCICECandidate *)candidate {
- (BOOL)addICECandidate:(RTCICECandidate*)candidate {
talk_base::scoped_ptr<const webrtc::IceCandidateInterface> iceCandidate(
candidate.candidate);
return self.peerConnection->AddIceCandidate(iceCandidate.get());
}
- (BOOL)addStream:(RTCMediaStream *)stream
constraints:(RTCMediaConstraints *)constraints {
- (BOOL)addStream:(RTCMediaStream*)stream
constraints:(RTCMediaConstraints*)constraints {
BOOL ret = self.peerConnection->AddStream(stream.mediaStream,
constraints.constraints);
if (!ret) {
@ -134,7 +133,7 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
}
- (void)createAnswerWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
constraints:(RTCMediaConstraints *)constraints {
constraints:(RTCMediaConstraints*)constraints {
talk_base::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
observer(new talk_base::RefCountedObject<
webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
@ -142,73 +141,73 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
}
- (void)createOfferWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
constraints:(RTCMediaConstraints *)constraints {
constraints:(RTCMediaConstraints*)constraints {
talk_base::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
observer(new talk_base::RefCountedObject<
webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
self.peerConnection->CreateOffer(observer, constraints.constraints);
}
- (void)removeStream:(RTCMediaStream *)stream {
- (void)removeStream:(RTCMediaStream*)stream {
self.peerConnection->RemoveStream(stream.mediaStream);
[_localStreams removeObject:stream];
}
- (void)
setLocalDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription *)sdp {
- (void)setLocalDescriptionWithDelegate:
(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription*)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
delegate, self));
self.peerConnection->SetLocalDescription(observer, sdp.sessionDescription);
}
- (void)
setRemoteDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription *)sdp {
- (void)setRemoteDescriptionWithDelegate:
(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription*)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
delegate, self));
self.peerConnection->SetRemoteDescription(observer, sdp.sessionDescription);
}
- (BOOL)updateICEServers:(NSArray *)servers
constraints:(RTCMediaConstraints *)constraints {
- (BOOL)updateICEServers:(NSArray*)servers
constraints:(RTCMediaConstraints*)constraints {
webrtc::PeerConnectionInterface::IceServers iceServers;
for (RTCICEServer *server in servers) {
for (RTCICEServer* server in servers) {
iceServers.push_back(server.iceServer);
}
return self.peerConnection->UpdateIce(iceServers, constraints.constraints);
}
- (RTCSessionDescription *)localDescription {
const webrtc::SessionDescriptionInterface *sdi =
- (RTCSessionDescription*)localDescription {
const webrtc::SessionDescriptionInterface* sdi =
self.peerConnection->local_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
: nil;
}
- (NSArray *)localStreams {
- (NSArray*)localStreams {
return [_localStreams copy];
}
- (RTCSessionDescription *)remoteDescription {
const webrtc::SessionDescriptionInterface *sdi =
- (RTCSessionDescription*)remoteDescription {
const webrtc::SessionDescriptionInterface* sdi =
self.peerConnection->remote_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
: nil;
}
- (RTCICEConnectionState)iceConnectionState {
return [RTCEnumConverter convertIceConnectionStateToObjC:
self.peerConnection->ice_connection_state()];
return [RTCEnumConverter
convertIceConnectionStateToObjC:self.peerConnection
->ice_connection_state()];
}
- (RTCICEGatheringState)iceGatheringState {
return [RTCEnumConverter convertIceGatheringStateToObjC:
self.peerConnection->ice_gathering_state()];
return [RTCEnumConverter
convertIceGatheringStateToObjC:self.peerConnection
->ice_gathering_state()];
}
- (RTCSignalingState)signalingState {
@ -224,9 +223,10 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
@implementation RTCPeerConnection (Internal)
- (id)initWithPeerConnection:(
talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection
observer:(webrtc::RTCPeerConnectionObserver *)observer {
- (id)initWithPeerConnection:
(talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)
peerConnection
observer:(webrtc::RTCPeerConnectionObserver*)observer {
if (!peerConnection || !observer) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;

View File

@ -86,54 +86,55 @@
return self;
}
- (RTCPeerConnection *)
peerConnectionWithICEServers:(NSArray *)servers
constraints:(RTCMediaConstraints *)constraints
- (RTCPeerConnection*)
peerConnectionWithICEServers:(NSArray*)servers
constraints:(RTCMediaConstraints*)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate {
webrtc::PeerConnectionInterface::IceServers iceServers;
for (RTCICEServer *server in servers) {
for (RTCICEServer* server in servers) {
iceServers.push_back(server.iceServer);
}
webrtc::RTCPeerConnectionObserver *observer =
webrtc::RTCPeerConnectionObserver* observer =
new webrtc::RTCPeerConnectionObserver(delegate);
webrtc::DTLSIdentityServiceInterface* dummy_dtls_identity_service = NULL;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection =
self.nativeFactory->CreatePeerConnection(
iceServers, constraints.constraints, dummy_dtls_identity_service,
observer);
RTCPeerConnection *pc =
self.nativeFactory->CreatePeerConnection(iceServers,
constraints.constraints,
dummy_dtls_identity_service,
observer);
RTCPeerConnection* pc =
[[RTCPeerConnection alloc] initWithPeerConnection:peerConnection
observer:observer];
observer->SetPeerConnection(pc);
return pc;
}
- (RTCMediaStream *)mediaStreamWithLabel:(NSString *)label {
- (RTCMediaStream*)mediaStreamWithLabel:(NSString*)label {
talk_base::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream =
self.nativeFactory->CreateLocalMediaStream([label UTF8String]);
return [[RTCMediaStream alloc] initWithMediaStream:nativeMediaStream];
}
- (RTCVideoSource *)videoSourceWithCapturer:(RTCVideoCapturer *)capturer
constraints:(RTCMediaConstraints *)constraints {
- (RTCVideoSource*)videoSourceWithCapturer:(RTCVideoCapturer*)capturer
constraints:(RTCMediaConstraints*)constraints {
if (!capturer) {
return nil;
}
talk_base::scoped_refptr<webrtc::VideoSourceInterface> source =
self.nativeFactory->CreateVideoSource([capturer release_native_capturer],
self.nativeFactory->CreateVideoSource([capturer takeNativeCapturer],
constraints.constraints);
return [[RTCVideoSource alloc] initWithMediaSource:source];
}
- (RTCVideoTrack *)videoTrackWithID:(NSString *)videoId
source:(RTCVideoSource *)source {
- (RTCVideoTrack*)videoTrackWithID:(NSString*)videoId
source:(RTCVideoSource*)source {
talk_base::scoped_refptr<webrtc::VideoTrackInterface> track =
self.nativeFactory->CreateVideoTrack([videoId UTF8String],
source.videoSource);
return [[RTCVideoTrack alloc] initWithMediaTrack:track];
}
- (RTCAudioTrack *)audioTrackWithID:(NSString *)audioId {
- (RTCAudioTrack*)audioTrackWithID:(NSString*)audioId {
talk_base::scoped_refptr<webrtc::AudioTrackInterface> track =
self.nativeFactory->CreateAudioTrack([audioId UTF8String], NULL);
return [[RTCAudioTrack alloc] initWithMediaTrack:track];

View File

@ -43,7 +43,7 @@ RTCPeerConnectionObserver::RTCPeerConnectionObserver(
}
void RTCPeerConnectionObserver::SetPeerConnection(
RTCPeerConnection *peerConnection) {
RTCPeerConnection* peerConnection) {
_peerConnection = peerConnection;
}
@ -54,8 +54,8 @@ void RTCPeerConnectionObserver::OnError() {
void RTCPeerConnectionObserver::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
[_delegate peerConnection:_peerConnection
signalingStateChanged:
[RTCEnumConverter convertSignalingStateToObjC:new_state]];
signalingStateChanged:[RTCEnumConverter
convertSignalingStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) {
@ -82,15 +82,15 @@ void RTCPeerConnectionObserver::OnRenegotiationNeeded() {
void RTCPeerConnectionObserver::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
[_delegate peerConnection:_peerConnection
iceConnectionChanged:
[RTCEnumConverter convertIceConnectionStateToObjC:new_state]];
iceConnectionChanged:[RTCEnumConverter
convertIceConnectionStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
[_delegate peerConnection:_peerConnection
iceGatheringChanged:
[RTCEnumConverter convertIceGatheringStateToObjC:new_state]];
iceGatheringChanged:[RTCEnumConverter
convertIceGatheringStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceCandidate(
@ -100,4 +100,4 @@ void RTCPeerConnectionObserver::OnIceCandidate(
[_delegate peerConnection:_peerConnection gotICECandidate:iceCandidate];
}
} // namespace webrtc
} // namespace webrtc

View File

@ -36,7 +36,7 @@
@synthesize description = _description;
@synthesize type = _type;
- (id)initWithType:(NSString *)type sdp:(NSString *)sdp {
- (id)initWithType:(NSString*)type sdp:(NSString*)sdp {
if (!type || !sdp) {
NSAssert(NO, @"nil arguments not allowed");
return nil;
@ -53,14 +53,14 @@
@implementation RTCSessionDescription (Internal)
- (id)initWithSessionDescription:
(const webrtc::SessionDescriptionInterface *)sessionDescription {
(const webrtc::SessionDescriptionInterface*)sessionDescription {
if (!sessionDescription) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
const std::string &type = sessionDescription->type();
const std::string& type = sessionDescription->type();
std::string sdp;
if (!sessionDescription->ToString(&sdp)) {
NSAssert(NO, @"Invalid SessionDescriptionInterface.");
@ -73,7 +73,7 @@
return self;
}
- (webrtc::SessionDescriptionInterface *)sessionDescription {
- (webrtc::SessionDescriptionInterface*)sessionDescription {
return webrtc::CreateSessionDescription(
[self.type UTF8String], [self.description UTF8String], NULL);
}

View File

@ -31,7 +31,7 @@
@interface RTCVideoCapturer (Internal)
- (cricket::VideoCapturer*)release_native_capturer;
- (cricket::VideoCapturer*)takeNativeCapturer;
- (id)initWithCapturer:(cricket::VideoCapturer*)capturer;

View File

@ -35,11 +35,11 @@
#include "talk/media/devices/devicemanager.h"
@implementation RTCVideoCapturer {
talk_base::scoped_ptr<cricket::VideoCapturer>_capturer;
talk_base::scoped_ptr<cricket::VideoCapturer> _capturer;
}
+ (RTCVideoCapturer *)capturerWithDeviceName:(NSString *)deviceName {
const std::string &device_name = std::string([deviceName UTF8String]);
+ (RTCVideoCapturer*)capturerWithDeviceName:(NSString*)deviceName {
const std::string& device_name = std::string([deviceName UTF8String]);
talk_base::scoped_ptr<cricket::DeviceManagerInterface> device_manager(
cricket::DeviceManagerFactory::Create());
bool initialized = device_manager->Init();
@ -51,7 +51,7 @@
}
talk_base::scoped_ptr<cricket::VideoCapturer> capturer(
device_manager->CreateVideoCapturer(device));
RTCVideoCapturer *rtcCapturer =
RTCVideoCapturer* rtcCapturer =
[[RTCVideoCapturer alloc] initWithCapturer:capturer.release()];
return rtcCapturer;
}
@ -60,14 +60,14 @@
@implementation RTCVideoCapturer (Internal)
- (id)initWithCapturer:(cricket::VideoCapturer *)capturer {
- (id)initWithCapturer:(cricket::VideoCapturer*)capturer {
if ((self = [super init])) {
_capturer.reset(capturer);
}
return self;
}
- (cricket::VideoCapturer*)release_native_capturer {
- (cricket::VideoCapturer*)takeNativeCapturer {
return _capturer.release();
}

View File

@ -49,7 +49,6 @@
// a VideoRenderCallback. Suitable for feeding to
// VideoTrackInterface::AddRenderer().
class CallbackConverter : public webrtc::VideoRendererInterface {
public:
CallbackConverter(webrtc::VideoRenderCallback* callback,
const uint32_t streamId)
@ -88,51 +87,57 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
};
@implementation RTCVideoRenderer {
VideoRenderIosView* _renderView;
UIActivityIndicatorView* _activityIndicator;
CallbackConverter* _converter;
talk_base::scoped_ptr<webrtc::VideoRenderIosImpl> _iosRenderer;
}
@synthesize delegate = _delegate;
+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame {
return [[RTCVideoRenderer alloc]
initWithRenderView:[RTCVideoRenderer newRenderViewWithFrame:frame]];
}
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
if ((self = [super init])) {
_delegate = delegate;
// TODO (hughv): Create video renderer.
}
// TODO(hughv): Create video renderer.
[self doesNotRecognizeSelector:_cmd];
return self;
}
+ (UIView*)newRenderViewWithFrame:(CGRect)frame {
VideoRenderIosView* newView =
[[VideoRenderIosView alloc] initWithFrame:frame];
return newView;
}
- (id)initWithRenderView:(UIView*)view {
NSAssert([view isKindOfClass:[VideoRenderIosView class]],
@"The view must be of kind 'VideoRenderIosView'");
- (id)initWithView:(UIView*)view {
if ((self = [super init])) {
VideoRenderIosView* renderView = (VideoRenderIosView*)view;
CGRect frame =
CGRectMake(0, 0, view.bounds.size.width, view.bounds.size.height);
_renderView = [[VideoRenderIosView alloc] initWithFrame:frame];
_iosRenderer.reset(
new webrtc::VideoRenderIosImpl(0, (__bridge void*)renderView, NO));
if (_iosRenderer->Init() != -1) {
new webrtc::VideoRenderIosImpl(0, (__bridge void*)_renderView, NO));
if (_iosRenderer->Init() == -1) {
self = nil;
} else {
webrtc::VideoRenderCallback* callback =
_iosRenderer->AddIncomingRenderStream(0, 1, 0, 0, 1, 1);
_converter = new CallbackConverter(callback, 0);
_iosRenderer->StartRender();
} else {
self = nil;
[view addSubview:_renderView];
_renderView.autoresizingMask =
UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;
_renderView.translatesAutoresizingMaskIntoConstraints = YES;
_activityIndicator = [[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:
UIActivityIndicatorViewStyleWhiteLarge];
_activityIndicator.frame = view.bounds;
_activityIndicator.hidesWhenStopped = YES;
[view addSubview:_activityIndicator];
_activityIndicator.autoresizingMask =
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
_activityIndicator.translatesAutoresizingMaskIntoConstraints = YES;
[_activityIndicator startAnimating];
}
}
return self;
}
- (void)start {
[_activityIndicator stopAnimating];
[_activityIndicator removeFromSuperview];
_iosRenderer->StartRender();
}
@ -159,7 +164,7 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
#import "RTCVideoRendererDelegate.h"
@implementation RTCVideoRenderer
@synthesize delegate = _delegate;
+ (RTCVideoRenderer*)videoRenderGUIWithFrame:(CGRect)frame {
+ (RTCVideoRenderer*)videoRendererWithFrame:(CGRect)frame {
// TODO(hughv): Implement.
return nil;
}
@ -170,12 +175,10 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
}
return self;
}
+ (UIView*)newRenderViewWithFrame:(CGRect)frame {
- (id)initWithView:(UIView*)view {
return nil;
}
- (id)initWithRenderView:(UIView*)renderView {
return nil;
- (void)setTransform:(CGAffineTransform)transform {
}
- (void)start {
}
@ -184,13 +187,13 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
@end
@implementation RTCVideoRenderer (Internal)
- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer {
- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface*)videoRenderer {
if ((self = [super init])) {
// TODO(hughv): Implement.
}
return self;
}
- (webrtc::VideoRendererInterface *)videoRenderer {
- (webrtc::VideoRendererInterface*)videoRenderer {
// TODO(hughv): Implement.
return NULL;
}

View File

@ -38,7 +38,7 @@
@implementation RTCVideoSource (Internal)
- (talk_base::scoped_refptr<webrtc::VideoSourceInterface>)videoSource {
return static_cast<webrtc::VideoSourceInterface *>(self.mediaSource.get());
return static_cast<webrtc::VideoSourceInterface*>(self.mediaSource.get());
}
@end

View File

@ -35,18 +35,19 @@
#import "RTCVideoRenderer+Internal.h"
@implementation RTCVideoTrack {
NSMutableArray *_rendererArray;
NSMutableArray* _rendererArray;
}
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
- (id)initWithMediaTrack:
(talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack {
if (self = [super initWithMediaTrack:mediaTrack]) {
_rendererArray = [NSMutableArray array];
}
return self;
}
- (void)addRenderer:(RTCVideoRenderer *)renderer {
- (void)addRenderer:(RTCVideoRenderer*)renderer {
NSAssert1(![self.renderers containsObject:renderer],
@"renderers already contains object [%@]",
[renderer description]);
@ -54,7 +55,7 @@
self.videoTrack->AddRenderer(renderer.videoRenderer);
}
- (void)removeRenderer:(RTCVideoRenderer *)renderer {
- (void)removeRenderer:(RTCVideoRenderer*)renderer {
NSUInteger index = [self.renderers indexOfObjectIdenticalTo:renderer];
if (index != NSNotFound) {
[_rendererArray removeObjectAtIndex:index];
@ -62,7 +63,7 @@
}
}
- (NSArray *)renderers {
- (NSArray*)renderers {
return [_rendererArray copy];
}
@ -71,7 +72,7 @@
@implementation RTCVideoTrack (Internal)
- (talk_base::scoped_refptr<webrtc::VideoTrackInterface>)videoTrack {
return static_cast<webrtc::VideoTrackInterface *>(self.mediaTrack.get());
return static_cast<webrtc::VideoTrackInterface*>(self.mediaTrack.get());
}
@end

View File

@ -33,12 +33,12 @@
@interface RTCMediaSource : NSObject
// The current state of the RTCMediaSource.
@property (nonatomic, assign, readonly)RTCSourceState state;
@property(nonatomic, assign, readonly) RTCSourceState state;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__(
(unavailable("init is not a supported initializer for this class.")));
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@ -26,6 +26,7 @@
*/
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
@protocol RTCVideoRendererDelegate;
struct CGRect;
@ -36,18 +37,15 @@ struct CGRect;
@property(nonatomic, strong) id<RTCVideoRendererDelegate> delegate;
// A convenience method to create a renderer and window and render frames into
// that window.
+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame;
+ (UIView*)newRenderViewWithFrame:(CGRect)frame;
// The view to the following constructor
// must be one of the views from newRenderViewWithFrame.
- (id)initWithRenderView:(UIView*)renderView;
- (id)initWithView:(UIView*)view;
// Initialize the renderer. Requires a delegate which does the actual drawing
// of frames.
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
// Set an affine transform on relevant UIViews.
- (void)setTransform:(CGAffineTransform)transform;
// Starts rendering.
- (void)start;
// Stops rendering. It can be restarted again using the 'start' method above.
@ -55,8 +53,8 @@ struct CGRect;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__(
(unavailable("init is not a supported initializer for this class.")));
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@ -35,13 +35,13 @@
@implementation RTCPeerConnectionSyncObserver {
int _expectedErrors;
NSMutableArray *_expectedSignalingChanges;
NSMutableArray *_expectedAddStreamLabels;
NSMutableArray *_expectedRemoveStreamLabels;
NSMutableArray* _expectedSignalingChanges;
NSMutableArray* _expectedAddStreamLabels;
NSMutableArray* _expectedRemoveStreamLabels;
int _expectedICECandidates;
NSMutableArray *_receivedICECandidates;
NSMutableArray *_expectedICEConnectionChanges;
NSMutableArray *_expectedICEGatheringChanges;
NSMutableArray* _receivedICECandidates;
NSMutableArray* _expectedICEConnectionChanges;
NSMutableArray* _expectedICEGatheringChanges;
}
- (id)init {
@ -58,24 +58,23 @@
return self;
}
- (int)popFirstElementAsInt:(NSMutableArray *)array {
- (int)popFirstElementAsInt:(NSMutableArray*)array {
NSAssert([array count] > 0, @"Empty array");
NSNumber *boxedState = [array objectAtIndex:0];
NSNumber* boxedState = [array objectAtIndex:0];
[array removeObjectAtIndex:0];
return [boxedState intValue];
}
- (NSString *)popFirstElementAsNSString:(NSMutableArray *)array {
- (NSString*)popFirstElementAsNSString:(NSMutableArray*)array {
NSAssert([array count] > 0, @"Empty expectation array");
NSString *string = [array objectAtIndex:0];
NSString* string = [array objectAtIndex:0];
[array removeObjectAtIndex:0];
return string;
}
- (BOOL)areAllExpectationsSatisfied {
return _expectedICECandidates <= 0 && // See comment in gotICECandidate.
_expectedErrors == 0 &&
[_expectedSignalingChanges count] == 0 &&
_expectedErrors == 0 && [_expectedSignalingChanges count] == 0 &&
[_expectedICEConnectionChanges count] == 0 &&
[_expectedICEGatheringChanges count] == 0 &&
[_expectedAddStreamLabels count] == 0 &&
@ -83,7 +82,7 @@
// TODO(hughv): Test video state here too.
}
- (NSArray *)releaseReceivedICECandidates {
- (NSArray*)releaseReceivedICECandidates {
NSArray* ret = _receivedICECandidates;
_receivedICECandidates = [NSMutableArray array];
return ret;
@ -97,11 +96,11 @@
[_expectedSignalingChanges addObject:@((int)state)];
}
- (void)expectAddStream:(NSString *)label {
- (void)expectAddStream:(NSString*)label {
[_expectedAddStreamLabels addObject:label];
}
- (void)expectRemoveStream:(NSString *)label {
- (void)expectRemoveStream:(NSString*)label {
[_expectedRemoveStreamLabels addObject:label];
}
@ -129,39 +128,41 @@
#pragma mark - RTCPeerConnectionDelegate methods
- (void)peerConnectionOnError:(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection {
NSLog(@"RTCPeerConnectionDelegate::onError");
NSAssert(--_expectedErrors >= 0, @"Unexpected error");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
signalingStateChanged:(RTCSignalingState)stateChanged {
int expectedState = [self popFirstElementAsInt:_expectedSignalingChanges];
NSString *message = [NSString stringWithFormat: @"RTCPeerConnectionDelegate::"
@"onSignalingStateChange [%d] expected[%d]", stateChanged, expectedState];
NSAssert(expectedState == (int) stateChanged, message);
NSString* message =
[NSString stringWithFormat:@"RTCPeerConnectionDelegate::"
@"onSignalingStateChange [%d] expected[%d]",
stateChanged,
expectedState];
NSAssert(expectedState == (int)stateChanged, message);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
addedStream:(RTCMediaStream *)stream {
NSString *expectedLabel =
- (void)peerConnection:(RTCPeerConnection*)peerConnection
addedStream:(RTCMediaStream*)stream {
NSString* expectedLabel =
[self popFirstElementAsNSString:_expectedAddStreamLabels];
NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
removedStream:(RTCMediaStream *)stream {
NSString *expectedLabel =
- (void)peerConnection:(RTCPeerConnection*)peerConnection
removedStream:(RTCMediaStream*)stream {
NSString* expectedLabel =
[self popFirstElementAsNSString:_expectedRemoveStreamLabels];
NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected");
}
- (void)peerConnectionOnRenegotiationNeeded:
(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
gotICECandidate:(RTCICECandidate *)candidate {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
gotICECandidate:(RTCICECandidate*)candidate {
--_expectedICECandidates;
// We don't assert expectedICECandidates >= 0 because it's hard to know
// how many to expect, in general. We only use expectICECandidates to
@ -169,7 +170,7 @@
[_receivedICECandidates addObject:candidate];
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceGatheringChanged:(RTCICEGatheringState)newState {
// It's fine to get a variable number of GATHERING messages before
// COMPLETE fires (depending on how long the test runs) so we don't assert
@ -181,7 +182,7 @@
NSAssert(expectedState == (int)newState, @"Empty expectation array");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceConnectionChanged:(RTCICEConnectionState)newState {
// See TODO(fischman) in RTCPeerConnectionTest.mm about Completed.
if (newState == RTCICEConnectionCompleted)

View File

@ -48,17 +48,16 @@
@interface RTCPeerConnectionTest : NSObject
// Returns whether the two sessions are of the same type.
+ (BOOL)isSession:(RTCSessionDescription *)session1
ofSameTypeAsSession:(RTCSessionDescription *)session2;
+ (BOOL)isSession:(RTCSessionDescription*)session1
ofSameTypeAsSession:(RTCSessionDescription*)session2;
// Create and add tracks to pc, with the given source, label, and IDs
- (RTCMediaStream *)
addTracksToPeerConnection:(RTCPeerConnection *)pc
withFactory:(RTCPeerConnectionFactory *)factory
videoSource:(RTCVideoSource *)videoSource
streamLabel:(NSString *)streamLabel
videoTrackID:(NSString *)videoTrackID
audioTrackID:(NSString *)audioTrackID;
- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
withFactory:(RTCPeerConnectionFactory*)factory
videoSource:(RTCVideoSource*)videoSource
streamLabel:(NSString*)streamLabel
videoTrackID:(NSString*)videoTrackID
audioTrackID:(NSString*)audioTrackID;
- (void)testCompleteSession;
@ -66,46 +65,45 @@
@implementation RTCPeerConnectionTest
+ (BOOL)isSession:(RTCSessionDescription *)session1
ofSameTypeAsSession:(RTCSessionDescription *)session2 {
+ (BOOL)isSession:(RTCSessionDescription*)session1
ofSameTypeAsSession:(RTCSessionDescription*)session2 {
return [session1.type isEqual:session2.type];
}
- (RTCMediaStream *)
addTracksToPeerConnection:(RTCPeerConnection *)pc
withFactory:(RTCPeerConnectionFactory *)factory
videoSource:(RTCVideoSource *)videoSource
streamLabel:(NSString *)streamLabel
videoTrackID:(NSString *)videoTrackID
audioTrackID:(NSString *)audioTrackID {
RTCMediaStream *localMediaStream = [factory mediaStreamWithLabel:streamLabel];
RTCVideoTrack *videoTrack =
- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
withFactory:(RTCPeerConnectionFactory*)factory
videoSource:(RTCVideoSource*)videoSource
streamLabel:(NSString*)streamLabel
videoTrackID:(NSString*)videoTrackID
audioTrackID:(NSString*)audioTrackID {
RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel];
RTCVideoTrack* videoTrack =
[factory videoTrackWithID:videoTrackID source:videoSource];
RTCVideoRenderer *videoRenderer =
RTCVideoRenderer* videoRenderer =
[[RTCVideoRenderer alloc] initWithDelegate:nil];
[videoTrack addRenderer:videoRenderer];
[localMediaStream addVideoTrack:videoTrack];
// Test that removal/re-add works.
[localMediaStream removeVideoTrack:videoTrack];
[localMediaStream addVideoTrack:videoTrack];
RTCAudioTrack *audioTrack = [factory audioTrackWithID:audioTrackID];
RTCAudioTrack* audioTrack = [factory audioTrackWithID:audioTrackID];
[localMediaStream addAudioTrack:audioTrack];
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] init];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] init];
[pc addStream:localMediaStream constraints:constraints];
return localMediaStream;
}
- (void)testCompleteSession {
RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] init];
RTCPeerConnectionSyncObserver *offeringExpectations =
RTCPeerConnectionFactory* factory = [[RTCPeerConnectionFactory alloc] init];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] init];
RTCPeerConnectionSyncObserver* offeringExpectations =
[[RTCPeerConnectionSyncObserver alloc] init];
RTCPeerConnection* pcOffer =
[factory peerConnectionWithICEServers:nil
constraints:constraints
delegate:offeringExpectations];
RTCPeerConnectionSyncObserver *answeringExpectations =
RTCPeerConnectionSyncObserver* answeringExpectations =
[[RTCPeerConnectionSyncObserver alloc] init];
RTCPeerConnection* pcAnswer =
@ -113,51 +111,48 @@
constraints:constraints
delegate:answeringExpectations];
// TODO(hughv): Create video capturer
RTCVideoCapturer *capturer = nil;
RTCVideoSource *videoSource =
RTCVideoCapturer* capturer = nil;
RTCVideoSource* videoSource =
[factory videoSourceWithCapturer:capturer constraints:constraints];
// Here and below, "oLMS" refers to offerer's local media stream, and "aLMS"
// refers to the answerer's local media stream, with suffixes of "a0" and "v0"
// for audio and video tracks, resp. These mirror chrome historical naming.
RTCMediaStream *oLMSUnused =
[self addTracksToPeerConnection:pcOffer
withFactory:factory
videoSource:videoSource
streamLabel:@"oLMS"
videoTrackID:@"oLMSv0"
audioTrackID:@"oLMSa0"];
RTCSessionDescriptionSyncObserver *sdpObserver =
RTCMediaStream* oLMSUnused = [self addTracksToPeerConnection:pcOffer
withFactory:factory
videoSource:videoSource
streamLabel:@"oLMS"
videoTrackID:@"oLMSv0"
audioTrackID:@"oLMSa0"];
RTCSessionDescriptionSyncObserver* sdpObserver =
[[RTCSessionDescriptionSyncObserver alloc] init];
[pcOffer createOfferWithDelegate:sdpObserver constraints:constraints];
[sdpObserver wait];
EXPECT_TRUE(sdpObserver.success);
RTCSessionDescription *offerSDP = sdpObserver.sessionDescription;
RTCSessionDescription* offerSDP = sdpObserver.sessionDescription;
EXPECT_EQ([@"offer" compare:offerSDP.type options:NSCaseInsensitiveSearch],
NSOrderedSame);
EXPECT_GT([offerSDP.description length], 0);
sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
[answeringExpectations
expectSignalingChange:RTCSignalingHaveRemoteOffer];
[answeringExpectations expectSignalingChange:RTCSignalingHaveRemoteOffer];
[answeringExpectations expectAddStream:@"oLMS"];
[pcAnswer setRemoteDescriptionWithDelegate:sdpObserver
sessionDescription:offerSDP];
[sdpObserver wait];
RTCMediaStream *aLMSUnused =
[self addTracksToPeerConnection:pcAnswer
withFactory:factory
videoSource:videoSource
streamLabel:@"aLMS"
videoTrackID:@"aLMSv0"
audioTrackID:@"aLMSa0"];
RTCMediaStream* aLMSUnused = [self addTracksToPeerConnection:pcAnswer
withFactory:factory
videoSource:videoSource
streamLabel:@"aLMS"
videoTrackID:@"aLMSv0"
audioTrackID:@"aLMSa0"];
sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
[pcAnswer createAnswerWithDelegate:sdpObserver constraints:constraints];
[sdpObserver wait];
EXPECT_TRUE(sdpObserver.success);
RTCSessionDescription *answerSDP = sdpObserver.sessionDescription;
RTCSessionDescription* answerSDP = sdpObserver.sessionDescription;
EXPECT_EQ([@"answer" compare:answerSDP.type options:NSCaseInsensitiveSearch],
NSOrderedSame);
EXPECT_GT([answerSDP.description length], 0);
@ -203,12 +198,12 @@
EXPECT_TRUE([offerSDP.type isEqual:pcAnswer.remoteDescription.type]);
EXPECT_TRUE([answerSDP.type isEqual:pcAnswer.localDescription.type]);
for (RTCICECandidate *candidate in
offeringExpectations.releaseReceivedICECandidates) {
for (RTCICECandidate* candidate in offeringExpectations
.releaseReceivedICECandidates) {
[pcAnswer addICECandidate:candidate];
}
for (RTCICECandidate *candidate in
answeringExpectations.releaseReceivedICECandidates) {
for (RTCICECandidate* candidate in answeringExpectations
.releaseReceivedICECandidates) {
[pcOffer addICECandidate:candidate];
}
@ -231,7 +226,7 @@
// a TestBase since it's not.
TEST(RTCPeerConnectionTest, SessionTest) {
talk_base::InitializeSSL();
RTCPeerConnectionTest *pcTest = [[RTCPeerConnectionTest alloc] init];
RTCPeerConnectionTest* pcTest = [[RTCPeerConnectionTest alloc] init];
[pcTest testCompleteSession];
talk_base::CleanupSSL();
}

View File

@ -33,10 +33,10 @@
#import "RTCSessionDescription.h"
@interface RTCSessionDescriptionSyncObserver()
@interface RTCSessionDescriptionSyncObserver ()
// CondVar used to wait for, and signal arrival of, an SDP-related callback.
@property(nonatomic, strong) NSCondition *condition;
@property(nonatomic, strong) NSCondition* condition;
// Whether an SDP-related callback has fired; cleared before wait returns.
@property(atomic, assign) BOOL signaled;
@ -72,9 +72,9 @@
}
#pragma mark - RTCSessionDescriptonDelegate methods
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didCreateSessionDescription:(RTCSessionDescription *)sdp
error:(NSError *)error {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didCreateSessionDescription:(RTCSessionDescription*)sdp
error:(NSError*)error {
[self.condition lock];
if (error) {
self.success = NO;
@ -87,8 +87,8 @@
[self.condition unlock];
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didSetSessionDescriptionWithError:(NSError *)error {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didSetSessionDescriptionWithError:(NSError*)error {
[self.condition lock];
if (error) {
self.success = NO;

View File

@ -27,7 +27,7 @@
#include "talk/base/gunit.h"
int main(int argc, char *argv[]) {
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}

View File

@ -47,12 +47,20 @@
// for the registered handler to be called with received messages.
@interface APPRTCAppClient : NSObject<NSURLConnectionDataDelegate>
@property(nonatomic, assign) id<ICEServerDelegate> ICEServerDelegate;
@property(nonatomic, assign) id<GAEMessageHandler> messageHandler;
@property(nonatomic, weak, readonly) id<ICEServerDelegate> ICEServerDelegate;
@property(nonatomic, weak, readonly) id<GAEMessageHandler> messageHandler;
@property(nonatomic, assign) BOOL initiator;
@property(nonatomic, strong) RTCMediaConstraints* videoConstraints;
@property(nonatomic, copy, readonly) RTCMediaConstraints* videoConstraints;
- (void)connectToRoom:(NSURL *)room;
- (void)sendData:(NSData *)data;
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler;
- (void)connectToRoom:(NSURL*)room;
- (void)sendData:(NSData*)data;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@ -37,13 +37,13 @@
@interface APPRTCAppClient ()
@property(nonatomic, strong) dispatch_queue_t backgroundQueue;
@property(nonatomic, copy) NSString *baseURL;
@property(nonatomic, strong) GAEChannelClient *gaeChannel;
@property(nonatomic, copy) NSString *postMessageUrl;
@property(nonatomic, copy) NSString *pcConfig;
@property(nonatomic, strong) NSMutableString *roomHtml;
@property(atomic, strong) NSMutableArray *sendQueue;
@property(nonatomic, copy) NSString *token;
@property(nonatomic, copy) NSString* baseURL;
@property(nonatomic, strong) GAEChannelClient* gaeChannel;
@property(nonatomic, copy) NSString* postMessageUrl;
@property(nonatomic, copy) NSString* pcConfig;
@property(nonatomic, strong) NSMutableString* roomHtml;
@property(atomic, strong) NSMutableArray* sendQueue;
@property(nonatomic, copy) NSString* token;
@property(nonatomic, assign) BOOL verboseLogging;
@ -51,23 +51,11 @@
@implementation APPRTCAppClient
@synthesize ICEServerDelegate = _ICEServerDelegate;
@synthesize messageHandler = _messageHandler;
@synthesize backgroundQueue = _backgroundQueue;
@synthesize baseURL = _baseURL;
@synthesize gaeChannel = _gaeChannel;
@synthesize postMessageUrl = _postMessageUrl;
@synthesize pcConfig = _pcConfig;
@synthesize roomHtml = _roomHtml;
@synthesize sendQueue = _sendQueue;
@synthesize token = _token;
@synthesize verboseLogging = _verboseLogging;
@synthesize initiator = _initiator;
@synthesize videoConstraints = _videoConstraints;
- (id)init {
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler {
if (self = [super init]) {
_ICEServerDelegate = delegate;
_messageHandler = handler;
_backgroundQueue = dispatch_queue_create("RTCBackgroundQueue", NULL);
_sendQueue = [NSMutableArray array];
// Uncomment to see Request/Response logging.
@ -78,12 +66,12 @@
#pragma mark - Public methods
- (void)connectToRoom:(NSURL *)url {
NSURLRequest *request = [self getRequestFromUrl:url];
- (void)connectToRoom:(NSURL*)url {
NSURLRequest* request = [self getRequestFromUrl:url];
[NSURLConnection connectionWithRequest:request delegate:self];
}
- (void)sendData:(NSData *)data {
- (void)sendData:(NSData*)data {
@synchronized(self) {
[self maybeLogMessage:@"Send message"];
[self.sendQueue addObject:[data copy]];
@ -93,49 +81,53 @@
#pragma mark - Internal methods
- (NSString*)findVar:(NSString*)name
strippingQuotes:(BOOL)strippingQuotes {
- (NSString*)findVar:(NSString*)name strippingQuotes:(BOOL)strippingQuotes {
NSError* error;
NSString* pattern =
[NSString stringWithFormat:@".*\n *var %@ = ([^\n]*);\n.*", name];
NSRegularExpression *regexp =
NSRegularExpression* regexp =
[NSRegularExpression regularExpressionWithPattern:pattern
options:0
error:&error];
NSAssert(!error, @"Unexpected error compiling regex: ",
NSAssert(!error,
@"Unexpected error compiling regex: ",
error.localizedDescription);
NSRange fullRange = NSMakeRange(0, [self.roomHtml length]);
NSArray *matches =
NSArray* matches =
[regexp matchesInString:self.roomHtml options:0 range:fullRange];
if ([matches count] != 1) {
[self showMessage:[NSString stringWithFormat:@"%d matches for %@ in %@",
[matches count], name, self.roomHtml]];
[matches count],
name,
self.roomHtml]];
return nil;
}
NSRange matchRange = [matches[0] rangeAtIndex:1];
NSString* value = [self.roomHtml substringWithRange:matchRange];
if (strippingQuotes) {
NSAssert([value length] > 2,
@"Can't strip quotes from short string: [%@]", value);
@"Can't strip quotes from short string: [%@]",
value);
NSAssert(([value characterAtIndex:0] == '\'' &&
[value characterAtIndex:[value length] - 1] == '\''),
@"Can't strip quotes from unquoted string: [%@]", value);
@"Can't strip quotes from unquoted string: [%@]",
value);
value = [value substringWithRange:NSMakeRange(1, [value length] - 2)];
}
return value;
}
- (NSURLRequest *)getRequestFromUrl:(NSURL *)url {
- (NSURLRequest*)getRequestFromUrl:(NSURL*)url {
self.roomHtml = [NSMutableString stringWithCapacity:20000];
NSString *path =
NSString* path =
[NSString stringWithFormat:@"https:%@", [url resourceSpecifier]];
NSURLRequest *request =
NSURLRequest* request =
[NSURLRequest requestWithURL:[NSURL URLWithString:path]];
return request;
}
- (void)maybeLogMessage:(NSString *)message {
- (void)maybeLogMessage:(NSString*)message {
if (self.verboseLogging) {
NSLog(@"%@", message);
}
@ -143,33 +135,33 @@
- (void)requestQueueDrainInBackground {
dispatch_async(self.backgroundQueue, ^(void) {
// TODO(hughv): This can block the UI thread. Fix.
@synchronized(self) {
if ([self.postMessageUrl length] < 1) {
return;
// TODO(hughv): This can block the UI thread. Fix.
@synchronized(self) {
if ([self.postMessageUrl length] < 1) {
return;
}
for (NSData* data in self.sendQueue) {
NSString* url =
[NSString stringWithFormat:@"%@/%@",
self.baseURL, self.postMessageUrl];
[self sendData:data withUrl:url];
}
[self.sendQueue removeAllObjects];
}
for (NSData *data in self.sendQueue) {
NSString *url = [NSString stringWithFormat:@"%@/%@",
self.baseURL,
self.postMessageUrl];
[self sendData:data withUrl:url];
}
[self.sendQueue removeAllObjects];
}
});
});
}
- (void)sendData:(NSData *)data withUrl:(NSString *)url {
NSMutableURLRequest *request =
- (void)sendData:(NSData*)data withUrl:(NSString*)url {
NSMutableURLRequest* request =
[NSMutableURLRequest requestWithURL:[NSURL URLWithString:url]];
request.HTTPMethod = @"POST";
[request setHTTPBody:data];
NSURLResponse *response;
NSError *error;
NSData *responseData = [NSURLConnection sendSynchronousRequest:request
NSURLResponse* response;
NSError* error;
NSData* responseData = [NSURLConnection sendSynchronousRequest:request
returningResponse:&response
error:&error];
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
NSHTTPURLResponse* httpResponse = (NSHTTPURLResponse*)response;
int status = [httpResponse statusCode];
NSAssert(status == 200,
@"Bad response [%d] to message: %@\n\n%@",
@ -178,9 +170,9 @@
[NSString stringWithUTF8String:[responseData bytes]]);
}
- (void)showMessage:(NSString *)message {
- (void)showMessage:(NSString*)message {
NSLog(@"%@", message);
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Unable to join"
UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:@"Unable to join"
message:message
delegate:nil
cancelButtonTitle:@"OK"
@ -188,96 +180,97 @@
[alertView show];
}
- (void)updateICEServers:(NSMutableArray *)ICEServers
withTurnServer:(NSString *)turnServerUrl {
- (void)updateICEServers:(NSMutableArray*)ICEServers
withTurnServer:(NSString*)turnServerUrl {
if ([turnServerUrl length] < 1) {
[self.ICEServerDelegate onICEServers:ICEServers];
return;
}
dispatch_async(self.backgroundQueue, ^(void) {
NSMutableURLRequest *request = [NSMutableURLRequest
requestWithURL:[NSURL URLWithString:turnServerUrl]];
[request addValue:@"Mozilla/5.0" forHTTPHeaderField:@"user-agent"];
[request addValue:@"https://apprtc.appspot.com"
forHTTPHeaderField:@"origin"];
NSURLResponse *response;
NSError *error;
NSData *responseData = [NSURLConnection sendSynchronousRequest:request
returningResponse:&response
error:&error];
if (!error) {
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:responseData
options:0
error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
NSString *username = json[@"username"];
NSString *password = json[@"password"];
NSArray* uris = json[@"uris"];
for (int i = 0; i < [uris count]; ++i) {
NSString *turnServer = [uris objectAtIndex:i];
RTCICEServer *ICEServer =
[[RTCICEServer alloc] initWithURI:[NSURL URLWithString:turnServer]
username:username
password:password];
NSLog(@"Added ICE Server: %@", ICEServer);
[ICEServers addObject:ICEServer];
NSMutableURLRequest* request = [NSMutableURLRequest
requestWithURL:[NSURL URLWithString:turnServerUrl]];
[request addValue:@"Mozilla/5.0" forHTTPHeaderField:@"user-agent"];
[request addValue:@"https://apprtc.appspot.com"
forHTTPHeaderField:@"origin"];
NSURLResponse* response;
NSError* error;
NSData* responseData = [NSURLConnection sendSynchronousRequest:request
returningResponse:&response
error:&error];
if (!error) {
NSDictionary* json =
[NSJSONSerialization JSONObjectWithData:responseData
options:0
error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
NSString* username = json[@"username"];
NSString* password = json[@"password"];
NSArray* uris = json[@"uris"];
for (int i = 0; i < [uris count]; ++i) {
NSString* turnServer = [uris objectAtIndex:i];
RTCICEServer* ICEServer =
[[RTCICEServer alloc] initWithURI:[NSURL URLWithString:turnServer]
username:username
password:password];
NSLog(@"Added ICE Server: %@", ICEServer);
[ICEServers addObject:ICEServer];
}
} else {
NSLog(@"Unable to get TURN server. Error: %@", error.description);
}
} else {
NSLog(@"Unable to get TURN server. Error: %@", error.description);
}
dispatch_async(dispatch_get_main_queue(), ^(void) {
[self.ICEServerDelegate onICEServers:ICEServers];
});
dispatch_async(dispatch_get_main_queue(), ^(void) {
[self.ICEServerDelegate onICEServers:ICEServers];
});
});
}
#pragma mark - NSURLConnectionDataDelegate methods
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data {
NSString *roomHtml = [NSString stringWithUTF8String:[data bytes]];
[self maybeLogMessage:
[NSString stringWithFormat:@"Received %d chars", [roomHtml length]]];
- (void)connection:(NSURLConnection*)connection didReceiveData:(NSData*)data {
NSString* roomHtml = [NSString stringWithUTF8String:[data bytes]];
[self maybeLogMessage:[NSString stringWithFormat:@"Received %d chars",
[roomHtml length]]];
[self.roomHtml appendString:roomHtml];
}
- (void)connection:(NSURLConnection *)connection
didReceiveResponse:(NSURLResponse *)response {
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
- (void)connection:(NSURLConnection*)connection
didReceiveResponse:(NSURLResponse*)response {
NSHTTPURLResponse* httpResponse = (NSHTTPURLResponse*)response;
int statusCode = [httpResponse statusCode];
[self maybeLogMessage:
[self
maybeLogMessage:
[NSString stringWithFormat:
@"Response received\nURL\n%@\nStatus [%d]\nHeaders\n%@",
[httpResponse URL],
statusCode,
[httpResponse allHeaderFields]]];
@"Response received\nURL\n%@\nStatus [%d]\nHeaders\n%@",
[httpResponse URL],
statusCode,
[httpResponse allHeaderFields]]];
NSAssert(statusCode == 200, @"Invalid response of %d received.", statusCode);
}
- (void)connectionDidFinishLoading:(NSURLConnection *)connection {
- (void)connectionDidFinishLoading:(NSURLConnection*)connection {
[self maybeLogMessage:[NSString stringWithFormat:@"finished loading %d chars",
[self.roomHtml length]]];
[self.roomHtml length]]];
NSRegularExpression* fullRegex =
[NSRegularExpression regularExpressionWithPattern:@"room is full"
options:0
error:nil];
[NSRegularExpression regularExpressionWithPattern:@"room is full"
options:0
error:nil];
if ([fullRegex
numberOfMatchesInString:self.roomHtml
options:0
range:NSMakeRange(0, [self.roomHtml length])]) {
[self showMessage:@"Room full"];
APPRTCAppDelegate *ad =
(APPRTCAppDelegate *)[[UIApplication sharedApplication] delegate];
APPRTCAppDelegate* ad =
(APPRTCAppDelegate*)[[UIApplication sharedApplication] delegate];
[ad closeVideoUI];
return;
}
NSString *fullUrl = [[[connection originalRequest] URL] absoluteString];
NSString* fullUrl = [[[connection originalRequest] URL] absoluteString];
NSRange queryRange = [fullUrl rangeOfString:@"?"];
self.baseURL = [fullUrl substringToIndex:queryRange.location];
[self maybeLogMessage:
[NSString stringWithFormat:@"Base URL: %@", self.baseURL]];
[self maybeLogMessage:[NSString
stringWithFormat:@"Base URL: %@", self.baseURL]];
self.initiator = [[self findVar:@"initiator" strippingQuotes:NO] boolValue];
self.token = [self findVar:@"channelToken" strippingQuotes:YES];
@ -290,45 +283,45 @@
if (!roomKey || !me)
return;
self.postMessageUrl =
[NSString stringWithFormat:@"/message?r=%@&u=%@", roomKey, me];
[NSString stringWithFormat:@"/message?r=%@&u=%@", roomKey, me];
[self maybeLogMessage:[NSString stringWithFormat:@"POST message URL: %@",
self.postMessageUrl]];
self.postMessageUrl]];
NSString* pcConfig = [self findVar:@"pcConfig" strippingQuotes:NO];
if (!pcConfig)
return;
[self maybeLogMessage:
[NSString stringWithFormat:@"PC Config JSON: %@", pcConfig]];
[self maybeLogMessage:[NSString
stringWithFormat:@"PC Config JSON: %@", pcConfig]];
NSString *turnServerUrl = [self findVar:@"turnUrl" strippingQuotes:YES];
NSString* turnServerUrl = [self findVar:@"turnUrl" strippingQuotes:YES];
if (turnServerUrl) {
[self maybeLogMessage:
[NSString stringWithFormat:@"TURN server request URL: %@",
turnServerUrl]];
[self maybeLogMessage:[NSString
stringWithFormat:@"TURN server request URL: %@",
turnServerUrl]];
}
NSError *error;
NSData *pcData = [pcConfig dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary *json =
NSError* error;
NSData* pcData = [pcConfig dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary* json =
[NSJSONSerialization JSONObjectWithData:pcData options:0 error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
NSArray *servers = [json objectForKey:@"iceServers"];
NSMutableArray *ICEServers = [NSMutableArray array];
for (NSDictionary *server in servers) {
NSString *url = [server objectForKey:@"urls"];
NSString *username = json[@"username"];
NSString *credential = [server objectForKey:@"credential"];
NSArray* servers = [json objectForKey:@"iceServers"];
NSMutableArray* ICEServers = [NSMutableArray array];
for (NSDictionary* server in servers) {
NSString* url = [server objectForKey:@"urls"];
NSString* username = json[@"username"];
NSString* credential = [server objectForKey:@"credential"];
if (!username) {
username = @"";
}
if (!credential) {
credential = @"";
}
[self maybeLogMessage:
[NSString stringWithFormat:@"url [%@] - credential [%@]",
url,
credential]];
RTCICEServer *ICEServer =
[self maybeLogMessage:[NSString
stringWithFormat:@"url [%@] - credential [%@]",
url,
credential]];
RTCICEServer* ICEServer =
[[RTCICEServer alloc] initWithURI:[NSURL URLWithString:url]
username:username
password:credential];
@ -340,18 +333,19 @@
NSString* mc = [self findVar:@"mediaConstraints" strippingQuotes:NO];
if (mc) {
error = nil;
NSData *mcData = [mc dataUsingEncoding:NSUTF8StringEncoding];
NSData* mcData = [mc dataUsingEncoding:NSUTF8StringEncoding];
json =
[NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error];
[NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
if ([[json objectForKey:@"video"] boolValue]) {
self.videoConstraints = [[RTCMediaConstraints alloc] init];
_videoConstraints = [[RTCMediaConstraints alloc] init];
}
}
[self maybeLogMessage:
[NSString stringWithFormat:@"About to open GAE with token: %@",
self.token]];
[self
maybeLogMessage:[NSString
stringWithFormat:@"About to open GAE with token: %@",
self.token]];
self.gaeChannel =
[[GAEChannelClient alloc] initWithToken:self.token
delegate:self.messageHandler];

View File

@ -34,9 +34,9 @@
// Used to send a message to an apprtc.appspot.com "room".
@protocol APPRTCSendMessage<NSObject>
- (void)sendData:(NSData *)data;
- (void)sendData:(NSData*)data;
// Logging helper.
- (void)displayLogMessage:(NSString *)message;
- (void)displayLogMessage:(NSString*)message;
@end
@class APPRTCViewController;
@ -51,8 +51,8 @@
RTCSessionDescriptonDelegate,
UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@property (strong, nonatomic) APPRTCViewController *viewController;
@property(strong, nonatomic) UIWindow* window;
@property(strong, nonatomic) APPRTCViewController* viewController;
- (void)closeVideoUI;

View File

@ -42,13 +42,13 @@
#import "RTCVideoRenderer.h"
#import "RTCVideoCapturer.h"
#import "RTCVideoTrack.h"
#import "VideoView.h"
#import "APPRTCVideoView.h"
@interface PCObserver : NSObject<RTCPeerConnectionDelegate>
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate;
@property(nonatomic, strong) VideoView *videoView;
@property(nonatomic, strong) APPRTCVideoView* videoView;
@end
@ -56,8 +56,6 @@
id<APPRTCSendMessage> _delegate;
}
@synthesize videoView = _videoView;
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate {
if (self = [super init]) {
_delegate = delegate;
@ -65,70 +63,71 @@
return self;
}
- (void)peerConnectionOnError:(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection {
NSLog(@"PCO onError.");
NSAssert(NO, @"PeerConnection failed.");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
signalingStateChanged:(RTCSignalingState)stateChanged {
NSLog(@"PCO onSignalingStateChange: %d", stateChanged);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
addedStream:(RTCMediaStream *)stream {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
addedStream:(RTCMediaStream*)stream {
NSLog(@"PCO onAddStream.");
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSAssert([stream.audioTracks count] >= 1,
@"Expected at least 1 audio stream");
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
[[self videoView]
renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]];
}
NSAssert([stream.audioTracks count] >= 1,
@"Expected at least 1 audio stream");
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
[self.videoView
renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]];
}
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
removedStream:(RTCMediaStream *)stream {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
removedStream:(RTCMediaStream*)stream {
NSLog(@"PCO onRemoveStream.");
}
- (void)
peerConnectionOnRenegotiationNeeded:(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
NSLog(@"PCO onRenegotiationNeeded.");
// TODO(hughv): Handle this.
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
gotICECandidate:(RTCICECandidate *)candidate {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
gotICECandidate:(RTCICECandidate*)candidate {
NSLog(@"PCO onICECandidate.\n Mid[%@] Index[%d] Sdp[%@]",
candidate.sdpMid,
candidate.sdpMLineIndex,
candidate.sdp);
NSDictionary *json =
@{ @"type" : @"candidate",
@"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex],
@"id" : candidate.sdpMid,
@"candidate" : candidate.sdp };
NSError *error;
NSData *data =
NSDictionary* json = @{
@"type" : @"candidate",
@"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex],
@"id" : candidate.sdpMid,
@"candidate" : candidate.sdp
};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
if (!error) {
[_delegate sendData:data];
} else {
NSAssert(NO, @"Unable to serialize JSON object with error: %@",
NSAssert(NO,
@"Unable to serialize JSON object with error: %@",
error.localizedDescription);
}
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceGatheringChanged:(RTCICEGatheringState)newState {
NSLog(@"PCO onIceGatheringChange. %d", newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceConnectionChanged:(RTCICEConnectionState)newState {
NSLog(@"PCO onIceConnectionChange. %d", newState);
if (newState == RTCICEConnectionConnected)
@ -136,7 +135,7 @@
NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!");
}
- (void)displayLogMessage:(NSString *)message {
- (void)displayLogMessage:(NSString*)message {
[_delegate displayLogMessage:message];
}
@ -144,28 +143,20 @@
@interface APPRTCAppDelegate ()
@property(nonatomic, strong) APPRTCAppClient *client;
@property(nonatomic, strong) PCObserver *pcObserver;
@property(nonatomic, strong) RTCPeerConnection *peerConnection;
@property(nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory;
@property(nonatomic, strong) NSMutableArray *queuedRemoteCandidates;
@property(nonatomic, strong) APPRTCAppClient* client;
@property(nonatomic, strong) PCObserver* pcObserver;
@property(nonatomic, strong) RTCPeerConnection* peerConnection;
@property(nonatomic, strong) RTCPeerConnectionFactory* peerConnectionFactory;
@property(nonatomic, strong) NSMutableArray* queuedRemoteCandidates;
@end
@implementation APPRTCAppDelegate
@synthesize window = _window;
@synthesize viewController = _viewController;
@synthesize client = _client;
@synthesize pcObserver = _pcObserver;
@synthesize peerConnection = _peerConnection;
@synthesize peerConnectionFactory = _peerConnectionFactory;
@synthesize queuedRemoteCandidates = _queuedRemoteCandidates;
#pragma mark - UIApplicationDelegate methods
- (BOOL)application:(UIApplication *)application
didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
- (BOOL)application:(UIApplication*)application
didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
[RTCPeerConnectionFactory initializeSSL];
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
self.viewController =
@ -176,100 +167,97 @@
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
- (void)applicationWillResignActive:(UIApplication*)application {
[self displayLogMessage:@"Application lost focus, connection broken."];
[self closeVideoUI];
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
- (void)applicationDidEnterBackground:(UIApplication*)application {
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
- (void)applicationWillEnterForeground:(UIApplication*)application {
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
- (void)applicationDidBecomeActive:(UIApplication*)application {
}
- (void)applicationWillTerminate:(UIApplication *)application {
- (void)applicationWillTerminate:(UIApplication*)application {
}
- (BOOL)application:(UIApplication *)application
openURL:(NSURL *)url
sourceApplication:(NSString *)sourceApplication
- (BOOL)application:(UIApplication*)application
openURL:(NSURL*)url
sourceApplication:(NSString*)sourceApplication
annotation:(id)annotation {
if (self.client) {
return NO;
}
self.client = [[APPRTCAppClient alloc] init];
self.client.ICEServerDelegate = self;
self.client.messageHandler = self;
self.client = [[APPRTCAppClient alloc] initWithICEServerDelegate:self
messageHandler:self];
[self.client connectToRoom:url];
return YES;
}
- (void)displayLogMessage:(NSString *)message {
- (void)displayLogMessage:(NSString*)message {
NSLog(@"%@", message);
[self.viewController displayText:message];
}
#pragma mark - RTCSendMessage method
- (void)sendData:(NSData *)data {
- (void)sendData:(NSData*)data {
[self.client sendData:data];
}
#pragma mark - ICEServerDelegate method
- (void)onICEServers:(NSArray *)servers {
- (void)onICEServers:(NSArray*)servers {
self.queuedRemoteCandidates = [NSMutableArray array];
self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init];
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:
@[[[RTCPair alloc]
initWithKey:@"OfferToReceiveAudio"
value:@"true"],
[[RTCPair alloc]
initWithKey:@"OfferToReceiveVideo"
value:@"true"]]
optionalConstraints:
@[[[RTCPair alloc]
initWithKey:@"internalSctpDataChannels"
value:@"true"],
[[RTCPair alloc]
initWithKey:@"DtlsSrtpKeyAgreement"
value:@"true"]]];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:
@[
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"],
[[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]
]
optionalConstraints:
@[
[[RTCPair alloc] initWithKey:@"internalSctpDataChannels"
value:@"true"],
[[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement"
value:@"true"]
]];
self.pcObserver = [[PCObserver alloc] initWithDelegate:self];
self.peerConnection =
[self.peerConnectionFactory peerConnectionWithICEServers:servers
constraints:constraints
delegate:self.pcObserver];
RTCMediaStream *lms =
RTCMediaStream* lms =
[self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"];
NSString *cameraID = nil;
for (AVCaptureDevice *captureDevice in
[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] ) {
if (captureDevice.position == AVCaptureDevicePositionFront) {
cameraID = [captureDevice localizedName];
break;
}
NSString* cameraID = nil;
for (AVCaptureDevice* captureDevice in
[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (captureDevice.position == AVCaptureDevicePositionFront) {
cameraID = [captureDevice localizedName];
break;
}
}
NSAssert(cameraID, @"Unable to get the front camera id");
RTCVideoCapturer *capturer =
[RTCVideoCapturer capturerWithDeviceName:cameraID];
RTCVideoSource *videoSource =
[self.peerConnectionFactory
videoSourceWithCapturer:capturer constraints:self.client.videoConstraints];
RTCVideoTrack *localVideoTrack =
[self.peerConnectionFactory
videoTrackWithID:@"ARDAMSv0" source:videoSource];
RTCVideoCapturer* capturer =
[RTCVideoCapturer capturerWithDeviceName:cameraID];
RTCVideoSource* videoSource = [self.peerConnectionFactory
videoSourceWithCapturer:capturer
constraints:self.client.videoConstraints];
RTCVideoTrack* localVideoTrack =
[self.peerConnectionFactory videoTrackWithID:@"ARDAMSv0"
source:videoSource];
if (localVideoTrack) {
[lms addVideoTrack:localVideoTrack];
[lms addVideoTrack:localVideoTrack];
}
[self.viewController.localVideoView
renderVideoTrackInterface:localVideoTrack];
renderVideoTrackInterface:localVideoTrack];
self.pcObserver.videoView = self.viewController.remoteVideoView;
@ -282,26 +270,26 @@
- (void)onOpen {
if (!self.client.initiator) {
[self displayLogMessage:@"Callee; waiting for remote offer"];
return;
[self displayLogMessage:@"Callee; waiting for remote offer"];
return;
}
[self displayLogMessage:@"GAE onOpen - create offer."];
RTCPair *audio =
RTCPair* audio =
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"];
RTCPair *video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
NSArray *mandatory = @[ audio , video ];
RTCMediaConstraints *constraints =
RTCPair* video =
[[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection createOfferWithDelegate:self constraints:constraints];
[self displayLogMessage:@"PC - createOffer."];
}
- (void)onMessage:(NSString *)data {
NSString *message = [self unHTMLifyString:data];
NSError *error;
NSDictionary *objects = [NSJSONSerialization
- (void)onMessage:(NSString*)data {
NSString* message = [self unHTMLifyString:data];
NSError* error;
NSDictionary* objects = [NSJSONSerialization
JSONObjectWithData:[message dataUsingEncoding:NSUTF8StringEncoding]
options:0
error:&error];
@ -309,14 +297,14 @@
@"%@",
[NSString stringWithFormat:@"Error: %@", error.description]);
NSAssert([objects count] > 0, @"Invalid JSON object");
NSString *value = [objects objectForKey:@"type"];
[self displayLogMessage:
[NSString stringWithFormat:@"GAE onMessage type - %@", value]];
NSString* value = [objects objectForKey:@"type"];
[self displayLogMessage:[NSString stringWithFormat:@"GAE onMessage type - %@",
value]];
if ([value compare:@"candidate"] == NSOrderedSame) {
NSString *mid = [objects objectForKey:@"id"];
NSNumber *sdpLineIndex = [objects objectForKey:@"label"];
NSString *sdp = [objects objectForKey:@"candidate"];
RTCICECandidate *candidate =
NSString* mid = [objects objectForKey:@"id"];
NSNumber* sdpLineIndex = [objects objectForKey:@"label"];
NSString* sdp = [objects objectForKey:@"candidate"];
RTCICECandidate* candidate =
[[RTCICECandidate alloc] initWithMid:mid
index:sdpLineIndex.intValue
sdp:sdp];
@ -327,20 +315,21 @@
}
} else if (([value compare:@"offer"] == NSOrderedSame) ||
([value compare:@"answer"] == NSOrderedSame)) {
NSString *sdpString = [objects objectForKey:@"sdp"];
RTCSessionDescription *sdp = [[RTCSessionDescription alloc]
initWithType:value sdp:[APPRTCAppDelegate preferISAC:sdpString]];
NSString* sdpString = [objects objectForKey:@"sdp"];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:value
sdp:[APPRTCAppDelegate preferISAC:sdpString]];
[self.peerConnection setRemoteDescriptionWithDelegate:self
sessionDescription:sdp];
[self displayLogMessage:@"PC - setRemoteDescription."];
} else if ([value compare:@"bye"] == NSOrderedSame) {
[self closeVideoUI];
UIAlertView *alertView =
[[UIAlertView alloc] initWithTitle:@"Remote end hung up"
message:@"dropping PeerConnection"
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
UIAlertView* alertView =
[[UIAlertView alloc] initWithTitle:@"Remote end hung up"
message:@"dropping PeerConnection"
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alertView show];
} else {
NSAssert(NO, @"Invalid message: %@", data);
@ -352,9 +341,9 @@
[self closeVideoUI];
}
- (void)onError:(int)code withDescription:(NSString *)description {
[self displayLogMessage:
[NSString stringWithFormat:@"GAE onError: %@", description]];
- (void)onError:(int)code withDescription:(NSString*)description {
[self displayLogMessage:[NSString stringWithFormat:@"GAE onError: %@",
description]];
[self closeVideoUI];
}
@ -362,19 +351,19 @@
// Match |pattern| to |string| and return the first group of the first
// match, or nil if no match was found.
+ (NSString *)firstMatch:(NSRegularExpression *)pattern
withString:(NSString *)string {
+ (NSString*)firstMatch:(NSRegularExpression*)pattern
withString:(NSString*)string {
NSTextCheckingResult* result =
[pattern firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
[pattern firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
if (!result)
return nil;
return [string substringWithRange:[result rangeAtIndex:1]];
}
// Mangle |origSDP| to prefer the ISAC/16k audio codec.
+ (NSString *)preferISAC:(NSString *)origSDP {
+ (NSString*)preferISAC:(NSString*)origSDP {
int mLineIndex = -1;
NSString* isac16kRtpMap = nil;
NSArray* lines = [origSDP componentsSeparatedByString:@"\n"];
@ -411,8 +400,8 @@
[newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]];
[newMLine addObject:isac16kRtpMap];
for (; origPartIndex < [origMLineParts count]; ++origPartIndex) {
if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]]
!= NSOrderedSame) {
if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]] !=
NSOrderedSame) {
[newMLine addObject:[origMLineParts objectAtIndex:origPartIndex]];
}
}
@ -423,9 +412,9 @@
return [newLines componentsJoinedByString:@"\n"];
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didCreateSessionDescription:(RTCSessionDescription *)origSdp
error:(NSError *)error {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didCreateSessionDescription:(RTCSessionDescription*)origSdp
error:(NSError*)error {
if (error) {
[self displayLogMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
@ -433,27 +422,26 @@
}
[self displayLogMessage:@"SDP onSuccess(SDP) - set local description."];
RTCSessionDescription* sdp =
[[RTCSessionDescription alloc]
initWithType:origSdp.type
sdp:[APPRTCAppDelegate preferISAC:origSdp.description]];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:origSdp.type
sdp:[APPRTCAppDelegate preferISAC:origSdp.description]];
[self.peerConnection setLocalDescriptionWithDelegate:self
sessionDescription:sdp];
[self displayLogMessage:@"PC setLocalDescription."];
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSDictionary *json = @{ @"type" : sdp.type, @"sdp" : sdp.description };
NSError *error;
NSData *data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
NSAssert(!error,
@"%@",
[NSString stringWithFormat:@"Error: %@", error.description]);
[self sendData:data];
NSDictionary* json = @{@"type" : sdp.type, @"sdp" : sdp.description};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
NSAssert(!error,
@"%@",
[NSString stringWithFormat:@"Error: %@", error.description]);
[self sendData:data];
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didSetSessionDescriptionWithError:(NSError *)error {
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didSetSessionDescriptionWithError:(NSError*)error {
if (error) {
[self displayLogMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
@ -462,34 +450,31 @@
[self displayLogMessage:@"SDP onSuccess() - possibly drain candidates"];
dispatch_async(dispatch_get_main_queue(), ^(void) {
if (!self.client.initiator) {
if (self.peerConnection.remoteDescription
&& !self.peerConnection.localDescription) {
[self displayLogMessage:@"Callee, setRemoteDescription succeeded"];
RTCPair *audio =
[[RTCPair alloc]
initWithKey:@"OfferToReceiveAudio" value:@"true"];
RTCPair *video =
[[RTCPair alloc]
initWithKey:@"OfferToReceiveVideo" value:@"true"];
NSArray *mandatory = @[ audio , video ];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection
createAnswerWithDelegate:self constraints:constraints];
[self displayLogMessage:@"PC - createAnswer."];
if (!self.client.initiator) {
if (self.peerConnection.remoteDescription &&
!self.peerConnection.localDescription) {
[self displayLogMessage:@"Callee, setRemoteDescription succeeded"];
RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio"
value:@"true"];
RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection createAnswerWithDelegate:self
constraints:constraints];
[self displayLogMessage:@"PC - createAnswer."];
} else {
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
} else {
} else {
if (self.peerConnection.remoteDescription) {
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
}
}
});
}
@ -502,36 +487,34 @@
self.peerConnection = nil;
self.peerConnectionFactory = nil;
self.pcObserver = nil;
self.client.ICEServerDelegate = nil;
self.client.messageHandler = nil;
self.client = nil;
[RTCPeerConnectionFactory deinitializeSSL];
}
- (void)drainRemoteCandidates {
for (RTCICECandidate *candidate in self.queuedRemoteCandidates) {
for (RTCICECandidate* candidate in self.queuedRemoteCandidates) {
[self.peerConnection addICECandidate:candidate];
}
self.queuedRemoteCandidates = nil;
}
- (NSString *)unHTMLifyString:(NSString *)base {
- (NSString*)unHTMLifyString:(NSString*)base {
// TODO(hughv): Investigate why percent escapes are being added. Removing
// them isn't necessary on Android.
// convert HTML escaped characters to UTF8.
NSString *removePercent =
NSString* removePercent =
[base stringByReplacingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// remove leading and trailing ".
NSRange range;
range.length = [removePercent length] - 2;
range.location = 1;
NSString *removeQuotes = [removePercent substringWithRange:range];
NSString* removeQuotes = [removePercent substringWithRange:range];
// convert \" to ".
NSString *removeEscapedQuotes =
NSString* removeEscapedQuotes =
[removeQuotes stringByReplacingOccurrencesOfString:@"\\\""
withString:@"\""];
// convert \\ to \.
NSString *removeBackslash =
NSString* removeBackslash =
[removeEscapedQuotes stringByReplacingOccurrencesOfString:@"\\\\"
withString:@"\\"];
return removeBackslash;
@ -540,8 +523,8 @@
#pragma mark - public methods
- (void)closeVideoUI {
[self disconnect];
[self.viewController resetUI];
[self disconnect];
[self.viewController resetUI];
}
@end

View File

@ -30,7 +30,7 @@
@class RTCVideoTrack;
// This class encapsulates VideoRenderIosView.
@interface VideoView : UIView
@interface APPRTCVideoView : UIView
// Property to get/set required video orientation.
@property(nonatomic, assign) UIInterfaceOrientation videoOrientation;
@ -40,11 +40,4 @@
// Sets up the underlying renderer and track objects.
- (void)renderVideoTrackInterface:(RTCVideoTrack*)track;
// Stops rendering.
- (void)pause;
// Starts rendering.
- (void)resume;
// Stops rendering and resets underlying renderer and track objects.
- (void)stop;
@end

View File

@ -0,0 +1,82 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This APPRTCVideoView must be initialzed and added to a View to get
* either the local or remote video stream rendered.
* It is a view itself and it encapsulates
* an object of VideoRenderIosView and UIActivityIndicatorView.
* Both of the views will get resized as per the frame of their parent.
*/
#import "APPRTCVideoView.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface APPRTCVideoView () {
RTCVideoTrack* _track;
RTCVideoRenderer* _renderer;
}
@property(nonatomic, weak) UIView* renderView;
@property(nonatomic, weak) UIActivityIndicatorView* activityView;
@end
@implementation APPRTCVideoView
@synthesize videoOrientation = _videoOrientation;
- (void)layoutSubviews {
[super layoutSubviews];
if (!_renderer) {
// Left-right (mirror) flip the remote view.
CGAffineTransform xform =
CGAffineTransformMakeScale(self.isRemote ? -1 : 1, 1);
// TODO(fischman): why is this rotate (vertical+horizontal flip) needed?!?
xform = CGAffineTransformRotate(xform, M_PI);
// TODO(fischman): ensure back-camera flip is correct in all orientations,
// when back-camera support is added.
[self setTransform:xform];
_renderer = [[RTCVideoRenderer alloc] initWithView:self];
}
}
- (void)renderVideoTrackInterface:(RTCVideoTrack*)videoTrack {
[_track removeRenderer:_renderer];
[_renderer stop];
_track = videoTrack;
if (_track) {
[_track addRenderer:_renderer];
[_renderer start];
}
}
@end

View File

@ -27,20 +27,20 @@
#import <UIKit/UIKit.h>
@class VideoView;
@class APPRTCVideoView;
// The view controller that is displayed when AppRTCDemo is loaded.
@interface APPRTCViewController : UIViewController<UITextFieldDelegate>
@property (weak, nonatomic) IBOutlet UITextField *textField;
@property (weak, nonatomic) IBOutlet UITextView *textInstructions;
@property (weak, nonatomic) IBOutlet UITextView *textOutput;
@property(weak, nonatomic) IBOutlet UITextField* textField;
@property(weak, nonatomic) IBOutlet UITextView* textInstructions;
@property(weak, nonatomic) IBOutlet UITextView* textOutput;
@property(weak, nonatomic) IBOutlet UIView* blackView;
@property(nonatomic, strong) VideoView* remoteVideoView;
@property(nonatomic, strong) VideoView* localVideoView;
@property(nonatomic, strong) APPRTCVideoView* remoteVideoView;
@property(nonatomic, strong) APPRTCVideoView* localVideoView;
- (void)displayText:(NSString *)text;
- (void)displayText:(NSString*)text;
- (void)resetUI;
@end

View File

@ -27,49 +27,40 @@
#import "APPRTCViewController.h"
#import "VideoView.h"
#import "APPRTCVideoView.h"
@interface APPRTCViewController ()
@property (nonatomic, assign) UIInterfaceOrientation statusBarOrientation;
@property(nonatomic, assign) UIInterfaceOrientation statusBarOrientation;
@end
@implementation APPRTCViewController
@synthesize textField = _textField;
@synthesize textInstructions = _textInstructions;
@synthesize textOutput = _textOutput;
@synthesize blackView = _blackView;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize localVideoView = _localVideoView;
@synthesize statusBarOrientation = _statusBarOrientation;
- (void)viewDidLoad {
[super viewDidLoad];
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[UIApplication sharedApplication].statusBarOrientation;
self.textField.delegate = self;
[self.textField becomeFirstResponder];
}
- (void)viewDidLayoutSubviews {
if (self.statusBarOrientation !=
[UIApplication sharedApplication].statusBarOrientation) {
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[[NSNotificationCenter defaultCenter]
postNotificationName:@"StatusBarOrientationDidChange" object:nil];
}
if (self.statusBarOrientation !=
[UIApplication sharedApplication].statusBarOrientation) {
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[[NSNotificationCenter defaultCenter]
postNotificationName:@"StatusBarOrientationDidChange"
object:nil];
}
}
- (void)displayText:(NSString *)text {
- (void)displayText:(NSString*)text {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSString *output =
[NSString stringWithFormat:@"%@\n%@", self.textOutput.text, text];
self.textOutput.text = output;
NSString* output =
[NSString stringWithFormat:@"%@\n%@", self.textOutput.text, text];
self.textOutput.text = output;
});
}
@ -82,11 +73,11 @@
self.textOutput.text = nil;
self.blackView.hidden = YES;
[_remoteVideoView stop];
[_remoteVideoView renderVideoTrackInterface:nil];
[_remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[_localVideoView stop];
[_remoteVideoView renderVideoTrackInterface:nil];
[_localVideoView removeFromSuperview];
self.localVideoView = nil;
}
@ -94,61 +85,62 @@
// TODO(fischman): Use video dimensions from the incoming video stream
// and resize the Video View accordingly w.r.t. aspect ratio.
enum {
// Remote video view dimensions.
kRemoteVideoWidth = 640,
kRemoteVideoHeight = 480,
// Padding space for local video view with its parent.
kLocalViewPadding = 20
// Remote video view dimensions.
kRemoteVideoWidth = 640,
kRemoteVideoHeight = 480,
// Padding space for local video view with its parent.
kLocalViewPadding = 20
};
- (void)setupCaptureSession {
self.blackView.hidden = NO;
self.blackView.hidden = NO;
CGRect frame = CGRectMake((self.blackView.bounds.size.width
-kRemoteVideoWidth)/2,
(self.blackView.bounds.size.height
-kRemoteVideoHeight)/2,
kRemoteVideoWidth,
kRemoteVideoHeight);
VideoView *videoView = [[VideoView alloc] initWithFrame:frame];
videoView.isRemote = TRUE;
CGRect frame =
CGRectMake((self.blackView.bounds.size.width - kRemoteVideoWidth) / 2,
(self.blackView.bounds.size.height - kRemoteVideoHeight) / 2,
kRemoteVideoWidth,
kRemoteVideoHeight);
APPRTCVideoView* videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = TRUE;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleRightMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleTopMargin;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_remoteVideoView = videoView;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleRightMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleTopMargin;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_remoteVideoView = videoView;
CGSize screenSize = [[UIScreen mainScreen] bounds].size;
CGFloat localVideoViewWidth =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ?
screenSize.width/4 : screenSize.height/4;
CGFloat localVideoViewHeight =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ?
screenSize.height/4 : screenSize.width/4;
frame = CGRectMake(self.blackView.bounds.size.width
-localVideoViewWidth-kLocalViewPadding,
kLocalViewPadding,
localVideoViewWidth,
localVideoViewHeight);
videoView = [[VideoView alloc] initWithFrame:frame];
videoView.isRemote = FALSE;
CGSize screenSize = [[UIScreen mainScreen] bounds].size;
CGFloat localVideoViewWidth =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.width / 4
: screenSize.height / 4;
CGFloat localVideoViewHeight =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.height / 4
: screenSize.width / 4;
frame = CGRectMake(self.blackView.bounds.size.width - localVideoViewWidth -
kLocalViewPadding,
kLocalViewPadding,
localVideoViewWidth,
localVideoViewHeight);
videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = FALSE;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleHeight |
UIViewAutoresizingFlexibleWidth;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_localVideoView = videoView;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleHeight |
UIViewAutoresizingFlexibleWidth;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_localVideoView = videoView;
}
#pragma mark - UITextFieldDelegate
- (void)textFieldDidEndEditing:(UITextField *)textField {
NSString *room = textField.text;
- (void)textFieldDidEndEditing:(UITextField*)textField {
NSString* room = textField.text;
if ([room length] == 0) {
return;
}
@ -159,16 +151,14 @@ enum {
// prepopulating the textField with a valid URL missing the room. This allows
// the user to have the simplicity of just entering the room or the ability to
// override to a custom appspot instance. Remove apprtc:// when this is done.
NSString *url =
NSString* url =
[NSString stringWithFormat:@"apprtc://apprtc.appspot.com/?r=%@", room];
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]];
dispatch_async(dispatch_get_main_queue(), ^{
[self setupCaptureSession];
});
dispatch_async(dispatch_get_main_queue(), ^{ [self setupCaptureSession]; });
}
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
- (BOOL)textFieldShouldReturn:(UITextField*)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
[textField resignFirstResponder];

View File

@ -32,27 +32,23 @@
@interface GAEChannelClient ()
@property(nonatomic, assign) id<GAEMessageHandler> delegate;
@property(nonatomic, strong) UIWebView *webView;
@property(nonatomic, strong) UIWebView* webView;
@end
@implementation GAEChannelClient
@synthesize delegate = _delegate;
@synthesize webView = _webView;
- (id)initWithToken:(NSString *)token delegate:(id<GAEMessageHandler>)delegate {
- (id)initWithToken:(NSString*)token delegate:(id<GAEMessageHandler>)delegate {
self = [super init];
if (self) {
_webView = [[UIWebView alloc] init];
_webView.delegate = self;
_delegate = delegate;
NSString *htmlPath =
NSString* htmlPath =
[[NSBundle mainBundle] pathForResource:@"ios_channel" ofType:@"html"];
NSURL *htmlUrl = [NSURL fileURLWithPath:htmlPath];
NSString *path = [NSString stringWithFormat:@"%@?token=%@",
[htmlUrl absoluteString],
token];
NSURL* htmlUrl = [NSURL fileURLWithPath:htmlPath];
NSString* path = [NSString
stringWithFormat:@"%@?token=%@", [htmlUrl absoluteString], token];
[_webView
loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:path]]];
@ -67,17 +63,17 @@
#pragma mark - UIWebViewDelegate method
- (BOOL)webView:(UIWebView *)webView
shouldStartLoadWithRequest:(NSURLRequest *)request
- (BOOL)webView:(UIWebView*)webView
shouldStartLoadWithRequest:(NSURLRequest*)request
navigationType:(UIWebViewNavigationType)navigationType {
NSString *scheme = [request.URL scheme];
NSString* scheme = [request.URL scheme];
if ([scheme compare:@"js-frame"] != NSOrderedSame) {
return YES;
}
NSString *resourceSpecifier = [request.URL resourceSpecifier];
NSString* resourceSpecifier = [request.URL resourceSpecifier];
NSRange range = [resourceSpecifier rangeOfString:@":"];
NSString *method;
NSString *message;
NSString* method;
NSString* message;
if (range.length == 0 && range.location == NSNotFound) {
method = resourceSpecifier;
} else {
@ -85,21 +81,21 @@
message = [resourceSpecifier substringFromIndex:range.location + 1];
}
dispatch_async(dispatch_get_main_queue(), ^(void) {
if ([method compare:@"onopen"] == NSOrderedSame) {
[self.delegate onOpen];
} else if ([method compare:@"onmessage"] == NSOrderedSame) {
[self.delegate onMessage:message];
} else if ([method compare:@"onclose"] == NSOrderedSame) {
[self.delegate onClose];
} else if ([method compare:@"onerror"] == NSOrderedSame) {
// TODO(hughv): Get error.
int code = -1;
NSString *description = message;
[self.delegate onError:code withDescription:description];
} else {
NSAssert(NO, @"Invalid message sent from UIWebView: %@",
resourceSpecifier);
}
if ([method compare:@"onopen"] == NSOrderedSame) {
[self.delegate onOpen];
} else if ([method compare:@"onmessage"] == NSOrderedSame) {
[self.delegate onMessage:message];
} else if ([method compare:@"onclose"] == NSOrderedSame) {
[self.delegate onClose];
} else if ([method compare:@"onerror"] == NSOrderedSame) {
// TODO(hughv): Get error.
int code = -1;
NSString* description = message;
[self.delegate onError:code withDescription:description];
} else {
NSAssert(
NO, @"Invalid message sent from UIWebView: %@", resourceSpecifier);
}
});
return YES;
}

View File

@ -1,168 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This VideoView must be initialzed and added to a View to get
* either the local or remote video stream rendered.
* It is a view itself and it encapsulates
* an object of VideoRenderIosView and UIActivityIndicatorView.
* Both of the views will get resized as per the frame of their parent.
*/
#import "VideoView.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface VideoView () {
RTCVideoTrack *_track;
RTCVideoRenderer *_renderer;
}
@property (nonatomic, weak) UIView *renderView;
@property (nonatomic, weak) UIActivityIndicatorView *activityView;
@end
@implementation VideoView
@synthesize videoOrientation = _videoOrientation;
@synthesize isRemote = _isRemote;
@synthesize renderView = _renderView;
@synthesize activityView = _activityView;
static void init(VideoView *self) {
UIView *renderView = [RTCVideoRenderer newRenderViewWithFrame:
CGRectMake(0,
0,
self.bounds.size.width,
self.bounds.size.height)];
[self addSubview:renderView];
renderView.autoresizingMask = UIViewAutoresizingFlexibleHeight |
UIViewAutoresizingFlexibleWidth;
renderView.translatesAutoresizingMaskIntoConstraints = YES;
self.renderView = renderView;
UIActivityIndicatorView *indicatorView =
[[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:
UIActivityIndicatorViewStyleWhiteLarge];
indicatorView.frame = self.bounds;
indicatorView.hidesWhenStopped = YES;
[self addSubview:indicatorView];
indicatorView.autoresizingMask = UIViewAutoresizingFlexibleWidth |
UIViewAutoresizingFlexibleHeight;
indicatorView.translatesAutoresizingMaskIntoConstraints = YES;
[indicatorView startAnimating];
self.activityView = indicatorView;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
init(self);
}
return self;
}
-(id)initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
init(self);
}
return self;
}
- (UIInterfaceOrientation)videoOrientation {
return _videoOrientation;
}
- (void)setVideoOrientation:(UIInterfaceOrientation)videoOrientation {
if (_videoOrientation != videoOrientation) {
_videoOrientation = videoOrientation;
CGFloat angle;
switch (videoOrientation) {
case UIInterfaceOrientationPortrait:
angle = M_PI_2;
break;
case UIInterfaceOrientationPortraitUpsideDown:
angle = -M_PI_2;
break;
case UIInterfaceOrientationLandscapeLeft:
angle = M_PI;
break;
case UIInterfaceOrientationLandscapeRight:
angle = 0;
break;
}
// The video comes in mirrored. That is fine for the local video,
// but the remote video should be put back to original.
CGAffineTransform xform =
CGAffineTransformMakeScale([self isRemote] ? -1 : 1, 1);
xform = CGAffineTransformRotate(xform, angle);
[[self renderView] setTransform:xform];
}
}
- (void)renderVideoTrackInterface:(RTCVideoTrack *)videoTrack {
[self stop];
_track = videoTrack;
if (_track) {
if (!_renderer) {
_renderer = [[RTCVideoRenderer alloc]
initWithRenderView:[self renderView]];
}
[_track addRenderer:_renderer];
[self resume];
}
[self setVideoOrientation:UIInterfaceOrientationLandscapeLeft];
[self setVideoOrientation:UIInterfaceOrientationPortrait];
[self setVideoOrientation:UIInterfaceOrientationLandscapeLeft];
}
-(void)pause {
[_renderer stop];
}
-(void)resume {
[self.activityView stopAnimating];
[self.activityView removeFromSuperview];
self.activityView = nil;
[_renderer start];
}
- (void)stop {
[_track removeRenderer:_renderer];
[_renderer stop];
}
@end

View File

@ -29,7 +29,7 @@
#import "APPRTCAppDelegate.h"
int main(int argc, char *argv[]) {
int main(int argc, char* argv[]) {
@autoreleasepool {
return UIApplicationMain(
argc, argv, nil, NSStringFromClass([APPRTCAppDelegate class]));

View File

@ -248,18 +248,18 @@
'examples/ios/AppRTCDemo/APPRTCAppDelegate.m',
'examples/ios/AppRTCDemo/APPRTCViewController.h',
'examples/ios/AppRTCDemo/APPRTCViewController.m',
'examples/ios/AppRTCDemo/APPRTCVideoView.h',
'examples/ios/AppRTCDemo/APPRTCVideoView.m',
'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch',
'examples/ios/AppRTCDemo/GAEChannelClient.h',
'examples/ios/AppRTCDemo/GAEChannelClient.m',
'examples/ios/AppRTCDemo/VideoView.h',
'examples/ios/AppRTCDemo/VideoView.m',
'examples/ios/AppRTCDemo/main.m',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
'INFOPLIST_FILE': 'examples/ios/AppRTCDemo/Info.plist',
'OTHER_LDFLAGS': [
'-framework CoreGraphics',
'-framework CoreGraphics',
'-framework Foundation',
'-framework UIKit',
],

View File

@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2004--2010, Google Inc.
* Copyright 2010, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@ -27,7 +27,7 @@
// support GCC compiler
#ifndef __has_feature
# define __has_feature(x) 0
#define __has_feature(x) 0
#endif
#include "talk/media/devices/devicemanager.h"
@ -42,7 +42,7 @@
cricket::DeviceManagerInterface* manager_;
}
- (id)init:(cricket::DeviceManagerInterface*)manager;
- (void)onDevicesChanged:(NSNotification *)notification;
- (void)onDevicesChanged:(NSNotification*)notification;
@end
@implementation DeviceWatcherImpl
@ -50,14 +50,16 @@
if ((self = [super init])) {
assert(manager != NULL);
manager_ = manager;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
}
return self;
}
@ -68,7 +70,7 @@
[super dealloc];
#endif
}
- (void)onDevicesChanged:(NSNotification *)notification {
- (void)onDevicesChanged:(NSNotification*)notification {
manager_->SignalDevicesChange();
}
@end
@ -83,9 +85,7 @@ DeviceWatcherImpl* CreateDeviceWatcherCallback(
#else
@autoreleasepool
#endif
{
impl = [[DeviceWatcherImpl alloc] init:manager];
}
{ impl = [[DeviceWatcherImpl alloc] init:manager]; }
#if !__has_feature(objc_arc)
[pool drain];
#endif
@ -115,20 +115,19 @@ bool GetQTKitVideoDevices(std::vector<Device>* devices) {
static NSString* const kFormat = @"localizedDisplayName: \"%@\", "
@"modelUniqueID: \"%@\", uniqueID \"%@\", isConnected: %d, "
@"isOpen: %d, isInUseByAnotherApplication: %d";
NSString* info = [NSString stringWithFormat:kFormat,
[qt_capture_device localizedDisplayName],
[qt_capture_device modelUniqueID],
[qt_capture_device uniqueID],
[qt_capture_device isConnected],
[qt_capture_device isOpen],
[qt_capture_device isInUseByAnotherApplication]];
NSString* info = [NSString
stringWithFormat:kFormat,
[qt_capture_device localizedDisplayName],
[qt_capture_device modelUniqueID],
[qt_capture_device uniqueID],
[qt_capture_device isConnected],
[qt_capture_device isOpen],
[qt_capture_device isInUseByAnotherApplication]];
LOG(LS_INFO) << [info UTF8String];
std::string name([[qt_capture_device localizedDisplayName]
UTF8String]);
devices->push_back(Device(name,
[[qt_capture_device uniqueID]
UTF8String]));
std::string name([[qt_capture_device localizedDisplayName] UTF8String]);
devices->push_back(
Device(name, [[qt_capture_device uniqueID] UTF8String]));
}
}
#if !__has_feature(objc_arc)