AppRTCDemo(ios): style/cleanup fixes following cr/62871616-p10

BUG=2168
R=noahric@google.com

Review URL: https://webrtc-codereview.appspot.com/9709004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5768 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org
2014-03-25 00:11:56 +00:00
parent ce12f1fd32
commit 7fa1fcb72c
35 changed files with 809 additions and 929 deletions

View File

@@ -55,7 +55,9 @@
- (NSString*)description {
return [NSString stringWithFormat:@"RTCICEServer: [%@:%@:%@]",
[self.URI absoluteString], self.username, self.password];
[self.URI absoluteString],
self.username,
self.password];
}
@end

View File

@@ -55,8 +55,8 @@
return self;
}
+ (webrtc::MediaConstraintsInterface::Constraints)
constraintsFromArray:(NSArray *)array {
+ (webrtc::MediaConstraintsInterface::Constraints)constraintsFromArray:
(NSArray*)array {
webrtc::MediaConstraintsInterface::Constraints constraints;
for (RTCPair* pair in array) {
constraints.push_back(webrtc::MediaConstraintsInterface::Constraint(

View File

@@ -40,7 +40,8 @@
- (BOOL)isEqual:(id)other {
// Equality is purely based on the label just like the C++ implementation.
if (self == other) return YES;
if (self == other)
return YES;
if (![other isKindOfClass:[self class]] ||
![self isKindOfClass:[other class]]) {
return NO;
@@ -82,8 +83,9 @@
@implementation RTCMediaStreamTrack (Internal)
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
- (id)initWithMediaTrack:
(talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack {
if (!mediaTrack) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;

View File

@@ -107,7 +107,6 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
id<RTCSessionDescriptonDelegate> _delegate;
RTCPeerConnection* _peerConnection;
};
}
@implementation RTCPeerConnection {
@@ -154,8 +153,8 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
[_localStreams removeObject:stream];
}
- (void)
setLocalDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
- (void)setLocalDescriptionWithDelegate:
(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription*)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
@@ -163,8 +162,8 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
self.peerConnection->SetLocalDescription(observer, sdp.sessionDescription);
}
- (void)
setRemoteDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
- (void)setRemoteDescriptionWithDelegate:
(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription*)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
@@ -184,9 +183,8 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
- (RTCSessionDescription*)localDescription {
const webrtc::SessionDescriptionInterface* sdi =
self.peerConnection->local_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
: nil;
}
- (NSArray*)localStreams {
@@ -196,19 +194,20 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
- (RTCSessionDescription*)remoteDescription {
const webrtc::SessionDescriptionInterface* sdi =
self.peerConnection->remote_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
return sdi ? [[RTCSessionDescription alloc] initWithSessionDescription:sdi]
: nil;
}
- (RTCICEConnectionState)iceConnectionState {
return [RTCEnumConverter convertIceConnectionStateToObjC:
self.peerConnection->ice_connection_state()];
return [RTCEnumConverter
convertIceConnectionStateToObjC:self.peerConnection
->ice_connection_state()];
}
- (RTCICEGatheringState)iceGatheringState {
return [RTCEnumConverter convertIceGatheringStateToObjC:
self.peerConnection->ice_gathering_state()];
return [RTCEnumConverter
convertIceGatheringStateToObjC:self.peerConnection
->ice_gathering_state()];
}
- (RTCSignalingState)signalingState {
@@ -224,8 +223,9 @@ class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
@implementation RTCPeerConnection (Internal)
- (id)initWithPeerConnection:(
talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection
- (id)initWithPeerConnection:
(talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)
peerConnection
observer:(webrtc::RTCPeerConnectionObserver*)observer {
if (!peerConnection || !observer) {
NSAssert(NO, @"nil arguments not allowed");

View File

@@ -98,8 +98,9 @@
new webrtc::RTCPeerConnectionObserver(delegate);
webrtc::DTLSIdentityServiceInterface* dummy_dtls_identity_service = NULL;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection =
self.nativeFactory->CreatePeerConnection(
iceServers, constraints.constraints, dummy_dtls_identity_service,
self.nativeFactory->CreatePeerConnection(iceServers,
constraints.constraints,
dummy_dtls_identity_service,
observer);
RTCPeerConnection* pc =
[[RTCPeerConnection alloc] initWithPeerConnection:peerConnection
@@ -120,7 +121,7 @@
return nil;
}
talk_base::scoped_refptr<webrtc::VideoSourceInterface> source =
self.nativeFactory->CreateVideoSource([capturer release_native_capturer],
self.nativeFactory->CreateVideoSource([capturer takeNativeCapturer],
constraints.constraints);
return [[RTCVideoSource alloc] initWithMediaSource:source];
}

View File

@@ -54,8 +54,8 @@ void RTCPeerConnectionObserver::OnError() {
void RTCPeerConnectionObserver::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
[_delegate peerConnection:_peerConnection
signalingStateChanged:
[RTCEnumConverter convertSignalingStateToObjC:new_state]];
signalingStateChanged:[RTCEnumConverter
convertSignalingStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) {
@@ -82,15 +82,15 @@ void RTCPeerConnectionObserver::OnRenegotiationNeeded() {
void RTCPeerConnectionObserver::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
[_delegate peerConnection:_peerConnection
iceConnectionChanged:
[RTCEnumConverter convertIceConnectionStateToObjC:new_state]];
iceConnectionChanged:[RTCEnumConverter
convertIceConnectionStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
[_delegate peerConnection:_peerConnection
iceGatheringChanged:
[RTCEnumConverter convertIceGatheringStateToObjC:new_state]];
iceGatheringChanged:[RTCEnumConverter
convertIceGatheringStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceCandidate(

View File

@@ -31,7 +31,7 @@
@interface RTCVideoCapturer (Internal)
- (cricket::VideoCapturer*)release_native_capturer;
- (cricket::VideoCapturer*)takeNativeCapturer;
- (id)initWithCapturer:(cricket::VideoCapturer*)capturer;

View File

@@ -67,7 +67,7 @@
return self;
}
- (cricket::VideoCapturer*)release_native_capturer {
- (cricket::VideoCapturer*)takeNativeCapturer {
return _capturer.release();
}

View File

@@ -49,7 +49,6 @@
// a VideoRenderCallback. Suitable for feeding to
// VideoTrackInterface::AddRenderer().
class CallbackConverter : public webrtc::VideoRendererInterface {
public:
CallbackConverter(webrtc::VideoRenderCallback* callback,
const uint32_t streamId)
@@ -88,51 +87,57 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
};
@implementation RTCVideoRenderer {
VideoRenderIosView* _renderView;
UIActivityIndicatorView* _activityIndicator;
CallbackConverter* _converter;
talk_base::scoped_ptr<webrtc::VideoRenderIosImpl> _iosRenderer;
}
@synthesize delegate = _delegate;
+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame {
return [[RTCVideoRenderer alloc]
initWithRenderView:[RTCVideoRenderer newRenderViewWithFrame:frame]];
}
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
if ((self = [super init])) {
_delegate = delegate;
// TODO(hughv): Create video renderer.
}
[self doesNotRecognizeSelector:_cmd];
return self;
}
+ (UIView*)newRenderViewWithFrame:(CGRect)frame {
VideoRenderIosView* newView =
[[VideoRenderIosView alloc] initWithFrame:frame];
return newView;
}
- (id)initWithRenderView:(UIView*)view {
NSAssert([view isKindOfClass:[VideoRenderIosView class]],
@"The view must be of kind 'VideoRenderIosView'");
- (id)initWithView:(UIView*)view {
if ((self = [super init])) {
VideoRenderIosView* renderView = (VideoRenderIosView*)view;
CGRect frame =
CGRectMake(0, 0, view.bounds.size.width, view.bounds.size.height);
_renderView = [[VideoRenderIosView alloc] initWithFrame:frame];
_iosRenderer.reset(
new webrtc::VideoRenderIosImpl(0, (__bridge void*)renderView, NO));
if (_iosRenderer->Init() != -1) {
new webrtc::VideoRenderIosImpl(0, (__bridge void*)_renderView, NO));
if (_iosRenderer->Init() == -1) {
self = nil;
} else {
webrtc::VideoRenderCallback* callback =
_iosRenderer->AddIncomingRenderStream(0, 1, 0, 0, 1, 1);
_converter = new CallbackConverter(callback, 0);
_iosRenderer->StartRender();
} else {
self = nil;
[view addSubview:_renderView];
_renderView.autoresizingMask =
UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;
_renderView.translatesAutoresizingMaskIntoConstraints = YES;
_activityIndicator = [[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:
UIActivityIndicatorViewStyleWhiteLarge];
_activityIndicator.frame = view.bounds;
_activityIndicator.hidesWhenStopped = YES;
[view addSubview:_activityIndicator];
_activityIndicator.autoresizingMask =
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
_activityIndicator.translatesAutoresizingMaskIntoConstraints = YES;
[_activityIndicator startAnimating];
}
}
return self;
}
- (void)start {
[_activityIndicator stopAnimating];
[_activityIndicator removeFromSuperview];
_iosRenderer->StartRender();
}
@@ -159,7 +164,7 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
#import "RTCVideoRendererDelegate.h"
@implementation RTCVideoRenderer
@synthesize delegate = _delegate;
+ (RTCVideoRenderer*)videoRenderGUIWithFrame:(CGRect)frame {
+ (RTCVideoRenderer*)videoRendererWithFrame:(CGRect)frame {
// TODO(hughv): Implement.
return nil;
}
@@ -170,12 +175,10 @@ class CallbackConverter : public webrtc::VideoRendererInterface {
}
return self;
}
+ (UIView*)newRenderViewWithFrame:(CGRect)frame {
- (id)initWithView:(UIView*)view {
return nil;
}
- (id)initWithRenderView:(UIView*)renderView {
return nil;
- (void)setTransform:(CGAffineTransform)transform {
}
- (void)start {
}

View File

@@ -38,8 +38,9 @@
NSMutableArray* _rendererArray;
}
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
- (id)initWithMediaTrack:
(talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack {
if (self = [super initWithMediaTrack:mediaTrack]) {
_rendererArray = [NSMutableArray array];
}

View File

@@ -37,8 +37,8 @@
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__(
(unavailable("init is not a supported initializer for this class.")));
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@@ -26,6 +26,7 @@
*/
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
@protocol RTCVideoRendererDelegate;
struct CGRect;
@@ -36,18 +37,15 @@ struct CGRect;
@property(nonatomic, strong) id<RTCVideoRendererDelegate> delegate;
// A convenience method to create a renderer and window and render frames into
// that window.
+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame;
+ (UIView*)newRenderViewWithFrame:(CGRect)frame;
// The view to the following constructor
// must be one of the views from newRenderViewWithFrame.
- (id)initWithRenderView:(UIView*)renderView;
- (id)initWithView:(UIView*)view;
// Initialize the renderer. Requires a delegate which does the actual drawing
// of frames.
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
// Set an affine transform on relevant UIViews.
- (void)setTransform:(CGAffineTransform)transform;
// Starts rendering.
- (void)start;
// Stops rendering. It can be restarted again using the 'start' method above.
@@ -55,8 +53,8 @@ struct CGRect;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__(
(unavailable("init is not a supported initializer for this class.")));
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@@ -74,8 +74,7 @@
- (BOOL)areAllExpectationsSatisfied {
return _expectedICECandidates <= 0 && // See comment in gotICECandidate.
_expectedErrors == 0 &&
[_expectedSignalingChanges count] == 0 &&
_expectedErrors == 0 && [_expectedSignalingChanges count] == 0 &&
[_expectedICEConnectionChanges count] == 0 &&
[_expectedICEGatheringChanges count] == 0 &&
[_expectedAddStreamLabels count] == 0 &&
@@ -137,8 +136,11 @@
- (void)peerConnection:(RTCPeerConnection*)peerConnection
signalingStateChanged:(RTCSignalingState)stateChanged {
int expectedState = [self popFirstElementAsInt:_expectedSignalingChanges];
NSString *message = [NSString stringWithFormat: @"RTCPeerConnectionDelegate::"
@"onSignalingStateChange [%d] expected[%d]", stateChanged, expectedState];
NSString* message =
[NSString stringWithFormat:@"RTCPeerConnectionDelegate::"
@"onSignalingStateChange [%d] expected[%d]",
stateChanged,
expectedState];
NSAssert(expectedState == (int)stateChanged, message);
}
@@ -156,8 +158,7 @@
NSAssert([expectedLabel isEqual:stream.label], @"Stream not expected");
}
- (void)peerConnectionOnRenegotiationNeeded:
(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection

View File

@@ -52,8 +52,7 @@
ofSameTypeAsSession:(RTCSessionDescription*)session2;
// Create and add tracks to pc, with the given source, label, and IDs
- (RTCMediaStream *)
addTracksToPeerConnection:(RTCPeerConnection *)pc
- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
withFactory:(RTCPeerConnectionFactory*)factory
videoSource:(RTCVideoSource*)videoSource
streamLabel:(NSString*)streamLabel
@@ -71,8 +70,7 @@
return [session1.type isEqual:session2.type];
}
- (RTCMediaStream *)
addTracksToPeerConnection:(RTCPeerConnection *)pc
- (RTCMediaStream*)addTracksToPeerConnection:(RTCPeerConnection*)pc
withFactory:(RTCPeerConnectionFactory*)factory
videoSource:(RTCVideoSource*)videoSource
streamLabel:(NSString*)streamLabel
@@ -120,8 +118,7 @@
// Here and below, "oLMS" refers to offerer's local media stream, and "aLMS"
// refers to the answerer's local media stream, with suffixes of "a0" and "v0"
// for audio and video tracks, resp. These mirror chrome historical naming.
RTCMediaStream *oLMSUnused =
[self addTracksToPeerConnection:pcOffer
RTCMediaStream* oLMSUnused = [self addTracksToPeerConnection:pcOffer
withFactory:factory
videoSource:videoSource
streamLabel:@"oLMS"
@@ -138,15 +135,13 @@
EXPECT_GT([offerSDP.description length], 0);
sdpObserver = [[RTCSessionDescriptionSyncObserver alloc] init];
[answeringExpectations
expectSignalingChange:RTCSignalingHaveRemoteOffer];
[answeringExpectations expectSignalingChange:RTCSignalingHaveRemoteOffer];
[answeringExpectations expectAddStream:@"oLMS"];
[pcAnswer setRemoteDescriptionWithDelegate:sdpObserver
sessionDescription:offerSDP];
[sdpObserver wait];
RTCMediaStream *aLMSUnused =
[self addTracksToPeerConnection:pcAnswer
RTCMediaStream* aLMSUnused = [self addTracksToPeerConnection:pcAnswer
withFactory:factory
videoSource:videoSource
streamLabel:@"aLMS"
@@ -203,12 +198,12 @@
EXPECT_TRUE([offerSDP.type isEqual:pcAnswer.remoteDescription.type]);
EXPECT_TRUE([answerSDP.type isEqual:pcAnswer.localDescription.type]);
for (RTCICECandidate *candidate in
offeringExpectations.releaseReceivedICECandidates) {
for (RTCICECandidate* candidate in offeringExpectations
.releaseReceivedICECandidates) {
[pcAnswer addICECandidate:candidate];
}
for (RTCICECandidate *candidate in
answeringExpectations.releaseReceivedICECandidates) {
for (RTCICECandidate* candidate in answeringExpectations
.releaseReceivedICECandidates) {
[pcOffer addICECandidate:candidate];
}

View File

@@ -47,12 +47,20 @@
// for the registered handler to be called with received messages.
@interface APPRTCAppClient : NSObject<NSURLConnectionDataDelegate>
@property(nonatomic, assign) id<ICEServerDelegate> ICEServerDelegate;
@property(nonatomic, assign) id<GAEMessageHandler> messageHandler;
@property(nonatomic, weak, readonly) id<ICEServerDelegate> ICEServerDelegate;
@property(nonatomic, weak, readonly) id<GAEMessageHandler> messageHandler;
@property(nonatomic, assign) BOOL initiator;
@property(nonatomic, strong) RTCMediaConstraints* videoConstraints;
@property(nonatomic, copy, readonly) RTCMediaConstraints* videoConstraints;
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler;
- (void)connectToRoom:(NSURL*)room;
- (void)sendData:(NSData*)data;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@@ -51,23 +51,11 @@
@implementation APPRTCAppClient
@synthesize ICEServerDelegate = _ICEServerDelegate;
@synthesize messageHandler = _messageHandler;
@synthesize backgroundQueue = _backgroundQueue;
@synthesize baseURL = _baseURL;
@synthesize gaeChannel = _gaeChannel;
@synthesize postMessageUrl = _postMessageUrl;
@synthesize pcConfig = _pcConfig;
@synthesize roomHtml = _roomHtml;
@synthesize sendQueue = _sendQueue;
@synthesize token = _token;
@synthesize verboseLogging = _verboseLogging;
@synthesize initiator = _initiator;
@synthesize videoConstraints = _videoConstraints;
- (id)init {
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler {
if (self = [super init]) {
_ICEServerDelegate = delegate;
_messageHandler = handler;
_backgroundQueue = dispatch_queue_create("RTCBackgroundQueue", NULL);
_sendQueue = [NSMutableArray array];
// Uncomment to see Request/Response logging.
@@ -93,8 +81,7 @@
#pragma mark - Internal methods
- (NSString*)findVar:(NSString*)name
strippingQuotes:(BOOL)strippingQuotes {
- (NSString*)findVar:(NSString*)name strippingQuotes:(BOOL)strippingQuotes {
NSError* error;
NSString* pattern =
[NSString stringWithFormat:@".*\n *var %@ = ([^\n]*);\n.*", name];
@@ -102,7 +89,8 @@
[NSRegularExpression regularExpressionWithPattern:pattern
options:0
error:&error];
NSAssert(!error, @"Unexpected error compiling regex: ",
NSAssert(!error,
@"Unexpected error compiling regex: ",
error.localizedDescription);
NSRange fullRange = NSMakeRange(0, [self.roomHtml length]);
@@ -110,17 +98,21 @@
[regexp matchesInString:self.roomHtml options:0 range:fullRange];
if ([matches count] != 1) {
[self showMessage:[NSString stringWithFormat:@"%d matches for %@ in %@",
[matches count], name, self.roomHtml]];
[matches count],
name,
self.roomHtml]];
return nil;
}
NSRange matchRange = [matches[0] rangeAtIndex:1];
NSString* value = [self.roomHtml substringWithRange:matchRange];
if (strippingQuotes) {
NSAssert([value length] > 2,
@"Can't strip quotes from short string: [%@]", value);
@"Can't strip quotes from short string: [%@]",
value);
NSAssert(([value characterAtIndex:0] == '\'' &&
[value characterAtIndex:[value length] - 1] == '\''),
@"Can't strip quotes from unquoted string: [%@]", value);
@"Can't strip quotes from unquoted string: [%@]",
value);
value = [value substringWithRange:NSMakeRange(1, [value length] - 2)];
}
return value;
@@ -149,9 +141,9 @@
return;
}
for (NSData* data in self.sendQueue) {
NSString *url = [NSString stringWithFormat:@"%@/%@",
self.baseURL,
self.postMessageUrl];
NSString* url =
[NSString stringWithFormat:@"%@/%@",
self.baseURL, self.postMessageUrl];
[self sendData:data withUrl:url];
}
[self.sendQueue removeAllObjects];
@@ -206,7 +198,8 @@
returningResponse:&response
error:&error];
if (!error) {
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:responseData
NSDictionary* json =
[NSJSONSerialization JSONObjectWithData:responseData
options:0
error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
@@ -236,8 +229,8 @@
- (void)connection:(NSURLConnection*)connection didReceiveData:(NSData*)data {
NSString* roomHtml = [NSString stringWithUTF8String:[data bytes]];
[self maybeLogMessage:
[NSString stringWithFormat:@"Received %d chars", [roomHtml length]]];
[self maybeLogMessage:[NSString stringWithFormat:@"Received %d chars",
[roomHtml length]]];
[self.roomHtml appendString:roomHtml];
}
@@ -245,7 +238,8 @@
didReceiveResponse:(NSURLResponse*)response {
NSHTTPURLResponse* httpResponse = (NSHTTPURLResponse*)response;
int statusCode = [httpResponse statusCode];
[self maybeLogMessage:
[self
maybeLogMessage:
[NSString stringWithFormat:
@"Response received\nURL\n%@\nStatus [%d]\nHeaders\n%@",
[httpResponse URL],
@@ -272,12 +266,11 @@
return;
}
NSString* fullUrl = [[[connection originalRequest] URL] absoluteString];
NSRange queryRange = [fullUrl rangeOfString:@"?"];
self.baseURL = [fullUrl substringToIndex:queryRange.location];
[self maybeLogMessage:
[NSString stringWithFormat:@"Base URL: %@", self.baseURL]];
[self maybeLogMessage:[NSString
stringWithFormat:@"Base URL: %@", self.baseURL]];
self.initiator = [[self findVar:@"initiator" strippingQuotes:NO] boolValue];
self.token = [self findVar:@"channelToken" strippingQuotes:YES];
@@ -297,13 +290,13 @@
NSString* pcConfig = [self findVar:@"pcConfig" strippingQuotes:NO];
if (!pcConfig)
return;
[self maybeLogMessage:
[NSString stringWithFormat:@"PC Config JSON: %@", pcConfig]];
[self maybeLogMessage:[NSString
stringWithFormat:@"PC Config JSON: %@", pcConfig]];
NSString* turnServerUrl = [self findVar:@"turnUrl" strippingQuotes:YES];
if (turnServerUrl) {
[self maybeLogMessage:
[NSString stringWithFormat:@"TURN server request URL: %@",
[self maybeLogMessage:[NSString
stringWithFormat:@"TURN server request URL: %@",
turnServerUrl]];
}
@@ -324,8 +317,8 @@
if (!credential) {
credential = @"";
}
[self maybeLogMessage:
[NSString stringWithFormat:@"url [%@] - credential [%@]",
[self maybeLogMessage:[NSString
stringWithFormat:@"url [%@] - credential [%@]",
url,
credential]];
RTCICEServer* ICEServer =
@@ -345,12 +338,13 @@
[NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
if ([[json objectForKey:@"video"] boolValue]) {
self.videoConstraints = [[RTCMediaConstraints alloc] init];
_videoConstraints = [[RTCMediaConstraints alloc] init];
}
}
[self maybeLogMessage:
[NSString stringWithFormat:@"About to open GAE with token: %@",
[self
maybeLogMessage:[NSString
stringWithFormat:@"About to open GAE with token: %@",
self.token]];
self.gaeChannel =
[[GAEChannelClient alloc] initWithToken:self.token

View File

@@ -42,13 +42,13 @@
#import "RTCVideoRenderer.h"
#import "RTCVideoCapturer.h"
#import "RTCVideoTrack.h"
#import "VideoView.h"
#import "APPRTCVideoView.h"
@interface PCObserver : NSObject<RTCPeerConnectionDelegate>
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate;
@property(nonatomic, strong) VideoView *videoView;
@property(nonatomic, strong) APPRTCVideoView* videoView;
@end
@@ -56,8 +56,6 @@
id<APPRTCSendMessage> _delegate;
}
@synthesize videoView = _videoView;
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate {
if (self = [super init]) {
_delegate = delegate;
@@ -84,7 +82,7 @@
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
[[self videoView]
[self.videoView
renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]];
}
});
@@ -95,8 +93,7 @@
NSLog(@"PCO onRemoveStream.");
}
- (void)
peerConnectionOnRenegotiationNeeded:(RTCPeerConnection *)peerConnection {
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
NSLog(@"PCO onRenegotiationNeeded.");
// TODO(hughv): Handle this.
}
@@ -107,18 +104,20 @@
candidate.sdpMid,
candidate.sdpMLineIndex,
candidate.sdp);
NSDictionary *json =
@{ @"type" : @"candidate",
NSDictionary* json = @{
@"type" : @"candidate",
@"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex],
@"id" : candidate.sdpMid,
@"candidate" : candidate.sdp };
@"candidate" : candidate.sdp
};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
if (!error) {
[_delegate sendData:data];
} else {
NSAssert(NO, @"Unable to serialize JSON object with error: %@",
NSAssert(NO,
@"Unable to serialize JSON object with error: %@",
error.localizedDescription);
}
}
@@ -154,14 +153,6 @@
@implementation APPRTCAppDelegate
@synthesize window = _window;
@synthesize viewController = _viewController;
@synthesize client = _client;
@synthesize pcObserver = _pcObserver;
@synthesize peerConnection = _peerConnection;
@synthesize peerConnectionFactory = _peerConnectionFactory;
@synthesize queuedRemoteCandidates = _queuedRemoteCandidates;
#pragma mark - UIApplicationDelegate methods
- (BOOL)application:(UIApplication*)application
@@ -200,9 +191,8 @@
if (self.client) {
return NO;
}
self.client = [[APPRTCAppClient alloc] init];
self.client.ICEServerDelegate = self;
self.client.messageHandler = self;
self.client = [[APPRTCAppClient alloc] initWithICEServerDelegate:self
messageHandler:self];
[self.client connectToRoom:url];
return YES;
}
@@ -225,19 +215,17 @@
self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:
@[[[RTCPair alloc]
initWithKey:@"OfferToReceiveAudio"
value:@"true"],
[[RTCPair alloc]
initWithKey:@"OfferToReceiveVideo"
value:@"true"]]
@[
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"],
[[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]
]
optionalConstraints:
@[[[RTCPair alloc]
initWithKey:@"internalSctpDataChannels"
@[
[[RTCPair alloc] initWithKey:@"internalSctpDataChannels"
value:@"true"],
[[RTCPair alloc]
initWithKey:@"DtlsSrtpKeyAgreement"
value:@"true"]]];
[[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement"
value:@"true"]
]];
self.pcObserver = [[PCObserver alloc] initWithDelegate:self];
self.peerConnection =
[self.peerConnectionFactory peerConnectionWithICEServers:servers
@@ -258,12 +246,12 @@
RTCVideoCapturer* capturer =
[RTCVideoCapturer capturerWithDeviceName:cameraID];
RTCVideoSource *videoSource =
[self.peerConnectionFactory
videoSourceWithCapturer:capturer constraints:self.client.videoConstraints];
RTCVideoSource* videoSource = [self.peerConnectionFactory
videoSourceWithCapturer:capturer
constraints:self.client.videoConstraints];
RTCVideoTrack* localVideoTrack =
[self.peerConnectionFactory
videoTrackWithID:@"ARDAMSv0" source:videoSource];
[self.peerConnectionFactory videoTrackWithID:@"ARDAMSv0"
source:videoSource];
if (localVideoTrack) {
[lms addVideoTrack:localVideoTrack];
}
@@ -288,8 +276,8 @@
[self displayLogMessage:@"GAE onOpen - create offer."];
RTCPair* audio =
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"];
RTCPair *video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
RTCPair* video =
[[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory
@@ -310,8 +298,8 @@
[NSString stringWithFormat:@"Error: %@", error.description]);
NSAssert([objects count] > 0, @"Invalid JSON object");
NSString* value = [objects objectForKey:@"type"];
[self displayLogMessage:
[NSString stringWithFormat:@"GAE onMessage type - %@", value]];
[self displayLogMessage:[NSString stringWithFormat:@"GAE onMessage type - %@",
value]];
if ([value compare:@"candidate"] == NSOrderedSame) {
NSString* mid = [objects objectForKey:@"id"];
NSNumber* sdpLineIndex = [objects objectForKey:@"label"];
@@ -329,7 +317,8 @@
([value compare:@"answer"] == NSOrderedSame)) {
NSString* sdpString = [objects objectForKey:@"sdp"];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:value sdp:[APPRTCAppDelegate preferISAC:sdpString]];
initWithType:value
sdp:[APPRTCAppDelegate preferISAC:sdpString]];
[self.peerConnection setRemoteDescriptionWithDelegate:self
sessionDescription:sdp];
[self displayLogMessage:@"PC - setRemoteDescription."];
@@ -353,8 +342,8 @@
}
- (void)onError:(int)code withDescription:(NSString*)description {
[self displayLogMessage:
[NSString stringWithFormat:@"GAE onError: %@", description]];
[self displayLogMessage:[NSString stringWithFormat:@"GAE onError: %@",
description]];
[self closeVideoUI];
}
@@ -411,8 +400,8 @@
[newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]];
[newMLine addObject:isac16kRtpMap];
for (; origPartIndex < [origMLineParts count]; ++origPartIndex) {
if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]]
!= NSOrderedSame) {
if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]] !=
NSOrderedSame) {
[newMLine addObject:[origMLineParts objectAtIndex:origPartIndex]];
}
}
@@ -433,8 +422,7 @@
}
[self displayLogMessage:@"SDP onSuccess(SDP) - set local description."];
RTCSessionDescription* sdp =
[[RTCSessionDescription alloc]
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:origSdp.type
sdp:[APPRTCAppDelegate preferISAC:origSdp.description]];
[self.peerConnection setLocalDescriptionWithDelegate:self
@@ -463,22 +451,19 @@
[self displayLogMessage:@"SDP onSuccess() - possibly drain candidates"];
dispatch_async(dispatch_get_main_queue(), ^(void) {
if (!self.client.initiator) {
if (self.peerConnection.remoteDescription
&& !self.peerConnection.localDescription) {
if (self.peerConnection.remoteDescription &&
!self.peerConnection.localDescription) {
[self displayLogMessage:@"Callee, setRemoteDescription succeeded"];
RTCPair *audio =
[[RTCPair alloc]
initWithKey:@"OfferToReceiveAudio" value:@"true"];
RTCPair *video =
[[RTCPair alloc]
initWithKey:@"OfferToReceiveVideo" value:@"true"];
RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio"
value:@"true"];
RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc]
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection
createAnswerWithDelegate:self constraints:constraints];
[self.peerConnection createAnswerWithDelegate:self
constraints:constraints];
[self displayLogMessage:@"PC - createAnswer."];
} else {
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
@@ -502,8 +487,6 @@
self.peerConnection = nil;
self.peerConnectionFactory = nil;
self.pcObserver = nil;
self.client.ICEServerDelegate = nil;
self.client.messageHandler = nil;
self.client = nil;
[RTCPeerConnectionFactory deinitializeSSL];
}

View File

@@ -30,7 +30,7 @@
@class RTCVideoTrack;
// This class encapsulates VideoRenderIosView.
@interface VideoView : UIView
@interface APPRTCVideoView : UIView
// Property to get/set required video orientation.
@property(nonatomic, assign) UIInterfaceOrientation videoOrientation;
@@ -40,11 +40,4 @@
// Sets up the underlying renderer and track objects.
- (void)renderVideoTrackInterface:(RTCVideoTrack*)track;
// Stops rendering.
- (void)pause;
// Starts rendering.
- (void)resume;
// Stops rendering and resets underlying renderer and track objects.
- (void)stop;
@end

View File

@@ -0,0 +1,82 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This APPRTCVideoView must be initialzed and added to a View to get
* either the local or remote video stream rendered.
* It is a view itself and it encapsulates
* an object of VideoRenderIosView and UIActivityIndicatorView.
* Both of the views will get resized as per the frame of their parent.
*/
#import "APPRTCVideoView.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface APPRTCVideoView () {
RTCVideoTrack* _track;
RTCVideoRenderer* _renderer;
}
@property(nonatomic, weak) UIView* renderView;
@property(nonatomic, weak) UIActivityIndicatorView* activityView;
@end
@implementation APPRTCVideoView
@synthesize videoOrientation = _videoOrientation;
- (void)layoutSubviews {
[super layoutSubviews];
if (!_renderer) {
// Left-right (mirror) flip the remote view.
CGAffineTransform xform =
CGAffineTransformMakeScale(self.isRemote ? -1 : 1, 1);
// TODO(fischman): why is this rotate (vertical+horizontal flip) needed?!?
xform = CGAffineTransformRotate(xform, M_PI);
// TODO(fischman): ensure back-camera flip is correct in all orientations,
// when back-camera support is added.
[self setTransform:xform];
_renderer = [[RTCVideoRenderer alloc] initWithView:self];
}
}
- (void)renderVideoTrackInterface:(RTCVideoTrack*)videoTrack {
[_track removeRenderer:_renderer];
[_renderer stop];
_track = videoTrack;
if (_track) {
[_track addRenderer:_renderer];
[_renderer start];
}
}
@end

View File

@@ -27,7 +27,7 @@
#import <UIKit/UIKit.h>
@class VideoView;
@class APPRTCVideoView;
// The view controller that is displayed when AppRTCDemo is loaded.
@interface APPRTCViewController : UIViewController<UITextFieldDelegate>
@@ -37,8 +37,8 @@
@property(weak, nonatomic) IBOutlet UITextView* textOutput;
@property(weak, nonatomic) IBOutlet UIView* blackView;
@property(nonatomic, strong) VideoView* remoteVideoView;
@property(nonatomic, strong) VideoView* localVideoView;
@property(nonatomic, strong) APPRTCVideoView* remoteVideoView;
@property(nonatomic, strong) APPRTCVideoView* localVideoView;
- (void)displayText:(NSString*)text;
- (void)resetUI;

View File

@@ -27,7 +27,7 @@
#import "APPRTCViewController.h"
#import "VideoView.h"
#import "APPRTCVideoView.h"
@interface APPRTCViewController ()
@@ -37,16 +37,6 @@
@implementation APPRTCViewController
@synthesize textField = _textField;
@synthesize textInstructions = _textInstructions;
@synthesize textOutput = _textOutput;
@synthesize blackView = _blackView;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize localVideoView = _localVideoView;
@synthesize statusBarOrientation = _statusBarOrientation;
- (void)viewDidLoad {
[super viewDidLoad];
self.statusBarOrientation =
@@ -61,7 +51,8 @@
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[[NSNotificationCenter defaultCenter]
postNotificationName:@"StatusBarOrientationDidChange" object:nil];
postNotificationName:@"StatusBarOrientationDidChange"
object:nil];
}
}
@@ -82,11 +73,11 @@
self.textOutput.text = nil;
self.blackView.hidden = YES;
[_remoteVideoView stop];
[_remoteVideoView renderVideoTrackInterface:nil];
[_remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[_localVideoView stop];
[_remoteVideoView renderVideoTrackInterface:nil];
[_localVideoView removeFromSuperview];
self.localVideoView = nil;
}
@@ -104,13 +95,12 @@ enum {
- (void)setupCaptureSession {
self.blackView.hidden = NO;
CGRect frame = CGRectMake((self.blackView.bounds.size.width
-kRemoteVideoWidth)/2,
(self.blackView.bounds.size.height
-kRemoteVideoHeight)/2,
CGRect frame =
CGRectMake((self.blackView.bounds.size.width - kRemoteVideoWidth) / 2,
(self.blackView.bounds.size.height - kRemoteVideoHeight) / 2,
kRemoteVideoWidth,
kRemoteVideoHeight);
VideoView *videoView = [[VideoView alloc] initWithFrame:frame];
APPRTCVideoView* videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = TRUE;
[self.blackView addSubview:videoView];
@@ -123,17 +113,19 @@ enum {
CGSize screenSize = [[UIScreen mainScreen] bounds].size;
CGFloat localVideoViewWidth =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ?
screenSize.width/4 : screenSize.height/4;
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.width / 4
: screenSize.height / 4;
CGFloat localVideoViewHeight =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation) ?
screenSize.height/4 : screenSize.width/4;
frame = CGRectMake(self.blackView.bounds.size.width
-localVideoViewWidth-kLocalViewPadding,
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.height / 4
: screenSize.width / 4;
frame = CGRectMake(self.blackView.bounds.size.width - localVideoViewWidth -
kLocalViewPadding,
kLocalViewPadding,
localVideoViewWidth,
localVideoViewHeight);
videoView = [[VideoView alloc] initWithFrame:frame];
videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = FALSE;
[self.blackView addSubview:videoView];
@@ -163,9 +155,7 @@ enum {
[NSString stringWithFormat:@"apprtc://apprtc.appspot.com/?r=%@", room];
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]];
dispatch_async(dispatch_get_main_queue(), ^{
[self setupCaptureSession];
});
dispatch_async(dispatch_get_main_queue(), ^{ [self setupCaptureSession]; });
}
- (BOOL)textFieldShouldReturn:(UITextField*)textField {

View File

@@ -38,9 +38,6 @@
@implementation GAEChannelClient
@synthesize delegate = _delegate;
@synthesize webView = _webView;
- (id)initWithToken:(NSString*)token delegate:(id<GAEMessageHandler>)delegate {
self = [super init];
if (self) {
@@ -50,9 +47,8 @@
NSString* htmlPath =
[[NSBundle mainBundle] pathForResource:@"ios_channel" ofType:@"html"];
NSURL* htmlUrl = [NSURL fileURLWithPath:htmlPath];
NSString *path = [NSString stringWithFormat:@"%@?token=%@",
[htmlUrl absoluteString],
token];
NSString* path = [NSString
stringWithFormat:@"%@?token=%@", [htmlUrl absoluteString], token];
[_webView
loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:path]]];
@@ -97,8 +93,8 @@
NSString* description = message;
[self.delegate onError:code withDescription:description];
} else {
NSAssert(NO, @"Invalid message sent from UIWebView: %@",
resourceSpecifier);
NSAssert(
NO, @"Invalid message sent from UIWebView: %@", resourceSpecifier);
}
});
return YES;

View File

@@ -1,168 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This VideoView must be initialzed and added to a View to get
* either the local or remote video stream rendered.
* It is a view itself and it encapsulates
* an object of VideoRenderIosView and UIActivityIndicatorView.
* Both of the views will get resized as per the frame of their parent.
*/
#import "VideoView.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface VideoView () {
RTCVideoTrack *_track;
RTCVideoRenderer *_renderer;
}
@property (nonatomic, weak) UIView *renderView;
@property (nonatomic, weak) UIActivityIndicatorView *activityView;
@end
@implementation VideoView
@synthesize videoOrientation = _videoOrientation;
@synthesize isRemote = _isRemote;
@synthesize renderView = _renderView;
@synthesize activityView = _activityView;
static void init(VideoView *self) {
UIView *renderView = [RTCVideoRenderer newRenderViewWithFrame:
CGRectMake(0,
0,
self.bounds.size.width,
self.bounds.size.height)];
[self addSubview:renderView];
renderView.autoresizingMask = UIViewAutoresizingFlexibleHeight |
UIViewAutoresizingFlexibleWidth;
renderView.translatesAutoresizingMaskIntoConstraints = YES;
self.renderView = renderView;
UIActivityIndicatorView *indicatorView =
[[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:
UIActivityIndicatorViewStyleWhiteLarge];
indicatorView.frame = self.bounds;
indicatorView.hidesWhenStopped = YES;
[self addSubview:indicatorView];
indicatorView.autoresizingMask = UIViewAutoresizingFlexibleWidth |
UIViewAutoresizingFlexibleHeight;
indicatorView.translatesAutoresizingMaskIntoConstraints = YES;
[indicatorView startAnimating];
self.activityView = indicatorView;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
init(self);
}
return self;
}
-(id)initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
init(self);
}
return self;
}
- (UIInterfaceOrientation)videoOrientation {
return _videoOrientation;
}
- (void)setVideoOrientation:(UIInterfaceOrientation)videoOrientation {
if (_videoOrientation != videoOrientation) {
_videoOrientation = videoOrientation;
CGFloat angle;
switch (videoOrientation) {
case UIInterfaceOrientationPortrait:
angle = M_PI_2;
break;
case UIInterfaceOrientationPortraitUpsideDown:
angle = -M_PI_2;
break;
case UIInterfaceOrientationLandscapeLeft:
angle = M_PI;
break;
case UIInterfaceOrientationLandscapeRight:
angle = 0;
break;
}
// The video comes in mirrored. That is fine for the local video,
// but the remote video should be put back to original.
CGAffineTransform xform =
CGAffineTransformMakeScale([self isRemote] ? -1 : 1, 1);
xform = CGAffineTransformRotate(xform, angle);
[[self renderView] setTransform:xform];
}
}
- (void)renderVideoTrackInterface:(RTCVideoTrack *)videoTrack {
[self stop];
_track = videoTrack;
if (_track) {
if (!_renderer) {
_renderer = [[RTCVideoRenderer alloc]
initWithRenderView:[self renderView]];
}
[_track addRenderer:_renderer];
[self resume];
}
[self setVideoOrientation:UIInterfaceOrientationLandscapeLeft];
[self setVideoOrientation:UIInterfaceOrientationPortrait];
[self setVideoOrientation:UIInterfaceOrientationLandscapeLeft];
}
-(void)pause {
[_renderer stop];
}
-(void)resume {
[self.activityView stopAnimating];
[self.activityView removeFromSuperview];
self.activityView = nil;
[_renderer start];
}
- (void)stop {
[_track removeRenderer:_renderer];
[_renderer stop];
}
@end

View File

@@ -248,11 +248,11 @@
'examples/ios/AppRTCDemo/APPRTCAppDelegate.m',
'examples/ios/AppRTCDemo/APPRTCViewController.h',
'examples/ios/AppRTCDemo/APPRTCViewController.m',
'examples/ios/AppRTCDemo/APPRTCVideoView.h',
'examples/ios/AppRTCDemo/APPRTCVideoView.m',
'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch',
'examples/ios/AppRTCDemo/GAEChannelClient.h',
'examples/ios/AppRTCDemo/GAEChannelClient.m',
'examples/ios/AppRTCDemo/VideoView.h',
'examples/ios/AppRTCDemo/VideoView.m',
'examples/ios/AppRTCDemo/main.m',
],
'xcode_settings': {

View File

@@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2004--2010, Google Inc.
* Copyright 2010, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@@ -50,11 +50,13 @@
if ((self = [super init])) {
assert(manager != NULL);
manager_ = manager;
[[NSNotificationCenter defaultCenter] addObserver:self
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(onDevicesChanged:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
@@ -83,9 +85,7 @@ DeviceWatcherImpl* CreateDeviceWatcherCallback(
#else
@autoreleasepool
#endif
{
impl = [[DeviceWatcherImpl alloc] init:manager];
}
{ impl = [[DeviceWatcherImpl alloc] init:manager]; }
#if !__has_feature(objc_arc)
[pool drain];
#endif
@@ -115,7 +115,8 @@ bool GetQTKitVideoDevices(std::vector<Device>* devices) {
static NSString* const kFormat = @"localizedDisplayName: \"%@\", "
@"modelUniqueID: \"%@\", uniqueID \"%@\", isConnected: %d, "
@"isOpen: %d, isInUseByAnotherApplication: %d";
NSString* info = [NSString stringWithFormat:kFormat,
NSString* info = [NSString
stringWithFormat:kFormat,
[qt_capture_device localizedDisplayName],
[qt_capture_device modelUniqueID],
[qt_capture_device uniqueID],
@@ -124,11 +125,9 @@ bool GetQTKitVideoDevices(std::vector<Device>* devices) {
[qt_capture_device isInUseByAnotherApplication]];
LOG(LS_INFO) << [info UTF8String];
std::string name([[qt_capture_device localizedDisplayName]
UTF8String]);
devices->push_back(Device(name,
[[qt_capture_device uniqueID]
UTF8String]));
std::string name([[qt_capture_device localizedDisplayName] UTF8String]);
devices->push_back(
Device(name, [[qt_capture_device uniqueID] UTF8String]));
}
}
#if !__has_feature(objc_arc)