Implement mac version of AppRTCDemo.

- Refactored and moved AppRTCDemo to support sharing AppRTC connection code between iOS and mac counterparts.
- Refactored OpenGL rendering code to be shared between iOS and mac counterparts.
- iOS AppRTCDemo now respects video aspect ratio.

BUG=2168
R=fischman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/17589004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6291 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
tkchin@webrtc.org 2014-05-30 22:26:06 +00:00
parent 9f8164c060
commit acca675bcf
42 changed files with 1754 additions and 744 deletions

View File

@ -45,6 +45,10 @@ Example of building & using the unittest & app:
ninja -C out_mac/Debug libjingle_peerconnection_objc_test && \
./out_mac/Debug/libjingle_peerconnection_objc_test.app/Contents/MacOS/libjingle_peerconnection_objc_test
- To build & launch the sample app on OSX:
wrmac && gclient runhooks && ninja -C out_mac/Debug AppRTCDemo && \
./out_mac/Debug/AppRTCDemo.app/Contents/MacOS/AppRTCDemo
- To build & launch the sample app on the iOS simulator:
wrsim && gclient runhooks && ninja -C out_sim/Debug iossim AppRTCDemo && \
./out_sim/Debug/iossim out_sim/Debug/AppRTCDemo.app
@ -66,14 +70,11 @@ Example of building & using the unittest & app:
the Info.plist file to ensure that the Bundle Identifier matches
your phone provisioning profile, or use a development wildcard
provisioning profile.)
- Alternately, use ios-deploy:
ios-deploy -d -b out_ios/Debug-iphoneos/AppRTCDemo.app
- Once installed:
- Tap AppRTCDemo on the iOS device's home screen (might have to scroll to find it).
- In desktop chrome, navigate to http://apprtc.appspot.com and note
the r=<NNN> room number in the resulting URL; enter that number
into the text field on the phone.
- Alternatively, background the app and launch Safari. In Safari,
open the url apprtc://apprtc.appspot.com/?r=<NNN> where <NNN> is
the room name. Other options are to put the link in an email/chat
and send it to yourself. Clicking on it will launch AppRTCDemo
and navigate to the room.

View File

@ -32,20 +32,21 @@
#import "RTCEAGLVideoView+Internal.h"
#import <GLKit/GLKit.h>
#import <QuartzCore/QuartzCore.h>
#import "RTCEAGLVideoRenderer.h"
#import "RTCOpenGLVideoRenderer.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface RTCEAGLVideoView () <GLKViewDelegate>
// |i420Frame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCI420Frame* i420Frame;
@end
@implementation RTCEAGLVideoView {
CADisplayLink* _displayLink;
GLKView* _glkView;
RTCEAGLVideoRenderer* _glRenderer;
RTCOpenGLVideoRenderer* _glRenderer;
RTCVideoRenderer* _videoRenderer;
}
@ -53,7 +54,7 @@
if (self = [super initWithFrame:frame]) {
EAGLContext* glContext =
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
_glRenderer = [[RTCEAGLVideoRenderer alloc] initWithContext:glContext];
_glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:glContext];
// GLKView manages a framebuffer for us.
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
@ -175,7 +176,9 @@
// provide. This occurs on non-main thread.
- (void)renderer:(RTCVideoRenderer*)renderer
didSetSize:(CGSize)size {
// Size is checked in renderer as frames arrive, no need to do anything here.
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderer:(RTCVideoRenderer*)renderer

View File

@ -0,0 +1,187 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCNSGLVideoView.h"
#import <CoreVideo/CVDisplayLink.h>
#import <OpenGL/gl3.h>
#import "RTCOpenGLVideoRenderer.h"
#import "RTCVideoRenderer.h"
@interface RTCNSGLVideoView () <RTCVideoRendererDelegate>
// |i420Frame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCI420Frame* i420Frame;
@property(atomic, strong) RTCOpenGLVideoRenderer* glRenderer;
- (void)drawFrame;
@end
static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
const CVTimeStamp* now,
const CVTimeStamp* outputTime,
CVOptionFlags flagsIn,
CVOptionFlags* flagsOut,
void* displayLinkContext) {
RTCNSGLVideoView* view = (__bridge RTCNSGLVideoView*)displayLinkContext;
[view drawFrame];
return kCVReturnSuccess;
}
@implementation RTCNSGLVideoView {
CVDisplayLinkRef _displayLink;
RTCVideoRenderer* _videoRenderer;
}
- (instancetype)initWithFrame:(NSRect)frame
pixelFormat:(NSOpenGLPixelFormat*)format {
if (self = [super initWithFrame:frame pixelFormat:format]) {
_videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
}
return self;
}
- (void)dealloc {
[self teardownDisplayLink];
}
- (void)drawRect:(NSRect)rect {
[self drawFrame];
}
- (void)reshape {
[super reshape];
NSRect frame = [self frame];
CGLLockContext([[self openGLContext] CGLContextObj]);
glViewport(0, 0, frame.size.width, frame.size.height);
CGLUnlockContext([[self openGLContext] CGLContextObj]);
}
- (void)lockFocus {
NSOpenGLContext* context = [self openGLContext];
[super lockFocus];
if ([context view] != self) {
[context setView:self];
}
[context makeCurrentContext];
}
- (void)prepareOpenGL {
[super prepareOpenGL];
if (!self.glRenderer) {
self.glRenderer =
[[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]];
}
[self.glRenderer setupGL];
[self setupDisplayLink];
}
- (void)clearGLContext {
[self.glRenderer teardownGL];
self.glRenderer = nil;
[super clearGLContext];
}
- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
if (_videoTrack == videoTrack) {
return;
}
if (_videoTrack) {
[_videoTrack removeRenderer:_videoRenderer];
CVDisplayLinkStop(_displayLink);
}
_videoTrack = videoTrack;
if (_videoTrack) {
[_videoTrack addRenderer:_videoRenderer];
CVDisplayLinkStart(_displayLink);
}
}
#pragma mark - RTCVideoRendererDelegate
// These methods are called when the video track has frame information to
// provide. This occurs on non-main thread.
- (void)renderer:(RTCVideoRenderer*)renderer
didSetSize:(CGSize)size {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderer:(RTCVideoRenderer*)renderer
didReceiveFrame:(RTCI420Frame*)frame {
self.i420Frame = frame;
}
#pragma mark - Private
- (void)drawFrame {
RTCI420Frame* i420Frame = self.i420Frame;
if (i420Frame && self.glRenderer.lastDrawnFrame != i420Frame) {
// This method may be called from CVDisplayLink callback which isn't on the
// main thread so we have to lock the GL context before drawing.
CGLLockContext([[self openGLContext] CGLContextObj]);
[self.glRenderer drawFrame:i420Frame];
CGLUnlockContext([[self openGLContext] CGLContextObj]);
}
}
- (void)setupDisplayLink {
if (_displayLink) {
return;
}
// Synchronize buffer swaps with vertical refresh rate.
GLint swapInt = 1;
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
// Create display link.
CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
CVDisplayLinkSetOutputCallback(_displayLink,
&OnDisplayLinkFired,
(__bridge void*)self);
// Set the display link for the current renderer.
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
_displayLink, cglContext, cglPixelFormat);
if (_videoTrack) {
CVDisplayLinkStart(_displayLink);
}
}
- (void)teardownDisplayLink {
if (!_displayLink) {
return;
}
CVDisplayLinkRelease(_displayLink);
_displayLink = NULL;
}
@end

View File

@ -29,50 +29,72 @@
#error "This file requires ARC support."
#endif
#import "RTCEAGLVideoRenderer.h"
#import "RTCOpenGLVideoRenderer.h"
#if TARGET_OS_IPHONE
#import <OpenGLES/ES2/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
#import "RTCI420Frame.h"
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
// anticipation of that happening in the future.
// Convenience macro for writing shader code that converts a code snippet into
// a C string during the C preprocessor step.
#define RTC_STRINGIZE(...) #__VA_ARGS__
#if TARGET_OS_IPHONE
#define RTC_PIXEL_FORMAT GL_LUMINANCE
#define SHADER_VERSION
#define VERTEX_SHADER_IN "attribute"
#define VERTEX_SHADER_OUT "varying"
#define FRAGMENT_SHADER_IN "varying"
#define FRAGMENT_SHADER_OUT
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
#define FRAGMENT_SHADER_TEXTURE "texture2D"
#else
#define RTC_PIXEL_FORMAT GL_RED
#define SHADER_VERSION "#version 150\n"
#define VERTEX_SHADER_IN "in"
#define VERTEX_SHADER_OUT "out"
#define FRAGMENT_SHADER_IN "in"
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
#define FRAGMENT_SHADER_COLOR "fragColor"
#define FRAGMENT_SHADER_TEXTURE "texture"
#endif
// Vertex shader doesn't do anything except pass coordinates through.
static const char kVertexShaderSource[] = RTC_STRINGIZE(
attribute vec2 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = vec4(position.x, position.y, 0.0, 1.0);
v_texcoord = texcoord;
}
);
static const char kVertexShaderSource[] =
SHADER_VERSION
VERTEX_SHADER_IN " vec2 position;\n"
VERTEX_SHADER_IN " vec2 texcoord;\n"
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
"void main() {\n"
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
" v_texcoord = texcoord;\n"
"}\n";
// Fragment shader converts YUV values from input textures into a final RGB
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
static const char kFragmentShaderSource[] = RTC_STRINGIZE(
precision highp float;
varying vec2 v_texcoord;
uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
void main() {
float y, u, v, r, g, b;
y = texture2D(s_textureY, v_texcoord).r;
u = texture2D(s_textureU, v_texcoord).r;
v = texture2D(s_textureV, v_texcoord).r;
u = u - 0.5;
v = v - 0.5;
r = y + 1.403 * v;
g = y - 0.344 * u - 0.714 * v;
b = y + 1.770 * u;
gl_FragColor = vec4(r, g, b, 1.0);
}
);
static const char kFragmentShaderSource[] =
SHADER_VERSION
"precision highp float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureU;\n"
"uniform lowp sampler2D s_textureV;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" float y, u, v, r, g, b;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
" u = u - 0.5;\n"
" v = v - 0.5;\n"
" r = y + 1.403 * v;\n"
" g = y - 0.344 * u - 0.714 * v;\n"
" b = y + 1.770 * u;\n"
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
" }\n";
// Compiles a shader of the given |type| with GLSL source |source| and returns
// the shader handle or 0 on error.
@ -122,11 +144,11 @@ GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, 0, 1, // Bottom left.
1, -1, 1, 1, // Bottom right.
1, 1, 1, 0, // Top right.
-1, 1, 0, 0, // Top left.
// X, Y, U, V.
-1, -1, 0, 1, // Bottom left.
1, -1, 1, 1, // Bottom right.
1, 1, 1, 0, // Top right.
-1, 1, 0, 0, // Top left.
};
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
@ -136,13 +158,20 @@ const GLfloat gVertices[] = {
static const GLsizei kNumTextureSets = 2;
static const GLsizei kNumTextures = 3 * kNumTextureSets;
@implementation RTCEAGLVideoRenderer {
@implementation RTCOpenGLVideoRenderer {
#if TARGET_OS_IPHONE
EAGLContext* _context;
#else
NSOpenGLContext* _context;
#endif
BOOL _isInitialized;
NSUInteger _currentTextureSet;
// Handles for OpenGL constructs.
GLuint _textures[kNumTextures];
GLuint _program;
#if !TARGET_OS_IPHONE
GLuint _vertexArray;
#endif
GLuint _vertexBuffer;
GLint _position;
GLint _texcoord;
@ -156,7 +185,11 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glDisable(GL_DITHER);
}
#if TARGET_OS_IPHONE
- (instancetype)initWithContext:(EAGLContext*)context {
#else
- (instancetype)initWithContext:(NSOpenGLContext*)context {
#endif
NSAssert(context != nil, @"context cannot be nil");
if (self = [super init]) {
_context = context;
@ -177,8 +210,14 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
return NO;
}
glClear(GL_COLOR_BUFFER_BIT);
#if !TARGET_OS_IPHONE
glBindVertexArray(_vertexArray);
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
#if !TARGET_OS_IPHONE
[_context flushBuffer];
#endif
_lastDrawnFrame = frame;
return YES;
}
@ -213,23 +252,34 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glDeleteTextures(kNumTextures, _textures);
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = 0;
#if !TARGET_OS_IPHONE
glDeleteVertexArrays(1, &_vertexArray);
#endif
_isInitialized = NO;
}
#pragma mark - Private
- (void)ensureGLContext {
NSAssert(_context, @"context shouldn't be nil");
#if TARGET_OS_IPHONE
if ([EAGLContext currentContext] != _context) {
NSAssert(_context, @"context shouldn't be nil");
[EAGLContext setCurrentContext:_context];
}
#else
if ([NSOpenGLContext currentContext] != _context) {
[_context makeCurrentContext];
}
#endif
}
- (BOOL)setupProgram {
NSAssert(!_program, @"program already set up");
GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
NSAssert(vertexShader, @"failed to create vertex shader");
GLuint fragmentShader =
CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
NSAssert(fragmentShader, @"failed to create fragment shader");
_program = CreateProgram(vertexShader, fragmentShader);
// Shaders are created only to generate program.
if (vertexShader) {
@ -282,33 +332,31 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glActiveTexture(GL_TEXTURE0 + i * 3);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
lumaWidth,
lumaHeight,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
0);
glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
chromaWidth,
chromaHeight,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
0);
glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
chromaWidth,
chromaHeight,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
0);
}
@ -328,11 +376,11 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glUniform1i(_ySampler, textureOffset);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
frame.width,
frame.height,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
frame.yPlane);
@ -340,11 +388,11 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glUniform1i(_uSampler, textureOffset + 1);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
frame.chromaWidth,
frame.chromaHeight,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
frame.uPlane);
@ -352,11 +400,11 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
glUniform1i(_vSampler, textureOffset + 2);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
frame.chromaWidth,
frame.chromaHeight,
0,
GL_LUMINANCE,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
frame.vPlane);
@ -365,9 +413,21 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
}
- (BOOL)setupVertices {
#if !TARGET_OS_IPHONE
NSAssert(!_vertexArray, @"vertex array already set up");
glGenVertexArrays(1, &_vertexArray);
if (!_vertexArray) {
return NO;
}
glBindVertexArray(_vertexArray);
#endif
NSAssert(!_vertexBuffer, @"vertex buffer already set up");
glGenBuffers(1, &_vertexBuffer);
if (!_vertexBuffer) {
#if !TARGET_OS_IPHONE
glDeleteVertexArrays(1, &_vertexArray);
_vertexArray = 0;
#endif
return NO;
}
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);

View File

@ -28,7 +28,6 @@
#import "RTCPeerConnection.h"
#import "RTCPeerConnectionDelegate.h"
#import "RTCPeerConnectionObserver.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@ -37,8 +36,8 @@
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection;
- (id)initWithPeerConnection:(
talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection
observer:(webrtc::RTCPeerConnectionObserver *)observer;
- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface*)factory
iceServers:(const webrtc::PeerConnectionInterface::IceServers&)iceServers
constraints:(const webrtc::MediaConstraintsInterface*)constraints;
@end

View File

@ -38,6 +38,7 @@
#import "RTCMediaConstraints+Internal.h"
#import "RTCMediaStream+Internal.h"
#import "RTCMediaStreamTrack+Internal.h"
#import "RTCPeerConnectionObserver.h"
#import "RTCSessionDescription+Internal.h"
#import "RTCSessionDescriptionDelegate.h"
#import "RTCSessionDescription.h"
@ -273,19 +274,15 @@ class RTCStatsObserver : public StatsObserver {
@implementation RTCPeerConnection (Internal)
- (id)initWithPeerConnection:
(talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)
peerConnection
observer:(webrtc::RTCPeerConnectionObserver*)observer {
if (!peerConnection || !observer) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
_peerConnection = peerConnection;
- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface*)factory
iceServers:(const webrtc::PeerConnectionInterface::IceServers&)iceServers
constraints:(const webrtc::MediaConstraintsInterface*)constraints {
NSParameterAssert(factory != NULL);
if (self = [super init]) {
_observer.reset(new webrtc::RTCPeerConnectionObserver(self));
_peerConnection = factory->CreatePeerConnection(
iceServers, constraints, NULL, NULL, _observer.get());
_localStreams = [[NSMutableArray alloc] init];
_observer.reset(observer);
}
return self;
}

View File

@ -41,7 +41,6 @@
#import "RTCMediaStreamTrack+Internal.h"
#import "RTCPeerConnection+Internal.h"
#import "RTCPeerConnectionDelegate.h"
#import "RTCPeerConnectionObserver.h"
#import "RTCVideoCapturer+Internal.h"
#import "RTCVideoSource+Internal.h"
#import "RTCVideoTrack+Internal.h"
@ -94,19 +93,11 @@
for (RTCICEServer* server in servers) {
iceServers.push_back(server.iceServer);
}
webrtc::RTCPeerConnectionObserver* observer =
new webrtc::RTCPeerConnectionObserver(delegate);
webrtc::DTLSIdentityServiceInterface* dummy_dtls_identity_service = NULL;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection =
self.nativeFactory->CreatePeerConnection(iceServers,
constraints.constraints,
NULL,
dummy_dtls_identity_service,
observer);
RTCPeerConnection* pc =
[[RTCPeerConnection alloc] initWithPeerConnection:peerConnection
observer:observer];
observer->SetPeerConnection(pc);
[[RTCPeerConnection alloc] initWithFactory:self.nativeFactory.get()
iceServers:iceServers
constraints:constraints.constraints];
pc.delegate = delegate;
return pc;
}

View File

@ -38,12 +38,9 @@ namespace webrtc {
class RTCPeerConnectionObserver : public PeerConnectionObserver {
public:
explicit RTCPeerConnectionObserver(id<RTCPeerConnectionDelegate> delegate);
RTCPeerConnectionObserver(RTCPeerConnection* peerConnection);
virtual ~RTCPeerConnectionObserver();
// |peerConnection| owns |this|, so outlives it.
void SetPeerConnection(RTCPeerConnection *peerConnection);
virtual void OnError() OVERRIDE;
// Triggered when the SignalingState changed.
@ -74,10 +71,7 @@ class RTCPeerConnectionObserver : public PeerConnectionObserver {
virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE;
private:
id<RTCPeerConnectionDelegate> _delegate;
// __unsafe_unretained is in fact safe because |_peerConnection| owns |this|;
// see comment on SetPeerConnection() above.
__unsafe_unretained RTCPeerConnection *_peerConnection;
__weak RTCPeerConnection* _peerConnection;
};
} // namespace webrtc

View File

@ -39,70 +39,74 @@
namespace webrtc {
RTCPeerConnectionObserver::RTCPeerConnectionObserver(
id<RTCPeerConnectionDelegate> delegate) {
_delegate = delegate;
}
RTCPeerConnectionObserver::~RTCPeerConnectionObserver() {}
void RTCPeerConnectionObserver::SetPeerConnection(
RTCPeerConnection* peerConnection) {
_peerConnection = peerConnection;
}
RTCPeerConnectionObserver::~RTCPeerConnectionObserver() {
}
void RTCPeerConnectionObserver::OnError() {
[_delegate peerConnectionOnError:_peerConnection];
[_peerConnection.delegate peerConnectionOnError:_peerConnection];
}
void RTCPeerConnectionObserver::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
[_delegate peerConnection:_peerConnection
signalingStateChanged:[RTCEnumConverter
convertSignalingStateToObjC:new_state]];
RTCSignalingState state =
[RTCEnumConverter convertSignalingStateToObjC:new_state];
[_peerConnection.delegate peerConnection:_peerConnection
signalingStateChanged:state];
}
void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) {
RTCMediaStream* mediaStream =
[[RTCMediaStream alloc] initWithMediaStream:stream];
[_delegate peerConnection:_peerConnection addedStream:mediaStream];
[_peerConnection.delegate peerConnection:_peerConnection
addedStream:mediaStream];
}
void RTCPeerConnectionObserver::OnRemoveStream(MediaStreamInterface* stream) {
RTCMediaStream* mediaStream =
[[RTCMediaStream alloc] initWithMediaStream:stream];
[_delegate peerConnection:_peerConnection removedStream:mediaStream];
[_peerConnection.delegate peerConnection:_peerConnection
removedStream:mediaStream];
}
void RTCPeerConnectionObserver::OnDataChannel(
DataChannelInterface* data_channel) {
RTCDataChannel* dataChannel =
[[RTCDataChannel alloc] initWithDataChannel:data_channel];
[_delegate peerConnection:_peerConnection didOpenDataChannel:dataChannel];
[_peerConnection.delegate peerConnection:_peerConnection
didOpenDataChannel:dataChannel];
}
void RTCPeerConnectionObserver::OnRenegotiationNeeded() {
[_delegate peerConnectionOnRenegotiationNeeded:_peerConnection];
id<RTCPeerConnectionDelegate> delegate = _peerConnection.delegate;
[delegate peerConnectionOnRenegotiationNeeded:_peerConnection];
}
void RTCPeerConnectionObserver::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
[_delegate peerConnection:_peerConnection
iceConnectionChanged:[RTCEnumConverter
convertIceConnectionStateToObjC:new_state]];
RTCICEConnectionState state =
[RTCEnumConverter convertIceConnectionStateToObjC:new_state];
[_peerConnection.delegate peerConnection:_peerConnection
iceConnectionChanged:state];
}
void RTCPeerConnectionObserver::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
[_delegate peerConnection:_peerConnection
iceGatheringChanged:[RTCEnumConverter
convertIceGatheringStateToObjC:new_state]];
RTCICEGatheringState state =
[RTCEnumConverter convertIceGatheringStateToObjC:new_state];
[_peerConnection.delegate peerConnection:_peerConnection
iceGatheringChanged:state];
}
void RTCPeerConnectionObserver::OnIceCandidate(
const IceCandidateInterface* candidate) {
RTCICECandidate* iceCandidate =
[[RTCICECandidate alloc] initWithCandidate:candidate];
[_delegate peerConnection:_peerConnection gotICECandidate:iceCandidate];
[_peerConnection.delegate peerConnection:_peerConnection
gotICECandidate:iceCandidate];
}
} // namespace webrtc

View File

@ -30,10 +30,18 @@
#import "RTCVideoRenderer.h"
@class RTCEAGLVideoView;
@protocol RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size;
@end
@class RTCVideoTrack;
// RTCEAGLVideoView renders |videoTrack| onto itself using OpenGLES.
@interface RTCEAGLVideoView : UIView
@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
@end

View File

@ -0,0 +1,48 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if TARGET_OS_IPHONE
#error "This file targets OSX."
#endif
#import <AppKit/NSOpenGLView.h>
#import "RTCVideoTrack.h"
@class RTCNSGLVideoView;
@protocol RTCNSGLVideoViewDelegate
- (void)videoView:(RTCNSGLVideoView*)videoView didChangeVideoSize:(CGSize)size;
@end
@interface RTCNSGLVideoView : NSOpenGLView
@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
@end

View File

@ -26,21 +26,29 @@
*/
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#import <GLKit/GLKit.h>
#else
#import <AppKit/NSOpenGL.h>
#endif
@class RTCI420Frame;
// RTCEAGLVideoRenderer issues appropriate EAGL commands to draw a frame to the
// currently bound framebuffer. OpenGL framebuffer creation and management
// should be handled elsewhere using the same context used to initialize this
// class.
@interface RTCEAGLVideoRenderer : NSObject
// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to
// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
// framebuffer creation and management should be handled elsewhere using the
// same context used to initialize this class.
@interface RTCOpenGLVideoRenderer : NSObject
// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
// hence saving battery life by reducing load.
@property(nonatomic, readonly) RTCI420Frame* lastDrawnFrame;
#if TARGET_OS_IPHONE
- (instancetype)initWithContext:(EAGLContext*)context;
#else
- (instancetype)initWithContext:(NSOpenGLContext*)context;
#endif
// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
// called before this function will succeed.

View File

@ -45,6 +45,8 @@
// http://www.w3.org/TR/mediacapture-streams/
@interface RTCPeerConnection : NSObject
@property(nonatomic, weak) id<RTCPeerConnectionDelegate> delegate;
// Accessor methods to active local streams.
@property(nonatomic, strong, readonly) NSArray *localStreams;

View File

@ -1,150 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "APPRTCViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "RTCEAGLVideoView.h"
@interface APPRTCViewController ()
@property(nonatomic, assign) UIInterfaceOrientation statusBarOrientation;
@end
@implementation APPRTCViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
self.roomInput.delegate = self;
[self.roomInput becomeFirstResponder];
}
- (void)viewDidLayoutSubviews {
if (self.statusBarOrientation !=
[UIApplication sharedApplication].statusBarOrientation) {
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[[NSNotificationCenter defaultCenter]
postNotificationName:@"StatusBarOrientationDidChange"
object:nil];
}
}
- (void)displayText:(NSString*)text {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSString* output =
[NSString stringWithFormat:@"%@\n%@", self.logView.text, text];
self.logView.text = output;
[self.logView
scrollRangeToVisible:NSMakeRange([self.logView.text length], 0)];
});
}
- (void)resetUI {
[self.roomInput resignFirstResponder];
self.roomInput.text = nil;
self.roomInput.hidden = NO;
self.instructionsView.hidden = NO;
self.logView.hidden = YES;
self.logView.text = nil;
self.blackView.hidden = YES;
[self.remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[self.localVideoView removeFromSuperview];
self.localVideoView = nil;
}
// TODO(fischman): Use video dimensions from the incoming video stream
// and resize the Video View accordingly w.r.t. aspect ratio.
enum {
// Remote video view dimensions.
kRemoteVideoWidth = 640,
kRemoteVideoHeight = 480,
// Padding space for local video view with its parent.
kLocalViewPadding = 20
};
- (void)setupCaptureSession {
self.blackView.hidden = NO;
CGSize videoSize =
CGSizeMake(kRemoteVideoWidth, kRemoteVideoHeight);
CGRect remoteVideoFrame =
AVMakeRectWithAspectRatioInsideRect(videoSize,
self.blackView.bounds);
CGRect localVideoFrame = remoteVideoFrame;
// TODO(tkchin): use video dimensions from incoming video stream
// and handle rotation.
localVideoFrame.size.width = remoteVideoFrame.size.height / 4;
localVideoFrame.size.height = remoteVideoFrame.size.width / 4;
localVideoFrame.origin.x = CGRectGetMaxX(remoteVideoFrame)
- localVideoFrame.size.width - kLocalViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(remoteVideoFrame)
- localVideoFrame.size.height - kLocalViewPadding;
self.remoteVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:remoteVideoFrame];
[self.blackView addSubview:self.remoteVideoView];
self.remoteVideoView.transform = CGAffineTransformMakeScale(-1, 1);
self.localVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:localVideoFrame];
[self.blackView addSubview:self.localVideoView];
}
#pragma mark - UITextFieldDelegate
- (void)textFieldDidEndEditing:(UITextField*)textField {
NSString* room = textField.text;
if ([room length] == 0) {
return;
}
textField.hidden = YES;
self.instructionsView.hidden = YES;
self.logView.hidden = NO;
// TODO(hughv): Instead of launching a URL with apprtc scheme, change to
// prepopulating the textField with a valid URL missing the room. This allows
// the user to have the simplicity of just entering the room or the ability to
// override to a custom appspot instance. Remove apprtc:// when this is done.
NSString* url =
[NSString stringWithFormat:@"apprtc://apprtc.appspot.com/?r=%@", room];
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]];
dispatch_async(dispatch_get_main_queue(), ^{ [self setupCaptureSession]; });
}
- (BOOL)textFieldShouldReturn:(UITextField*)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
[textField resignFirstResponder];
return YES;
}
@end

View File

@ -1,3 +0,0 @@
This directory contains an example iOS client for http://apprtc.appspot.com
See ../../app/webrtc/objc/README for information on how to use it.

View File

@ -29,10 +29,13 @@
#import "GAEChannelClient.h"
// Called when there are RTCICEServers.
@protocol ICEServerDelegate<NSObject>
@class APPRTCAppClient;
@protocol APPRTCAppClientDelegate
- (void)onICEServers:(NSArray*)servers;
- (void)appClient:(APPRTCAppClient*)appClient
didErrorWithMessage:(NSString*)message;
- (void)appClient:(APPRTCAppClient*)appClient
didReceiveICEServers:(NSArray*)servers;
@end
@ -47,13 +50,12 @@
// for the registered handler to be called with received messages.
@interface APPRTCAppClient : NSObject<NSURLConnectionDataDelegate>
@property(nonatomic, weak, readonly) id<ICEServerDelegate> ICEServerDelegate;
@property(nonatomic, weak, readonly) id<GAEMessageHandler> messageHandler;
@property(nonatomic, assign) BOOL initiator;
@property(nonatomic) BOOL initiator;
@property(nonatomic, copy, readonly) RTCMediaConstraints* videoConstraints;
@property(nonatomic, weak) id<APPRTCAppClientDelegate> delegate;
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler;
- (id)initWithDelegate:(id<APPRTCAppClientDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler;
- (void)connectToRoom:(NSURL*)room;
- (void)sendData:(NSData*)data;

View File

@ -25,18 +25,22 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "APPRTCAppClient.h"
#import <dispatch/dispatch.h>
#import "GAEChannelClient.h"
#import "RTCICEServer.h"
#import "APPRTCAppDelegate.h"
#import "RTCMediaConstraints.h"
#import "RTCPair.h"
@interface APPRTCAppClient ()
@property(nonatomic, strong) dispatch_queue_t backgroundQueue;
@property(nonatomic, weak, readonly) id<GAEMessageHandler> messageHandler;
@property(nonatomic, copy) NSString* baseURL;
@property(nonatomic, strong) GAEChannelClient* gaeChannel;
@property(nonatomic, copy) NSString* postMessageUrl;
@ -49,12 +53,14 @@
@end
@implementation APPRTCAppClient
@implementation APPRTCAppClient {
dispatch_queue_t _backgroundQueue;
}
- (id)initWithICEServerDelegate:(id<ICEServerDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler {
- (id)initWithDelegate:(id<APPRTCAppClientDelegate>)delegate
messageHandler:(id<GAEMessageHandler>)handler {
if (self = [super init]) {
_ICEServerDelegate = delegate;
_delegate = delegate;
_messageHandler = handler;
_backgroundQueue = dispatch_queue_create("RTCBackgroundQueue",
DISPATCH_QUEUE_SERIAL);
@ -68,14 +74,15 @@
#pragma mark - Public methods
- (void)connectToRoom:(NSURL*)url {
NSURLRequest* request = [self getRequestFromUrl:url];
self.roomHtml = [NSMutableString stringWithCapacity:20000];
NSURLRequest* request = [NSURLRequest requestWithURL:url];
[NSURLConnection connectionWithRequest:request delegate:self];
}
- (void)sendData:(NSData*)data {
[self maybeLogMessage:@"Send message"];
dispatch_async(self.backgroundQueue, ^{
dispatch_async(_backgroundQueue, ^{
[self.sendQueue addObject:[data copy]];
if ([self.postMessageUrl length] < 1) {
@ -109,10 +116,10 @@
NSArray* matches =
[regexp matchesInString:self.roomHtml options:0 range:fullRange];
if ([matches count] != 1) {
[self showMessage:[NSString stringWithFormat:@"%d matches for %@ in %@",
[matches count],
name,
self.roomHtml]];
NSString* format = @"%lu matches for %@ in %@";
NSString* message = [NSString stringWithFormat:format,
(unsigned long)[matches count], name, self.roomHtml];
[self.delegate appClient:self didErrorWithMessage:message];
return nil;
}
NSRange matchRange = [matches[0] rangeAtIndex:1];
@ -130,15 +137,6 @@
return value;
}
- (NSURLRequest*)getRequestFromUrl:(NSURL*)url {
self.roomHtml = [NSMutableString stringWithCapacity:20000];
NSString* path =
[NSString stringWithFormat:@"https:%@", [url resourceSpecifier]];
NSURLRequest* request =
[NSURLRequest requestWithURL:[NSURL URLWithString:path]];
return request;
}
- (void)maybeLogMessage:(NSString*)message {
if (self.verboseLogging) {
NSLog(@"%@", message);
@ -164,23 +162,13 @@
[NSString stringWithUTF8String:[responseData bytes]]);
}
- (void)showMessage:(NSString*)message {
NSLog(@"%@", message);
UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:@"Unable to join"
message:message
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alertView show];
}
- (void)updateICEServers:(NSMutableArray*)ICEServers
withTurnServer:(NSString*)turnServerUrl {
if ([turnServerUrl length] < 1) {
[self.ICEServerDelegate onICEServers:ICEServers];
[self.delegate appClient:self didReceiveICEServers:ICEServers];
return;
}
dispatch_async(self.backgroundQueue, ^(void) {
dispatch_async(_backgroundQueue, ^(void) {
NSMutableURLRequest* request = [NSMutableURLRequest
requestWithURL:[NSURL URLWithString:turnServerUrl]];
[request addValue:@"Mozilla/5.0" forHTTPHeaderField:@"user-agent"];
@ -214,7 +202,7 @@
}
dispatch_async(dispatch_get_main_queue(), ^(void) {
[self.ICEServerDelegate onICEServers:ICEServers];
[self.delegate appClient:self didReceiveICEServers:ICEServers];
});
});
}
@ -223,8 +211,10 @@
- (void)connection:(NSURLConnection*)connection didReceiveData:(NSData*)data {
NSString* roomHtml = [NSString stringWithUTF8String:[data bytes]];
[self maybeLogMessage:[NSString stringWithFormat:@"Received %d chars",
[roomHtml length]]];
NSString* message =
[NSString stringWithFormat:@"Received %lu chars",
(unsigned long)[roomHtml length]];
[self maybeLogMessage:message];
[self.roomHtml appendString:roomHtml];
}
@ -243,8 +233,10 @@
}
- (void)connectionDidFinishLoading:(NSURLConnection*)connection {
[self maybeLogMessage:[NSString stringWithFormat:@"finished loading %d chars",
[self.roomHtml length]]];
NSString* message =
[NSString stringWithFormat:@"finished loading %lu chars",
(unsigned long)[self.roomHtml length]];
[self maybeLogMessage:message];
NSRegularExpression* fullRegex =
[NSRegularExpression regularExpressionWithPattern:@"room is full"
options:0
@ -253,10 +245,8 @@
numberOfMatchesInString:self.roomHtml
options:0
range:NSMakeRange(0, [self.roomHtml length])]) {
[self showMessage:@"Room full"];
APPRTCAppDelegate* ad =
(APPRTCAppDelegate*)[[UIApplication sharedApplication] delegate];
[ad closeVideoUI];
NSString* message = @"Room full, dropping peerconnection.";
[self.delegate appClient:self didErrorWithMessage:message];
return;
}
@ -331,7 +321,22 @@
json =
[NSJSONSerialization JSONObjectWithData:mcData options:0 error:&error];
NSAssert(!error, @"Unable to parse. %@", error.localizedDescription);
if ([[json objectForKey:@"video"] boolValue]) {
id video = json[@"video"];
if ([video isKindOfClass:[NSDictionary class]]) {
NSDictionary* mandatory = video[@"mandatory"];
NSMutableArray* mandatoryContraints =
[NSMutableArray arrayWithCapacity:[mandatory count]];
[mandatory enumerateKeysAndObjectsUsingBlock:^(
id key, id obj, BOOL* stop) {
[mandatoryContraints addObject:[[RTCPair alloc] initWithKey:key
value:obj]];
}];
// TODO(tkchin): figure out json formats for optional constraints.
_videoConstraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatoryContraints
optionalConstraints:nil];
} else if ([video isKindOfClass:[NSNumber class]] && [video boolValue]) {
_videoConstraints = [[RTCMediaConstraints alloc] init];
}
}

View File

@ -0,0 +1,66 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Foundation/Foundation.h>
// Used to log messages to destination like UI.
@protocol APPRTCLogger<NSObject>
- (void)logMessage:(NSString*)message;
@end
@class RTCVideoTrack;
@class APPRTCConnectionManager;
// Used to provide AppRTC connection information.
@protocol APPRTCConnectionManagerDelegate<NSObject>
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack;
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack;
- (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager;
- (void)connectionManager:(APPRTCConnectionManager*)manager
didErrorWithMessage:(NSString*)errorMessage;
@end
// Abstracts the network connection aspect of AppRTC. The delegate will receive
// information about connection status as changes occur.
@interface APPRTCConnectionManager : NSObject
@property(nonatomic, weak) id<APPRTCConnectionManagerDelegate> delegate;
@property(nonatomic, weak) id<APPRTCLogger> logger;
- (instancetype)initWithDelegate:(id<APPRTCConnectionManagerDelegate>)delegate
logger:(id<APPRTCLogger>)logger;
- (BOOL)connectToRoomWithURL:(NSURL*)url;
- (void)disconnect;
@end

View File

@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2013, Google Inc.
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@ -25,14 +25,12 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "APPRTCConnectionManager.h"
#import <AVFoundation/AVFoundation.h>
#import "APPRTCAppDelegate.h"
#import "APPRTCViewController.h"
#import "RTCEAGLVideoView.h"
#import "APPRTCAppClient.h"
#import "GAEChannelClient.h"
#import "RTCICECandidate.h"
#import "RTCICEServer.h"
#import "RTCMediaConstraints.h"
#import "RTCMediaStream.h"
#import "RTCPair.h"
@ -40,209 +38,83 @@
#import "RTCPeerConnectionDelegate.h"
#import "RTCPeerConnectionFactory.h"
#import "RTCSessionDescription.h"
#import "RTCSessionDescriptionDelegate.h"
#import "RTCStatsDelegate.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoCapturer.h"
#import "RTCVideoTrack.h"
#import "RTCVideoSource.h"
@interface PCObserver : NSObject<RTCPeerConnectionDelegate>
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate;
@property(nonatomic, strong) RTCEAGLVideoView* videoView;
@end
@implementation PCObserver {
id<APPRTCSendMessage> _delegate;
}
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate {
if (self = [super init]) {
_delegate = delegate;
}
return self;
}
#pragma mark - RTCPeerConnectionDelegate
- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onError.");
NSAssert(NO, @"PeerConnection failed.");
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
signalingStateChanged:(RTCSignalingState)stateChanged {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onSignalingStateChange: %d", stateChanged);
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
addedStream:(RTCMediaStream*)stream {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onAddStream.");
NSAssert([stream.audioTracks count] <= 1,
@"Expected at most 1 audio stream");
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
self.videoView.videoTrack = stream.videoTracks[0];
}
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
removedStream:(RTCMediaStream*)stream {
dispatch_async(dispatch_get_main_queue(),
^(void) { NSLog(@"PCO onRemoveStream."); });
}
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onRenegotiationNeeded - ignoring because AppRTC has a "
"predefined negotiation strategy");
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
gotICECandidate:(RTCICECandidate*)candidate {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onICECandidate.\n Mid[%@] Index[%d] Sdp[%@]",
candidate.sdpMid,
candidate.sdpMLineIndex,
candidate.sdp);
NSDictionary* json = @{
@"type" : @"candidate",
@"label" : [NSNumber numberWithInt:candidate.sdpMLineIndex],
@"id" : candidate.sdpMid,
@"candidate" : candidate.sdp
};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
if (!error) {
[_delegate sendData:data];
} else {
NSAssert(NO,
@"Unable to serialize JSON object with error: %@",
error.localizedDescription);
}
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceGatheringChanged:(RTCICEGatheringState)newState {
dispatch_async(dispatch_get_main_queue(),
^(void) { NSLog(@"PCO onIceGatheringChange. %d", newState); });
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceConnectionChanged:(RTCICEConnectionState)newState {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(@"PCO onIceConnectionChange. %d", newState);
if (newState == RTCICEConnectionConnected)
[self displayLogMessage:@"ICE Connection Connected."];
NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!");
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didOpenDataChannel:(RTCDataChannel*)dataChannel {
NSAssert(NO, @"AppRTC doesn't use DataChannels");
}
#pragma mark - Private
- (void)displayLogMessage:(NSString*)message {
[_delegate displayLogMessage:message];
}
@end
@interface APPRTCAppDelegate () <RTCStatsDelegate>
@interface APPRTCConnectionManager ()
<APPRTCAppClientDelegate, GAEMessageHandler, RTCPeerConnectionDelegate,
RTCSessionDescriptionDelegate, RTCStatsDelegate>
@property(nonatomic, strong) APPRTCAppClient* client;
@property(nonatomic, strong) PCObserver* pcObserver;
@property(nonatomic, strong) RTCPeerConnection* peerConnection;
@property(nonatomic, strong) RTCPeerConnectionFactory* peerConnectionFactory;
@property(nonatomic, strong) RTCVideoSource* videoSource;
@property(nonatomic, strong) NSMutableArray* queuedRemoteCandidates;
@end
@implementation APPRTCAppDelegate {
@implementation APPRTCConnectionManager {
NSTimer* _statsTimer;
}
#pragma mark - UIApplicationDelegate methods
- (BOOL)application:(UIApplication*)application
didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
[RTCPeerConnectionFactory initializeSSL];
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
self.viewController =
[[APPRTCViewController alloc] initWithNibName:@"APPRTCViewController"
bundle:nil];
self.window.rootViewController = self.viewController;
_statsTimer =
[NSTimer scheduledTimerWithTimeInterval:10
target:self
selector:@selector(didFireStatsTimer:)
userInfo:nil
repeats:YES];
[self.window makeKeyAndVisible];
return YES;
- (instancetype)initWithDelegate:(id<APPRTCConnectionManagerDelegate>)delegate
logger:(id<APPRTCLogger>)logger {
if (self = [super init]) {
self.delegate = delegate;
self.logger = logger;
self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init];
// TODO(tkchin): turn this into a button.
// Uncomment for stat logs.
// _statsTimer =
// [NSTimer scheduledTimerWithTimeInterval:10
// target:self
// selector:@selector(didFireStatsTimer:)
// userInfo:nil
// repeats:YES];
}
return self;
}
- (void)applicationWillResignActive:(UIApplication*)application {
[self displayLogMessage:@"Application lost focus, connection broken."];
[self closeVideoUI];
- (void)dealloc {
[self disconnect];
}
- (void)applicationDidEnterBackground:(UIApplication*)application {
}
- (void)applicationWillEnterForeground:(UIApplication*)application {
}
- (void)applicationDidBecomeActive:(UIApplication*)application {
}
- (void)applicationWillTerminate:(UIApplication*)application {
}
- (BOOL)application:(UIApplication*)application
openURL:(NSURL*)url
sourceApplication:(NSString*)sourceApplication
annotation:(id)annotation {
- (BOOL)connectToRoomWithURL:(NSURL*)url {
if (self.client) {
// Already have a connection.
return NO;
}
self.client = [[APPRTCAppClient alloc] initWithICEServerDelegate:self
messageHandler:self];
self.client = [[APPRTCAppClient alloc] initWithDelegate:self
messageHandler:self];
[self.client connectToRoom:url];
return YES;
}
- (void)displayLogMessage:(NSString*)message {
NSAssert([NSThread isMainThread], @"Called off main thread!");
NSLog(@"%@", message);
[self.viewController displayText:message];
- (void)disconnect {
if (!self.client) {
return;
}
[self.client
sendData:[@"{\"type\": \"bye\"}" dataUsingEncoding:NSUTF8StringEncoding]];
[self.peerConnection close];
self.peerConnection = nil;
self.client = nil;
self.queuedRemoteCandidates = nil;
}
#pragma mark - RTCSendMessage method
#pragma mark - APPRTCAppClientDelegate
- (void)sendData:(NSData*)data {
[self.client sendData:data];
- (void)appClient:(APPRTCAppClient*)appClient
didErrorWithMessage:(NSString*)message {
[self.delegate connectionManager:self
didErrorWithMessage:message];
}
#pragma mark - ICEServerDelegate method
- (void)onICEServers:(NSArray*)servers {
- (void)appClient:(APPRTCAppClient*)appClient
didReceiveICEServers:(NSArray*)servers {
self.queuedRemoteCandidates = [NSMutableArray array];
self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:
@[
@ -256,11 +128,10 @@
[[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement"
value:@"true"]
]];
self.pcObserver = [[PCObserver alloc] initWithDelegate:self];
self.peerConnection =
[self.peerConnectionFactory peerConnectionWithICEServers:servers
constraints:constraints
delegate:self.pcObserver];
delegate:self];
RTCMediaStream* lms =
[self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"];
@ -268,7 +139,10 @@
// support or emulation (http://goo.gl/rHAnC1) so don't bother
// trying to open a local stream.
RTCVideoTrack* localVideoTrack;
#if !TARGET_IPHONE_SIMULATOR
// TODO(tkchin): local video capture for OSX. See
// https://code.google.com/p/webrtc/issues/detail?id=3417.
#if !TARGET_IPHONE_SIMULATOR && TARGET_OS_IPHONE
NSString* cameraID = nil;
for (AVCaptureDevice* captureDevice in
[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
@ -290,25 +164,23 @@
if (localVideoTrack) {
[lms addVideoTrack:localVideoTrack];
}
self.viewController.localVideoView.videoTrack = localVideoTrack;
#else
self.viewController.localVideoView.hidden = YES;
[self.delegate connectionManager:self
didReceiveLocalVideoTrack:localVideoTrack];
#endif
self.pcObserver.videoView = self.viewController.remoteVideoView;
[lms addAudioTrack:[self.peerConnectionFactory audioTrackWithID:@"ARDAMSa0"]];
[self.peerConnection addStream:lms constraints:constraints];
[self displayLogMessage:@"onICEServers - added local stream."];
[self.logger logMessage:@"onICEServers - added local stream."];
}
#pragma mark - GAEMessageHandler methods
- (void)onOpen {
if (!self.client.initiator) {
[self displayLogMessage:@"Callee; waiting for remote offer"];
[self.logger logMessage:@"Callee; waiting for remote offer"];
return;
}
[self displayLogMessage:@"GAE onOpen - create offer."];
[self.logger logMessage:@"GAE onOpen - create offer."];
RTCPair* audio =
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"];
RTCPair* video =
@ -318,14 +190,14 @@
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection createOfferWithDelegate:self constraints:constraints];
[self displayLogMessage:@"PC - createOffer."];
[self.logger logMessage:@"PC - createOffer."];
}
- (void)onMessage:(NSDictionary*)messageData {
NSString* type = messageData[@"type"];
NSAssert(type, @"Missing type: %@", messageData);
[self displayLogMessage:[NSString stringWithFormat:@"GAE onMessage type - %@",
type]];
[self.logger logMessage:[NSString stringWithFormat:@"GAE onMessage type - %@",
type]];
if ([type isEqualToString:@"candidate"]) {
NSString* mid = messageData[@"id"];
NSNumber* sdpLineIndex = messageData[@"label"];
@ -344,36 +216,202 @@
NSString* sdpString = messageData[@"sdp"];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:type
sdp:[APPRTCAppDelegate preferISAC:sdpString]];
sdp:[[self class] preferISAC:sdpString]];
[self.peerConnection setRemoteDescriptionWithDelegate:self
sessionDescription:sdp];
[self displayLogMessage:@"PC - setRemoteDescription."];
[self.logger logMessage:@"PC - setRemoteDescription."];
} else if ([type isEqualToString:@"bye"]) {
[self closeVideoUI];
UIAlertView* alertView =
[[UIAlertView alloc] initWithTitle:@"Remote end hung up"
message:@"dropping PeerConnection"
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alertView show];
[self.delegate connectionManagerDidReceiveHangup:self];
} else {
NSAssert(NO, @"Invalid message: %@", messageData);
}
}
- (void)onClose {
[self displayLogMessage:@"GAE onClose."];
[self closeVideoUI];
[self.logger logMessage:@"GAE onClose."];
[self.delegate connectionManagerDidReceiveHangup:self];
}
- (void)onError:(int)code withDescription:(NSString*)description {
[self displayLogMessage:[NSString stringWithFormat:@"GAE onError: %d, %@",
code, description]];
[self closeVideoUI];
NSString* message = [NSString stringWithFormat:@"GAE onError: %d, %@",
code, description];
[self.logger logMessage:message];
[self.delegate connectionManager:self
didErrorWithMessage:message];
}
#pragma mark - RTCSessionDescriptionDelegate methods
#pragma mark - RTCPeerConnectionDelegate
- (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection {
dispatch_async(dispatch_get_main_queue(), ^{
NSString* message = @"PeerConnection error";
NSLog(@"%@", message);
NSAssert(NO, @"PeerConnection failed.");
[self.delegate connectionManager:self
didErrorWithMessage:message];
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
signalingStateChanged:(RTCSignalingState)stateChanged {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"PCO onSignalingStateChange: %d", stateChanged);
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
addedStream:(RTCMediaStream*)stream {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"PCO onAddStream.");
NSAssert([stream.audioTracks count] == 1 || [stream.videoTracks count] == 1,
@"Expected audio or video track");
NSAssert([stream.audioTracks count] <= 1,
@"Expected at most 1 audio stream");
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
[self.delegate connectionManager:self
didReceiveRemoteVideoTrack:stream.videoTracks[0]];
}
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
removedStream:(RTCMediaStream*)stream {
dispatch_async(dispatch_get_main_queue(),
^{ NSLog(@"PCO onRemoveStream."); });
}
- (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"PCO onRenegotiationNeeded - ignoring because AppRTC has a "
"predefined negotiation strategy");
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
gotICECandidate:(RTCICECandidate*)candidate {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"PCO onICECandidate.\n Mid[%@] Index[%li] Sdp[%@]",
candidate.sdpMid,
(long)candidate.sdpMLineIndex,
candidate.sdp);
NSDictionary* json = @{
@"type" : @"candidate",
@"label" : @(candidate.sdpMLineIndex),
@"id" : candidate.sdpMid,
@"candidate" : candidate.sdp
};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
if (!error) {
[self.client sendData:data];
} else {
NSAssert(NO,
@"Unable to serialize JSON object with error: %@",
error.localizedDescription);
}
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceGatheringChanged:(RTCICEGatheringState)newState {
dispatch_async(dispatch_get_main_queue(),
^{ NSLog(@"PCO onIceGatheringChange. %d", newState); });
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
iceConnectionChanged:(RTCICEConnectionState)newState {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"PCO onIceConnectionChange. %d", newState);
if (newState == RTCICEConnectionConnected)
[self.logger logMessage:@"ICE Connection Connected."];
NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!");
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didOpenDataChannel:(RTCDataChannel*)dataChannel {
NSAssert(NO, @"AppRTC doesn't use DataChannels");
}
#pragma mark - RTCSessionDescriptionDelegate
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didCreateSessionDescription:(RTCSessionDescription*)origSdp
error:(NSError*)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
[self.logger logMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
return;
}
[self.logger logMessage:@"SDP onSuccess(SDP) - set local description."];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:origSdp.type
sdp:[[self class] preferISAC:origSdp.description]];
[self.peerConnection setLocalDescriptionWithDelegate:self
sessionDescription:sdp];
[self.logger logMessage:@"PC setLocalDescription."];
NSDictionary* json = @{@"type" : sdp.type, @"sdp" : sdp.description};
NSError* jsonError;
NSData* data = [NSJSONSerialization dataWithJSONObject:json
options:0
error:&jsonError];
NSAssert(!jsonError, @"Error: %@", jsonError.description);
[self.client sendData:data];
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didSetSessionDescriptionWithError:(NSError*)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
[self.logger logMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
return;
}
[self.logger logMessage:@"SDP onSuccess() - possibly drain candidates"];
if (!self.client.initiator) {
if (self.peerConnection.remoteDescription &&
!self.peerConnection.localDescription) {
[self.logger logMessage:@"Callee, setRemoteDescription succeeded"];
RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio"
value:@"true"];
RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection createAnswerWithDelegate:self
constraints:constraints];
[self.logger logMessage:@"PC - createAnswer."];
} else {
[self.logger logMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
} else {
if (self.peerConnection.remoteDescription) {
[self.logger logMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
}
});
}
#pragma mark - RTCStatsDelegate methods
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didGetStats:(NSArray*)stats {
dispatch_async(dispatch_get_main_queue(), ^{
NSString* message = [NSString stringWithFormat:@"Stats:\n %@", stats];
[self.logger logMessage:message];
});
}
#pragma mark - Private
// Match |pattern| to |string| and return the first group of the first
// match, or nil if no match was found.
@ -438,96 +476,6 @@
return [newLines componentsJoinedByString:@"\n"];
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didCreateSessionDescription:(RTCSessionDescription*)origSdp
error:(NSError*)error {
dispatch_async(dispatch_get_main_queue(), ^(void) {
if (error) {
[self displayLogMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
return;
}
[self displayLogMessage:@"SDP onSuccess(SDP) - set local description."];
RTCSessionDescription* sdp = [[RTCSessionDescription alloc]
initWithType:origSdp.type
sdp:[APPRTCAppDelegate preferISAC:origSdp.description]];
[self.peerConnection setLocalDescriptionWithDelegate:self
sessionDescription:sdp];
[self displayLogMessage:@"PC setLocalDescription."];
NSDictionary* json = @{@"type" : sdp.type, @"sdp" : sdp.description};
NSError* error;
NSData* data =
[NSJSONSerialization dataWithJSONObject:json options:0 error:&error];
NSAssert(!error,
@"%@",
[NSString stringWithFormat:@"Error: %@", error.description]);
[self sendData:data];
});
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didSetSessionDescriptionWithError:(NSError*)error {
dispatch_async(dispatch_get_main_queue(), ^(void) {
if (error) {
[self displayLogMessage:@"SDP onFailure."];
NSAssert(NO, error.description);
return;
}
[self displayLogMessage:@"SDP onSuccess() - possibly drain candidates"];
if (!self.client.initiator) {
if (self.peerConnection.remoteDescription &&
!self.peerConnection.localDescription) {
[self displayLogMessage:@"Callee, setRemoteDescription succeeded"];
RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio"
value:@"true"];
RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo"
value:@"true"];
NSArray* mandatory = @[ audio, video ];
RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:nil];
[self.peerConnection createAnswerWithDelegate:self
constraints:constraints];
[self displayLogMessage:@"PC - createAnswer."];
} else {
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
} else {
if (self.peerConnection.remoteDescription) {
[self displayLogMessage:@"SDP onSuccess - drain candidates"];
[self drainRemoteCandidates];
}
}
});
}
#pragma mark - RTCStatsDelegate methods
- (void)peerConnection:(RTCPeerConnection*)peerConnection
didGetStats:(NSArray*)stats {
dispatch_async(dispatch_get_main_queue(), ^{
NSString* message = [NSString stringWithFormat:@"Stats:\n %@", stats];
[self displayLogMessage:message];
});
}
#pragma mark - internal methods
- (void)disconnect {
[self.client
sendData:[@"{\"type\": \"bye\"}" dataUsingEncoding:NSUTF8StringEncoding]];
[self.peerConnection close];
self.peerConnection = nil;
self.pcObserver = nil;
self.client = nil;
self.videoSource = nil;
self.peerConnectionFactory = nil;
[RTCPeerConnectionFactory deinitializeSSL];
}
- (void)drainRemoteCandidates {
for (RTCICECandidate* candidate in self.queuedRemoteCandidates) {
[self.peerConnection addICECandidate:candidate];
@ -535,29 +483,7 @@
self.queuedRemoteCandidates = nil;
}
- (NSString*)unHTMLifyString:(NSString*)base {
// TODO(hughv): Investigate why percent escapes are being added. Removing
// them isn't necessary on Android.
// convert HTML escaped characters to UTF8.
NSString* removePercent =
[base stringByReplacingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// remove leading and trailing ".
NSRange range;
range.length = [removePercent length] - 2;
range.location = 1;
NSString* removeQuotes = [removePercent substringWithRange:range];
// convert \" to ".
NSString* removeEscapedQuotes =
[removeQuotes stringByReplacingOccurrencesOfString:@"\\\""
withString:@"\""];
// convert \\ to \.
NSString* removeBackslash =
[removeEscapedQuotes stringByReplacingOccurrencesOfString:@"\\\\"
withString:@"\\"];
return removeBackslash;
}
- (void)didFireStatsTimer:(NSTimer *)timer {
- (void)didFireStatsTimer:(NSTimer*)timer {
if (self.peerConnection) {
[self.peerConnection getStatsWithDelegate:self
mediaStreamTrack:nil
@ -565,11 +491,4 @@
}
}
#pragma mark - public methods
- (void)closeVideoUI {
[self.viewController resetUI];
[self disconnect];
}
@end

View File

@ -25,7 +25,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
// These methods will be called by the AppEngine chanel. The documentation
// for these methods is found here. (Yes, it is a JS API.)
@ -35,15 +35,15 @@
- (void)onOpen;
- (void)onMessage:(NSDictionary*)data;
- (void)onClose;
- (void)onError:(int)code withDescription:(NSString *)description;
- (void)onError:(int)code withDescription:(NSString*)description;
@end
// Initialize with a token for an AppRTC data channel. This will load
// ios_channel.html and use the token to establish a data channel between the
// application and AppEngine.
@interface GAEChannelClient : NSObject<UIWebViewDelegate>
@interface GAEChannelClient : NSObject
- (id)initWithToken:(NSString *)token delegate:(id<GAEMessageHandler>)delegate;
- (id)initWithToken:(NSString*)token delegate:(id<GAEMessageHandler>)delegate;
@end

View File

@ -29,10 +29,25 @@
#import "RTCPeerConnectionFactory.h"
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
@interface GAEChannelClient () <UIWebViewDelegate>
@property(nonatomic, strong) UIWebView* webView;
#else
#import <WebKit/WebKit.h>
@interface GAEChannelClient ()
@property(nonatomic, strong) WebView* webView;
#endif
@property(nonatomic, assign) id<GAEMessageHandler> delegate;
@property(nonatomic, strong) UIWebView* webView;
@end
@ -41,47 +56,67 @@
- (id)initWithToken:(NSString*)token delegate:(id<GAEMessageHandler>)delegate {
self = [super init];
if (self) {
#if TARGET_OS_IPHONE
_webView = [[UIWebView alloc] init];
_webView.delegate = self;
#else
_webView = [[WebView alloc] init];
_webView.policyDelegate = self;
#endif
_delegate = delegate;
NSString* htmlPath =
[[NSBundle mainBundle] pathForResource:@"ios_channel" ofType:@"html"];
[[NSBundle mainBundle] pathForResource:@"channel" ofType:@"html"];
NSURL* htmlUrl = [NSURL fileURLWithPath:htmlPath];
NSString* path = [NSString
stringWithFormat:@"%@?token=%@", [htmlUrl absoluteString], token];
#if TARGET_OS_IPHONE
[_webView
#else
[[_webView mainFrame]
#endif
loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:path]]];
}
return self;
}
- (void)dealloc {
#if TARGET_OS_IPHONE
_webView.delegate = nil;
[_webView stopLoading];
#else
_webView.policyDelegate = nil;
[[_webView mainFrame] stopLoading];
#endif
}
#pragma mark - UIWebViewDelegate method
+ (NSDictionary*)jsonStringToDictionary:(NSString*)str {
NSData* data = [str dataUsingEncoding:NSUTF8StringEncoding];
NSError* error;
NSDictionary* dict =
[NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSAssert(!error, @"Invalid JSON? %@", str);
return dict;
}
#if TARGET_OS_IPHONE
#pragma mark - UIWebViewDelegate
- (BOOL)webView:(UIWebView*)webView
shouldStartLoadWithRequest:(NSURLRequest*)request
navigationType:(UIWebViewNavigationType)navigationType {
#else
// WebPolicyDelegate is an informal delegate.
#pragma mark - WebPolicyDelegate
- (void)webView:(WebView*)webView
decidePolicyForNavigationAction:(NSDictionary*)actionInformation
request:(NSURLRequest*)request
frame:(WebFrame*)frame
decisionListener:(id<WebPolicyDecisionListener>)listener {
#endif
NSString* scheme = [request.URL scheme];
NSAssert(scheme, @"scheme is nil: %@", request);
if (![scheme isEqualToString:@"js-frame"]) {
#if TARGET_OS_IPHONE
return YES;
#else
[listener use];
return;
#endif
}
dispatch_async(dispatch_get_main_queue(), ^(void) {
dispatch_async(dispatch_get_main_queue(), ^{
NSString* queuedMessage = [webView
stringByEvaluatingJavaScriptFromString:@"popQueuedMessage();"];
NSAssert([queuedMessage length], @"Empty queued message from JS");
@ -110,7 +145,23 @@
NSAssert(NO, @"Invalid message sent from UIWebView: %@", queuedMessage);
}
});
#if TARGET_OS_IPHONE
return NO;
#else
[listener ignore];
return;
#endif
}
#pragma mark - Private
+ (NSDictionary*)jsonStringToDictionary:(NSString*)str {
NSData* data = [str dataUsingEncoding:NSUTF8StringEncoding];
NSError* error;
NSDictionary* dict =
[NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSAssert(!error, @"Invalid JSON? %@", str);
return dict;
}
@end

View File

@ -27,34 +27,8 @@
#import <UIKit/UIKit.h>
#import "GAEChannelClient.h"
#import "APPRTCAppClient.h"
#import "RTCSessionDescriptionDelegate.h"
#import "RTCVideoSource.h"
// Used to send a message to an apprtc.appspot.com "room".
@protocol APPRTCSendMessage<NSObject>
- (void)sendData:(NSData*)data;
// Logging helper.
- (void)displayLogMessage:(NSString*)message;
@end
@class APPRTCViewController;
@class RTCVideoTrack;
// The main application class of the AppRTCDemo iOS app demonstrating
// interoperability between the Objective C implementation of PeerConnection
// and the apprtc.appspot.com demo webapp.
@interface APPRTCAppDelegate : UIResponder<ICEServerDelegate,
GAEMessageHandler,
APPRTCSendMessage,
RTCSessionDescriptionDelegate,
UIApplicationDelegate>
@property(strong, nonatomic) UIWindow* window;
@property(strong, nonatomic) APPRTCViewController* viewController;
@property (strong, nonatomic) RTCVideoSource* videoSource;
- (void)closeVideoUI;
@interface APPRTCAppDelegate : NSObject<UIApplicationDelegate>
@end

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "APPRTCAppDelegate.h"
#import "APPRTCViewController.h"
#import "RTCPeerConnectionFactory.h"
@implementation APPRTCAppDelegate {
UIWindow* _window;
}
#pragma mark - UIApplicationDelegate methods
- (BOOL)application:(UIApplication*)application
didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
[RTCPeerConnectionFactory initializeSSL];
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
APPRTCViewController* viewController =
[[APPRTCViewController alloc] initWithNibName:@"APPRTCViewController"
bundle:nil];
_window.rootViewController = viewController;
[_window makeKeyAndVisible];
return YES;
}
- (void)applicationWillResignActive:(UIApplication*)application {
[[self appRTCViewController] applicationWillResignActive:application];
}
- (void)applicationWillTerminate:(UIApplication*)application {
[RTCPeerConnectionFactory deinitializeSSL];
}
#pragma mark - Private
- (APPRTCViewController*)appRTCViewController {
return (APPRTCViewController*)_window.rootViewController;
}
@end

View File

@ -27,8 +27,6 @@
#import <UIKit/UIKit.h>
@class RTCEAGLVideoView;
// The view controller that is displayed when AppRTCDemo is loaded.
@interface APPRTCViewController : UIViewController<UITextFieldDelegate>
@ -37,10 +35,6 @@
@property(weak, nonatomic) IBOutlet UITextView* logView;
@property(weak, nonatomic) IBOutlet UIView* blackView;
@property(nonatomic, strong) RTCEAGLVideoView* localVideoView;
@property(nonatomic, strong) RTCEAGLVideoView* remoteVideoView;
- (void)displayText:(NSString*)text;
- (void)resetUI;
- (void)applicationWillResignActive:(UIApplication*)application;
@end

View File

@ -0,0 +1,231 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "APPRTCViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "APPRTCConnectionManager.h"
#import "RTCEAGLVideoView.h"
// Padding space for local video view with its parent.
static CGFloat const kLocalViewPadding = 20;
@interface APPRTCViewController ()
<APPRTCConnectionManagerDelegate, APPRTCLogger, RTCEAGLVideoViewDelegate>
@property(nonatomic, assign) UIInterfaceOrientation statusBarOrientation;
@property(nonatomic, strong) RTCEAGLVideoView* localVideoView;
@property(nonatomic, strong) RTCEAGLVideoView* remoteVideoView;
@end
@implementation APPRTCViewController {
APPRTCConnectionManager* _connectionManager;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
}
- (instancetype)initWithNibName:(NSString*)nibName
bundle:(NSBundle*)bundle {
if (self = [super initWithNibName:nibName bundle:bundle]) {
_connectionManager =
[[APPRTCConnectionManager alloc] initWithDelegate:self
logger:self];
}
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
self.roomInput.delegate = self;
[self.roomInput becomeFirstResponder];
}
- (void)viewDidLayoutSubviews {
if (self.statusBarOrientation !=
[UIApplication sharedApplication].statusBarOrientation) {
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
[[NSNotificationCenter defaultCenter]
postNotificationName:@"StatusBarOrientationDidChange"
object:nil];
}
}
- (void)applicationWillResignActive:(UIApplication*)application {
[self logMessage:@"Application lost focus, connection broken."];
[self disconnect];
}
#pragma mark - APPRTCConnectionManagerDelegate
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
self.localVideoView.hidden = NO;
self.localVideoView.videoTrack = localVideoTrack;
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
self.remoteVideoView.videoTrack = remoteVideoTrack;
}
- (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
[self showAlertWithMessage:@"Remote hung up."];
[self disconnect];
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didErrorWithMessage:(NSString*)message {
[self showAlertWithMessage:message];
[self disconnect];
}
#pragma mark - APPRTCLogger
- (void)logMessage:(NSString*)message {
dispatch_async(dispatch_get_main_queue(), ^{
NSString* output =
[NSString stringWithFormat:@"%@\n%@", self.logView.text, message];
self.logView.text = output;
[self.logView
scrollRangeToVisible:NSMakeRange([self.logView.text length], 0)];
});
}
#pragma mark - RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView
didChangeVideoSize:(CGSize)size {
if (videoView == self.localVideoView) {
_localVideoSize = size;
} else if (videoView == self.remoteVideoView) {
_remoteVideoSize = size;
} else {
NSParameterAssert(NO);
}
[self updateVideoViewLayout];
}
#pragma mark - UITextFieldDelegate
- (void)textFieldDidEndEditing:(UITextField*)textField {
NSString* room = textField.text;
if ([room length] == 0) {
return;
}
textField.hidden = YES;
self.instructionsView.hidden = YES;
self.logView.hidden = NO;
NSString* url =
[NSString stringWithFormat:@"https://apprtc.appspot.com/?r=%@", room];
[_connectionManager connectToRoomWithURL:[NSURL URLWithString:url]];
[self setupCaptureSession];
}
- (BOOL)textFieldShouldReturn:(UITextField*)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
[textField resignFirstResponder];
return YES;
}
#pragma mark - Private
- (void)disconnect {
[self resetUI];
[_connectionManager disconnect];
}
- (void)showAlertWithMessage:(NSString*)message {
UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:nil
message:message
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alertView show];
}
- (void)resetUI {
[self.roomInput resignFirstResponder];
self.roomInput.text = nil;
self.roomInput.hidden = NO;
self.instructionsView.hidden = NO;
self.logView.hidden = YES;
self.logView.text = nil;
self.blackView.hidden = YES;
[self.remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[self.localVideoView removeFromSuperview];
self.localVideoView = nil;
}
- (void)setupCaptureSession {
self.blackView.hidden = NO;
self.remoteVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.remoteVideoView.delegate = self;
self.remoteVideoView.transform = CGAffineTransformMakeScale(-1, 1);
[self.blackView addSubview:self.remoteVideoView];
self.localVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.localVideoView.delegate = self;
[self.blackView addSubview:self.localVideoView];
[self updateVideoViewLayout];
}
- (void)updateVideoViewLayout {
// TODO(tkchin): handle rotation.
CGSize defaultAspectRatio = CGSizeMake(4, 3);
CGSize localAspectRatio = CGSizeEqualToSize(_localVideoSize, CGSizeZero) ?
defaultAspectRatio : _localVideoSize;
CGSize remoteAspectRatio = CGSizeEqualToSize(_remoteVideoSize, CGSizeZero) ?
defaultAspectRatio : _remoteVideoSize;
CGRect remoteVideoFrame =
AVMakeRectWithAspectRatioInsideRect(remoteAspectRatio,
self.blackView.bounds);
self.remoteVideoView.frame = remoteVideoFrame;
CGRect localVideoFrame =
AVMakeRectWithAspectRatioInsideRect(localAspectRatio,
self.blackView.bounds);
localVideoFrame.size.width = localVideoFrame.size.width / 3;
localVideoFrame.size.height = localVideoFrame.size.height / 3;
localVideoFrame.origin.x = CGRectGetMaxX(self.blackView.bounds)
- localVideoFrame.size.width - kLocalViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(self.blackView.bounds)
- localVideoFrame.size.height - kLocalViewPadding;
self.localVideoView.frame = localVideoFrame;
}
@end

View File

Before

Width:  |  Height:  |  Size: 6.4 KiB

After

Width:  |  Height:  |  Size: 6.4 KiB

View File

@ -38,19 +38,6 @@
<array>
<string>iPhoneOS</string>
</array>
<key>CFBundleURLTypes</key>
<array>
<dict>
<key>CFBundleTypeRole</key>
<string>Editor</string>
<key>CFBundleURLName</key>
<string>com.google.apprtcdemo</string>
<key>CFBundleURLSchemes</key>
<array>
<string>apprtc</string>
</array>
</dict>
</array>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>UIRequiredDeviceCapabilities</key>

View File

@ -52,7 +52,7 @@
<bool key="IBUIClipsSubviews">YES</bool>
<bool key="IBUIUserInteractionEnabled">NO</bool>
<string key="targetRuntimeIdentifier">IBCocoaTouchFramework</string>
<string key="IBUIText">Use Safari and open a URL with a scheme of apprtc to load the test app and connect. i.e. apprtc://apprtc.appspot.com/?r=12345678 Or just enter the room below to connect to apprtc.</string>
<string key="IBUIText">Enter the room below to connect to apprtc.</string>
<object class="IBUITextInputTraits" key="IBUITextInputTraits">
<int key="IBUIAutocapitalizationType">2</int>
<string key="targetRuntimeIdentifier">IBCocoaTouchFramework</string>

View File

@ -0,0 +1,31 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Cocoa/Cocoa.h>
@interface APPRTCAppDelegate : NSObject<NSApplicationDelegate>
@end

View File

@ -0,0 +1,77 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "APPRTCAppDelegate.h"
#import "APPRTCViewController.h"
#import "RTCPeerConnectionFactory.h"
@interface APPRTCAppDelegate () <NSWindowDelegate>
@end
@implementation APPRTCAppDelegate {
APPRTCViewController* _viewController;
NSWindow* _window;
}
#pragma mark - NSApplicationDelegate
- (void)applicationDidFinishLaunching:(NSNotification*)notification {
[RTCPeerConnectionFactory initializeSSL];
NSScreen* screen = [NSScreen mainScreen];
NSRect visibleRect = [screen visibleFrame];
NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
NSMidY(visibleRect),
1320,
1140);
NSUInteger styleMask = NSTitledWindowMask | NSClosableWindowMask;
_window = [[NSWindow alloc] initWithContentRect:windowRect
styleMask:styleMask
backing:NSBackingStoreBuffered
defer:NO];
_window.delegate = self;
[_window makeKeyAndOrderFront:self];
[_window makeMainWindow];
_viewController = [[APPRTCViewController alloc] initWithNibName:nil
bundle:nil];
[_window setContentView:[_viewController view]];
}
#pragma mark - NSWindow
- (void)windowWillClose:(NSNotification*)notification {
[_viewController windowWillClose:notification];
[RTCPeerConnectionFactory deinitializeSSL];
[NSApp terminate:self];
}
@end

View File

@ -0,0 +1,34 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Cocoa/Cocoa.h>
@interface APPRTCViewController : NSViewController
- (void)windowWillClose:(NSNotification*)notification;
@end

View File

@ -0,0 +1,312 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "APPRTCViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "APPRTCConnectionManager.h"
#import "RTCNSGLVideoView.h"
static NSUInteger const kContentWidth = 1280;
static NSUInteger const kContentHeight = 720;
static NSUInteger const kRoomFieldWidth = 80;
static NSUInteger const kLogViewHeight = 280;
@class APPRTCMainView;
@protocol APPRTCMainViewDelegate
- (void)appRTCMainView:(APPRTCMainView*)mainView
didEnterRoomId:(NSString*)roomId;
@end
@interface APPRTCMainView : NSView
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) RTCNSGLVideoView* localVideoView;
@property(nonatomic, readonly) RTCNSGLVideoView* remoteVideoView;
- (void)displayLogMessage:(NSString*)message;
@end
@interface APPRTCMainView () <NSTextFieldDelegate, RTCNSGLVideoViewDelegate>
@end
@implementation APPRTCMainView {
NSScrollView* _scrollView;
NSTextField* _roomLabel;
NSTextField* _roomField;
NSTextView* _logView;
RTCNSGLVideoView* _localVideoView;
RTCNSGLVideoView* _remoteVideoView;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
}
+ (BOOL)requiresConstraintBasedLayout {
return YES;
}
- (instancetype)initWithFrame:(NSRect)frame {
if (self = [super initWithFrame:frame]) {
[self setupViews];
}
return self;
}
- (void)updateConstraints {
NSParameterAssert(
_roomField != nil && _scrollView != nil && _remoteVideoView != nil);
[self removeConstraints:[self constraints]];
NSDictionary* viewsDictionary =
NSDictionaryOfVariableBindings(_roomLabel,
_roomField,
_scrollView,
_remoteVideoView);
NSSize remoteViewSize = [self remoteVideoViewSize];
NSDictionary* metrics = @{
@"kLogViewHeight" : @(kLogViewHeight),
@"kRoomFieldWidth" : @(kRoomFieldWidth),
@"remoteViewWidth" : @(remoteViewSize.width),
@"remoteViewHeight" : @(remoteViewSize.height),
};
// Declare this separately to avoid compiler warning about splitting string
// within an NSArray expression.
NSString* verticalConstraint =
@"V:|-[_roomLabel]-[_roomField]-[_scrollView(kLogViewHeight)]"
"-[_remoteVideoView(remoteViewHeight)]-|";
NSArray* constraintFormats = @[
verticalConstraint,
@"|-[_roomLabel]",
@"|-[_roomField(kRoomFieldWidth)]",
@"|-[_scrollView(remoteViewWidth)]-|",
@"|-[_remoteVideoView(remoteViewWidth)]-|",
];
for (NSString* constraintFormat in constraintFormats) {
NSArray* constraints =
[NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
options:0
metrics:metrics
views:viewsDictionary];
for (NSLayoutConstraint* constraint in constraints) {
[self addConstraint:constraint];
}
}
[super updateConstraints];
}
- (void)displayLogMessage:(NSString*)message {
_logView.string =
[NSString stringWithFormat:@"%@%@\n", _logView.string, message];
NSRange range = NSMakeRange([_logView.string length], 0);
[_logView scrollRangeToVisible:range];
}
#pragma mark - NSControl delegate
- (void)controlTextDidEndEditing:(NSNotification*)notification {
NSDictionary* userInfo = [notification userInfo];
NSInteger textMovement = [userInfo[@"NSTextMovement"] intValue];
if (textMovement == NSReturnTextMovement) {
[self.delegate appRTCMainView:self didEnterRoomId:_roomField.stringValue];
}
}
#pragma mark - RTCNSGLVideoViewDelegate
- (void)videoView:(RTCNSGLVideoView*)videoView
didChangeVideoSize:(NSSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
} else if (videoView == _localVideoView) {
_localVideoSize = size;
} else {
return;
}
[self setNeedsUpdateConstraints:YES];
}
#pragma mark - Private
- (void)setupViews {
NSParameterAssert([[self subviews] count] == 0);
_roomLabel = [[NSTextField alloc] initWithFrame:NSZeroRect];
[_roomLabel setTranslatesAutoresizingMaskIntoConstraints:NO];
[_roomLabel setBezeled:NO];
[_roomLabel setDrawsBackground:NO];
[_roomLabel setEditable:NO];
[_roomLabel setStringValue:@"Enter AppRTC room id:"];
[self addSubview:_roomLabel];
_roomField = [[NSTextField alloc] initWithFrame:NSZeroRect];
[_roomField setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_roomField];
[_roomField setEditable:YES];
[_roomField setDelegate:self];
_logView = [[NSTextView alloc] initWithFrame:NSZeroRect];
[_logView setMinSize:NSMakeSize(0, kLogViewHeight)];
[_logView setMaxSize:NSMakeSize(FLT_MAX, FLT_MAX)];
[_logView setVerticallyResizable:YES];
[_logView setAutoresizingMask:NSViewWidthSizable];
NSTextContainer* textContainer = [_logView textContainer];
NSSize containerSize = NSMakeSize(kContentWidth, FLT_MAX);
[textContainer setContainerSize:containerSize];
[textContainer setWidthTracksTextView:YES];
[_logView setEditable:NO];
_scrollView = [[NSScrollView alloc] initWithFrame:NSZeroRect];
[_scrollView setTranslatesAutoresizingMaskIntoConstraints:NO];
[_scrollView setHasVerticalScroller:YES];
[_scrollView setDocumentView:_logView];
[self addSubview:_scrollView];
NSOpenGLPixelFormatAttribute attributes[] = {
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAOpenGLProfile,
NSOpenGLProfileVersion3_2Core,
0
};
NSOpenGLPixelFormat* pixelFormat =
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
_remoteVideoView = [[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect
pixelFormat:pixelFormat];
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
_remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView];
// TODO(tkchin): create local video view.
// https://code.google.com/p/webrtc/issues/detail?id=3417.
}
- (NSSize)remoteVideoViewSize {
if (_remoteVideoSize.width > 0 && _remoteVideoSize.height > 0) {
return _remoteVideoSize;
} else {
return NSMakeSize(kContentWidth, kContentHeight);
}
}
- (NSSize)localVideoViewSize {
return NSZeroSize;
}
@end
@interface APPRTCViewController ()
<APPRTCConnectionManagerDelegate, APPRTCMainViewDelegate, APPRTCLogger>
@property(nonatomic, readonly) APPRTCMainView* mainView;
@end
@implementation APPRTCViewController {
APPRTCConnectionManager* _connectionManager;
}
- (instancetype)initWithNibName:(NSString*)nibName
bundle:(NSBundle*)bundle {
if (self = [super initWithNibName:nibName bundle:bundle]) {
_connectionManager =
[[APPRTCConnectionManager alloc] initWithDelegate:self
logger:self];
}
return self;
}
- (void)dealloc {
[self disconnect];
}
- (void)loadView {
APPRTCMainView* view = [[APPRTCMainView alloc] initWithFrame:NSZeroRect];
[view setTranslatesAutoresizingMaskIntoConstraints:NO];
view.delegate = self;
self.view = view;
}
- (void)windowWillClose:(NSNotification*)notification {
[self disconnect];
}
#pragma mark - APPRTCConnectionManagerDelegate
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
self.mainView.localVideoView.videoTrack = localVideoTrack;
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
self.mainView.remoteVideoView.videoTrack = remoteVideoTrack;
}
- (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
[self showAlertWithMessage:@"Remote closed connection"];
[self disconnect];
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didErrorWithMessage:(NSString*)message {
[self showAlertWithMessage:message];
[self disconnect];
}
#pragma mark - APPRTCLogger
- (void)logMessage:(NSString*)message {
[self.mainView displayLogMessage:message];
}
#pragma mark - APPRTCMainViewDelegate
- (void)appRTCMainView:(APPRTCMainView*)mainView
didEnterRoomId:(NSString*)roomId {
NSString* urlString =
[NSString stringWithFormat:@"https://apprtc.appspot.com/?r=%@", roomId];
[_connectionManager connectToRoomWithURL:[NSURL URLWithString:urlString]];
}
#pragma mark - Private
- (APPRTCMainView*)mainView {
return (APPRTCMainView*)self.view;
}
- (void)showAlertWithMessage:(NSString*)message {
NSAlert* alert = [[NSAlert alloc] init];
[alert setMessageText:message];
[alert runModal];
}
- (void)disconnect {
self.mainView.remoteVideoView.videoTrack = nil;
[_connectionManager disconnect];
}
@end

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple/DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>com.Google.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>LSMinimumSystemVersion</key>
<string>${MACOSX_DEPLOYMENT_TARGET}</string>
<key>NSPrincipalClass</key>
<string>NSApplication</string>
</dict>
</plist>

View File

@ -0,0 +1,39 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Cocoa/Cocoa.h>
#import "APPRTCAppDelegate.h"
int main(int argc, char* argv[]) {
@autoreleasepool {
[NSApplication sharedApplication];
APPRTCAppDelegate* delegate = [[APPRTCAppDelegate alloc] init];
[NSApp setDelegate:delegate];
[NSApp run];
}
}

View File

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 61 KiB

View File

@ -0,0 +1,3 @@
This directory contains sample iOS and mac clients for http://apprtc.appspot.com
See ../../app/webrtc/objc/README for information on how to use it.

View File

@ -189,6 +189,7 @@
'app/webrtc/objc/RTCMediaStream.mm',
'app/webrtc/objc/RTCMediaStreamTrack+Internal.h',
'app/webrtc/objc/RTCMediaStreamTrack.mm',
'app/webrtc/objc/RTCOpenGLVideoRenderer.mm',
'app/webrtc/objc/RTCPair.m',
'app/webrtc/objc/RTCPeerConnection+Internal.h',
'app/webrtc/objc/RTCPeerConnection.mm',
@ -217,6 +218,7 @@
'app/webrtc/objc/public/RTCMediaSource.h',
'app/webrtc/objc/public/RTCMediaStream.h',
'app/webrtc/objc/public/RTCMediaStreamTrack.h',
'app/webrtc/objc/public/RTCOpenGLVideoRenderer.h',
'app/webrtc/objc/public/RTCPair.h',
'app/webrtc/objc/public/RTCPeerConnection.h',
'app/webrtc/objc/public/RTCPeerConnectionDelegate.h',
@ -255,10 +257,8 @@
'conditions': [
['OS=="ios"', {
'sources': [
'app/webrtc/objc/RTCEAGLVideoRenderer.mm',
'app/webrtc/objc/RTCEAGLVideoView+Internal.h',
'app/webrtc/objc/RTCEAGLVideoView.m',
'app/webrtc/objc/public/RTCEAGLVideoRenderer.h',
'app/webrtc/objc/public/RTCEAGLVideoView.h',
],
'link_settings': {
@ -271,11 +271,22 @@
},
}],
['OS=="mac"', {
'sources': [
'app/webrtc/objc/RTCNSGLVideoView.m',
'app/webrtc/objc/public/RTCNSGLVideoView.h',
],
'xcode_settings': {
# Need to build against 10.7 framework for full ARC support
# on OSX.
'MACOSX_DEPLOYMENT_TARGET' : '10.7',
},
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework Cocoa',
],
},
},
}],
],
}, # target libjingle_peerconnection_objc

View File

@ -218,7 +218,7 @@
], # targets
}], # OS=="linux" or OS=="win"
['OS=="ios"', {
['OS=="ios" or (OS=="mac" and mac_sdk>="10.8")', {
'targets': [
{
'target_name': 'AppRTCDemo',
@ -226,40 +226,71 @@
'product_name': 'AppRTCDemo',
'mac_bundle': 1,
'mac_bundle_resources': [
'examples/ios/AppRTCDemo/ResourceRules.plist',
'examples/ios/AppRTCDemo/en.lproj/APPRTCViewController.xib',
'examples/ios/AppRTCDemo/ios_channel.html',
'examples/ios/Icon.png',
'examples/objc/AppRTCDemo/channel.html',
],
'dependencies': [
'libjingle.gyp:libjingle_peerconnection_objc',
],
'conditions': [
['OS=="ios"', {
'mac_bundle_resources': [
'examples/objc/AppRTCDemo/ios/ResourceRules.plist',
'examples/objc/AppRTCDemo/ios/en.lproj/APPRTCViewController.xib',
'examples/objc/Icon.png',
],
'sources': [
'examples/objc/AppRTCDemo/ios/APPRTCAppDelegate.h',
'examples/objc/AppRTCDemo/ios/APPRTCAppDelegate.m',
'examples/objc/AppRTCDemo/ios/APPRTCViewController.h',
'examples/objc/AppRTCDemo/ios/APPRTCViewController.m',
'examples/objc/AppRTCDemo/ios/AppRTCDemo-Prefix.pch',
'examples/objc/AppRTCDemo/ios/main.m',
],
'xcode_settings': {
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/ios/Info.plist',
},
}],
['OS=="mac"', {
'sources': [
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.h',
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m',
'examples/objc/AppRTCDemo/mac/APPRTCViewController.h',
'examples/objc/AppRTCDemo/mac/APPRTCViewController.m',
'examples/objc/AppRTCDemo/mac/main.m',
],
'xcode_settings': {
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/mac/Info.plist',
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
'OTHER_LDFLAGS': [
'-framework AVFoundation',
'-framework WebKit',
],
},
}],
['target_arch=="ia32"', {
'dependencies' : [
'<(DEPTH)/testing/iossim/iossim.gyp:iossim#host',
],
}],
],
'include_dirs': [
'examples/objc/APPRTCDemo',
],
'sources': [
'examples/ios/AppRTCDemo/APPRTCAppClient.h',
'examples/ios/AppRTCDemo/APPRTCAppClient.m',
'examples/ios/AppRTCDemo/APPRTCAppDelegate.h',
'examples/ios/AppRTCDemo/APPRTCAppDelegate.m',
'examples/ios/AppRTCDemo/APPRTCViewController.h',
'examples/ios/AppRTCDemo/APPRTCViewController.m',
'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch',
'examples/ios/AppRTCDemo/GAEChannelClient.h',
'examples/ios/AppRTCDemo/GAEChannelClient.m',
'examples/ios/AppRTCDemo/main.m',
'examples/objc/AppRTCDemo/APPRTCAppClient.h',
'examples/objc/AppRTCDemo/APPRTCAppClient.m',
'examples/objc/AppRTCDemo/APPRTCConnectionManager.h',
'examples/objc/AppRTCDemo/APPRTCConnectionManager.m',
'examples/objc/AppRTCDemo/GAEChannelClient.h',
'examples/objc/AppRTCDemo/GAEChannelClient.m',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
'INFOPLIST_FILE': 'examples/ios/AppRTCDemo/Info.plist',
},
}, # target AppRTCDemo
], # targets
}], # OS=="ios"
}], # OS=="ios" or (OS=="mac" and mac_sdk>="10.8")
['OS=="android"', {
'targets': [