Add a UIView for rendering a video track.

RTCEAGLVideoView provides functionality to render a supplied RTCVideoTrack using OpenGLES2.

R=fischman@webrtc.org
BUG=3188

Review URL: https://webrtc-codereview.appspot.com/12489004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6192 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
tkchin@webrtc.org 2014-05-19 23:26:01 +00:00
parent 7ca1edb31d
commit 1732a591e7
20 changed files with 934 additions and 344 deletions

View File

@ -0,0 +1,397 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCEAGLVideoRenderer.h"
#import <OpenGLES/ES2/gl.h>
#import "RTCI420Frame.h"
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
// anticipation of that happening in the future.
// Convenience macro for writing shader code that converts a code snippet into
// a C string during the C preprocessor step.
#define RTC_STRINGIZE(...) #__VA_ARGS__
// Vertex shader doesn't do anything except pass coordinates through.
static const char kVertexShaderSource[] = RTC_STRINGIZE(
attribute vec2 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = vec4(position.x, position.y, 0.0, 1.0);
v_texcoord = texcoord;
}
);
// Fragment shader converts YUV values from input textures into a final RGB
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
static const char kFragmentShaderSource[] = RTC_STRINGIZE(
precision highp float;
varying vec2 v_texcoord;
uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
void main() {
float y, u, v, r, g, b;
y = texture2D(s_textureY, v_texcoord).r;
u = texture2D(s_textureU, v_texcoord).r;
v = texture2D(s_textureV, v_texcoord).r;
u = u - 0.5;
v = v - 0.5;
r = y + 1.403 * v;
g = y - 0.344 * u - 0.714 * v;
b = y + 1.770 * u;
gl_FragColor = vec4(r, g, b, 1.0);
}
);
// Compiles a shader of the given |type| with GLSL source |source| and returns
// the shader handle or 0 on error.
GLuint CreateShader(GLenum type, const GLchar* source) {
GLuint shader = glCreateShader(type);
if (!shader) {
return 0;
}
glShaderSource(shader, 1, &source, NULL);
glCompileShader(shader);
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
glDeleteShader(shader);
shader = 0;
}
return shader;
}
// Links a shader program with the given vertex and fragment shaders and
// returns the program handle or 0 on error.
GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
if (vertexShader == 0 || fragmentShader == 0) {
return 0;
}
GLuint program = glCreateProgram();
if (!program) {
return 0;
}
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus == GL_FALSE) {
glDeleteProgram(program);
program = 0;
}
return program;
}
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
// stretched over that square using texture coordinates (u, v) that range
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, 0, 1, // Bottom left.
1, -1, 1, 1, // Bottom right.
1, 1, 1, 0, // Top right.
-1, 1, 0, 0, // Top left.
};
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
// of 3 textures are used here, one for each of the Y, U and V planes. Having
// two sets alleviates CPU blockage in the event that the GPU is asked to render
// to a texture that is already in use.
static const GLsizei kNumTextureSets = 2;
static const GLsizei kNumTextures = 3 * kNumTextureSets;
@implementation RTCEAGLVideoRenderer {
EAGLContext* _context;
BOOL _isInitialized;
NSUInteger _currentTextureSet;
// Handles for OpenGL constructs.
GLuint _textures[kNumTextures];
GLuint _program;
GLuint _vertexBuffer;
GLint _position;
GLint _texcoord;
GLint _ySampler;
GLint _uSampler;
GLint _vSampler;
}
+ (void)initialize {
// Disable dithering for performance.
glDisable(GL_DITHER);
}
- (instancetype)initWithContext:(EAGLContext*)context {
NSAssert(context != nil, @"context cannot be nil");
if (self = [super init]) {
_context = context;
}
return self;
}
- (BOOL)drawFrame:(RTCI420Frame*)frame {
if (!_isInitialized) {
return NO;
}
if (_lastDrawnFrame == frame) {
return NO;
}
[self ensureGLContext];
if (![self updateTextureSizesForFrame:frame] ||
![self updateTextureDataForFrame:frame]) {
return NO;
}
glClear(GL_COLOR_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
_lastDrawnFrame = frame;
return YES;
}
- (void)setupGL {
if (_isInitialized) {
return;
}
[self ensureGLContext];
if (![self setupProgram]) {
return;
}
if (![self setupTextures]) {
return;
}
if (![self setupVertices]) {
return;
}
glUseProgram(_program);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glClearColor(0, 0, 0, 1);
_isInitialized = YES;
}
- (void)teardownGL {
if (!_isInitialized) {
return;
}
[self ensureGLContext];
glDeleteProgram(_program);
_program = 0;
glDeleteTextures(kNumTextures, _textures);
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = 0;
_isInitialized = NO;
}
#pragma mark - Private
- (void)ensureGLContext {
if ([EAGLContext currentContext] != _context) {
NSAssert(_context, @"context shouldn't be nil");
[EAGLContext setCurrentContext:_context];
}
}
- (BOOL)setupProgram {
NSAssert(!_program, @"program already set up");
GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
GLuint fragmentShader =
CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
_program = CreateProgram(vertexShader, fragmentShader);
// Shaders are created only to generate program.
if (vertexShader) {
glDeleteShader(vertexShader);
}
if (fragmentShader) {
glDeleteShader(fragmentShader);
}
if (!_program) {
return NO;
}
_position = glGetAttribLocation(_program, "position");
_texcoord = glGetAttribLocation(_program, "texcoord");
_ySampler = glGetUniformLocation(_program, "s_textureY");
_uSampler = glGetUniformLocation(_program, "s_textureU");
_vSampler = glGetUniformLocation(_program, "s_textureV");
if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
_vSampler < 0) {
return NO;
}
return YES;
}
- (BOOL)setupTextures {
glGenTextures(kNumTextures, _textures);
// Set parameters for each of the textures we created.
for (GLsizei i = 0; i < kNumTextures; i++) {
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
return YES;
}
- (BOOL)updateTextureSizesForFrame:(RTCI420Frame*)frame {
if (frame.height == _lastDrawnFrame.height &&
frame.width == _lastDrawnFrame.width &&
frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
return YES;
}
GLsizei lumaWidth = frame.width;
GLsizei lumaHeight = frame.height;
GLsizei chromaWidth = frame.chromaWidth;
GLsizei chromaHeight = frame.chromaHeight;
for (GLint i = 0; i < kNumTextureSets; i++) {
glActiveTexture(GL_TEXTURE0 + i * 3);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
lumaWidth,
lumaHeight,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0);
glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
chromaWidth,
chromaHeight,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0);
glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
chromaWidth,
chromaHeight,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0);
}
return YES;
}
- (BOOL)updateTextureDataForFrame:(RTCI420Frame*)frame {
NSUInteger textureOffset = _currentTextureSet * 3;
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
NSParameterAssert(frame.yPitch == frame.width);
NSParameterAssert(frame.uPitch == frame.chromaWidth);
NSParameterAssert(frame.vPitch == frame.chromaWidth);
glActiveTexture(GL_TEXTURE0 + textureOffset);
// When setting texture sampler uniforms, the texture index is used not
// the texture handle.
glUniform1i(_ySampler, textureOffset);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
frame.width,
frame.height,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
frame.yPlane);
glActiveTexture(GL_TEXTURE0 + textureOffset + 1);
glUniform1i(_uSampler, textureOffset + 1);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
frame.chromaWidth,
frame.chromaHeight,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
frame.uPlane);
glActiveTexture(GL_TEXTURE0 + textureOffset + 2);
glUniform1i(_vSampler, textureOffset + 2);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
frame.chromaWidth,
frame.chromaHeight,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
frame.vPlane);
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
return YES;
}
- (BOOL)setupVertices {
NSAssert(!_vertexBuffer, @"vertex buffer already set up");
glGenBuffers(1, &_vertexBuffer);
if (!_vertexBuffer) {
return NO;
}
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
// Read position attribute from |gVertices| with size of 2 and stride of 4
// beginning at the start of the array. The last argument indicates offset
// of data within |gVertices| as supplied to the vertex buffer.
glVertexAttribPointer(
_position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void*)0);
glEnableVertexAttribArray(_position);
// Read texcoord attribute from |gVertices| with size of 2 and stride of 4
// beginning at the first texcoord in the array. The last argument indicates
// offset of data within |gVertices| as supplied to the vertex buffer.
glVertexAttribPointer(_texcoord,
2,
GL_FLOAT,
GL_FALSE,
4 * sizeof(GLfloat),
(void*)(2 * sizeof(GLfloat)));
glEnableVertexAttribArray(_texcoord);
return YES;
}
@end

View File

@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2013, Google Inc.
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@ -27,18 +27,10 @@
#import <Foundation/Foundation.h>
@class RTCI420Frame;
@class RTCVideoRenderer;
// RTCVideoRendererDelegate is a protocol for an object that must be
// implemented to get messages when rendering.
@protocol RTCVideoRendererDelegate<NSObject>
// The size of the frame.
- (void)videoRenderer:(RTCVideoRenderer *)videoRenderer setSize:(CGSize)size;
// The frame to be displayed.
- (void)videoRenderer:(RTCVideoRenderer *)videoRenderer
renderFrame:(RTCI420Frame *)frame;
#import "RTCEAGLVideoView.h"
#import "RTCVideoRenderer.h"
// TODO(tkchin): Move declaration to implementation file. Exposed here in order
// to support deprecated methods in RTCVideoRenderer.
@interface RTCEAGLVideoView (Internal) <RTCVideoRendererDelegate>
@end

View File

@ -0,0 +1,186 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCEAGLVideoView+Internal.h"
#import <GLKit/GLKit.h>
#import <QuartzCore/QuartzCore.h>
#import "RTCEAGLVideoRenderer.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface RTCEAGLVideoView () <GLKViewDelegate>
@property(atomic, strong) RTCI420Frame* i420Frame;
@end
@implementation RTCEAGLVideoView {
CADisplayLink* _displayLink;
GLKView* _glkView;
RTCEAGLVideoRenderer* _glRenderer;
RTCVideoRenderer* _videoRenderer;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
EAGLContext* glContext =
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
_glRenderer = [[RTCEAGLVideoRenderer alloc] initWithContext:glContext];
// GLKView manages a framebuffer for us.
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
context:glContext];
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
_glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
_glkView.delegate = self;
_glkView.layer.masksToBounds = YES;
[self addSubview:_glkView];
// Listen to application state in order to clean up OpenGL before app goes
// away.
NSNotificationCenter* notificationCenter =
[NSNotificationCenter defaultCenter];
[notificationCenter addObserver:self
selector:@selector(willResignActive)
name:UIApplicationWillResignActiveNotification
object:nil];
[notificationCenter addObserver:self
selector:@selector(didBecomeActive)
name:UIApplicationDidBecomeActiveNotification
object:nil];
_displayLink =
[CADisplayLink displayLinkWithTarget:self
selector:@selector(displayLinkDidFire:)];
_displayLink.paused = YES;
// Set to half of screen refresh, which should be 30fps.
[_displayLink setFrameInterval:2];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
forMode:NSRunLoopCommonModes];
_videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
[self setupGL];
}
return self;
}
- (void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
UIApplicationState appState =
[UIApplication sharedApplication].applicationState;
if (appState == UIApplicationStateActive) {
[self teardownGL];
}
}
- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
if (_videoTrack == videoTrack) {
return;
}
[_videoTrack removeRenderer:_videoRenderer];
_videoTrack = videoTrack;
[_videoTrack addRenderer:_videoRenderer];
// TODO(tkchin): potentially handle changes in track state - e.g. render
// black if track fails.
}
#pragma mark - UIView
- (void)layoutSubviews {
[super layoutSubviews];
_glkView.frame = self.bounds;
}
#pragma mark - GLKViewDelegate
// This method is called when the GLKView's content is dirty and needs to be
// redrawn. This occurs on main thread.
- (void)glkView:(GLKView*)view drawInRect:(CGRect)rect {
if (self.i420Frame) {
// The renderer will draw the frame to the framebuffer corresponding to the
// one used by |view|.
[_glRenderer drawFrame:self.i420Frame];
}
}
#pragma mark - Private
// Frames are received on a separate thread, so we poll for current frame
// using a refresh rate proportional to screen refresh frequency. This occurs
// on main thread.
- (void)displayLinkDidFire:(CADisplayLink*)displayLink {
// Don't render if frame hasn't changed.
if (_glRenderer.lastDrawnFrame == self.i420Frame) {
return;
}
// This tells the GLKView that it's dirty, which will then call the the
// GLKViewDelegate method implemented above.
[_glkView setNeedsDisplay];
}
- (void)setupGL {
[_glRenderer setupGL];
_displayLink.paused = NO;
}
- (void)teardownGL {
_displayLink.paused = YES;
[_glkView deleteDrawable];
[_glRenderer teardownGL];
}
- (void)didBecomeActive {
[self setupGL];
}
- (void)willResignActive {
[self teardownGL];
}
@end
@implementation RTCEAGLVideoView (Internal)
#pragma mark - RTCVideoRendererDelegate
// These methods are called when the video track has frame information to
// provide. This occurs on non-main thread.
- (void)renderer:(RTCVideoRenderer*)renderer
didSetSize:(CGSize)size {
// Size is checked in renderer as frames arrive, no need to do anything here.
}
- (void)renderer:(RTCVideoRenderer*)renderer
didReceiveFrame:(RTCI420Frame*)frame {
self.i420Frame = frame;
}
@end

View File

@ -0,0 +1,36 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCI420Frame.h"
#include "talk/media/base/videoframe.h"
@interface RTCI420Frame (Internal)
- (instancetype)initWithVideoFrame:(const cricket::VideoFrame*)videoFrame;
@end

View File

@ -27,8 +27,68 @@
#import "RTCI420Frame.h"
@implementation RTCI420Frame
#include "talk/base/scoped_ptr.h"
#include "talk/media/base/videoframe.h"
// TODO(hughv): Should this just be a cricket::VideoFrame wrapper object?
@implementation RTCI420Frame {
talk_base::scoped_ptr<cricket::VideoFrame> _videoFrame;
}
- (NSUInteger)width {
return _videoFrame->GetWidth();
}
- (NSUInteger)height {
return _videoFrame->GetHeight();
}
- (NSUInteger)chromaWidth {
return _videoFrame->GetChromaWidth();
}
- (NSUInteger)chromaHeight {
return _videoFrame->GetChromaHeight();
}
- (NSUInteger)chromaSize {
return _videoFrame->GetChromaSize();
}
- (const uint8_t*)yPlane {
return _videoFrame->GetYPlane();
}
- (const uint8_t*)uPlane {
return _videoFrame->GetUPlane();
}
- (const uint8_t*)vPlane {
return _videoFrame->GetVPlane();
}
- (NSInteger)yPitch {
return _videoFrame->GetYPitch();
}
- (NSInteger)uPitch {
return _videoFrame->GetUPitch();
}
- (NSInteger)vPitch {
return _videoFrame->GetVPitch();
}
@end
@implementation RTCI420Frame (Internal)
- (instancetype)initWithVideoFrame:(cricket::VideoFrame*)videoFrame {
if (self = [super init]) {
// Keep a shallow copy of the video frame. The underlying frame buffer is
// not copied.
_videoFrame.reset(videoFrame->Copy());
}
return self;
}
@end

View File

@ -32,8 +32,24 @@
#import "RTCMediaStreamTrack+Internal.h"
#import "RTCEnumConverter.h"
namespace webrtc {
class RTCMediaStreamTrackObserver : public ObserverInterface {
public:
RTCMediaStreamTrackObserver(RTCMediaStreamTrack* track) { _track = track; }
virtual void OnChanged() OVERRIDE {
[_track.delegate mediaStreamTrackDidChange:_track];
}
private:
__weak RTCMediaStreamTrack* _track;
};
}
@implementation RTCMediaStreamTrack {
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface> _mediaTrack;
talk_base::scoped_ptr<webrtc::RTCMediaStreamTrackObserver> _observer;
}
@synthesize label;
@ -91,13 +107,19 @@
self = nil;
return nil;
}
if ((self = [super init])) {
if (self = [super init]) {
_mediaTrack = mediaTrack;
label = @(mediaTrack->id().c_str());
_observer.reset(new webrtc::RTCMediaStreamTrackObserver(self));
_mediaTrack->RegisterObserver(_observer.get());
}
return self;
}
- (void)dealloc {
_mediaTrack->UnregisterObserver(_observer.get());
}
- (talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
return _mediaTrack;
}

View File

@ -31,8 +31,6 @@
@interface RTCVideoRenderer (Internal)
// TODO(hughv): Use smart pointer.
@property(nonatomic, assign, readonly)
webrtc::VideoRendererInterface *videoRenderer;
@property(nonatomic, readonly) webrtc::VideoRendererInterface* videoRenderer;
@end

View File

@ -32,173 +32,71 @@
#import "RTCVideoRenderer+Internal.h"
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#import "RTCEAGLVideoView+Internal.h"
#endif
#import "RTCI420Frame+Internal.h"
#import "RTCI420Frame.h"
#import "RTCVideoRendererDelegate.h"
namespace webrtc {
#import "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#import "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "common_video/interface/i420_video_frame.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/media/base/videoframe.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
// An adapter presenting VideoRendererInterface's API and delegating to
// a VideoRenderCallback. Suitable for feeding to
// VideoTrackInterface::AddRenderer().
class CallbackConverter : public webrtc::VideoRendererInterface {
class RTCVideoRendererAdapter : public VideoRendererInterface {
public:
CallbackConverter(webrtc::VideoRenderCallback* callback,
const uint32_t streamId)
: callback_(callback), streamId_(streamId) {}
RTCVideoRendererAdapter(RTCVideoRenderer* renderer) { _renderer = renderer; }
virtual void SetSize(int width, int height) {};
virtual void RenderFrame(const cricket::VideoFrame* frame) {
// Make this into an I420VideoFrame.
size_t width = frame->GetWidth();
size_t height = frame->GetHeight();
virtual void SetSize(int width, int height) OVERRIDE {
[_renderer.delegate renderer:_renderer
didSetSize:CGSizeMake(width, height)];
}
size_t y_plane_size = width * height;
size_t uv_plane_size = frame->GetChromaSize();
webrtc::I420VideoFrame i420Frame;
i420Frame.CreateFrame(y_plane_size,
frame->GetYPlane(),
uv_plane_size,
frame->GetUPlane(),
uv_plane_size,
frame->GetVPlane(),
width,
height,
frame->GetYPitch(),
frame->GetUPitch(),
frame->GetVPitch());
i420Frame.set_render_time_ms(frame->GetTimeStamp() / 1000000);
callback_->RenderFrame(streamId_, i420Frame);
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
if (!_renderer.delegate) {
return;
}
RTCI420Frame* i420Frame = [[RTCI420Frame alloc] initWithVideoFrame:frame];
[_renderer.delegate renderer:_renderer didReceiveFrame:i420Frame];
}
private:
webrtc::VideoRenderCallback* callback_;
const uint32_t streamId_;
__weak RTCVideoRenderer* _renderer;
};
}
@implementation RTCVideoRenderer {
VideoRenderIosView* _renderView;
UIActivityIndicatorView* _activityIndicator;
CallbackConverter* _converter;
talk_base::scoped_ptr<webrtc::VideoRenderIosImpl> _iosRenderer;
talk_base::scoped_ptr<webrtc::RTCVideoRendererAdapter> _adapter;
#if TARGET_OS_IPHONE
RTCEAGLVideoView* _videoView;
#endif
}
@synthesize delegate = _delegate;
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
// TODO(hughv): Create video renderer.
[self doesNotRecognizeSelector:_cmd];
return self;
}
- (id)initWithView:(UIView*)view {
if ((self = [super init])) {
CGRect frame =
CGRectMake(0, 0, view.bounds.size.width, view.bounds.size.height);
_renderView = [[VideoRenderIosView alloc] initWithFrame:frame];
_iosRenderer.reset(
new webrtc::VideoRenderIosImpl(0, (__bridge void*)_renderView, NO));
if (_iosRenderer->Init() == -1) {
self = nil;
} else {
webrtc::VideoRenderCallback* callback =
_iosRenderer->AddIncomingRenderStream(0, 1, 0, 0, 1, 1);
_converter = new CallbackConverter(callback, 0);
_iosRenderer->StartRender();
[view addSubview:_renderView];
_renderView.autoresizingMask =
UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;
_renderView.translatesAutoresizingMaskIntoConstraints = YES;
_activityIndicator = [[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:
UIActivityIndicatorViewStyleWhiteLarge];
_activityIndicator.frame = view.bounds;
_activityIndicator.hidesWhenStopped = YES;
[view addSubview:_activityIndicator];
_activityIndicator.autoresizingMask =
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
_activityIndicator.translatesAutoresizingMaskIntoConstraints = YES;
[_activityIndicator startAnimating];
}
}
return self;
}
- (void)start {
[_activityIndicator stopAnimating];
[_activityIndicator removeFromSuperview];
_iosRenderer->StartRender();
}
- (void)stop {
[_activityIndicator stopAnimating];
[_activityIndicator removeFromSuperview];
_iosRenderer->StopRender();
}
@end
@implementation RTCVideoRenderer (Internal)
- (webrtc::VideoRendererInterface*)videoRenderer {
return _converter;
}
@end
#else // TARGET_OS_IPHONE
// TODO(fischman): implement an OS/X RTCVideoRenderer (and add to
// RTCPeerConnectionTest!).
#import "RTCI420Frame.h"
#import "RTCVideoRendererDelegate.h"
@implementation RTCVideoRenderer
@synthesize delegate = _delegate;
+ (RTCVideoRenderer*)videoRendererWithFrame:(CGRect)frame {
// TODO(hughv): Implement.
return nil;
}
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
if ((self = [super init])) {
- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
if (self = [super init]) {
_delegate = delegate;
// TODO(hughv): Create video renderer.
_adapter.reset(new webrtc::RTCVideoRendererAdapter(self));
}
return self;
}
- (id)initWithView:(UIView*)view {
return nil;
}
- (void)setTransform:(CGAffineTransform)transform {
}
- (void)start {
}
- (void)stop {
#if TARGET_OS_IPHONE
// TODO(tkchin): remove shim for deprecated method.
- (instancetype)initWithView:(UIView*)view {
if (self = [super init]) {
_videoView = [[RTCEAGLVideoView alloc] initWithFrame:view.bounds];
_videoView.autoresizingMask =
UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;
_videoView.translatesAutoresizingMaskIntoConstraints = YES;
[view addSubview:_videoView];
self.delegate = _videoView;
_adapter.reset(new webrtc::RTCVideoRendererAdapter(self));
}
return self;
}
#endif
@end
@implementation RTCVideoRenderer (Internal)
- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface*)videoRenderer {
if ((self = [super init])) {
// TODO(hughv): Implement.
}
return self;
}
- (webrtc::VideoRendererInterface*)videoRenderer {
// TODO(hughv): Implement.
return NULL;
}
@end
#endif // TARGET_OS_IPHONE
- (webrtc::VideoRendererInterface*)videoRenderer {
return _adapter.get();
}
@end

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Foundation/Foundation.h>
#import <GLKit/GLKit.h>
@class RTCI420Frame;
// RTCEAGLVideoRenderer issues appropriate EAGL commands to draw a frame to the
// currently bound framebuffer. OpenGL framebuffer creation and management
// should be handled elsewhere using the same context used to initialize this
// class.
@interface RTCEAGLVideoRenderer : NSObject
// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
// hence saving battery life by reducing load.
@property(nonatomic, readonly) RTCI420Frame* lastDrawnFrame;
- (instancetype)initWithContext:(EAGLContext*)context;
// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
// called before this function will succeed.
- (BOOL)drawFrame:(RTCI420Frame*)frame;
// The following methods are used to manage OpenGL resources. On iOS
// applications should release resources when placed in background for use in
// the foreground application. In fact, attempting to call OpenGLES commands
// while in background will result in application termination.
// Sets up the OpenGL state needed for rendering.
- (void)setupGL;
// Tears down the OpenGL state created by |setupGL|.
- (void)teardownGL;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2013, Google Inc.
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@ -25,19 +25,15 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "RTCVideoRenderer.h"
@class RTCVideoTrack;
// RTCEAGLVideoView renders |videoTrack| onto itself using OpenGLES.
@interface RTCEAGLVideoView : UIView
// This class encapsulates VideoRenderIosView.
@interface APPRTCVideoView : UIView
// Property to get/set required video orientation.
@property(nonatomic, assign) UIInterfaceOrientation videoOrientation;
// Specifies whether the object represents a local or remote video stream.
@property(nonatomic, assign) BOOL isRemote;
// Sets up the underlying renderer and track objects.
- (void)renderVideoTrackInterface:(RTCVideoTrack*)track;
@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@end

View File

@ -30,7 +30,24 @@
// RTCI420Frame is an ObjectiveC version of cricket::VideoFrame.
@interface RTCI420Frame : NSObject
// TODO(hughv): Implement this when iOS VP8 is ready.
@property(nonatomic, readonly) NSUInteger width;
@property(nonatomic, readonly) NSUInteger height;
@property(nonatomic, readonly) NSUInteger chromaWidth;
@property(nonatomic, readonly) NSUInteger chromaHeight;
@property(nonatomic, readonly) NSUInteger chromaSize;
// These can return NULL if the object is not backed by a buffer.
@property(nonatomic, readonly) const uint8_t* yPlane;
@property(nonatomic, readonly) const uint8_t* uPlane;
@property(nonatomic, readonly) const uint8_t* vPlane;
@property(nonatomic, readonly) NSInteger yPitch;
@property(nonatomic, readonly) NSInteger uPitch;
@property(nonatomic, readonly) NSInteger vPitch;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((
unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

View File

@ -29,13 +29,21 @@
#import "RTCTypes.h"
@class RTCMediaStreamTrack;
@protocol RTCMediaStreamTrackDelegate<NSObject>
- (void)mediaStreamTrackDidChange:(RTCMediaStreamTrack*)mediaStreamTrack;
@end
// RTCMediaStreamTrack implements the interface common to RTCAudioTrack and
// RTCVideoTrack. Do not create an instance of this class, rather create one
// of the derived classes.
@interface RTCMediaStreamTrack : NSObject
@property(nonatomic, assign, readonly) NSString *kind;
@property(nonatomic, assign, readonly) NSString *label;
@property(nonatomic, readonly) NSString* kind;
@property(nonatomic, readonly) NSString* label;
@property(nonatomic, weak) id<RTCMediaStreamTrackDelegate> delegate;
- (BOOL)isEnabled;
- (BOOL)setEnabled:(BOOL)enabled;

View File

@ -26,27 +26,40 @@
*/
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#endif
@protocol RTCVideoRendererDelegate;
struct CGRect;
@class UIView;
@class RTCI420Frame;
@class RTCVideoRenderer;
// RTCVideoRendererDelegate is a protocol for an object that must be
// implemented to get messages when rendering.
@protocol RTCVideoRendererDelegate<NSObject>
// The size of the frame.
- (void)renderer:(RTCVideoRenderer*)renderer didSetSize:(CGSize)size;
// The frame to be displayed.
- (void)renderer:(RTCVideoRenderer*)renderer
didReceiveFrame:(RTCI420Frame*)frame;
@end
// Interface for rendering VideoFrames from a VideoTrack
@interface RTCVideoRenderer : NSObject
@property(nonatomic, strong) id<RTCVideoRendererDelegate> delegate;
- (id)initWithView:(UIView*)view;
@property(nonatomic, weak) id<RTCVideoRendererDelegate> delegate;
// Initialize the renderer. Requires a delegate which does the actual drawing
// of frames.
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
// Starts rendering.
- (void)start;
// Stops rendering. It can be restarted again using the 'start' method above.
- (void)stop;
#if TARGET_OS_IPHONE
// DEPRECATED. See https://code.google.com/p/webrtc/issues/detail?id=3341 for
// details.
- (instancetype)initWithView:(UIView*)view;
#endif
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation

View File

@ -30,6 +30,7 @@
#import "APPRTCAppDelegate.h"
#import "APPRTCViewController.h"
#import "RTCEAGLVideoView.h"
#import "RTCICECandidate.h"
#import "RTCICEServer.h"
#import "RTCMediaConstraints.h"
@ -43,13 +44,12 @@
#import "RTCVideoRenderer.h"
#import "RTCVideoCapturer.h"
#import "RTCVideoTrack.h"
#import "APPRTCVideoView.h"
@interface PCObserver : NSObject<RTCPeerConnectionDelegate>
- (id)initWithDelegate:(id<APPRTCSendMessage>)delegate;
@property(nonatomic, strong) APPRTCVideoView* videoView;
@property(nonatomic, strong) RTCEAGLVideoView* videoView;
@end
@ -89,8 +89,7 @@
NSAssert([stream.videoTracks count] <= 1,
@"Expected at most 1 video stream");
if ([stream.videoTracks count] != 0) {
[self.videoView
renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]];
self.videoView.videoTrack = stream.videoTracks[0];
}
});
}
@ -291,13 +290,12 @@
if (localVideoTrack) {
[lms addVideoTrack:localVideoTrack];
}
self.viewController.localVideoView.videoTrack = localVideoTrack;
#else
self.viewController.localVideoView.hidden = YES;
#endif
[self.viewController.localVideoView
renderVideoTrackInterface:localVideoTrack];
self.pcObserver.videoView = self.viewController.remoteVideoView;
[lms addAudioTrack:[self.peerConnectionFactory audioTrackWithID:@"ARDAMSa0"]];
[self.peerConnection addStream:lms constraints:constraints];
[self displayLogMessage:@"onICEServers - added local stream."];

View File

@ -1,82 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This APPRTCVideoView must be initialzed and added to a View to get
* either the local or remote video stream rendered.
* It is a view itself and it encapsulates
* an object of VideoRenderIosView and UIActivityIndicatorView.
* Both of the views will get resized as per the frame of their parent.
*/
#import "APPRTCVideoView.h"
#import "RTCVideoRenderer.h"
#import "RTCVideoTrack.h"
@interface APPRTCVideoView () {
RTCVideoTrack* _track;
RTCVideoRenderer* _renderer;
}
@property(nonatomic, weak) UIView* renderView;
@property(nonatomic, weak) UIActivityIndicatorView* activityView;
@end
@implementation APPRTCVideoView
@synthesize videoOrientation = _videoOrientation;
- (void)layoutSubviews {
[super layoutSubviews];
if (!_renderer) {
// Left-right (mirror) flip the remote view.
CGAffineTransform xform =
CGAffineTransformMakeScale(self.isRemote ? -1 : 1, 1);
// TODO(fischman): why is this rotate (vertical+horizontal flip) needed?!?
xform = CGAffineTransformRotate(xform, M_PI);
// TODO(fischman): ensure back-camera flip is correct in all orientations,
// when back-camera support is added.
[self setTransform:xform];
_renderer = [[RTCVideoRenderer alloc] initWithView:self];
}
}
- (void)renderVideoTrackInterface:(RTCVideoTrack*)videoTrack {
[_track removeRenderer:_renderer];
[_renderer stop];
_track = videoTrack;
if (_track) {
[_track addRenderer:_renderer];
[_renderer start];
}
}
@end

View File

@ -27,7 +27,7 @@
#import <UIKit/UIKit.h>
@class APPRTCVideoView;
@class RTCEAGLVideoView;
// The view controller that is displayed when AppRTCDemo is loaded.
@interface APPRTCViewController : UIViewController<UITextFieldDelegate>
@ -37,8 +37,8 @@
@property(weak, nonatomic) IBOutlet UITextView* logView;
@property(weak, nonatomic) IBOutlet UIView* blackView;
@property(nonatomic, strong) APPRTCVideoView* remoteVideoView;
@property(nonatomic, strong) APPRTCVideoView* localVideoView;
@property(nonatomic, strong) RTCEAGLVideoView* localVideoView;
@property(nonatomic, strong) RTCEAGLVideoView* remoteVideoView;
- (void)displayText:(NSString*)text;
- (void)resetUI;

View File

@ -27,12 +27,11 @@
#import "APPRTCViewController.h"
#import "APPRTCVideoView.h"
#import <AVFoundation/AVFoundation.h>
#import "RTCEAGLVideoView.h"
@interface APPRTCViewController ()
@property(nonatomic, assign) UIInterfaceOrientation statusBarOrientation;
@end
@implementation APPRTCViewController
@ -75,12 +74,10 @@
self.logView.text = nil;
self.blackView.hidden = YES;
[_remoteVideoView renderVideoTrackInterface:nil];
[_remoteVideoView removeFromSuperview];
[self.remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[_localVideoView renderVideoTrackInterface:nil];
[_localVideoView removeFromSuperview];
[self.localVideoView removeFromSuperview];
self.localVideoView = nil;
}
@ -97,46 +94,29 @@ enum {
- (void)setupCaptureSession {
self.blackView.hidden = NO;
CGRect frame =
CGRectMake((self.blackView.bounds.size.width - kRemoteVideoWidth) / 2,
(self.blackView.bounds.size.height - kRemoteVideoHeight) / 2,
kRemoteVideoWidth,
kRemoteVideoHeight);
APPRTCVideoView* videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = TRUE;
CGSize videoSize =
CGSizeMake(kRemoteVideoWidth, kRemoteVideoHeight);
CGRect remoteVideoFrame =
AVMakeRectWithAspectRatioInsideRect(videoSize,
self.blackView.bounds);
CGRect localVideoFrame = remoteVideoFrame;
// TODO(tkchin): use video dimensions from incoming video stream
// and handle rotation.
localVideoFrame.size.width = remoteVideoFrame.size.height / 4;
localVideoFrame.size.height = remoteVideoFrame.size.width / 4;
localVideoFrame.origin.x = CGRectGetMaxX(remoteVideoFrame)
- localVideoFrame.size.width - kLocalViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(remoteVideoFrame)
- localVideoFrame.size.height - kLocalViewPadding;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleRightMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleTopMargin;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_remoteVideoView = videoView;
self.remoteVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:remoteVideoFrame];
[self.blackView addSubview:self.remoteVideoView];
self.remoteVideoView.transform = CGAffineTransformMakeScale(-1, 1);
CGSize screenSize = [[UIScreen mainScreen] bounds].size;
CGFloat localVideoViewWidth =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.width / 4
: screenSize.height / 4;
CGFloat localVideoViewHeight =
UIInterfaceOrientationIsPortrait(self.statusBarOrientation)
? screenSize.height / 4
: screenSize.width / 4;
frame = CGRectMake(self.blackView.bounds.size.width - localVideoViewWidth -
kLocalViewPadding,
kLocalViewPadding,
localVideoViewWidth,
localVideoViewHeight);
videoView = [[APPRTCVideoView alloc] initWithFrame:frame];
videoView.isRemote = FALSE;
[self.blackView addSubview:videoView];
videoView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin |
UIViewAutoresizingFlexibleBottomMargin |
UIViewAutoresizingFlexibleHeight |
UIViewAutoresizingFlexibleWidth;
videoView.translatesAutoresizingMaskIntoConstraints = YES;
_localVideoView = videoView;
self.localVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:localVideoFrame];
[self.blackView addSubview:self.localVideoView];
}
#pragma mark - UITextFieldDelegate

View File

@ -70,8 +70,6 @@
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

View File

@ -173,6 +173,7 @@
'app/webrtc/objc/RTCDataChannel.mm',
'app/webrtc/objc/RTCEnumConverter.h',
'app/webrtc/objc/RTCEnumConverter.mm',
'app/webrtc/objc/RTCI420Frame+Internal.h',
'app/webrtc/objc/RTCI420Frame.mm',
'app/webrtc/objc/RTCICECandidate+Internal.h',
'app/webrtc/objc/RTCICECandidate.mm',
@ -227,7 +228,6 @@
'app/webrtc/objc/public/RTCTypes.h',
'app/webrtc/objc/public/RTCVideoCapturer.h',
'app/webrtc/objc/public/RTCVideoRenderer.h',
'app/webrtc/objc/public/RTCVideoRendererDelegate.h',
'app/webrtc/objc/public/RTCVideoSource.h',
'app/webrtc/objc/public/RTCVideoTrack.h',
],
@ -243,7 +243,6 @@
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'-lstdc++',
],
},
@ -254,6 +253,23 @@
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
},
'conditions': [
['OS=="ios"', {
'sources': [
'app/webrtc/objc/RTCEAGLVideoRenderer.mm',
'app/webrtc/objc/RTCEAGLVideoView+Internal.h',
'app/webrtc/objc/RTCEAGLVideoView.m',
'app/webrtc/objc/public/RTCEAGLVideoRenderer.h',
'app/webrtc/objc/public/RTCEAGLVideoView.h',
],
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework CoreGraphics',
'-framework GLKit',
],
},
},
}],
['OS=="mac"', {
'xcode_settings': {
# Need to build against 10.7 framework for full ARC support
@ -680,7 +696,6 @@
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework Foundation',
'-framework IOKit',
'-framework Security',
'-framework SystemConfiguration',
'-framework UIKit',

View File

@ -248,8 +248,6 @@
'examples/ios/AppRTCDemo/APPRTCAppDelegate.m',
'examples/ios/AppRTCDemo/APPRTCViewController.h',
'examples/ios/AppRTCDemo/APPRTCViewController.m',
'examples/ios/AppRTCDemo/APPRTCVideoView.h',
'examples/ios/AppRTCDemo/APPRTCVideoView.m',
'examples/ios/AppRTCDemo/AppRTCDemo-Prefix.pch',
'examples/ios/AppRTCDemo/GAEChannelClient.h',
'examples/ios/AppRTCDemo/GAEChannelClient.m',
@ -258,11 +256,6 @@
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
'INFOPLIST_FILE': 'examples/ios/AppRTCDemo/Info.plist',
'OTHER_LDFLAGS': [
'-framework CoreGraphics',
'-framework Foundation',
'-framework UIKit',
],
},
}, # target AppRTCDemo
], # targets