Remove deprecated RTCVideoRenderer constructor.

Removes -[RTCVideoRenderer initWithView]. Also, fix potential issue where we hold on to a video frame longer than the lifetime of its associated track.

BUG=3341
R=glaznev@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/16099004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7032 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
tkchin@webrtc.org 2014-09-02 20:50:00 +00:00
parent 34a6764981
commit 90750482fa
6 changed files with 31 additions and 57 deletions

View File

@ -173,6 +173,7 @@
return;
}
[_videoTrack removeRenderer:_videoRenderer];
self.i420Frame = nil;
_videoTrack = videoTrack;
[_videoTrack addRenderer:_videoRenderer];
// TODO(tkchin): potentially handle changes in track state - e.g. render
@ -191,11 +192,9 @@
// This method is called when the GLKView's content is dirty and needs to be
// redrawn. This occurs on main thread.
- (void)glkView:(GLKView*)view drawInRect:(CGRect)rect {
if (self.i420Frame) {
// The renderer will draw the frame to the framebuffer corresponding to the
// one used by |view|.
[_glRenderer drawFrame:self.i420Frame];
}
// The renderer will draw the frame to the framebuffer corresponding to the
// one used by |view|.
[_glRenderer drawFrame:self.i420Frame];
}
#pragma mark - Private

View File

@ -116,6 +116,9 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
if (_videoTrack) {
[_videoTrack removeRenderer:_videoRenderer];
CVDisplayLinkStop(_displayLink);
// Clear contents.
self.i420Frame = nil;
[self drawFrame];
}
_videoTrack = videoTrack;
if (_videoTrack) {
@ -144,7 +147,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
- (void)drawFrame {
RTCI420Frame* i420Frame = self.i420Frame;
if (i420Frame && self.glRenderer.lastDrawnFrame != i420Frame) {
if (self.glRenderer.lastDrawnFrame != i420Frame) {
// This method may be called from CVDisplayLink callback which isn't on the
// main thread so we have to lock the GL context before drawing.
CGLLockContext([[self openGLContext] CGLContextObj]);

View File

@ -205,16 +205,18 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
return NO;
}
[self ensureGLContext];
if (![self updateTextureSizesForFrame:frame] ||
![self updateTextureDataForFrame:frame]) {
return NO;
}
glClear(GL_COLOR_BUFFER_BIT);
if (frame) {
if (![self updateTextureSizesForFrame:frame] ||
![self updateTextureDataForFrame:frame]) {
return NO;
}
#if !TARGET_OS_IPHONE
glBindVertexArray(_vertexArray);
glBindVertexArray(_vertexArray);
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
#if !TARGET_OS_IPHONE
[_context flushBuffer];
#endif
@ -238,7 +240,6 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
}
glUseProgram(_program);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glClearColor(0, 0, 0, 1);
_isInitialized = YES;
}

View File

@ -30,10 +30,6 @@
#endif
#import "RTCVideoRenderer+Internal.h"
#if TARGET_OS_IPHONE
#import "RTCEAGLVideoView+Internal.h"
#endif
#import "RTCI420Frame+Internal.h"
namespace webrtc {
@ -62,9 +58,6 @@ class RTCVideoRendererAdapter : public VideoRendererInterface {
@implementation RTCVideoRenderer {
rtc::scoped_ptr<webrtc::RTCVideoRendererAdapter> _adapter;
#if TARGET_OS_IPHONE
RTCEAGLVideoView* _videoView;
#endif
}
- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
@ -75,22 +68,6 @@ class RTCVideoRendererAdapter : public VideoRendererInterface {
return self;
}
#if TARGET_OS_IPHONE
// TODO(tkchin): remove shim for deprecated method.
- (instancetype)initWithView:(UIView*)view {
if (self = [super init]) {
_videoView = [[RTCEAGLVideoView alloc] initWithFrame:view.bounds];
_videoView.autoresizingMask =
UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;
_videoView.translatesAutoresizingMaskIntoConstraints = YES;
[view addSubview:_videoView];
self.delegate = _videoView;
_adapter.reset(new webrtc::RTCVideoRendererAdapter(self));
}
return self;
}
#endif
@end
@implementation RTCVideoRenderer (Internal)

View File

@ -55,12 +55,6 @@
// of frames.
- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
#if TARGET_OS_IPHONE
// DEPRECATED. See https://code.google.com/p/webrtc/issues/detail?id=3341 for
// details.
- (instancetype)initWithView:(UIView*)view;
#endif
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((

View File

@ -63,6 +63,18 @@ static CGFloat const kLocalViewPadding = 20;
- (void)viewDidLoad {
[super viewDidLoad];
self.remoteVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.remoteVideoView.delegate = self;
self.remoteVideoView.transform = CGAffineTransformMakeScale(-1, 1);
[self.blackView addSubview:self.remoteVideoView];
self.localVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.localVideoView.delegate = self;
[self.blackView addSubview:self.localVideoView];
self.statusBarOrientation =
[UIApplication sharedApplication].statusBarOrientation;
self.roomInput.delegate = self;
@ -181,25 +193,13 @@ static CGFloat const kLocalViewPadding = 20;
self.instructionsView.hidden = NO;
self.logView.hidden = YES;
self.logView.text = nil;
self.localVideoView.videoTrack = nil;
self.remoteVideoView.videoTrack = nil;
self.blackView.hidden = YES;
[self.remoteVideoView removeFromSuperview];
self.remoteVideoView = nil;
[self.localVideoView removeFromSuperview];
self.localVideoView = nil;
}
- (void)setupCaptureSession {
self.blackView.hidden = NO;
self.remoteVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.remoteVideoView.delegate = self;
self.remoteVideoView.transform = CGAffineTransformMakeScale(-1, 1);
[self.blackView addSubview:self.remoteVideoView];
self.localVideoView =
[[RTCEAGLVideoView alloc] initWithFrame:self.blackView.bounds];
self.localVideoView.delegate = self;
[self.blackView addSubview:self.localVideoView];
[self updateVideoViewLayout];
}