The video render module for iOS.

BUG=2105, 2028
R=fischman@webrtc.org, mallinath@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2064004

Patch from SeungJae Lee <sjlee@webrtc.org>.

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4734 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org 2013-09-12 17:39:53 +00:00
parent e509f943ed
commit 36cf4d2309
13 changed files with 1422 additions and 23 deletions

View File

@ -30,7 +30,7 @@ enum VideoRenderType
kRenderWindows = 1, // Windows
kRenderCocoa = 2, // Mac
kRenderCarbon = 3,
kRenderiPhone = 4, // iPhone
kRenderiOS = 4, // iPhone
kRenderAndroid = 5, // Android
kRenderX11 = 6, // Linux
kRenderDefault

View File

@ -0,0 +1,64 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
#include <OpenGLES/ES2/glext.h>
#include "webrtc/modules/video_render/include/video_render_defines.h"
/*
* This OpenGles20 is the class of renderer for I420VideoFrame into a GLES 2.0
* windows used in the VideoRenderIosView class.
*/
namespace webrtc {
class OpenGles20 {
public:
OpenGles20();
~OpenGles20();
bool Setup(int32_t width, int32_t height);
bool Render(const I420VideoFrame& frame);
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
// Values must be between 0 and 1.
bool SetCoordinates(const float z_order,
const float left,
const float top,
const float right,
const float bottom);
private:
// Compile and load the vertex and fragment shaders defined at the top of
// open_gles20.mm
GLuint LoadShader(GLenum shader_type, const char* shader_source);
GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
// Initialize the textures by the frame width and height
void SetupTextures(const I420VideoFrame& frame);
// Update the textures by the YUV data from the frame
void UpdateTextures(const I420VideoFrame& frame);
GLuint texture_ids_[3]; // Texture id of Y,U and V texture.
GLuint program_;
GLsizei texture_width_;
GLsizei texture_height_;
GLfloat vertices_[20];
static const char indices_[];
static const char vertext_shader_[];
static const char fragment_shader_[];
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_

View File

@ -0,0 +1,325 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This files is mostly copied from
// webrtc/modules/video_render/android/video_render_opengles20.h
// TODO(sjlee): unify this copy with the android one.
#include "webrtc/modules/video_render/ios/open_gles20.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1};
const char OpenGles20::vertext_shader_[] = {
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n"};
// The fragment shader.
// Do YUV to RGB565 conversion.
const char OpenGles20::fragment_shader_[] = {
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n"};
OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) {
texture_ids_[0] = 0;
texture_ids_[1] = 0;
texture_ids_[2] = 0;
program_ = 0;
const GLfloat vertices[20] = {
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, // Bottom Right
1, 1, 0, 1, 0, // Top Right
-1, 1, 0, 0, 0}; // Top Left
memcpy(vertices_, vertices, sizeof(vertices_));
}
OpenGles20::~OpenGles20() {
if (program_) {
glDeleteTextures(3, texture_ids_);
glDeleteProgram(program_);
}
}
bool OpenGles20::Setup(int32_t width, int32_t height) {
program_ = CreateProgram(vertext_shader_, fragment_shader_);
if (!program_) {
return false;
}
int position_handle = glGetAttribLocation(program_, "aPosition");
int texture_handle = glGetAttribLocation(program_, "aTextureCoord");
// set the vertices array in the shader
// vertices_ contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(
position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_);
glEnableVertexAttribArray(position_handle);
// set the texture coordinate array in the shader
// vertices_ contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(
texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]);
glEnableVertexAttribArray(texture_handle);
glUseProgram(program_);
int i = glGetUniformLocation(program_, "Ytex");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
i = glGetUniformLocation(program_, "Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
i = glGetUniformLocation(program_, "Vtex");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
glViewport(0, 0, width, height);
return true;
}
bool OpenGles20::SetCoordinates(const float z_order,
const float left,
const float top,
const float right,
const float bottom) {
if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 ||
bottom < 0 || left > 1 || left < 0) {
return false;
}
// Bottom Left
vertices_[0] = (left * 2) - 1;
vertices_[1] = -1 * (2 * bottom) + 1;
vertices_[2] = z_order;
// Bottom Right
vertices_[5] = (right * 2) - 1;
vertices_[6] = -1 * (2 * bottom) + 1;
vertices_[7] = z_order;
// Top Right
vertices_[10] = (right * 2) - 1;
vertices_[11] = -1 * (2 * top) + 1;
vertices_[12] = z_order;
// Top Left
vertices_[15] = (left * 2) - 1;
vertices_[16] = -1 * (2 * top) + 1;
vertices_[17] = z_order;
return true;
}
bool OpenGles20::Render(const I420VideoFrame& frame) {
if (texture_width_ != (GLsizei)frame.width() ||
texture_height_ != (GLsizei)frame.height()) {
SetupTextures(frame);
}
UpdateTextures(frame);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_);
return true;
}
GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) {
GLuint shader = glCreateShader(shader_type);
if (shader) {
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint info_len = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
if (info_len) {
char* buf = (char*)malloc(info_len);
glGetShaderInfoLog(shader, info_len, NULL, buf);
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
0,
"%s: Could not compile shader %d: %s",
__FUNCTION__,
shader_type,
buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
GLuint OpenGles20::CreateProgram(const char* vertex_source,
const char* fragment_source) {
GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source);
if (!vertex_shader) {
return -1;
}
GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source);
if (!fragment_shader) {
return -1;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
GLint link_status = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &link_status);
if (link_status != GL_TRUE) {
GLint info_len = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len);
if (info_len) {
char* buf = (char*)malloc(info_len);
glGetProgramInfoLog(program, info_len, NULL, buf);
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
0,
"%s: Could not link program: %s",
__FUNCTION__,
buf);
free(buf);
}
glDeleteProgram(program);
program = 0;
}
}
if (vertex_shader) {
glDeleteShader(vertex_shader);
}
if (fragment_shader) {
glDeleteShader(fragment_shader);
}
return program;
}
static void InitializeTexture(int name, int id, int width, int height) {
glActiveTexture(name);
glBindTexture(GL_TEXTURE_2D, id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
width,
height,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
NULL);
}
void OpenGles20::SetupTextures(const I420VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();
if (!texture_ids_[0]) {
glGenTextures(3, texture_ids_); // Generate the Y, U and V texture
}
InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height);
InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2);
InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2);
texture_width_ = width;
texture_height_ = height;
}
// Uploads a plane of pixel data, accounting for stride != width*bpp.
static void GlTexSubImage2D(GLsizei width,
GLsizei height,
int stride,
const uint8_t* plane) {
if (stride == width) {
// Yay! We can upload the entire plane in a single GL call.
glTexSubImage2D(GL_TEXTURE_2D,
0,
0,
0,
width,
height,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane));
} else {
// Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't
// have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
for (int row = 0; row < height; ++row) {
glTexSubImage2D(GL_TEXTURE_2D,
0,
0,
row,
width,
1,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane + (row * stride)));
}
}
}
void OpenGles20::UpdateTextures(const I420VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture_ids_[0]);
GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane));
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, texture_ids_[1]);
GlTexSubImage2D(
width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, texture_ids_[2]);
GlTexSubImage2D(
width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane));
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
namespace webrtc {
class VideoRenderIosGles20;
class VideoRenderIosChannel : public VideoRenderCallback {
public:
explicit VideoRenderIosChannel(VideoRenderIosView* view);
virtual ~VideoRenderIosChannel();
// Implementation of VideoRenderCallback.
virtual int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame& video_frame) OVERRIDE;
int SetStreamSettings(const float z_order,
const float left,
const float top,
const float right,
const float bottom);
bool IsUpdated();
bool RenderOffScreenBuffer();
private:
VideoRenderIosView* view_;
I420VideoFrame* current_frame_;
bool buffer_is_updated_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_

View File

@ -0,0 +1,59 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
using namespace webrtc;
VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
: view_(view),
current_frame_(new I420VideoFrame()),
buffer_is_updated_(false) {}
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
I420VideoFrame& video_frame) {
video_frame.set_render_time_ms(0);
current_frame_->CopyFrame(video_frame);
buffer_is_updated_ = true;
return 0;
}
bool VideoRenderIosChannel::RenderOffScreenBuffer() {
if (![view_ renderFrame:current_frame_]) {
return false;
}
buffer_is_updated_ = false;
return true;
}
bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; }
int VideoRenderIosChannel::SetStreamSettings(const float z_order,
const float left,
const float top,
const float right,
const float bottom) {
if (![view_ setCoordinatesForZOrder:z_order
Left:left
Top:bottom
Right:right
Bottom:top]) {
return -1;
}
return 0;
}

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
#include <list>
#include <map>
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class VideoRenderIosGles20 {
public:
VideoRenderIosGles20(VideoRenderIosView* view,
bool full_screen,
int render_id);
virtual ~VideoRenderIosGles20();
int Init();
VideoRenderIosChannel* CreateEaglChannel(int channel,
int z_order,
float left,
float top,
float right,
float bottom);
int DeleteEaglChannel(int channel);
bool HasChannel(int channel);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect); // NOLINT
int GetScreenResolution(uint& screen_width, uint& screen_height); // NOLINT
int SetStreamCropping(const uint stream_id,
const float left,
const float top,
const float right,
const float bottom);
int ChangeWindow(void* new_window);
int ChangeUniqueID(int unique_id);
int StartRender();
int StopRender();
protected:
static bool ScreenUpdateThreadProc(void* obj);
private:
bool RenderOffScreenBuffers();
int SwapAndDisplayBuffers();
private:
scoped_ptr<CriticalSectionWrapper> gles_crit_sec_;
EventWrapper* screen_update_event_;
ThreadWrapper* screen_update_thread_;
VideoRenderIosView* view_;
Rect window_rect_;
int window_width_;
int window_height_;
bool is_full_screen_;
GLint backing_width_;
GLint backing_height_;
GLuint view_renderbuffer_;
GLuint view_framebuffer_;
GLuint depth_renderbuffer_;
std::map<int, VideoRenderIosChannel*> agl_channels_;
std::multimap<int, int> z_order_to_channel_;
EAGLContext* gles_context_;
bool is_rendering_;
int id_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_

View File

@ -0,0 +1,299 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
using namespace webrtc;
VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
bool full_screen,
int render_id)
: gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()),
screen_update_event_(0),
screen_update_thread_(0),
view_(view),
window_rect_(),
window_width_(0),
window_height_(0),
is_full_screen_(full_screen),
agl_channels_(),
z_order_to_channel_(),
gles_context_([view context]),
is_rendering_(true),
id_(render_id) {
screen_update_thread_ = ThreadWrapper::CreateThread(
ScreenUpdateThreadProc, this, kRealtimePriority);
screen_update_event_ = EventWrapper::Create();
GetWindowRect(window_rect_);
}
VideoRenderIosGles20::~VideoRenderIosGles20() {
// Signal event to exit thread, then delete it
ThreadWrapper* thread_wrapper = screen_update_thread_;
screen_update_thread_ = NULL;
if (thread_wrapper) {
thread_wrapper->SetNotAlive();
screen_update_event_->Set();
screen_update_event_->StopTimer();
if (thread_wrapper->Stop()) {
delete thread_wrapper;
}
delete screen_update_event_;
screen_update_event_ = NULL;
is_rendering_ = FALSE;
}
// Delete all channels
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
delete it->second;
agl_channels_.erase(it);
it = agl_channels_.begin();
}
agl_channels_.clear();
// Clean the zOrder map
std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
while (z_it != z_order_to_channel_.end()) {
z_order_to_channel_.erase(z_it);
z_it = z_order_to_channel_.begin();
}
z_order_to_channel_.clear();
}
int VideoRenderIosGles20::Init() {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!screen_update_thread_) {
return -1;
}
if (!view_) {
view_ = [[VideoRenderIosView alloc] init];
}
if (![view_ createContext]) {
return -1;
}
unsigned int thread_id;
screen_update_thread_->Start(thread_id);
// Start the event triggering the render process
unsigned int monitor_freq = 60;
screen_update_event_->StartTimer(true, 1000 / monitor_freq);
window_width_ = window_rect_.right - window_rect_.left;
window_height_ = window_rect_.bottom - window_rect_.top;
return 0;
}
VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel,
int z_order,
float left,
float top,
float right,
float bottom) {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (HasChannel(channel)) {
return NULL;
}
VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_);
if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) ==
-1) {
return NULL;
}
agl_channels_[channel] = new_eagl_channel;
z_order_to_channel_.insert(std::pair<int, int>(z_order, channel));
return new_eagl_channel;
}
int VideoRenderIosGles20::DeleteEaglChannel(int channel) {
CriticalSectionScoped cs(gles_crit_sec_.get());
std::map<int, VideoRenderIosChannel*>::iterator it;
it = agl_channels_.find(channel);
if (it != agl_channels_.end()) {
delete it->second;
agl_channels_.erase(it);
} else {
return -1;
}
std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
while (z_it != z_order_to_channel_.end()) {
if (z_it->second == channel) {
z_order_to_channel_.erase(z_it);
break;
}
z_it++;
}
return 0;
}
bool VideoRenderIosGles20::HasChannel(int channel) {
CriticalSectionScoped cs(gles_crit_sec_.get());
std::map<int, VideoRenderIosChannel*>::iterator it =
agl_channels_.find(channel);
if (it != agl_channels_.end()) {
return true;
}
return false;
}
// Rendering process
bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) {
return static_cast<VideoRenderIosGles20*>(obj)->ScreenUpdateProcess();
}
bool VideoRenderIosGles20::ScreenUpdateProcess() {
screen_update_event_->Wait(100);
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!is_rendering_) {
return false;
}
if (!screen_update_thread_) {
return false;
}
if (GetWindowRect(window_rect_) == -1) {
return true;
}
if (window_width_ != (window_rect_.right - window_rect_.left) ||
window_height_ != (window_rect_.bottom - window_rect_.top)) {
window_width_ = window_rect_.right - window_rect_.left;
window_height_ = window_rect_.bottom - window_rect_.top;
}
// Check if there are any updated buffers
bool updated = false;
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
VideoRenderIosChannel* agl_channel = it->second;
updated = agl_channel->IsUpdated();
if (updated) {
break;
}
it++;
}
if (updated) {
// At least one buffer has been updated, we need to repaint the texture
// Loop through all channels starting highest zOrder ending with lowest.
for (std::multimap<int, int>::reverse_iterator r_it =
z_order_to_channel_.rbegin();
r_it != z_order_to_channel_.rend();
r_it++) {
int channel_id = r_it->second;
std::map<int, VideoRenderIosChannel*>::iterator it =
agl_channels_.find(channel_id);
VideoRenderIosChannel* agl_channel = it->second;
agl_channel->RenderOffScreenBuffer();
}
[view_ presentFramebuffer];
}
return true;
}
int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!view_) {
return -1;
}
CGRect frame = [view_ frame];
rect.top = frame.origin.x;
rect.left = frame.origin.y;
rect.bottom = frame.size.width;
rect.right = frame.size.height;
return 0;
}
int VideoRenderIosGles20::ChangeWindow(void* new_window) {
CriticalSectionScoped cs(gles_crit_sec_.get());
view_ = (VideoRenderIosView*)new_window;
return 0;
}
int VideoRenderIosGles20::ChangeUniqueID(int unique_id) {
CriticalSectionScoped cs(gles_crit_sec_.get());
id_ = unique_id;
return 0;
}
int VideoRenderIosGles20::StartRender() {
is_rendering_ = true;
return 0;
}
int VideoRenderIosGles20::StopRender() {
is_rendering_ = false;
return 0;
}
int VideoRenderIosGles20::GetScreenResolution(uint& screen_width,
uint& screen_height) {
screen_width = [view_ frame].size.width;
screen_height = [view_ frame].size.height;
return 0;
}
int VideoRenderIosGles20::SetStreamCropping(const uint stream_id,
const float left,
const float top,
const float right,
const float bottom) {
// Check if there are any updated buffers
// bool updated = false;
uint counter = 0;
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
if (counter == stream_id) {
VideoRenderIosChannel* agl_channel = it->second;
agl_channel->SetStreamSettings(0, left, top, right, bottom);
}
counter++;
it++;
}
return 0;
}

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
#include <list>
#include <map>
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
class VideoRenderIosGles20;
class CriticalSectionWrapper;
class VideoRenderIosImpl : IVideoRender {
public:
explicit VideoRenderIosImpl(const int32_t id,
void* window,
const bool full_screen);
~VideoRenderIosImpl();
// Implementation of IVideoRender.
int32_t Init() OVERRIDE;
int32_t ChangeUniqueId(const int32_t id) OVERRIDE;
int32_t ChangeWindow(void* window) OVERRIDE;
VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom) OVERRIDE;
int32_t DeleteIncomingRenderStream(const uint32_t stream_id) OVERRIDE;
int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id,
uint32_t& z_order,
float& left,
float& top,
float& right,
float& bottom) const OVERRIDE;
int32_t StartRender() OVERRIDE;
int32_t StopRender() OVERRIDE;
VideoRenderType RenderType() OVERRIDE;
RawVideoType PerferedVideoType() OVERRIDE;
bool FullScreen() OVERRIDE;
int32_t GetGraphicsMemory(
uint64_t& total_graphics_memory,
uint64_t& available_graphics_memory) const OVERRIDE; // NOLINT
int32_t GetScreenResolution(
uint32_t& screen_width,
uint32_t& screen_height) const OVERRIDE; // NOLINT
uint32_t RenderFrameRate(const uint32_t stream_id);
int32_t SetStreamCropping(const uint32_t stream_id,
const float left,
const float top,
const float right,
const float bottom) OVERRIDE;
int32_t ConfigureRenderer(const uint32_t stream_id,
const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) OVERRIDE;
int32_t SetTransparentBackground(const bool enable) OVERRIDE;
int32_t SetText(const uint8_t text_id,
const uint8_t* text,
const int32_t text_length,
const uint32_t text_color_ref,
const uint32_t background_color_ref,
const float left,
const float top,
const float right,
const float bottom) OVERRIDE;
int32_t SetBitmap(const void* bit_map,
const uint8_t picture_id,
const void* color_key,
const float left,
const float top,
const float right,
const float bottom);
int32_t FullScreenRender(void* window, const bool enable);
private:
int32_t id_;
void* ptr_window_;
bool full_screen_;
CriticalSectionWrapper* crit_sec_;
VideoRenderIosGles20* ptr_ios_render_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_

View File

@ -0,0 +1,178 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
#define IOS_UNSUPPORTED() \
WEBRTC_TRACE(kTraceError, \
kTraceVideoRenderer, \
id_, \
"%s is not supported on the iOS platform.", \
__FUNCTION__); \
return -1;
VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
void* window,
const bool full_screen)
: id_(id),
ptr_window_(window),
full_screen_(full_screen),
crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {}
VideoRenderIosImpl::~VideoRenderIosImpl() {
delete crit_sec_;
if (ptr_ios_render_) {
delete ptr_ios_render_;
ptr_ios_render_ = NULL;
}
}
int32_t VideoRenderIosImpl::Init() {
CriticalSectionScoped cs(crit_sec_);
ptr_ios_render_ = new VideoRenderIosGles20(
(VideoRenderIosView*)ptr_window_, full_screen_, id_);
return ptr_ios_render_->Init();
;
}
int32_t VideoRenderIosImpl::ChangeUniqueId(const int32_t id) {
CriticalSectionScoped cs(crit_sec_);
id_ = id;
return ptr_ios_render_->ChangeUniqueID(id_);
}
int32_t VideoRenderIosImpl::ChangeWindow(void* window) {
CriticalSectionScoped cs(crit_sec_);
if (window == NULL) {
return -1;
}
ptr_window_ = window;
return ptr_ios_render_->ChangeWindow(ptr_window_);
}
VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream(
const uint32_t stream_id,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom) {
CriticalSectionScoped cs(crit_sec_);
if (!ptr_window_) {
return NULL;
}
return ptr_ios_render_->CreateEaglChannel(
stream_id, z_order, left, top, right, bottom);
}
int32_t VideoRenderIosImpl::DeleteIncomingRenderStream(
const uint32_t stream_id) {
CriticalSectionScoped cs(crit_sec_);
return ptr_ios_render_->DeleteEaglChannel(stream_id);
}
int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties(
const uint32_t stream_id,
uint32_t& z_order,
float& left,
float& top,
float& right,
float& bottom) const {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::StartRender() {
return ptr_ios_render_->StartRender();
}
int32_t VideoRenderIosImpl::StopRender() {
return ptr_ios_render_->StopRender();
}
VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; }
RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; }
bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); }
int32_t VideoRenderIosImpl::GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const {
return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight);
}
uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom) {
return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom);
}
int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) {
IOS_UNSUPPORTED();
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
#import <UIKit/UIKit.h>
#import <QuartzCore/QuartzCore.h>
#include "webrtc/modules/video_render/ios/open_gles20.h"
@interface VideoRenderIosView : UIView {
@private // NOLINT
EAGLContext* context_;
webrtc::OpenGles20* gles_renderer20_;
int _frameBufferWidth;
int _frameBufferHeight;
unsigned int _defaultFrameBuffer;
unsigned int _colorRenderBuffer;
}
- (BOOL)createContext;
- (BOOL)presentFramebuffer;
- (BOOL)renderFrame:(webrtc::I420VideoFrame*)frameToRender;
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
Left:(const float)left
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom;
@property(nonatomic, retain) EAGLContext* context;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_

View File

@ -0,0 +1,163 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
@implementation VideoRenderIosView
@synthesize context = context_;
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder*)coder {
// init super class
self = [super initWithCoder:coder];
if (self) {
gles_renderer20_ = new OpenGles20();
}
return self;
}
- (id)init {
// init super class
self = [super init];
if (self) {
gles_renderer20_ = new OpenGles20();
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
// init super class
self = [super initWithFrame:frame];
if (self) {
gles_renderer20_ = new OpenGles20();
}
return self;
}
- (void)dealloc {
if (_defaultFrameBuffer) {
glDeleteFramebuffers(1, &_defaultFrameBuffer);
_defaultFrameBuffer = 0;
}
if (_colorRenderBuffer) {
glDeleteRenderbuffers(1, &_colorRenderBuffer);
_colorRenderBuffer = 0;
}
context_ = nil;
if (gles_renderer20_) {
delete gles_renderer20_;
}
[super dealloc];
}
- (NSString*)description {
return [NSString stringWithFormat:
@"A WebRTC implemented subclass of UIView."
"+Class method is overwritten, along with custom methods"];
}
- (BOOL)createContext {
// create OpenGLES context from self layer class
CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer;
eagl_layer.opaque = YES;
eagl_layer.drawableProperties =
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8,
kEAGLDrawablePropertyColorFormat,
nil];
context_ = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!context_) {
return NO;
}
// set current EAGLContext to self context_
if (![EAGLContext setCurrentContext:context_]) {
return NO;
}
// generates and binds the OpenGLES buffers
glGenFramebuffers(1, &_defaultFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
[context_ renderbufferStorage:GL_RENDERBUFFER
fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER,
_colorRenderBuffer);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
return NO;
}
// set the frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
return gles_renderer20_->Setup([self frame].size.width,
[self frame].size.height);
}
- (BOOL)presentFramebuffer {
if (![context_ presentRenderbuffer:GL_RENDERBUFFER]) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
0,
"%s:%d [context present_renderbuffer] "
"returned false",
__FUNCTION__,
__LINE__);
}
// update UI stuff on the main thread
[self performSelectorOnMainThread:@selector(setNeedsDisplay)
withObject:nil
waitUntilDone:NO];
return YES;
}
- (BOOL)renderFrame:(I420VideoFrame*)frameToRender {
if (![EAGLContext setCurrentContext:context_]) {
return NO;
}
return gles_renderer20_->Render(*frameToRender);
}
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
Left:(const float)left
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom {
return gles_renderer20_->SetCoordinates(zOrder, left, top, right, bottom);
}
@end

View File

@ -42,6 +42,16 @@
'include/video_render_defines.h',
'incoming_video_stream.cc',
'incoming_video_stream.h',
'ios/open_gles20.h',
'ios/open_gles20.mm',
'ios/video_render_ios_channel.h',
'ios/video_render_ios_channel.mm',
'ios/video_render_ios_gles20.h',
'ios/video_render_ios_gles20.mm',
'ios/video_render_ios_impl.h',
'ios/video_render_ios_impl.mm',
'ios/video_render_ios_view.h',
'ios/video_render_ios_view.mm',
'linux/video_render_linux_impl.cc',
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.cc',
@ -88,6 +98,21 @@
'android/video_render_opengles20.cc',
],
}],
['OS!="ios" or include_internal_video_render==0', {
'sources!': [
# iOS
'ios/open_gles20.h',
'ios/open_gles20.mm',
'ios/video_render_ios_channel.h',
'ios/video_render_ios_channel.mm',
'ios/video_render_ios_gles20.h',
'ios/video_render_ios_gles20.mm',
'ios/video_render_ios_impl.h',
'ios/video_render_ios_impl.mm',
'ios/video_render_ios_view.h',
'ios/video_render_ios_view.mm',
],
}],
['OS!="linux" or include_internal_video_render==0', {
'sources!': [
'linux/video_render_linux_impl.h',
@ -114,6 +139,17 @@
'mac/cocoa_full_screen_window.mm',
],
}],
['OS=="ios"', {
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework OpenGLES',
'-framework QuartzCore',
'-framework UIKit',
],
},
},
}],
['OS=="mac"', {
'direct_dependent_settings': {
'include_dirs': ['mac',],

View File

@ -26,13 +26,9 @@
// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
// gets defined if WEBRTC_IOS is defined
#elif defined(WEBRTC_IOS) && 0
// TODO(sjlee): land https://webrtc-codereview.appspot.com/1641004/
#if defined(IPHONE_GLES_RENDERING)
#define STANDARD_RENDERING kRenderiPhone
#include "iPhone/video_render_iphone_impl.h"
#endif
#elif defined(WEBRTC_IOS)
#define STANDARD_RENDERING kRenderiOS
#include "ios/video_render_ios_impl.h"
#elif defined(WEBRTC_MAC)
#if defined(COCOA_RENDERING)
#define STANDARD_RENDERING kRenderCocoa
@ -116,11 +112,10 @@ ModuleVideoRenderImpl::ModuleVideoRenderImpl(
}
break;
#elif defined(WEBRTC_IOS) && 0
// TODO(sjlee): land https://webrtc-codereview.appspot.com/1641004/
case kRenderiPhone:
#elif defined(WEBRTC_IOS)
case kRenderiOS:
{
VideoRenderIPhoneImpl* ptrRenderer = new VideoRenderIPhoneImpl(_id, videoRenderType, window, _fullScreen);
VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
@ -231,6 +226,7 @@ ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
if (_ptrRenderer)
{
VideoRenderType videoRenderType = _ptrRenderer->RenderType();
switch (videoRenderType)
{
case kRenderExternal:
@ -252,6 +248,14 @@ ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_IOS)
case kRenderiOS:
{
VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_MAC)
#if defined(COCOA_RENDERING)
@ -272,11 +276,6 @@ ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
break;
#endif
#elif defined(WEBRTC_IOS) && 0
// TODO(sjlee): land https://webrtc-codereview.appspot.com/1641004/
case kRenderiPhone:
break;
#elif defined(WEBRTC_ANDROID)
case kRenderAndroid:
{
@ -351,18 +350,14 @@ int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
_ptrRenderer = NULL;
delete _ptrRenderer;
#if 0 // TODO(sjlee): land https://webrtc-codereview.appspot.com/1641004/
VideoRenderIPhoneImpl* ptrRenderer;
ptrRenderer = new VideoRenderIPhoneImpl(_id, kRenderiPhone, window, _fullScreen);
VideoRenderIosImpl* ptrRenderer;
ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
if (!ptrRenderer)
{
return -1;
}
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
return _ptrRenderer->ChangeWindow(window);
#else
return -1;
#endif
#elif defined(WEBRTC_MAC)
_ptrRenderer = NULL;