Refactored IncomingVideoStream and VideoRenderFrame, to get code in better shape when hunting BUG=481.

BUG=481
TEST=Compiles on all platformas and autotest passes.

Review URL: https://webrtc-codereview.appspot.com/608005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2323 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2012-05-30 10:45:18 +00:00
parent 9259e7bd03
commit 327ada1cb0
4 changed files with 550 additions and 678 deletions

View File

@ -8,20 +8,10 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "incoming_video_stream.h" #include "modules/video_render/main/source/incoming_video_stream.h"
#include "critical_section_wrapper.h"
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "video_render_frames.h"
#include "tick_util.h"
#include "map_wrapper.h"
#include "common_video/libyuv/include/libyuv.h"
#include <cassert> #include <cassert>
// Platform specifics
#if defined(_WIN32) #if defined(_WIN32)
#include <windows.h> #include <windows.h>
#elif defined(WEBRTC_LINUX) #elif defined(WEBRTC_LINUX)
@ -31,383 +21,342 @@
#include <sys/time.h> #include <sys/time.h>
#endif #endif
#include "common_video/libyuv/include/libyuv.h"
#include "modules/video_render/main/source/video_render_frames.h"
#include "system_wrappers/interface/critical_section_wrapper.h"
#include "system_wrappers/interface/event_wrapper.h"
#include "system_wrappers/interface/map_wrapper.h"
#include "system_wrappers/interface/thread_wrapper.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
namespace webrtc { namespace webrtc {
IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId) : IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 module_id,
_moduleId(moduleId), const WebRtc_UWord32 stream_id)
_streamId(streamId), : module_id_(module_id),
_streamCritsect(*CriticalSectionWrapper::CreateCriticalSection()), stream_id_(stream_id),
_threadCritsect(*CriticalSectionWrapper::CreateCriticalSection()), stream_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
_bufferCritsect(*CriticalSectionWrapper::CreateCriticalSection()), thread_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrIncomingRenderThread(), buffer_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
_deliverBufferEvent(*EventWrapper::Create()), incoming_render_thread_(),
_running(false), deliver_buffer_event_(*EventWrapper::Create()),
_ptrExternalCallback(NULL), running_(false),
_ptrRenderCallback(NULL), external_callback_(NULL),
_renderBuffers(*(new VideoRenderFrames)), render_callback_(NULL),
_callbackVideoType(kVideoI420), render_buffers_(*(new VideoRenderFrames)),
_callbackWidth(0), callbackVideoType_(kVideoI420),
_callbackHeight(0), callbackWidth_(0),
_incomingRate(0), callbackHeight_(0),
_lastRateCalculationTimeMs(0), incoming_rate_(0),
_numFramesSinceLastCalculation(0), last_rate_calculation_time_ms_(0),
_lastRenderedFrame(), num_frames_since_last_calculation_(0),
_tempFrame(), last_rendered_frame_(),
_startImage(), temp_frame_(),
_timeoutImage(), start_image_(),
_timeoutTime(), timeout_image_(),
_mirrorFramesEnabled(false), timeout_time_(),
_mirroring(), mirror_frames_enabled_(false),
_transformedVideoFrame() mirroring_(),
{ transformed_video_frame_() {
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId, WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
"%s created for stream %d", __FUNCTION__, streamId); "%s created for stream %d", __FUNCTION__, stream_id);
} }
IncomingVideoStream::~IncomingVideoStream() IncomingVideoStream::~IncomingVideoStream() {
{ WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId, "%s deleted for stream %d", __FUNCTION__, stream_id_);
"%s deleted for stream %d", __FUNCTION__, _streamId);
Stop(); Stop();
// _ptrIncomingRenderThread - Delete in stop
delete &_renderBuffers;
delete &_streamCritsect;
delete &_bufferCritsect;
delete &_threadCritsect;
delete &_deliverBufferEvent;
// incoming_render_thread_ - Delete in stop
delete &render_buffers_;
delete &stream_critsect_;
delete &buffer_critsect_;
delete &thread_critsect_;
delete &deliver_buffer_event_;
} }
WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id) WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id) {
{ CriticalSectionScoped cs(&stream_critsect_);
CriticalSectionScoped cs(&_streamCritsect); module_id_ = id;
return 0;
_moduleId = id;
return 0;
} }
VideoRenderCallback* VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
IncomingVideoStream::ModuleCallback() CriticalSectionScoped cs(&stream_critsect_);
{ return this;
CriticalSectionScoped cs(&_streamCritsect);
return this;
} }
WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 streamId, WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id,
VideoFrame& videoFrame) VideoFrame& video_frame) {
{ CriticalSectionScoped csS(&stream_critsect_);
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
video_frame.RenderTimeMs());
CriticalSectionScoped csS(&_streamCritsect); if (!running_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId, WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s for stream %d, render time: %u", __FUNCTION__, _streamId, "%s: Not running", __FUNCTION__);
videoFrame.RenderTimeMs()); return -1;
}
if (!_running) if (true == mirror_frames_enabled_) {
{ transformed_video_frame_.VerifyAndAllocate(video_frame.Length());
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId, if (mirroring_.mirror_x_axis) {
"%s: Not running", __FUNCTION__); MirrorI420UpDown(video_frame.Buffer(),
return -1; transformed_video_frame_.Buffer(),
video_frame.Width(), video_frame.Height());
transformed_video_frame_.SetLength(video_frame.Length());
transformed_video_frame_.SetWidth(video_frame.Width());
transformed_video_frame_.SetHeight(video_frame.Height());
video_frame.SwapFrame(transformed_video_frame_);
} }
if (mirroring_.mirror_y_axis) {
if (true == _mirrorFramesEnabled) MirrorI420LeftRight(video_frame.Buffer(),
{ transformed_video_frame_.Buffer(),
_transformedVideoFrame.VerifyAndAllocate(videoFrame.Length()); video_frame.Width(), video_frame.Height());
if (_mirroring.mirrorXAxis) transformed_video_frame_.SetLength(video_frame.Length());
{ transformed_video_frame_.SetWidth(video_frame.Width());
MirrorI420UpDown(videoFrame.Buffer(), transformed_video_frame_.SetHeight(video_frame.Height());
_transformedVideoFrame.Buffer(), video_frame.SwapFrame(transformed_video_frame_);
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
}
if (_mirroring.mirrorYAxis)
{
MirrorI420LeftRight(videoFrame.Buffer(),
_transformedVideoFrame.Buffer(),
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
}
} }
}
// Rate statistics // Rate statistics.
_numFramesSinceLastCalculation++; num_frames_since_last_calculation_++;
WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp(); WebRtc_Word64 now_ms = TickTime::MillisecondTimestamp();
if (nowMs >= _lastRateCalculationTimeMs + KFrameRatePeriodMs) if (now_ms >= last_rate_calculation_time_ms_ + KFrameRatePeriodMs) {
{ incoming_rate_ =
_incomingRate = (WebRtc_UWord32) (1000 * _numFramesSinceLastCalculation static_cast<WebRtc_UWord32>(1000 * num_frames_since_last_calculation_ /
/ (nowMs - _lastRateCalculationTimeMs)); (now_ms - last_rate_calculation_time_ms_));
_numFramesSinceLastCalculation = 0; num_frames_since_last_calculation_ = 0;
_lastRateCalculationTimeMs = nowMs; last_rate_calculation_time_ms_ = now_ms;
} }
// Insert frame // Insert frame.
CriticalSectionScoped csB(&_bufferCritsect); CriticalSectionScoped csB(&buffer_critsect_);
if (_renderBuffers.AddFrame(&videoFrame) == 1) if (render_buffers_.AddFrame(&video_frame) == 1)
_deliverBufferEvent.Set(); deliver_buffer_event_.Set();
return 0; return 0;
} }
WebRtc_Word32 IncomingVideoStream::SetStartImage(const VideoFrame& videoFrame) WebRtc_Word32 IncomingVideoStream::SetStartImage(
{ const VideoFrame& video_frame) {
CriticalSectionScoped csS(&_threadCritsect); CriticalSectionScoped csS(&thread_critsect_);
return _startImage.CopyFrame(videoFrame); return start_image_.CopyFrame(video_frame);
} }
WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(const VideoFrame& videoFrame, WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(
const WebRtc_UWord32 timeout) const VideoFrame& video_frame, const WebRtc_UWord32 timeout) {
{ CriticalSectionScoped csS(&thread_critsect_);
CriticalSectionScoped csS(&_threadCritsect); timeout_time_ = timeout;
_timeoutTime = timeout; return timeout_image_.CopyFrame(video_frame);
return _timeoutImage.CopyFrame(videoFrame);
} }
WebRtc_Word32 IncomingVideoStream::SetRenderCallback(VideoRenderCallback* renderCallback) WebRtc_Word32 IncomingVideoStream::SetRenderCallback(
{ VideoRenderCallback* render_callback) {
CriticalSectionScoped cs(&_streamCritsect); CriticalSectionScoped cs(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId, WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s(%x) for stream %d", __FUNCTION__, renderCallback, "%s(%x) for stream %d", __FUNCTION__, render_callback,
_streamId); stream_id_);
_ptrRenderCallback = renderCallback; render_callback_ = render_callback;
return 0; return 0;
} }
WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable, WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable,
const bool mirrorXAxis, const bool mirror_x_axis,
const bool mirrorYAxis) const bool mirror_y_axis) {
{ CriticalSectionScoped cs(&stream_critsect_);
CriticalSectionScoped cs(&_streamCritsect); mirror_frames_enabled_ = enable;
_mirrorFramesEnabled = enable; mirroring_.mirror_x_axis = mirror_x_axis;
_mirroring.mirrorXAxis = mirrorXAxis; mirroring_.mirror_y_axis = mirror_y_axis;
_mirroring.mirrorYAxis = mirrorYAxis;
return 0; return 0;
} }
WebRtc_Word32 IncomingVideoStream::SetExternalCallback(VideoRenderCallback* externalCallback) WebRtc_Word32 IncomingVideoStream::SetExternalCallback(
{ VideoRenderCallback* external_callback) {
CriticalSectionScoped cs(&_streamCritsect); CriticalSectionScoped cs(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId, "%s(%x) for stream %d", __FUNCTION__, external_callback,
"%s(%x) for stream %d", __FUNCTION__, externalCallback, stream_id_);
_streamId); external_callback_ = external_callback;
_ptrExternalCallback = externalCallback; callbackVideoType_ = kVideoI420;
_callbackVideoType = kVideoI420; callbackWidth_ = 0;
_callbackWidth = 0; callbackHeight_ = 0;
_callbackHeight = 0; return 0;
return 0;
} }
WebRtc_Word32 IncomingVideoStream::Start() WebRtc_Word32 IncomingVideoStream::Start() {
{ CriticalSectionScoped csS(&stream_critsect_);
CriticalSectionScoped csS(&_streamCritsect); WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId, "%s for stream %d", __FUNCTION__, stream_id_);
"%s for stream %d", __FUNCTION__, _streamId); if (running_) {
if (_running) WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
{ "%s: Already running", __FUNCTION__);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
"%s: Already running", __FUNCTION__);
return 0;
}
CriticalSectionScoped csT(&_threadCritsect);
assert(_ptrIncomingRenderThread == NULL);
_ptrIncomingRenderThread
= ThreadWrapper::CreateThread(IncomingVideoStreamThreadFun, this,
kRealtimePriority,
"IncomingVideoStreamThread");
if (!_ptrIncomingRenderThread)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_ptrIncomingRenderThread->Start(tId))
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s: thread started: %u", __FUNCTION__, tId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
_deliverBufferEvent.StartTimer(false, KEventStartupTimeMS);
_running = true;
return 0; return 0;
}
CriticalSectionScoped csT(&thread_critsect_);
assert(incoming_render_thread_ == NULL);
incoming_render_thread_ = ThreadWrapper::CreateThread(
IncomingVideoStreamThreadFun, this, kRealtimePriority,
"IncomingVideoStreamThread");
if (!incoming_render_thread_) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int t_id = 0;
if (incoming_render_thread_->Start(t_id)) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s: thread started: %u", __FUNCTION__, t_id);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
deliver_buffer_event_.StartTimer(false, KEventStartupTimeMS);
running_ = true;
return 0;
} }
WebRtc_Word32 IncomingVideoStream::Stop() WebRtc_Word32 IncomingVideoStream::Stop() {
{ CriticalSectionScoped cs_stream(&stream_critsect_);
CriticalSectionScoped csStream(&_streamCritsect); WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId, "%s for stream %d", __FUNCTION__, stream_id_);
"%s for stream %d", __FUNCTION__, _streamId);
if (!_running) if (!running_) {
{ WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId, "%s: Not running", __FUNCTION__);
"%s: Not running", __FUNCTION__); return 0;
return 0; }
}
_threadCritsect.Enter(); thread_critsect_.Enter();
if (_ptrIncomingRenderThread) if (incoming_render_thread_) {
{ ThreadWrapper* thread = incoming_render_thread_;
ThreadWrapper* ptrThread = _ptrIncomingRenderThread; incoming_render_thread_ = NULL;
_ptrIncomingRenderThread = NULL; thread->SetNotAlive();
ptrThread->SetNotAlive(); #ifndef WIN32_
#ifndef _WIN32 deliver_buffer_event_.StopTimer();
_deliverBufferEvent.StopTimer();
#endif #endif
_threadCritsect.Leave(); thread_critsect_.Leave();
if (ptrThread->Stop()) if (thread->Stop()) {
{ delete thread;
delete ptrThread; } else {
} assert(false);
else WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
{ "%s: Not able to stop thread, leaking", __FUNCTION__);
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
"%s: Not able to stop thread, leaking", __FUNCTION__);
}
} }
else } else {
{ thread_critsect_.Leave();
_threadCritsect.Leave(); }
running_ = false;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Reset() {
CriticalSectionScoped cs_stream(&stream_critsect_);
CriticalSectionScoped cs_buffer(&buffer_critsect_);
render_buffers_.ReleaseAllFrames();
return 0;
}
WebRtc_UWord32 IncomingVideoStream::StreamId() const {
CriticalSectionScoped cs_stream(&stream_critsect_);
return stream_id_;
}
WebRtc_UWord32 IncomingVideoStream::IncomingRate() const {
CriticalSectionScoped cs(&stream_critsect_);
return incoming_rate_;
}
bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj) {
return static_cast<IncomingVideoStream*>(obj)->IncomingVideoStreamProcess();
}
bool IncomingVideoStream::IncomingVideoStreamProcess() {
if (kEventError != deliver_buffer_event_.Wait(KEventMaxWaitTimeMs)) {
if (incoming_render_thread_ == NULL) {
// Terminating
return false;
} }
_running = false;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Reset() thread_critsect_.Enter();
{ VideoFrame* frame_to_render = NULL;
CriticalSectionScoped csStream(&_streamCritsect);
CriticalSectionScoped csBuffer(&_bufferCritsect);
_renderBuffers.ReleaseAllFrames(); // Get a new frame to render and the time for the frame after this one.
return 0; buffer_critsect_.Enter();
} frame_to_render = render_buffers_.FrameToRender();
WebRtc_UWord32 wait_time = render_buffers_.TimeToNextFrameRelease();
buffer_critsect_.Leave();
WebRtc_UWord32 IncomingVideoStream::StreamId() const // Set timer for next frame to render.
{ if (wait_time > KEventMaxWaitTimeMs) {
CriticalSectionScoped csStream(&_streamCritsect); wait_time = KEventMaxWaitTimeMs;
return _streamId;
}
WebRtc_UWord32 IncomingVideoStream::IncomingRate() const
{
CriticalSectionScoped cs(&_streamCritsect);
return _incomingRate;
}
bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj)
{
return static_cast<IncomingVideoStream*> (obj)->IncomingVideoStreamProcess();
}
bool IncomingVideoStream::IncomingVideoStreamProcess()
{
if (kEventError != _deliverBufferEvent.Wait(KEventMaxWaitTimeMs))
{
if (_ptrIncomingRenderThread == NULL)
{
// Terminating
return false;
}
_threadCritsect.Enter();
VideoFrame* ptrFrameToRender = NULL;
// Get a new frame to render and the time for the frame after this one.
_bufferCritsect.Enter();
ptrFrameToRender = _renderBuffers.FrameToRender();
WebRtc_UWord32 waitTime = _renderBuffers.TimeToNextFrameRelease();
_bufferCritsect.Leave();
// Set timer for next frame to render
if (waitTime > KEventMaxWaitTimeMs)
{
waitTime = KEventMaxWaitTimeMs;
}
_deliverBufferEvent.StartTimer(false, waitTime);
if (!ptrFrameToRender)
{
if (_ptrRenderCallback)
{
if (_lastRenderedFrame.RenderTimeMs() == 0
&& _startImage.Size()) // And we have not rendered anything and have a start image
{
_tempFrame.CopyFrame(_startImage);// Copy the startimage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
}
else if (_timeoutImage.Size()
&& _lastRenderedFrame.RenderTimeMs() + _timeoutTime
< TickTime::MillisecondTimestamp()) // We have rendered something a long time ago and have a timeout image
{
_tempFrame.CopyFrame(_timeoutImage); // Copy the timeoutImage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
}
}
// No frame
_threadCritsect.Leave();
return true;
}
// Send frame for rendering
if (_ptrExternalCallback)
{
WEBRTC_TRACE(kTraceStream,
kTraceVideoRenderer,
_moduleId,
"%s: executing external renderer callback to deliver frame",
__FUNCTION__, ptrFrameToRender->RenderTimeMs());
_ptrExternalCallback->RenderFrame(_streamId, *ptrFrameToRender);
}
else
{
if (_ptrRenderCallback)
{
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
"%s: Render frame, time: ", __FUNCTION__,
ptrFrameToRender->RenderTimeMs());
_ptrRenderCallback->RenderFrame(_streamId, *ptrFrameToRender);
}
}
// Release critsect before calling the module user
_threadCritsect.Leave();
// We're done with this frame, delete it.
if (ptrFrameToRender)
{
CriticalSectionScoped cs(&_bufferCritsect);
_lastRenderedFrame.SwapFrame(*ptrFrameToRender);
_renderBuffers.ReturnFrame(ptrFrameToRender);
}
} }
return true; deliver_buffer_event_.StartTimer(false, wait_time);
}
WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(VideoFrame& videoFrame) const if (!frame_to_render) {
{ if (render_callback_) {
CriticalSectionScoped cs(&_bufferCritsect); if (last_rendered_frame_.RenderTimeMs() == 0 &&
return videoFrame.CopyFrame(_lastRenderedFrame); start_image_.Size()) {
// We have not rendered anything and have a start image.
temp_frame_.CopyFrame(start_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
} else if (timeout_image_.Size() &&
last_rendered_frame_.RenderTimeMs() + timeout_time_ <
TickTime::MillisecondTimestamp()) {
// Render a timeout image.
temp_frame_.CopyFrame(timeout_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
}
}
// No frame.
thread_critsect_.Leave();
return true;
}
// Send frame for rendering.
if (external_callback_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: executing external renderer callback to deliver frame",
__FUNCTION__, frame_to_render->RenderTimeMs());
external_callback_->RenderFrame(stream_id_, *frame_to_render);
} else {
if (render_callback_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: Render frame, time: ", __FUNCTION__,
frame_to_render->RenderTimeMs());
render_callback_->RenderFrame(stream_id_, *frame_to_render);
}
}
// Release critsect before calling the module user.
thread_critsect_.Leave();
// We're done with this frame, delete it.
if (frame_to_render) {
CriticalSectionScoped cs(&buffer_critsect_);
last_rendered_frame_.SwapFrame(*frame_to_render);
render_buffers_.ReturnFrame(frame_to_render);
}
}
return true;
} }
} //namespace webrtc WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(
VideoFrame& video_frame) const {
CriticalSectionScoped cs(&buffer_critsect_);
return video_frame.CopyFrame(last_rendered_frame_);
}
} // namespace webrtc

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_ #ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_ #define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#include "video_render.h" #include "modules/video_render/main/interface/video_render.h"
#include "map_wrapper.h" #include "system_wrappers/interface/map_wrapper.h"
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper; class CriticalSectionWrapper;
@ -21,120 +21,94 @@ class ThreadWrapper;
class VideoRenderCallback; class VideoRenderCallback;
class VideoRenderFrames; class VideoRenderFrames;
struct VideoMirroring struct VideoMirroring {
{ VideoMirroring() : mirror_x_axis(false), mirror_y_axis(false) {}
bool mirrorXAxis; bool mirror_x_axis;
bool mirrorYAxis; bool mirror_y_axis;
VideoMirroring() :
mirrorXAxis(false), mirrorYAxis(false)
{
}
}; };
// Class definitions class IncomingVideoStream : public VideoRenderCallback {
class IncomingVideoStream: public VideoRenderCallback public:
{ IncomingVideoStream(const WebRtc_Word32 module_id,
public: const WebRtc_UWord32 stream_id);
/* ~IncomingVideoStream();
* VideoRenderer constructor/destructor
*/
IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId);
~IncomingVideoStream();
WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id); WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id);
// Get callbck to deliver frames to the module // Get callback to deliver frames to the module.
VideoRenderCallback* ModuleCallback(); VideoRenderCallback* ModuleCallback();
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
VideoFrame& videoFrame); VideoFrame& video_frame);
// Set callback to the platform dependant code // Set callback to the platform dependent code.
WebRtc_Word32 SetRenderCallback(VideoRenderCallback* renderCallback); WebRtc_Word32 SetRenderCallback(VideoRenderCallback* render_callback);
// Callback for file recording, snapshot, ... // Callback for file recording, snapshot, ...
WebRtc_Word32 SetExternalCallback(VideoRenderCallback* renderObject); WebRtc_Word32 SetExternalCallback(VideoRenderCallback* render_object);
/* // Start/Stop.
* Start/Stop WebRtc_Word32 Start();
*/ WebRtc_Word32 Stop();
WebRtc_Word32 Start();
WebRtc_Word32 Stop();
// Clear all buffers // Clear all buffers.
WebRtc_Word32 Reset(); WebRtc_Word32 Reset();
/* // Properties.
* Properties WebRtc_UWord32 StreamId() const;
*/ WebRtc_UWord32 IncomingRate() const;
WebRtc_UWord32 StreamId() const;
WebRtc_UWord32 IncomingRate() const;
/* WebRtc_Word32 GetLastRenderedFrame(VideoFrame& video_frame) const;
*
*/
WebRtc_Word32 GetLastRenderedFrame(VideoFrame& videoFrame) const;
WebRtc_Word32 SetStartImage(const VideoFrame& videoFrame); WebRtc_Word32 SetStartImage(const VideoFrame& video_frame);
WebRtc_Word32 SetTimeoutImage(const VideoFrame& videoFrame, WebRtc_Word32 SetTimeoutImage(const VideoFrame& video_frame,
const WebRtc_UWord32 timeout); const WebRtc_UWord32 timeout);
WebRtc_Word32 EnableMirroring(const bool enable, WebRtc_Word32 EnableMirroring(const bool enable,
const bool mirrorXAxis, const bool mirror_xaxis,
const bool mirrorYAxis); const bool mirror_yaxis);
protected: protected:
static bool IncomingVideoStreamThreadFun(void* obj); static bool IncomingVideoStreamThreadFun(void* obj);
bool IncomingVideoStreamProcess(); bool IncomingVideoStreamProcess();
private: private:
enum { KEventStartupTimeMS = 10 };
enum { KEventMaxWaitTimeMs = 100 };
enum { KFrameRatePeriodMs = 1000 };
// Enums WebRtc_Word32 module_id_;
enum WebRtc_UWord32 stream_id_;
{ // Critsects in allowed to enter order.
KEventStartupTimeMS = 10 CriticalSectionWrapper& stream_critsect_;
}; CriticalSectionWrapper& thread_critsect_;
enum CriticalSectionWrapper& buffer_critsect_;
{ ThreadWrapper* incoming_render_thread_;
KEventMaxWaitTimeMs = 100 EventWrapper& deliver_buffer_event_;
}; bool running_;
enum
{
KFrameRatePeriodMs = 1000
};
WebRtc_Word32 _moduleId; VideoRenderCallback* external_callback_;
WebRtc_UWord32 _streamId; VideoRenderCallback* render_callback_;
CriticalSectionWrapper& _streamCritsect; // Critsects in allowed to enter order VideoRenderFrames& render_buffers_;
CriticalSectionWrapper& _threadCritsect;
CriticalSectionWrapper& _bufferCritsect;
ThreadWrapper* _ptrIncomingRenderThread;
EventWrapper& _deliverBufferEvent;
bool _running;
VideoRenderCallback* _ptrExternalCallback; RawVideoType callbackVideoType_;
VideoRenderCallback* _ptrRenderCallback; WebRtc_UWord32 callbackWidth_;
VideoRenderFrames& _renderBuffers; WebRtc_UWord32 callbackHeight_;
RawVideoType _callbackVideoType; WebRtc_UWord32 incoming_rate_;
WebRtc_UWord32 _callbackWidth; WebRtc_Word64 last_rate_calculation_time_ms_;
WebRtc_UWord32 _callbackHeight; WebRtc_UWord16 num_frames_since_last_calculation_;
VideoFrame last_rendered_frame_;
VideoFrame temp_frame_;
VideoFrame start_image_;
VideoFrame timeout_image_;
WebRtc_UWord32 timeout_time_;
WebRtc_UWord32 _incomingRate; bool mirror_frames_enabled_;
WebRtc_Word64 _lastRateCalculationTimeMs; VideoMirroring mirroring_;
WebRtc_UWord16 _numFramesSinceLastCalculation; VideoFrame transformed_video_frame_;
VideoFrame _lastRenderedFrame;
VideoFrame _tempFrame;
VideoFrame _startImage;
VideoFrame _timeoutImage;
WebRtc_UWord32 _timeoutTime;
bool _mirrorFramesEnabled;
VideoMirroring _mirroring;
VideoFrame _transformedVideoFrame;
}; };
} //namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_ #endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -8,202 +8,167 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "video_render_frames.h" #include "modules/video_render/main/source/video_render_frames.h"
#include "module_common_types.h"
#include "tick_util.h"
#include "trace.h"
#include <cassert> #include <cassert>
#include "modules/interface/module_common_types.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
namespace webrtc { namespace webrtc {
VideoRenderFrames::VideoRenderFrames() :
_incomingFrames(), _renderDelayMs(10)
{
}
VideoRenderFrames::~VideoRenderFrames()
{
ReleaseAllFrames();
}
WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* ptrNewFrame)
{
const WebRtc_Word64 timeNow = TickTime::MillisecondTimestamp();
if (ptrNewFrame->RenderTimeMs() + KOldRenderTimestampMS < timeNow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: too old frame.", __FUNCTION__);
return -1;
}
if (ptrNewFrame->RenderTimeMs() > timeNow + KFutureRenderTimestampMS)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future.", __FUNCTION__);
return -1;
}
// Get an empty frame
VideoFrame* ptrFrameToAdd = NULL;
if (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
ptrFrameToAdd = static_cast<VideoFrame*> (item->GetItem());
_emptyFrames.Erase(item);
}
}
if (!ptrFrameToAdd)
{
if (_emptyFrames.GetSize() + _incomingFrames.GetSize()
> KMaxNumberOfFrames)
{
// Already allocated toom many frames...
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
-1, "%s: too many frames, limit: %d", __FUNCTION__,
KMaxNumberOfFrames);
return -1;
}
// Allocate new memory
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
_emptyFrames.GetSize() + _incomingFrames.GetSize());
ptrFrameToAdd = new VideoFrame();
if (!ptrFrameToAdd)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not create new frame for", __FUNCTION__);
return -1;
}
}
ptrFrameToAdd->VerifyAndAllocate(ptrNewFrame->Length());
ptrFrameToAdd->SwapFrame(const_cast<VideoFrame&> (*ptrNewFrame)); //remove const ness. Copying will be costly.
_incomingFrames.PushBack(ptrFrameToAdd);
return _incomingFrames.GetSize();
}
VideoFrame*
VideoRenderFrames::FrameToRender()
{
VideoFrame* ptrRenderFrame = NULL;
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrOldestFrameInList =
static_cast<VideoFrame*> (item->GetItem());
if (ptrOldestFrameInList->RenderTimeMs()
<= TickTime::MillisecondTimestamp() + _renderDelayMs)
{
// This is the oldest one so far and it's ok to render
if (ptrRenderFrame)
{
// This one is older than the newly found frame, remove this one.
ptrRenderFrame->SetWidth(0);
ptrRenderFrame->SetHeight(0);
ptrRenderFrame->SetLength(0);
ptrRenderFrame->SetRenderTime(0);
ptrRenderFrame->SetTimeStamp(0);
_emptyFrames.PushFront(ptrRenderFrame);
}
ptrRenderFrame = ptrOldestFrameInList;
_incomingFrames.Erase(item);
}
else
{
// We can't release this one yet, we're done here.
break;
}
}
else
{
assert(false);
}
}
return ptrRenderFrame;
}
WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* ptrOldFrame)
{
ptrOldFrame->SetWidth(0);
ptrOldFrame->SetHeight(0);
ptrOldFrame->SetRenderTime(0);
ptrOldFrame->SetLength(0);
_emptyFrames.PushBack(ptrOldFrame);
return 0;
}
WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames()
{
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_incomingFrames.Erase(item);
}
while (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_emptyFrames.Erase(item);
}
return 0;
}
WebRtc_Word32 KEventMaxWaitTimeMs = 200; WebRtc_Word32 KEventMaxWaitTimeMs = 200;
WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease() VideoRenderFrames::VideoRenderFrames()
{ : incoming_frames_(),
WebRtc_Word64 timeToRelease = 0; render_delay_ms_(10) {
ListItem* item = _incomingFrames.First(); }
if (item)
{ VideoRenderFrames::~VideoRenderFrames() {
VideoFrame* oldestFrame = ReleaseAllFrames();
static_cast<VideoFrame*> (item->GetItem()); }
timeToRelease = oldestFrame->RenderTimeMs() - _renderDelayMs
- TickTime::MillisecondTimestamp(); WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) {
if (timeToRelease < 0) const WebRtc_Word64 time_now = TickTime::MillisecondTimestamp();
{
timeToRelease = 0; if (new_frame->RenderTimeMs() + KOldRenderTimestampMS < time_now) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: too old frame.", __FUNCTION__);
return -1;
}
if (new_frame->RenderTimeMs() > time_now + KFutureRenderTimestampMS) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future.", __FUNCTION__);
return -1;
}
// Get an empty frame
VideoFrame* frame_to_add = NULL;
if (!empty_frames_.Empty()) {
ListItem* item = empty_frames_.First();
if (item) {
frame_to_add = static_cast<VideoFrame*>(item->GetItem());
empty_frames_.Erase(item);
}
}
if (!frame_to_add) {
if (empty_frames_.GetSize() + incoming_frames_.GetSize() >
KMaxNumberOfFrames) {
// Already allocated too many frames.
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
-1, "%s: too many frames, limit: %d", __FUNCTION__,
KMaxNumberOfFrames);
return -1;
}
// Allocate new memory.
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
empty_frames_.GetSize() + incoming_frames_.GetSize());
frame_to_add = new VideoFrame();
if (!frame_to_add) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not create new frame for", __FUNCTION__);
return -1;
}
}
frame_to_add->VerifyAndAllocate(new_frame->Length());
// TODO(mflodman) Change this!
// Remove const ness. Copying will be costly.
frame_to_add->SwapFrame(const_cast<VideoFrame&>(*new_frame));
incoming_frames_.PushBack(frame_to_add);
return incoming_frames_.GetSize();
}
VideoFrame* VideoRenderFrames::FrameToRender() {
VideoFrame* render_frame = NULL;
while (!incoming_frames_.Empty()) {
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* oldest_frame_in_list =
static_cast<VideoFrame*>(item->GetItem());
if (oldest_frame_in_list->RenderTimeMs() <=
TickTime::MillisecondTimestamp() + render_delay_ms_) {
// This is the oldest one so far and it's OK to render.
if (render_frame) {
// This one is older than the newly found frame, remove this one.
render_frame->SetWidth(0);
render_frame->SetHeight(0);
render_frame->SetLength(0);
render_frame->SetRenderTime(0);
render_frame->SetTimeStamp(0);
empty_frames_.PushFront(render_frame);
} }
render_frame = oldest_frame_in_list;
incoming_frames_.Erase(item);
} else {
// We can't release this one yet, we're done here.
break;
}
} else {
assert(false);
} }
else }
{ return render_frame;
timeToRelease = KEventMaxWaitTimeMs; }
}
WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* old_frame) {
return (WebRtc_UWord32) timeToRelease; old_frame->SetWidth(0);
old_frame->SetHeight(0);
old_frame->SetRenderTime(0);
old_frame->SetLength(0);
empty_frames_.PushBack(old_frame);
return 0;
}
WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames() {
while (!incoming_frames_.Empty()) {
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
assert(frame != NULL);
frame->Free();
delete frame;
}
incoming_frames_.Erase(item);
}
while (!empty_frames_.Empty()) {
ListItem* item = empty_frames_.First();
if (item) {
VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
assert(frame != NULL);
frame->Free();
delete frame;
}
empty_frames_.Erase(item);
}
return 0;
}
WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease() {
WebRtc_Word64 time_to_release = 0;
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* oldest_frame = static_cast<VideoFrame*>(item->GetItem());
time_to_release = oldest_frame->RenderTimeMs() - render_delay_ms_
- TickTime::MillisecondTimestamp();
if (time_to_release < 0) {
time_to_release = 0;
}
} else {
time_to_release = KEventMaxWaitTimeMs;
}
return static_cast<WebRtc_UWord32>(time_to_release);
} }
//
WebRtc_Word32 VideoRenderFrames::SetRenderDelay( WebRtc_Word32 VideoRenderFrames::SetRenderDelay(
const WebRtc_UWord32 renderDelay) const WebRtc_UWord32 render_delay) {
{ render_delay_ms_ = render_delay;
_renderDelayMs = renderDelay; return 0;
return 0;
} }
} //namespace webrtc } // namespace webrtc

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -8,71 +8,55 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ #ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ #define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#include "list_wrapper.h" #include "modules/video_render/main/interface/video_render.h"
#include "video_render.h" #include "system_wrappers/interface/list_wrapper.h"
namespace webrtc { namespace webrtc {
// Class definitions // Class definitions
class VideoRenderFrames class VideoRenderFrames {
{ public:
public: VideoRenderFrames();
VideoRenderFrames(); ~VideoRenderFrames();
~VideoRenderFrames();
/* // Add a frame to the render queue
* Add a frame to the render queue WebRtc_Word32 AddFrame(VideoFrame* new_frame);
*/
WebRtc_Word32 AddFrame(VideoFrame* ptrNewFrame);
/* // Get a frame for rendering, if it's time to render.
* Get a frame for rendering, if it's time to render. VideoFrame* FrameToRender();
*/
VideoFrame* FrameToRender();
/* // Return an old frame
* Return an old frame WebRtc_Word32 ReturnFrame(VideoFrame* old_frame);
*/
WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
/* // Releases all frames
* Releases all frames WebRtc_Word32 ReleaseAllFrames();
*/
WebRtc_Word32 ReleaseAllFrames();
/* // Returns the number of ms to next frame to render
* Returns the number of ms to next frame to render WebRtc_UWord32 TimeToNextFrameRelease();
*/
WebRtc_UWord32 TimeToNextFrameRelease();
/* // Sets estimates delay in renderer
* Sets estimates delay in renderer WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 render_delay);
*/
WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 renderDelay);
private: private:
enum // 10 seconds for 30 fps.
{ enum { KMaxNumberOfFrames = 300 };
KMaxNumberOfFrames = 300 // Don't render frames with timestamp older than 500ms from now.
}; // 10 seconds for 30 fps. enum { KOldRenderTimestampMS = 500 };
enum // Don't render frames with timestamp more than 10s into the future.
{ enum { KFutureRenderTimestampMS = 10000 };
KOldRenderTimestampMS = 500
}; //Don't render frames with timestamp older than 500ms from now.
enum
{
KFutureRenderTimestampMS = 10000
}; //Don't render frames with timestamp more than 10s into the future.
ListWrapper _incomingFrames; // Sorted oldest video frame first // Sorted list with framed to be rendered, oldest first.
ListWrapper _emptyFrames; // Empty frames ListWrapper incoming_frames_;
// Empty frames.
ListWrapper empty_frames_;
WebRtc_UWord32 _renderDelayMs; // Set render delay // Estimated delay from a frame is released until it's rendered.
WebRtc_UWord32 render_delay_ms_;
}; };
} //namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ #endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT