Refactored IncomingVideoStream and VideoRenderFrame, to get code in better shape when hunting BUG=481.

BUG=481
TEST=Compiles on all platformas and autotest passes.

Review URL: https://webrtc-codereview.appspot.com/608005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2323 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2012-05-30 10:45:18 +00:00
parent 9259e7bd03
commit 327ada1cb0
4 changed files with 550 additions and 678 deletions

View File

@ -8,20 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "incoming_video_stream.h"
#include "critical_section_wrapper.h"
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "video_render_frames.h"
#include "tick_util.h"
#include "map_wrapper.h"
#include "common_video/libyuv/include/libyuv.h"
#include "modules/video_render/main/source/incoming_video_stream.h"
#include <cassert>
// Platform specifics
#if defined(_WIN32)
#include <windows.h>
#elif defined(WEBRTC_LINUX)
@ -31,383 +21,342 @@
#include <sys/time.h>
#endif
#include "common_video/libyuv/include/libyuv.h"
#include "modules/video_render/main/source/video_render_frames.h"
#include "system_wrappers/interface/critical_section_wrapper.h"
#include "system_wrappers/interface/event_wrapper.h"
#include "system_wrappers/interface/map_wrapper.h"
#include "system_wrappers/interface/thread_wrapper.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
namespace webrtc {
IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId) :
_moduleId(moduleId),
_streamId(streamId),
_streamCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_threadCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_bufferCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrIncomingRenderThread(),
_deliverBufferEvent(*EventWrapper::Create()),
_running(false),
_ptrExternalCallback(NULL),
_ptrRenderCallback(NULL),
_renderBuffers(*(new VideoRenderFrames)),
_callbackVideoType(kVideoI420),
_callbackWidth(0),
_callbackHeight(0),
_incomingRate(0),
_lastRateCalculationTimeMs(0),
_numFramesSinceLastCalculation(0),
_lastRenderedFrame(),
_tempFrame(),
_startImage(),
_timeoutImage(),
_timeoutTime(),
_mirrorFramesEnabled(false),
_mirroring(),
_transformedVideoFrame()
{
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
"%s created for stream %d", __FUNCTION__, streamId);
IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 module_id,
const WebRtc_UWord32 stream_id)
: module_id_(module_id),
stream_id_(stream_id),
stream_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
thread_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
buffer_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
incoming_render_thread_(),
deliver_buffer_event_(*EventWrapper::Create()),
running_(false),
external_callback_(NULL),
render_callback_(NULL),
render_buffers_(*(new VideoRenderFrames)),
callbackVideoType_(kVideoI420),
callbackWidth_(0),
callbackHeight_(0),
incoming_rate_(0),
last_rate_calculation_time_ms_(0),
num_frames_since_last_calculation_(0),
last_rendered_frame_(),
temp_frame_(),
start_image_(),
timeout_image_(),
timeout_time_(),
mirror_frames_enabled_(false),
mirroring_(),
transformed_video_frame_() {
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
"%s created for stream %d", __FUNCTION__, stream_id);
}
IncomingVideoStream::~IncomingVideoStream()
{
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
"%s deleted for stream %d", __FUNCTION__, _streamId);
IncomingVideoStream::~IncomingVideoStream() {
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
"%s deleted for stream %d", __FUNCTION__, stream_id_);
Stop();
// _ptrIncomingRenderThread - Delete in stop
delete &_renderBuffers;
delete &_streamCritsect;
delete &_bufferCritsect;
delete &_threadCritsect;
delete &_deliverBufferEvent;
// incoming_render_thread_ - Delete in stop
delete &render_buffers_;
delete &stream_critsect_;
delete &buffer_critsect_;
delete &thread_critsect_;
delete &deliver_buffer_event_;
}
WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(&_streamCritsect);
_moduleId = id;
WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id) {
CriticalSectionScoped cs(&stream_critsect_);
module_id_ = id;
return 0;
}
VideoRenderCallback*
IncomingVideoStream::ModuleCallback()
{
CriticalSectionScoped cs(&_streamCritsect);
VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
CriticalSectionScoped cs(&stream_critsect_);
return this;
}
WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame)
{
WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id,
VideoFrame& video_frame) {
CriticalSectionScoped csS(&stream_critsect_);
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
video_frame.RenderTimeMs());
CriticalSectionScoped csS(&_streamCritsect);
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
"%s for stream %d, render time: %u", __FUNCTION__, _streamId,
videoFrame.RenderTimeMs());
if (!_running)
{
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
if (!running_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: Not running", __FUNCTION__);
return -1;
}
if (true == _mirrorFramesEnabled)
{
_transformedVideoFrame.VerifyAndAllocate(videoFrame.Length());
if (_mirroring.mirrorXAxis)
{
MirrorI420UpDown(videoFrame.Buffer(),
_transformedVideoFrame.Buffer(),
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
if (true == mirror_frames_enabled_) {
transformed_video_frame_.VerifyAndAllocate(video_frame.Length());
if (mirroring_.mirror_x_axis) {
MirrorI420UpDown(video_frame.Buffer(),
transformed_video_frame_.Buffer(),
video_frame.Width(), video_frame.Height());
transformed_video_frame_.SetLength(video_frame.Length());
transformed_video_frame_.SetWidth(video_frame.Width());
transformed_video_frame_.SetHeight(video_frame.Height());
video_frame.SwapFrame(transformed_video_frame_);
}
if (_mirroring.mirrorYAxis)
{
MirrorI420LeftRight(videoFrame.Buffer(),
_transformedVideoFrame.Buffer(),
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
if (mirroring_.mirror_y_axis) {
MirrorI420LeftRight(video_frame.Buffer(),
transformed_video_frame_.Buffer(),
video_frame.Width(), video_frame.Height());
transformed_video_frame_.SetLength(video_frame.Length());
transformed_video_frame_.SetWidth(video_frame.Width());
transformed_video_frame_.SetHeight(video_frame.Height());
video_frame.SwapFrame(transformed_video_frame_);
}
}
// Rate statistics
_numFramesSinceLastCalculation++;
WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp();
if (nowMs >= _lastRateCalculationTimeMs + KFrameRatePeriodMs)
{
_incomingRate = (WebRtc_UWord32) (1000 * _numFramesSinceLastCalculation
/ (nowMs - _lastRateCalculationTimeMs));
_numFramesSinceLastCalculation = 0;
_lastRateCalculationTimeMs = nowMs;
// Rate statistics.
num_frames_since_last_calculation_++;
WebRtc_Word64 now_ms = TickTime::MillisecondTimestamp();
if (now_ms >= last_rate_calculation_time_ms_ + KFrameRatePeriodMs) {
incoming_rate_ =
static_cast<WebRtc_UWord32>(1000 * num_frames_since_last_calculation_ /
(now_ms - last_rate_calculation_time_ms_));
num_frames_since_last_calculation_ = 0;
last_rate_calculation_time_ms_ = now_ms;
}
// Insert frame
CriticalSectionScoped csB(&_bufferCritsect);
if (_renderBuffers.AddFrame(&videoFrame) == 1)
_deliverBufferEvent.Set();
// Insert frame.
CriticalSectionScoped csB(&buffer_critsect_);
if (render_buffers_.AddFrame(&video_frame) == 1)
deliver_buffer_event_.Set();
return 0;
}
WebRtc_Word32 IncomingVideoStream::SetStartImage(const VideoFrame& videoFrame)
{
CriticalSectionScoped csS(&_threadCritsect);
return _startImage.CopyFrame(videoFrame);
WebRtc_Word32 IncomingVideoStream::SetStartImage(
const VideoFrame& video_frame) {
CriticalSectionScoped csS(&thread_critsect_);
return start_image_.CopyFrame(video_frame);
}
WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(const VideoFrame& videoFrame,
const WebRtc_UWord32 timeout)
{
CriticalSectionScoped csS(&_threadCritsect);
_timeoutTime = timeout;
return _timeoutImage.CopyFrame(videoFrame);
WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(
const VideoFrame& video_frame, const WebRtc_UWord32 timeout) {
CriticalSectionScoped csS(&thread_critsect_);
timeout_time_ = timeout;
return timeout_image_.CopyFrame(video_frame);
}
WebRtc_Word32 IncomingVideoStream::SetRenderCallback(VideoRenderCallback* renderCallback)
{
CriticalSectionScoped cs(&_streamCritsect);
WebRtc_Word32 IncomingVideoStream::SetRenderCallback(
VideoRenderCallback* render_callback) {
CriticalSectionScoped cs(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s(%x) for stream %d", __FUNCTION__, renderCallback,
_streamId);
_ptrRenderCallback = renderCallback;
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s(%x) for stream %d", __FUNCTION__, render_callback,
stream_id_);
render_callback_ = render_callback;
return 0;
}
WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis)
{
CriticalSectionScoped cs(&_streamCritsect);
_mirrorFramesEnabled = enable;
_mirroring.mirrorXAxis = mirrorXAxis;
_mirroring.mirrorYAxis = mirrorYAxis;
const bool mirror_x_axis,
const bool mirror_y_axis) {
CriticalSectionScoped cs(&stream_critsect_);
mirror_frames_enabled_ = enable;
mirroring_.mirror_x_axis = mirror_x_axis;
mirroring_.mirror_y_axis = mirror_y_axis;
return 0;
}
WebRtc_Word32 IncomingVideoStream::SetExternalCallback(VideoRenderCallback* externalCallback)
{
CriticalSectionScoped cs(&_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s(%x) for stream %d", __FUNCTION__, externalCallback,
_streamId);
_ptrExternalCallback = externalCallback;
_callbackVideoType = kVideoI420;
_callbackWidth = 0;
_callbackHeight = 0;
WebRtc_Word32 IncomingVideoStream::SetExternalCallback(
VideoRenderCallback* external_callback) {
CriticalSectionScoped cs(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s(%x) for stream %d", __FUNCTION__, external_callback,
stream_id_);
external_callback_ = external_callback;
callbackVideoType_ = kVideoI420;
callbackWidth_ = 0;
callbackHeight_ = 0;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Start()
{
CriticalSectionScoped csS(&_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s for stream %d", __FUNCTION__, _streamId);
if (_running)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
WebRtc_Word32 IncomingVideoStream::Start() {
CriticalSectionScoped csS(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s for stream %d", __FUNCTION__, stream_id_);
if (running_) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
"%s: Already running", __FUNCTION__);
return 0;
}
CriticalSectionScoped csT(&_threadCritsect);
assert(_ptrIncomingRenderThread == NULL);
CriticalSectionScoped csT(&thread_critsect_);
assert(incoming_render_thread_ == NULL);
_ptrIncomingRenderThread
= ThreadWrapper::CreateThread(IncomingVideoStreamThreadFun, this,
kRealtimePriority,
incoming_render_thread_ = ThreadWrapper::CreateThread(
IncomingVideoStreamThreadFun, this, kRealtimePriority,
"IncomingVideoStreamThread");
if (!_ptrIncomingRenderThread)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
if (!incoming_render_thread_) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_ptrIncomingRenderThread->Start(tId))
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s: thread started: %u", __FUNCTION__, tId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
unsigned int t_id = 0;
if (incoming_render_thread_->Start(t_id)) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s: thread started: %u", __FUNCTION__, t_id);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
_deliverBufferEvent.StartTimer(false, KEventStartupTimeMS);
deliver_buffer_event_.StartTimer(false, KEventStartupTimeMS);
_running = true;
running_ = true;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Stop()
{
CriticalSectionScoped csStream(&_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s for stream %d", __FUNCTION__, _streamId);
WebRtc_Word32 IncomingVideoStream::Stop() {
CriticalSectionScoped cs_stream(&stream_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
"%s for stream %d", __FUNCTION__, stream_id_);
if (!_running)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
if (!running_) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
"%s: Not running", __FUNCTION__);
return 0;
}
_threadCritsect.Enter();
if (_ptrIncomingRenderThread)
{
ThreadWrapper* ptrThread = _ptrIncomingRenderThread;
_ptrIncomingRenderThread = NULL;
ptrThread->SetNotAlive();
#ifndef _WIN32
_deliverBufferEvent.StopTimer();
thread_critsect_.Enter();
if (incoming_render_thread_) {
ThreadWrapper* thread = incoming_render_thread_;
incoming_render_thread_ = NULL;
thread->SetNotAlive();
#ifndef WIN32_
deliver_buffer_event_.StopTimer();
#endif
_threadCritsect.Leave();
if (ptrThread->Stop())
{
delete ptrThread;
}
else
{
thread_critsect_.Leave();
if (thread->Stop()) {
delete thread;
} else {
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
"%s: Not able to stop thread, leaking", __FUNCTION__);
}
} else {
thread_critsect_.Leave();
}
else
{
_threadCritsect.Leave();
}
_running = false;
running_ = false;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Reset()
{
CriticalSectionScoped csStream(&_streamCritsect);
CriticalSectionScoped csBuffer(&_bufferCritsect);
_renderBuffers.ReleaseAllFrames();
WebRtc_Word32 IncomingVideoStream::Reset() {
CriticalSectionScoped cs_stream(&stream_critsect_);
CriticalSectionScoped cs_buffer(&buffer_critsect_);
render_buffers_.ReleaseAllFrames();
return 0;
}
WebRtc_UWord32 IncomingVideoStream::StreamId() const
{
CriticalSectionScoped csStream(&_streamCritsect);
return _streamId;
WebRtc_UWord32 IncomingVideoStream::StreamId() const {
CriticalSectionScoped cs_stream(&stream_critsect_);
return stream_id_;
}
WebRtc_UWord32 IncomingVideoStream::IncomingRate() const
{
CriticalSectionScoped cs(&_streamCritsect);
return _incomingRate;
WebRtc_UWord32 IncomingVideoStream::IncomingRate() const {
CriticalSectionScoped cs(&stream_critsect_);
return incoming_rate_;
}
bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj)
{
bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj) {
return static_cast<IncomingVideoStream*>(obj)->IncomingVideoStreamProcess();
}
bool IncomingVideoStream::IncomingVideoStreamProcess()
{
if (kEventError != _deliverBufferEvent.Wait(KEventMaxWaitTimeMs))
{
if (_ptrIncomingRenderThread == NULL)
{
bool IncomingVideoStream::IncomingVideoStreamProcess() {
if (kEventError != deliver_buffer_event_.Wait(KEventMaxWaitTimeMs)) {
if (incoming_render_thread_ == NULL) {
// Terminating
return false;
}
_threadCritsect.Enter();
VideoFrame* ptrFrameToRender = NULL;
thread_critsect_.Enter();
VideoFrame* frame_to_render = NULL;
// Get a new frame to render and the time for the frame after this one.
_bufferCritsect.Enter();
ptrFrameToRender = _renderBuffers.FrameToRender();
WebRtc_UWord32 waitTime = _renderBuffers.TimeToNextFrameRelease();
_bufferCritsect.Leave();
buffer_critsect_.Enter();
frame_to_render = render_buffers_.FrameToRender();
WebRtc_UWord32 wait_time = render_buffers_.TimeToNextFrameRelease();
buffer_critsect_.Leave();
// Set timer for next frame to render
if (waitTime > KEventMaxWaitTimeMs)
{
waitTime = KEventMaxWaitTimeMs;
// Set timer for next frame to render.
if (wait_time > KEventMaxWaitTimeMs) {
wait_time = KEventMaxWaitTimeMs;
}
_deliverBufferEvent.StartTimer(false, waitTime);
deliver_buffer_event_.StartTimer(false, wait_time);
if (!ptrFrameToRender)
{
if (_ptrRenderCallback)
{
if (_lastRenderedFrame.RenderTimeMs() == 0
&& _startImage.Size()) // And we have not rendered anything and have a start image
{
_tempFrame.CopyFrame(_startImage);// Copy the startimage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
}
else if (_timeoutImage.Size()
&& _lastRenderedFrame.RenderTimeMs() + _timeoutTime
< TickTime::MillisecondTimestamp()) // We have rendered something a long time ago and have a timeout image
{
_tempFrame.CopyFrame(_timeoutImage); // Copy the timeoutImage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
if (!frame_to_render) {
if (render_callback_) {
if (last_rendered_frame_.RenderTimeMs() == 0 &&
start_image_.Size()) {
// We have not rendered anything and have a start image.
temp_frame_.CopyFrame(start_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
} else if (timeout_image_.Size() &&
last_rendered_frame_.RenderTimeMs() + timeout_time_ <
TickTime::MillisecondTimestamp()) {
// Render a timeout image.
temp_frame_.CopyFrame(timeout_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
}
}
// No frame
_threadCritsect.Leave();
// No frame.
thread_critsect_.Leave();
return true;
}
// Send frame for rendering
if (_ptrExternalCallback)
{
WEBRTC_TRACE(kTraceStream,
kTraceVideoRenderer,
_moduleId,
// Send frame for rendering.
if (external_callback_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: executing external renderer callback to deliver frame",
__FUNCTION__, ptrFrameToRender->RenderTimeMs());
_ptrExternalCallback->RenderFrame(_streamId, *ptrFrameToRender);
}
else
{
if (_ptrRenderCallback)
{
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
__FUNCTION__, frame_to_render->RenderTimeMs());
external_callback_->RenderFrame(stream_id_, *frame_to_render);
} else {
if (render_callback_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: Render frame, time: ", __FUNCTION__,
ptrFrameToRender->RenderTimeMs());
_ptrRenderCallback->RenderFrame(_streamId, *ptrFrameToRender);
frame_to_render->RenderTimeMs());
render_callback_->RenderFrame(stream_id_, *frame_to_render);
}
}
// Release critsect before calling the module user
_threadCritsect.Leave();
// Release critsect before calling the module user.
thread_critsect_.Leave();
// We're done with this frame, delete it.
if (ptrFrameToRender)
{
CriticalSectionScoped cs(&_bufferCritsect);
_lastRenderedFrame.SwapFrame(*ptrFrameToRender);
_renderBuffers.ReturnFrame(ptrFrameToRender);
if (frame_to_render) {
CriticalSectionScoped cs(&buffer_critsect_);
last_rendered_frame_.SwapFrame(*frame_to_render);
render_buffers_.ReturnFrame(frame_to_render);
}
}
return true;
}
WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(VideoFrame& videoFrame) const
{
CriticalSectionScoped cs(&_bufferCritsect);
return videoFrame.CopyFrame(_lastRenderedFrame);
WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(
VideoFrame& video_frame) const {
CriticalSectionScoped cs(&buffer_critsect_);
return video_frame.CopyFrame(last_rendered_frame_);
}
} // namespace webrtc

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#include "video_render.h"
#include "map_wrapper.h"
#include "modules/video_render/main/interface/video_render.h"
#include "system_wrappers/interface/map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
@ -21,118 +21,92 @@ class ThreadWrapper;
class VideoRenderCallback;
class VideoRenderFrames;
struct VideoMirroring
{
bool mirrorXAxis;
bool mirrorYAxis;
VideoMirroring() :
mirrorXAxis(false), mirrorYAxis(false)
{
}
struct VideoMirroring {
VideoMirroring() : mirror_x_axis(false), mirror_y_axis(false) {}
bool mirror_x_axis;
bool mirror_y_axis;
};
// Class definitions
class IncomingVideoStream: public VideoRenderCallback
{
class IncomingVideoStream : public VideoRenderCallback {
public:
/*
* VideoRenderer constructor/destructor
*/
IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId);
IncomingVideoStream(const WebRtc_Word32 module_id,
const WebRtc_UWord32 stream_id);
~IncomingVideoStream();
WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id);
// Get callbck to deliver frames to the module
// Get callback to deliver frames to the module.
VideoRenderCallback* ModuleCallback();
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
VideoFrame& video_frame);
// Set callback to the platform dependant code
WebRtc_Word32 SetRenderCallback(VideoRenderCallback* renderCallback);
// Set callback to the platform dependent code.
WebRtc_Word32 SetRenderCallback(VideoRenderCallback* render_callback);
// Callback for file recording, snapshot, ...
WebRtc_Word32 SetExternalCallback(VideoRenderCallback* renderObject);
WebRtc_Word32 SetExternalCallback(VideoRenderCallback* render_object);
/*
* Start/Stop
*/
// Start/Stop.
WebRtc_Word32 Start();
WebRtc_Word32 Stop();
// Clear all buffers
// Clear all buffers.
WebRtc_Word32 Reset();
/*
* Properties
*/
// Properties.
WebRtc_UWord32 StreamId() const;
WebRtc_UWord32 IncomingRate() const;
/*
*
*/
WebRtc_Word32 GetLastRenderedFrame(VideoFrame& videoFrame) const;
WebRtc_Word32 GetLastRenderedFrame(VideoFrame& video_frame) const;
WebRtc_Word32 SetStartImage(const VideoFrame& videoFrame);
WebRtc_Word32 SetStartImage(const VideoFrame& video_frame);
WebRtc_Word32 SetTimeoutImage(const VideoFrame& videoFrame,
WebRtc_Word32 SetTimeoutImage(const VideoFrame& video_frame,
const WebRtc_UWord32 timeout);
WebRtc_Word32 EnableMirroring(const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis);
const bool mirror_xaxis,
const bool mirror_yaxis);
protected:
static bool IncomingVideoStreamThreadFun(void* obj);
bool IncomingVideoStreamProcess();
private:
enum { KEventStartupTimeMS = 10 };
enum { KEventMaxWaitTimeMs = 100 };
enum { KFrameRatePeriodMs = 1000 };
// Enums
enum
{
KEventStartupTimeMS = 10
};
enum
{
KEventMaxWaitTimeMs = 100
};
enum
{
KFrameRatePeriodMs = 1000
};
WebRtc_Word32 module_id_;
WebRtc_UWord32 stream_id_;
// Critsects in allowed to enter order.
CriticalSectionWrapper& stream_critsect_;
CriticalSectionWrapper& thread_critsect_;
CriticalSectionWrapper& buffer_critsect_;
ThreadWrapper* incoming_render_thread_;
EventWrapper& deliver_buffer_event_;
bool running_;
WebRtc_Word32 _moduleId;
WebRtc_UWord32 _streamId;
CriticalSectionWrapper& _streamCritsect; // Critsects in allowed to enter order
CriticalSectionWrapper& _threadCritsect;
CriticalSectionWrapper& _bufferCritsect;
ThreadWrapper* _ptrIncomingRenderThread;
EventWrapper& _deliverBufferEvent;
bool _running;
VideoRenderCallback* external_callback_;
VideoRenderCallback* render_callback_;
VideoRenderFrames& render_buffers_;
VideoRenderCallback* _ptrExternalCallback;
VideoRenderCallback* _ptrRenderCallback;
VideoRenderFrames& _renderBuffers;
RawVideoType callbackVideoType_;
WebRtc_UWord32 callbackWidth_;
WebRtc_UWord32 callbackHeight_;
RawVideoType _callbackVideoType;
WebRtc_UWord32 _callbackWidth;
WebRtc_UWord32 _callbackHeight;
WebRtc_UWord32 incoming_rate_;
WebRtc_Word64 last_rate_calculation_time_ms_;
WebRtc_UWord16 num_frames_since_last_calculation_;
VideoFrame last_rendered_frame_;
VideoFrame temp_frame_;
VideoFrame start_image_;
VideoFrame timeout_image_;
WebRtc_UWord32 timeout_time_;
WebRtc_UWord32 _incomingRate;
WebRtc_Word64 _lastRateCalculationTimeMs;
WebRtc_UWord16 _numFramesSinceLastCalculation;
VideoFrame _lastRenderedFrame;
VideoFrame _tempFrame;
VideoFrame _startImage;
VideoFrame _timeoutImage;
WebRtc_UWord32 _timeoutTime;
bool _mirrorFramesEnabled;
VideoMirroring _mirroring;
VideoFrame _transformedVideoFrame;
bool mirror_frames_enabled_;
VideoMirroring mirroring_;
VideoFrame transformed_video_frame_;
};
} // namespace webrtc

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,202 +8,167 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_frames.h"
#include "module_common_types.h"
#include "tick_util.h"
#include "trace.h"
#include "modules/video_render/main/source/video_render_frames.h"
#include <cassert>
#include "modules/interface/module_common_types.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
namespace webrtc {
VideoRenderFrames::VideoRenderFrames() :
_incomingFrames(), _renderDelayMs(10)
{
WebRtc_Word32 KEventMaxWaitTimeMs = 200;
VideoRenderFrames::VideoRenderFrames()
: incoming_frames_(),
render_delay_ms_(10) {
}
VideoRenderFrames::~VideoRenderFrames()
{
VideoRenderFrames::~VideoRenderFrames() {
ReleaseAllFrames();
}
WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* ptrNewFrame)
{
const WebRtc_Word64 timeNow = TickTime::MillisecondTimestamp();
WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) {
const WebRtc_Word64 time_now = TickTime::MillisecondTimestamp();
if (ptrNewFrame->RenderTimeMs() + KOldRenderTimestampMS < timeNow)
{
if (new_frame->RenderTimeMs() + KOldRenderTimestampMS < time_now) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: too old frame.", __FUNCTION__);
return -1;
}
if (ptrNewFrame->RenderTimeMs() > timeNow + KFutureRenderTimestampMS)
{
if (new_frame->RenderTimeMs() > time_now + KFutureRenderTimestampMS) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future.", __FUNCTION__);
return -1;
}
// Get an empty frame
VideoFrame* ptrFrameToAdd = NULL;
if (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
ptrFrameToAdd = static_cast<VideoFrame*> (item->GetItem());
_emptyFrames.Erase(item);
VideoFrame* frame_to_add = NULL;
if (!empty_frames_.Empty()) {
ListItem* item = empty_frames_.First();
if (item) {
frame_to_add = static_cast<VideoFrame*>(item->GetItem());
empty_frames_.Erase(item);
}
}
if (!ptrFrameToAdd)
{
if (_emptyFrames.GetSize() + _incomingFrames.GetSize()
> KMaxNumberOfFrames)
{
// Already allocated toom many frames...
if (!frame_to_add) {
if (empty_frames_.GetSize() + incoming_frames_.GetSize() >
KMaxNumberOfFrames) {
// Already allocated too many frames.
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
-1, "%s: too many frames, limit: %d", __FUNCTION__,
KMaxNumberOfFrames);
return -1;
}
// Allocate new memory
// Allocate new memory.
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
_emptyFrames.GetSize() + _incomingFrames.GetSize());
empty_frames_.GetSize() + incoming_frames_.GetSize());
ptrFrameToAdd = new VideoFrame();
if (!ptrFrameToAdd)
{
frame_to_add = new VideoFrame();
if (!frame_to_add) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not create new frame for", __FUNCTION__);
return -1;
}
}
ptrFrameToAdd->VerifyAndAllocate(ptrNewFrame->Length());
ptrFrameToAdd->SwapFrame(const_cast<VideoFrame&> (*ptrNewFrame)); //remove const ness. Copying will be costly.
_incomingFrames.PushBack(ptrFrameToAdd);
frame_to_add->VerifyAndAllocate(new_frame->Length());
// TODO(mflodman) Change this!
// Remove const ness. Copying will be costly.
frame_to_add->SwapFrame(const_cast<VideoFrame&>(*new_frame));
incoming_frames_.PushBack(frame_to_add);
return _incomingFrames.GetSize();
return incoming_frames_.GetSize();
}
VideoFrame*
VideoRenderFrames::FrameToRender()
{
VideoFrame* ptrRenderFrame = NULL;
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrOldestFrameInList =
VideoFrame* VideoRenderFrames::FrameToRender() {
VideoFrame* render_frame = NULL;
while (!incoming_frames_.Empty()) {
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* oldest_frame_in_list =
static_cast<VideoFrame*>(item->GetItem());
if (ptrOldestFrameInList->RenderTimeMs()
<= TickTime::MillisecondTimestamp() + _renderDelayMs)
{
// This is the oldest one so far and it's ok to render
if (ptrRenderFrame)
{
if (oldest_frame_in_list->RenderTimeMs() <=
TickTime::MillisecondTimestamp() + render_delay_ms_) {
// This is the oldest one so far and it's OK to render.
if (render_frame) {
// This one is older than the newly found frame, remove this one.
ptrRenderFrame->SetWidth(0);
ptrRenderFrame->SetHeight(0);
ptrRenderFrame->SetLength(0);
ptrRenderFrame->SetRenderTime(0);
ptrRenderFrame->SetTimeStamp(0);
_emptyFrames.PushFront(ptrRenderFrame);
render_frame->SetWidth(0);
render_frame->SetHeight(0);
render_frame->SetLength(0);
render_frame->SetRenderTime(0);
render_frame->SetTimeStamp(0);
empty_frames_.PushFront(render_frame);
}
ptrRenderFrame = ptrOldestFrameInList;
_incomingFrames.Erase(item);
}
else
{
render_frame = oldest_frame_in_list;
incoming_frames_.Erase(item);
} else {
// We can't release this one yet, we're done here.
break;
}
}
else
{
} else {
assert(false);
}
}
return ptrRenderFrame;
return render_frame;
}
WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* ptrOldFrame)
{
ptrOldFrame->SetWidth(0);
ptrOldFrame->SetHeight(0);
ptrOldFrame->SetRenderTime(0);
ptrOldFrame->SetLength(0);
_emptyFrames.PushBack(ptrOldFrame);
WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* old_frame) {
old_frame->SetWidth(0);
old_frame->SetHeight(0);
old_frame->SetRenderTime(0);
old_frame->SetLength(0);
empty_frames_.PushBack(old_frame);
return 0;
}
WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames()
{
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames() {
while (!incoming_frames_.Empty()) {
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
assert(frame != NULL);
frame->Free();
delete frame;
}
_incomingFrames.Erase(item);
incoming_frames_.Erase(item);
}
while (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
while (!empty_frames_.Empty()) {
ListItem* item = empty_frames_.First();
if (item) {
VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
assert(frame != NULL);
frame->Free();
delete frame;
}
_emptyFrames.Erase(item);
empty_frames_.Erase(item);
}
return 0;
}
WebRtc_Word32 KEventMaxWaitTimeMs = 200;
WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease()
{
WebRtc_Word64 timeToRelease = 0;
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* oldestFrame =
static_cast<VideoFrame*> (item->GetItem());
timeToRelease = oldestFrame->RenderTimeMs() - _renderDelayMs
WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease() {
WebRtc_Word64 time_to_release = 0;
ListItem* item = incoming_frames_.First();
if (item) {
VideoFrame* oldest_frame = static_cast<VideoFrame*>(item->GetItem());
time_to_release = oldest_frame->RenderTimeMs() - render_delay_ms_
- TickTime::MillisecondTimestamp();
if (timeToRelease < 0)
{
timeToRelease = 0;
if (time_to_release < 0) {
time_to_release = 0;
}
} else {
time_to_release = KEventMaxWaitTimeMs;
}
else
{
timeToRelease = KEventMaxWaitTimeMs;
return static_cast<WebRtc_UWord32>(time_to_release);
}
return (WebRtc_UWord32) timeToRelease;
}
//
WebRtc_Word32 VideoRenderFrames::SetRenderDelay(
const WebRtc_UWord32 renderDelay)
{
_renderDelayMs = renderDelay;
const WebRtc_UWord32 render_delay) {
render_delay_ms_ = render_delay;
return 0;
}
} // namespace webrtc

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,71 +8,55 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#include "list_wrapper.h"
#include "video_render.h"
#include "modules/video_render/main/interface/video_render.h"
#include "system_wrappers/interface/list_wrapper.h"
namespace webrtc {
// Class definitions
class VideoRenderFrames
{
class VideoRenderFrames {
public:
VideoRenderFrames();
~VideoRenderFrames();
/*
* Add a frame to the render queue
*/
WebRtc_Word32 AddFrame(VideoFrame* ptrNewFrame);
// Add a frame to the render queue
WebRtc_Word32 AddFrame(VideoFrame* new_frame);
/*
* Get a frame for rendering, if it's time to render.
*/
// Get a frame for rendering, if it's time to render.
VideoFrame* FrameToRender();
/*
* Return an old frame
*/
WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
// Return an old frame
WebRtc_Word32 ReturnFrame(VideoFrame* old_frame);
/*
* Releases all frames
*/
// Releases all frames
WebRtc_Word32 ReleaseAllFrames();
/*
* Returns the number of ms to next frame to render
*/
// Returns the number of ms to next frame to render
WebRtc_UWord32 TimeToNextFrameRelease();
/*
* Sets estimates delay in renderer
*/
WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 renderDelay);
// Sets estimates delay in renderer
WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 render_delay);
private:
enum
{
KMaxNumberOfFrames = 300
}; // 10 seconds for 30 fps.
enum
{
KOldRenderTimestampMS = 500
}; //Don't render frames with timestamp older than 500ms from now.
enum
{
KFutureRenderTimestampMS = 10000
}; //Don't render frames with timestamp more than 10s into the future.
// 10 seconds for 30 fps.
enum { KMaxNumberOfFrames = 300 };
// Don't render frames with timestamp older than 500ms from now.
enum { KOldRenderTimestampMS = 500 };
// Don't render frames with timestamp more than 10s into the future.
enum { KFutureRenderTimestampMS = 10000 };
ListWrapper _incomingFrames; // Sorted oldest video frame first
ListWrapper _emptyFrames; // Empty frames
// Sorted list with framed to be rendered, oldest first.
ListWrapper incoming_frames_;
// Empty frames.
ListWrapper empty_frames_;
WebRtc_UWord32 _renderDelayMs; // Set render delay
// Estimated delay from a frame is released until it's rendered.
WebRtc_UWord32 render_delay_ms_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT