Implement I420FrameCallbacks in Call.

BUG=2425
R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2393004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5005 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2013-10-21 10:34:43 +00:00
parent e05362916c
commit fe1ef935e7
13 changed files with 223 additions and 26 deletions

View File

@ -22,6 +22,8 @@
namespace webrtc {
class I420FrameCallback;
class VideoEngine;
// This class declares an abstract interface for a user defined effect filter.
@ -90,6 +92,17 @@ class WEBRTC_DLLEXPORT ViEImageProcess {
virtual int EnableColorEnhancement(const int video_channel,
const bool enable) = 0;
// New-style callbacks, used by VideoSendStream/VideoReceiveStream.
virtual void RegisterPreEncodeCallback(
int video_channel,
I420FrameCallback* pre_encode_callback) = 0;
virtual void DeRegisterPreEncodeCallback(int video_channel) = 0;
virtual void RegisterPreRenderCallback(
int video_channel,
I420FrameCallback* pre_render_callback) = 0;
virtual void DeRegisterPreRenderCallback(int video_channel) = 0;
protected:
ViEImageProcess() {}
virtual ~ViEImageProcess() {}

View File

@ -19,6 +19,7 @@
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
@ -86,21 +87,28 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
render_ = webrtc::ViERender::GetInterface(video_engine);
assert(render_ != NULL);
if (render_->AddRenderer(channel_, kVideoI420, this) != 0) {
abort();
}
render_->AddRenderer(channel_, kVideoI420, this);
image_process_ = ViEImageProcess::GetInterface(video_engine);
image_process_->RegisterPreRenderCallback(channel_,
config_.pre_render_callback);
clock_ = Clock::GetRealTimeClock();
}
VideoReceiveStream::~VideoReceiveStream() {
image_process_->DeRegisterPreEncodeCallback(channel_);
render_->RemoveRenderer(channel_);
for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
external_codec_->DeRegisterExternalReceiveCodec(
channel_, config_.external_decoders[i].payload_type);
}
network_->DeregisterSendTransport(channel_);
image_process_->Release();
video_engine_base_->Release();
external_codec_->Release();
codec_->Release();
@ -170,15 +178,8 @@ int VideoReceiveStream::DeliverFrame(uint8_t* frame,
video_frame.set_timestamp(timestamp);
video_frame.set_render_time_ms(render_time);
if (config_.post_decode_callback != NULL) {
config_.post_decode_callback->FrameCallback(&video_frame);
}
if (config_.renderer != NULL) {
// TODO(pbos): Add timing to RenderFrame call
config_.renderer->RenderFrame(video_frame,
render_time - clock_->TimeInMilliseconds());
}
config_.renderer->RenderFrame(video_frame,
render_time - clock_->TimeInMilliseconds());
return 0;
}

View File

@ -25,6 +25,7 @@ class VideoEngine;
class ViEBase;
class ViECodec;
class ViEExternalCodec;
class ViEImageProcess;
class ViENetwork;
class ViERender;
class ViERTP_RTCP;
@ -66,6 +67,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
ViENetwork* network_;
ViERender* render_;
ViERTP_RTCP* rtp_rtcp_;
ViEImageProcess* image_process_;
int channel_;

View File

@ -19,6 +19,7 @@
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/new_include/video_send_stream.h"
@ -190,11 +191,16 @@ VideoSendStream::VideoSendStream(newapi::Transport* transport,
video_engine_base_->RegisterCpuOveruseObserver(channel_,
overuse_observer_.get());
}
image_process_ = ViEImageProcess::GetInterface(video_engine);
image_process_->RegisterPreEncodeCallback(channel_,
config_.pre_encode_callback);
}
VideoSendStream::~VideoSendStream() {
image_process_->DeRegisterPreEncodeCallback(channel_);
network_->DeregisterSendTransport(channel_);
video_engine_base_->DeleteChannel(channel_);
capture_->DisconnectCaptureDevice(channel_);
capture_->ReleaseCaptureDevice(capture_id_);
@ -204,6 +210,9 @@ VideoSendStream::~VideoSendStream() {
config_.codec.plType);
}
video_engine_base_->DeleteChannel(channel_);
image_process_->Release();
video_engine_base_->Release();
capture_->Release();
codec_->Release();
@ -220,10 +229,6 @@ void VideoSendStream::PutFrame(const I420VideoFrame& frame,
I420VideoFrame frame_copy;
frame_copy.CopyFrame(frame);
if (config_.pre_encode_callback != NULL) {
config_.pre_encode_callback->FrameCallback(&frame_copy);
}
ViEVideoFrameI420 vf;
// TODO(pbos): This represents a memcpy step and is only required because

View File

@ -26,6 +26,7 @@ class ViECapture;
class ViECodec;
class ViEExternalCapture;
class ViEExternalCodec;
class ViEImageProcess;
class ViENetwork;
class ViERTP_RTCP;
@ -72,6 +73,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
ViEExternalCodec* external_codec_;
ViENetwork* network_;
ViERTP_RTCP* rtp_rtcp_;
ViEImageProcess* image_process_;
int channel_;
int capture_id_;

View File

@ -98,7 +98,7 @@ class VideoReceiveStream {
render_delay_ms(0),
audio_channel_id(0),
pre_decode_callback(NULL),
post_decode_callback(NULL),
pre_render_callback(NULL),
target_delay_ms(0) {}
// Codecs the receive stream can receive.
std::vector<VideoCodec> codecs;
@ -150,7 +150,7 @@ class VideoReceiveStream {
// Called for each decoded frame. E.g. used when adding effects to the
// decoded
// stream. 'NULL' disables the callback.
I420FrameCallback* post_decode_callback;
I420FrameCallback* pre_render_callback;
// External video decoders to be used if incoming payload type matches the
// registered type for an external decoder.

View File

@ -236,7 +236,7 @@ class NackObserver : public test::RtpRtcpObserver {
TEST_F(CallTest, UsesTraceCallback) {
const unsigned int kSenderTraceFilter = kTraceDebug;
const unsigned int kReceiverTraceFilter = kTraceDefault & (~kTraceDebug);
class TraceObserver: public TraceCallback {
class TraceObserver : public TraceCallback {
public:
TraceObserver(unsigned int filter)
: filter_(filter), messages_left_(50), done_(EventWrapper::Create()) {}
@ -347,7 +347,6 @@ TEST_F(CallTest, ReceivesAndRetransmitsNack) {
CreateStreams();
CreateFrameGenerator();
StartSending();
// Wait() waits for an event triggered when NACKs have been received, NACKed
@ -361,6 +360,103 @@ TEST_F(CallTest, ReceivesAndRetransmitsNack) {
DestroyStreams();
}
TEST_F(CallTest, UsesFrameCallbacks) {
static const int kWidth = 320;
static const int kHeight = 240;
class Renderer : public VideoRenderer {
public:
Renderer() : event_(EventWrapper::Create()) {}
virtual void RenderFrame(const I420VideoFrame& video_frame,
int /*time_to_render_ms*/) OVERRIDE {
EXPECT_EQ(0, *video_frame.buffer(kYPlane))
<< "Rendered frame should have zero luma which is applied by the "
"pre-render callback.";
event_->Set();
}
EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
scoped_ptr<EventWrapper> event_;
} renderer;
class TestFrameCallback : public I420FrameCallback {
public:
TestFrameCallback(int expected_luma_byte, int next_luma_byte)
: event_(EventWrapper::Create()),
expected_luma_byte_(expected_luma_byte),
next_luma_byte_(next_luma_byte) {}
EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
private:
virtual void FrameCallback(I420VideoFrame* frame) {
EXPECT_EQ(kWidth, frame->width())
<< "Width not as expected, callback done before resize?";
EXPECT_EQ(kHeight, frame->height())
<< "Height not as expected, callback done before resize?";
// Previous luma specified, observed luma should be fairly close.
if (expected_luma_byte_ != -1) {
EXPECT_NEAR(expected_luma_byte_, *frame->buffer(kYPlane), 10);
}
memset(frame->buffer(kYPlane),
next_luma_byte_,
frame->allocated_size(kYPlane));
event_->Set();
}
scoped_ptr<EventWrapper> event_;
int expected_luma_byte_;
int next_luma_byte_;
};
TestFrameCallback pre_encode_callback(-1, 255); // Changes luma to 255.
TestFrameCallback pre_render_callback(255, 0); // Changes luma from 255 to 0.
test::DirectTransport sender_transport, receiver_transport;
CreateCalls(Call::Config(&sender_transport),
Call::Config(&receiver_transport));
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
CreateTestConfigs();
send_config_.encoder = NULL;
send_config_.codec = sender_call_->GetVideoCodecs()[0];
send_config_.codec.width = kWidth;
send_config_.codec.height = kHeight;
send_config_.pre_encode_callback = &pre_encode_callback;
receive_config_.pre_render_callback = &pre_render_callback;
receive_config_.renderer = &renderer;
CreateStreams();
StartSending();
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::Create(kWidth / 2, kHeight / 2));
send_stream_->Input()->PutFrame(frame_generator->NextFrame(), 0);
EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
<< "Timed out while waiting for pre-encode callback.";
EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
EXPECT_EQ(kEventSignaled, renderer.Wait())
<< "Timed out while waiting for the frame to render.";
StopSending();
sender_transport.StopSending();
receiver_transport.StopSending();
DestroyStreams();
}
class PliObserver : public test::RtpRtcpObserver, public VideoRenderer {
static const int kInverseDropProbability = 16;
@ -450,7 +546,6 @@ void CallTest::ReceivesPliAndRecovers(int rtp_history_ms) {
CreateStreams();
CreateFrameGenerator();
StartSending();
// Wait() waits for an event triggered when Pli has been received and frames
@ -510,7 +605,6 @@ TEST_F(CallTest, SurvivesIncomingRtpPacketsToDestroyedReceiveStream) {
CreateStreams();
CreateFrameGenerator();
StartSending();
receiver_call_->DestroyReceiveStream(receive_stream_);

View File

@ -29,6 +29,7 @@
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/new_include/frame_callback.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@ -101,7 +102,8 @@ ViEChannel::ViEChannel(int32_t channel_id,
mtu_(0),
sender_(sender),
nack_history_size_sender_(kSendSidePacketHistorySize),
max_nack_reordering_threshold_(kMaxPacketAgeToNack) {
max_nack_reordering_threshold_(kMaxPacketAgeToNack),
pre_render_callback_(NULL) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, channel_id),
"ViEChannel::ViEChannel(channel_id: %d, engine_id: %d)",
channel_id, engine_id);
@ -1599,6 +1601,8 @@ int32_t ViEChannel::FrameToRender(
}
// Post processing is not supported if the frame is backed by a texture.
if (video_frame.native_handle() == NULL) {
if (pre_render_callback_ != NULL)
pre_render_callback_->FrameCallback(&video_frame);
if (effect_filter_) {
unsigned int length = CalcBufferSize(kI420,
video_frame.width(),
@ -1827,6 +1831,12 @@ int32_t ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
return 0;
}
void ViEChannel::RegisterPreRenderCallback(
I420FrameCallback* pre_render_callback) {
CriticalSectionScoped cs(callback_cs_.get());
pre_render_callback_ = pre_render_callback;
}
void ViEChannel::OnApplicationDataReceived(const int32_t id,
const uint8_t sub_type,
const uint32_t name,

View File

@ -34,6 +34,7 @@ class ChannelStatsObserver;
class Config;
class CriticalSectionWrapper;
class Encryption;
class I420FrameCallback;
class PacedSender;
class ProcessThread;
class RtcpRttObserver;
@ -308,6 +309,9 @@ class ViEChannel
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
// New-style callback, used by VideoReceiveStream.
void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
protected:
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();
@ -384,6 +388,7 @@ class ViEChannel
int nack_history_size_sender_;
int max_nack_reordering_threshold_;
I420FrameCallback* pre_render_callback_;
};
} // namespace webrtc

View File

@ -28,6 +28,7 @@
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/new_include/frame_callback.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@ -162,7 +163,8 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
has_received_rpsi_(false),
picture_id_rpsi_(0),
qm_callback_(NULL),
video_auto_muted_(false) {
video_auto_muted_(false),
pre_encode_callback_(NULL) {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(engine_id, channel_id),
"%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
@ -650,6 +652,13 @@ void ViEEncoder::DeliverFrame(int id,
if (decimated_frame == NULL) {
decimated_frame = video_frame;
}
{
CriticalSectionScoped cs(callback_cs_.get());
if (pre_encode_callback_)
pre_encode_callback_->FrameCallback(decimated_frame);
}
#ifdef VIDEOCODEC_VP8
if (vcm_.SendCodec() == webrtc::kVideoCodecVP8) {
webrtc::CodecSpecificInfo codec_specific_info;
@ -1152,6 +1161,17 @@ void ViEEncoder::EnableAutoMuting(int threshold_bps, int window_bps) {
vcm_.EnableAutoMuting(threshold_bps, window_bps);
}
void ViEEncoder::RegisterPreEncodeCallback(
I420FrameCallback* pre_encode_callback) {
CriticalSectionScoped cs(callback_cs_.get());
pre_encode_callback_ = pre_encode_callback;
}
void ViEEncoder::DeRegisterPreEncodeCallback() {
CriticalSectionScoped cs(callback_cs_.get());
pre_encode_callback_ = NULL;
}
QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
: vpm_(vpm) {
}

View File

@ -21,6 +21,7 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/new_include/frame_callback.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
@ -167,6 +168,10 @@ class ViEEncoder
// |threshold_bps| + |window_bps|.
virtual void EnableAutoMuting(int threshold_bps, int window_bps);
// New-style callback, used by VideoSendStream.
void RegisterPreEncodeCallback(I420FrameCallback* pre_encode_callback);
void DeRegisterPreEncodeCallback();
int channel_id() const { return channel_id_; }
protected:
// Called by BitrateObserver.
@ -178,7 +183,6 @@ class ViEEncoder
bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms);
int TimeToSendPadding(int bytes);
private:
bool EncoderPaused() const;
@ -223,6 +227,7 @@ class ViEEncoder
// Quality modes callback
QMVideoSettingsCallback* qm_callback_;
bool video_auto_muted_;
I420FrameCallback* pre_encode_callback_;
};
} // namespace webrtc

View File

@ -269,4 +269,35 @@ int ViEImageProcessImpl::EnableColorEnhancement(const int video_channel,
return 0;
}
void ViEImageProcessImpl::RegisterPreEncodeCallback(
int video_channel,
I420FrameCallback* pre_encode_callback) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
vie_encoder->RegisterPreEncodeCallback(pre_encode_callback);
}
void ViEImageProcessImpl::DeRegisterPreEncodeCallback(int video_channel) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
assert(vie_encoder != NULL);
vie_encoder->DeRegisterPreEncodeCallback();
}
void ViEImageProcessImpl::RegisterPreRenderCallback(
int video_channel,
I420FrameCallback* pre_render_callback) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
vie_channel->RegisterPreRenderCallback(pre_render_callback);
}
void ViEImageProcessImpl::DeRegisterPreRenderCallback(int video_channel) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
vie_channel->RegisterPreRenderCallback(NULL);
}
} // namespace webrtc

View File

@ -38,6 +38,15 @@ class ViEImageProcessImpl
virtual int EnableDenoising(const int capture_id, const bool enable);
virtual int EnableColorEnhancement(const int video_channel,
const bool enable);
virtual void RegisterPreEncodeCallback(
int video_channel,
I420FrameCallback* pre_encode_callback) OVERRIDE;
virtual void DeRegisterPreEncodeCallback(int video_channel) OVERRIDE;
virtual void RegisterPreRenderCallback(
int video_channel,
I420FrameCallback* pre_render_callback) OVERRIDE;
virtual void DeRegisterPreRenderCallback(int video_channel) OVERRIDE;
protected:
explicit ViEImageProcessImpl(ViESharedData* shared_data);