Run FullStack tests without render windows.

Also disables test on valgrind platforms, it has no chance to keep up.

BUG=2278
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2159008

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4972 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org
2013-10-16 11:05:37 +00:00
parent 5ed4f46af1
commit 9401524211
3 changed files with 164 additions and 52 deletions

View File

@@ -0,0 +1,2 @@
# Tests that are too slow.
FullStack/*

View File

@@ -82,6 +82,11 @@ FrameGeneratorCapturer::~FrameGeneratorCapturer() {
} }
bool FrameGeneratorCapturer::Init() { bool FrameGeneratorCapturer::Init() {
// This check is added because frame_generator_ might be file based and should
// not crash because a file moved.
if (frame_generator_.get() == NULL)
return false;
if (!tick_->StartTimer(true, 1000 / target_fps_)) if (!tick_->StartTimer(true, 1000 / target_fps_))
return false; return false;
thread_.reset(ThreadWrapper::CreateThread(FrameGeneratorCapturer::Run, thread_.reset(ThreadWrapper::CreateThread(FrameGeneratorCapturer::Run,

View File

@@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/video_engine/new_include/call.h" #include "webrtc/video_engine/new_include/call.h"
@@ -69,16 +70,15 @@ class VideoAnalyzer : public PacketReceiver,
public: public:
VideoAnalyzer(VideoSendStreamInput* input, VideoAnalyzer(VideoSendStreamInput* input,
Transport* transport, Transport* transport,
VideoRenderer* loopback_video,
const char* test_label, const char* test_label,
double avg_psnr_threshold, double avg_psnr_threshold,
double avg_ssim_threshold, double avg_ssim_threshold,
uint64_t duration_frames) int duration_frames)
: input_(input), : input_(input),
transport_(transport), transport_(transport),
renderer_(loopback_video),
receiver_(NULL), receiver_(NULL),
test_label_(test_label), test_label_(test_label),
dropped_frames_(0),
rtp_timestamp_delta_(0), rtp_timestamp_delta_(0),
first_send_frame_(NULL), first_send_frame_(NULL),
last_render_time_(0), last_render_time_(0),
@@ -86,9 +86,17 @@ class VideoAnalyzer : public PacketReceiver,
avg_ssim_threshold_(avg_ssim_threshold), avg_ssim_threshold_(avg_ssim_threshold),
frames_left_(duration_frames), frames_left_(duration_frames),
crit_(CriticalSectionWrapper::CreateCriticalSection()), crit_(CriticalSectionWrapper::CreateCriticalSection()),
trigger_(EventWrapper::Create()) {} comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()),
comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread,
this)),
trigger_(EventWrapper::Create()) {
unsigned int id;
EXPECT_TRUE(comparison_thread_->Start(id));
}
~VideoAnalyzer() { ~VideoAnalyzer() {
EXPECT_TRUE(comparison_thread_->Stop());
while (!frames_.empty()) { while (!frames_.empty()) {
delete frames_.back(); delete frames_.back();
frames_.pop_back(); frames_.pop_back();
@@ -99,6 +107,8 @@ class VideoAnalyzer : public PacketReceiver,
} }
} }
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE { virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE {
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create()); scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header; RTPHeader header;
@@ -164,12 +174,15 @@ class VideoAnalyzer : public PacketReceiver,
virtual void RenderFrame(const I420VideoFrame& video_frame, virtual void RenderFrame(const I420VideoFrame& video_frame,
int time_to_render_ms) OVERRIDE { int time_to_render_ms) OVERRIDE {
int64_t render_time_ms =
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_; uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
{ {
CriticalSectionScoped cs(crit_.get()); CriticalSectionScoped cs(crit_.get());
while (frames_.front()->timestamp() < send_timestamp) { while (frames_.front()->timestamp() < send_timestamp) {
AddFrameComparison(frames_.front(), &last_rendered_frame_, true); AddFrameComparison(
frames_.front(), &last_rendered_frame_, true, render_time_ms);
frame_pool_.push_back(frames_.front()); frame_pool_.push_back(frames_.front());
frames_.pop_front(); frames_.pop_front();
} }
@@ -177,56 +190,150 @@ class VideoAnalyzer : public PacketReceiver,
I420VideoFrame* reference_frame = frames_.front(); I420VideoFrame* reference_frame = frames_.front();
frames_.pop_front(); frames_.pop_front();
assert(reference_frame != NULL); assert(reference_frame != NULL);
EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
assert(reference_frame->timestamp() == send_timestamp); assert(reference_frame->timestamp() == send_timestamp);
AddFrameComparison(reference_frame, &video_frame, false); AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
frame_pool_.push_back(reference_frame); frame_pool_.push_back(reference_frame);
}
last_rendered_frame_.CopyFrame(video_frame);
}
void Wait() { trigger_->Wait(120 * 1000); }
VideoSendStreamInput* input_;
Transport* transport_;
PacketReceiver* receiver_;
private:
struct FrameComparison {
FrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t send_time_ms,
int64_t recv_time_ms,
int64_t render_time_ms)
: dropped(dropped),
send_time_ms(send_time_ms),
recv_time_ms(recv_time_ms),
render_time_ms(render_time_ms) {
this->reference.CopyFrame(*reference);
this->render.CopyFrame(*render);
}
FrameComparison(const FrameComparison& compare)
: dropped(compare.dropped),
send_time_ms(compare.send_time_ms),
recv_time_ms(compare.recv_time_ms),
render_time_ms(compare.render_time_ms) {
this->reference.CopyFrame(compare.reference);
this->render.CopyFrame(compare.render);
}
~FrameComparison() {}
I420VideoFrame reference;
I420VideoFrame render;
bool dropped;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
};
void AddFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t render_time_ms) {
int64_t send_time_ms = send_times_[reference->timestamp()];
send_times_.erase(reference->timestamp());
int64_t recv_time_ms = recv_times_[reference->timestamp()];
recv_times_.erase(reference->timestamp());
CriticalSectionScoped crit(comparison_lock_.get());
comparisons_.push_back(FrameComparison(reference,
render,
dropped,
send_time_ms,
recv_time_ms,
render_time_ms));
}
static bool FrameComparisonThread(void* obj) {
return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
}
bool CompareFrames() {
assert(frames_left_ > 0);
I420VideoFrame reference;
I420VideoFrame render;
bool dropped;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
SleepMs(10);
while (true) {
{
CriticalSectionScoped crit(comparison_lock_.get());
if (comparisons_.empty())
return true;
reference.SwapFrame(&comparisons_.front().reference);
render.SwapFrame(&comparisons_.front().render);
dropped = comparisons_.front().dropped;
send_time_ms = comparisons_.front().send_time_ms;
recv_time_ms = comparisons_.front().recv_time_ms;
render_time_ms = comparisons_.front().render_time_ms;
comparisons_.pop_front();
}
PerformFrameComparison(&reference,
&render,
dropped,
send_time_ms,
recv_time_ms,
render_time_ms);
if (--frames_left_ == 0) { if (--frames_left_ == 0) {
PrintResult("psnr", psnr_, " dB"); PrintResult("psnr", psnr_, " dB");
PrintResult("ssim", ssim_, ""); PrintResult("ssim", ssim_, "");
PrintResult("sender_time", sender_time_, " ms"); PrintResult("sender_time", sender_time_, " ms");
printf(
"RESULT dropped_frames: %s = %d\n", test_label_, dropped_frames_);
PrintResult("receiver_time", receiver_time_, " ms"); PrintResult("receiver_time", receiver_time_, " ms");
PrintResult("total_delay_incl_network", end_to_end_, " ms"); PrintResult("total_delay_incl_network", end_to_end_, " ms");
PrintResult("time_between_rendered_frames", rendered_delta_, " ms"); PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_); EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_); EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
trigger_->Set(); trigger_->Set();
return false;
} }
} }
renderer_->RenderFrame(video_frame, time_to_render_ms);
last_rendered_frame_.CopyFrame(video_frame);
} }
void Wait() { trigger_->Wait(WEBRTC_EVENT_INFINITE); } void PerformFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
VideoSendStreamInput* input_; bool dropped,
Transport* transport_; int64_t send_time_ms,
VideoRenderer* renderer_; int64_t recv_time_ms,
PacketReceiver* receiver_; int64_t render_time_ms) {
psnr_.AddSample(I420PSNR(reference, render));
private: ssim_.AddSample(I420SSIM(reference, render));
void AddFrameComparison(const I420VideoFrame* reference_frame, if (dropped) {
const I420VideoFrame* render, ++dropped_frames_;
bool dropped) {
int64_t render_time = Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
psnr_.AddSample(I420PSNR(reference_frame, render));
ssim_.AddSample(I420SSIM(reference_frame, render));
if (dropped)
return; return;
}
if (last_render_time_ != 0) if (last_render_time_ != 0)
rendered_delta_.AddSample(render_time - last_render_time_); rendered_delta_.AddSample(render_time_ms - last_render_time_);
last_render_time_ = render_time; last_render_time_ = render_time_ms;
int64_t input_time = reference_frame->render_time_ms(); int64_t input_time_ms = reference->render_time_ms();
int64_t send_time = send_times_[reference_frame->timestamp()]; sender_time_.AddSample(send_time_ms - input_time_ms);
send_times_.erase(reference_frame->timestamp()); receiver_time_.AddSample(render_time_ms - recv_time_ms);
sender_time_.AddSample(send_time - input_time); end_to_end_.AddSample(render_time_ms - input_time_ms);
int64_t recv_time = recv_times_[reference_frame->timestamp()];
recv_times_.erase(reference_frame->timestamp());
receiver_time_.AddSample(render_time - recv_time);
end_to_end_.AddSample(render_time - input_time);
} }
void PrintResult(const char* result_type, void PrintResult(const char* result_type,
@@ -248,6 +355,7 @@ class VideoAnalyzer : public PacketReceiver,
test::Statistics end_to_end_; test::Statistics end_to_end_;
test::Statistics rendered_delta_; test::Statistics rendered_delta_;
int dropped_frames_;
std::deque<I420VideoFrame*> frames_; std::deque<I420VideoFrame*> frames_;
std::deque<I420VideoFrame*> frame_pool_; std::deque<I420VideoFrame*> frame_pool_;
I420VideoFrame last_rendered_frame_; I420VideoFrame last_rendered_frame_;
@@ -258,40 +366,34 @@ class VideoAnalyzer : public PacketReceiver,
int64_t last_render_time_; int64_t last_render_time_;
double avg_psnr_threshold_; double avg_psnr_threshold_;
double avg_ssim_threshold_; double avg_ssim_threshold_;
uint32_t frames_left_; int frames_left_;
scoped_ptr<CriticalSectionWrapper> crit_; scoped_ptr<CriticalSectionWrapper> crit_;
scoped_ptr<CriticalSectionWrapper> comparison_lock_;
scoped_ptr<ThreadWrapper> comparison_thread_;
std::deque<FrameComparison> comparisons_;
scoped_ptr<EventWrapper> trigger_; scoped_ptr<EventWrapper> trigger_;
}; };
TEST_P(FullStackTest, DISABLED_NoPacketLoss) { TEST_P(FullStackTest, NoPacketLoss) {
FullStackTestParams params = GetParam(); FullStackTestParams params = GetParam();
scoped_ptr<test::VideoRenderer> local_preview(test::VideoRenderer::Create(
"Local Preview", params.clip.width, params.clip.height));
scoped_ptr<test::VideoRenderer> loopback_video(test::VideoRenderer::Create(
"Loopback Video", params.clip.width, params.clip.height));
test::DirectTransport transport; test::DirectTransport transport;
VideoAnalyzer analyzer( VideoAnalyzer analyzer(NULL,
NULL, &transport,
&transport, params.test_label,
loopback_video.get(), params.avg_psnr_threshold,
params.test_label, params.avg_ssim_threshold,
params.avg_psnr_threshold, FLAGS_seconds * params.clip.fps);
params.avg_ssim_threshold,
static_cast<uint64_t>(FLAGS_seconds * params.clip.fps));
Call::Config call_config(&analyzer); Call::Config call_config(&analyzer);
scoped_ptr<Call> call(Call::Create(call_config)); scoped_ptr<Call> call(Call::Create(call_config));
analyzer.receiver_ = call->Receiver(); analyzer.SetReceiver(call->Receiver());
transport.SetReceiver(&analyzer); transport.SetReceiver(&analyzer);
VideoSendStream::Config send_config = call->GetDefaultSendConfig(); VideoSendStream::Config send_config = call->GetDefaultSendConfig();
test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_); test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_);
send_config.local_renderer = local_preview.get();
// TODO(pbos): static_cast shouldn't be required after mflodman refactors the // TODO(pbos): static_cast shouldn't be required after mflodman refactors the
// VideoCodec struct. // VideoCodec struct.
send_config.codec.width = static_cast<uint16_t>(params.clip.width); send_config.codec.width = static_cast<uint16_t>(params.clip.width);
@@ -311,6 +413,9 @@ TEST_P(FullStackTest, DISABLED_NoPacketLoss) {
params.clip.height, params.clip.height,
params.clip.fps, params.clip.fps,
Clock::GetRealTimeClock())); Clock::GetRealTimeClock()));
ASSERT_TRUE(file_capturer.get() != NULL)
<< "Could not create capturer for " << params.clip.name
<< ".yuv. Is this resource file present?";
VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig(); VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
receive_config.rtp.ssrc = send_config.rtp.ssrcs[0]; receive_config.rtp.ssrc = send_config.rtp.ssrcs[0];