Offline screenshare quality test, plus loopback.

BUG=4171
R=mflodman@webrtc.org, pbos@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/34109004

Cr-Commit-Position: refs/heads/master@{#8408}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8408 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
sprang@webrtc.org 2015-02-18 12:46:06 +00:00
parent 0521127779
commit 131bea89d6
17 changed files with 836 additions and 278 deletions

View File

@ -0,0 +1 @@
6006efc8095f2aa4b30879ec37d5baad261f8ab0

View File

@ -0,0 +1 @@
c43aac0ccbf1b6f0922d1db5925d9e05d263a360

View File

@ -0,0 +1 @@
9f1112d34e6a6e3039c3564bdbdf0913a9f62688

View File

@ -0,0 +1 @@
74c974b0ed28003c3a3b6a2923e5373478dfd145

View File

@ -320,7 +320,8 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
const int width = 352;
const int height = 288;
generator_.reset(FrameGenerator::CreateFromYuvFile(
test::ResourcePath(input_video, "yuv").c_str(), width, height));
std::vector<std::string>(1, test::ResourcePath(input_video, "yuv")),
width, height, 1));
codec_ = MakeVp8VideoCodec(width, height, 3);
codec_.minBitrate = 10;

View File

@ -13,6 +13,7 @@
#include <stdio.h>
#include <string.h>
#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc {
@ -52,66 +53,98 @@ class ChromaGenerator : public FrameGenerator {
class YuvFileGenerator : public FrameGenerator {
public:
YuvFileGenerator(FILE* file, size_t width, size_t height)
: file_(file), width_(width), height_(height) {
assert(file);
YuvFileGenerator(std::vector<FILE*> files,
size_t width,
size_t height,
int frame_repeat_count)
: file_index_(0),
files_(files),
width_(width),
height_(height),
frame_size_(CalcBufferSize(kI420,
static_cast<int>(width_),
static_cast<int>(height_))),
frame_buffer_(new uint8_t[frame_size_]),
frame_display_count_(frame_repeat_count),
current_display_count_(0) {
assert(width > 0);
assert(height > 0);
frame_size_ = CalcBufferSize(
kI420, static_cast<int>(width_), static_cast<int>(height_));
frame_buffer_ = new uint8_t[frame_size_];
ReadNextFrame();
}
virtual ~YuvFileGenerator() {
fclose(file_);
delete[] frame_buffer_;
for (FILE* file : files_)
fclose(file);
}
virtual I420VideoFrame* NextFrame() OVERRIDE {
size_t count = fread(frame_buffer_, 1, frame_size_, file_);
if (count < frame_size_) {
rewind(file_);
return NextFrame();
if (frame_display_count_ > 0) {
if (current_display_count_ < frame_display_count_) {
++current_display_count_;
} else {
ReadNextFrame();
current_display_count_ = 0;
}
}
frame_.CreateEmptyFrame(static_cast<int>(width_),
static_cast<int>(height_),
static_cast<int>(width_),
static_cast<int>((width_ + 1) / 2),
static_cast<int>((width_ + 1) / 2));
current_frame_.CopyFrame(last_read_frame_);
return &current_frame_;
}
ConvertToI420(kI420,
frame_buffer_,
0,
0,
static_cast<int>(width_),
static_cast<int>(height_),
0,
kRotateNone,
&frame_);
return &frame_;
void ReadNextFrame() {
size_t bytes_read =
fread(frame_buffer_.get(), 1, frame_size_, files_[file_index_]);
if (bytes_read < frame_size_) {
// No more frames to read in this file, rewind and move to next file.
rewind(files_[file_index_]);
file_index_ = (file_index_ + 1) % files_.size();
bytes_read = fread(frame_buffer_.get(), 1, frame_size_,
files_[file_index_]);
assert(bytes_read >= frame_size_);
}
last_read_frame_.CreateEmptyFrame(
static_cast<int>(width_), static_cast<int>(height_),
static_cast<int>(width_), static_cast<int>((width_ + 1) / 2),
static_cast<int>((width_ + 1) / 2));
ConvertToI420(kI420, frame_buffer_.get(), 0, 0, static_cast<int>(width_),
static_cast<int>(height_), 0, kRotateNone, &last_read_frame_);
}
private:
FILE* file_;
size_t width_;
size_t height_;
size_t frame_size_;
uint8_t* frame_buffer_;
I420VideoFrame frame_;
size_t file_index_;
const std::vector<FILE*> files_;
const size_t width_;
const size_t height_;
const size_t frame_size_;
const scoped_ptr<uint8_t[]> frame_buffer_;
const int frame_display_count_;
int current_display_count_;
I420VideoFrame current_frame_;
I420VideoFrame last_read_frame_;
};
} // namespace
FrameGenerator* FrameGenerator::Create(size_t width, size_t height) {
FrameGenerator* FrameGenerator::CreateChromaGenerator(size_t width,
size_t height) {
return new ChromaGenerator(width, height);
}
FrameGenerator* FrameGenerator::CreateFromYuvFile(const char* file,
size_t width,
size_t height) {
FILE* file_handle = fopen(file, "rb");
assert(file_handle);
return new YuvFileGenerator(file_handle, width, height);
FrameGenerator* FrameGenerator::CreateFromYuvFile(
std::vector<std::string> filenames,
size_t width,
size_t height,
int frame_repeat_count) {
assert(!filenames.empty());
std::vector<FILE*> files;
for (const std::string& filename : filenames) {
FILE* file = fopen(filename.c_str(), "rb");
DCHECK(file != nullptr);
files.push_back(file);
}
return new YuvFileGenerator(files, width, height, frame_repeat_count);
}
} // namespace test

View File

@ -10,6 +10,9 @@
#ifndef WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
#define WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
#include <string>
#include <vector>
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/typedefs.h"
@ -24,10 +27,17 @@ class FrameGenerator {
// Returns video frame that remains valid until next call.
virtual I420VideoFrame* NextFrame() = 0;
static FrameGenerator* Create(size_t width, size_t height);
static FrameGenerator* CreateFromYuvFile(const char* file,
// Creates a test frame generator that creates fully saturated frames with
// varying U, V values over time.
static FrameGenerator* CreateChromaGenerator(size_t width, size_t height);
// Creates a frame generator that repeatedly plays a set of yuv files.
// The frame_repeat_count determines how many times each frame is shown,
// with 0 = show the first frame indefinitely, 1 = show each frame once, etc.
static FrameGenerator* CreateFromYuvFile(std::vector<std::string> files,
size_t width,
size_t height);
size_t height,
int frame_repeat_count);
};
} // namespace test
} // namespace webrtc

View File

@ -28,7 +28,8 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::Create(
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock, input, FrameGenerator::Create(width, height), target_fps);
clock, input, FrameGenerator::CreateChromaGenerator(width, height),
target_fps);
if (!capturer->Init()) {
delete capturer;
return NULL;
@ -39,15 +40,15 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::Create(
FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
VideoSendStreamInput* input,
const char* file_name,
const std::string& file_name,
size_t width,
size_t height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock,
input,
FrameGenerator::CreateFromYuvFile(file_name, width, height),
clock, input,
FrameGenerator::CreateFromYuvFile(std::vector<std::string>(1, file_name),
width, height, 1),
target_fps);
if (!capturer->Init()) {
delete capturer;

View File

@ -10,6 +10,8 @@
#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_
#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_
#include <string>
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/typedefs.h"
@ -33,7 +35,7 @@ class FrameGeneratorCapturer : public VideoCapturer {
Clock* clock);
static FrameGeneratorCapturer* CreateFromYuvFile(VideoSendStreamInput* input,
const char* file_name,
const std::string& file_name,
size_t width,
size_t height,
int target_fps,
@ -45,12 +47,13 @@ class FrameGeneratorCapturer : public VideoCapturer {
int64_t first_frame_capture_time() const { return first_frame_capture_time_; }
private:
FrameGeneratorCapturer(Clock* clock,
VideoSendStreamInput* input,
FrameGenerator* frame_generator,
int target_fps);
bool Init();
private:
void InsertFrame();
static bool Run(void* obj);

View File

@ -169,7 +169,7 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::Create(kWidth, kHeight));
test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
@ -215,8 +215,9 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
CreateStreams();
Start();
scoped_ptr<test::FrameGenerator> frame_generator(test::FrameGenerator::Create(
encoder_config_.streams[0].width, encoder_config_.streams[0].height));
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
encoder_config_.streams[0].width, encoder_config_.streams[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, renderer.Wait())
@ -834,7 +835,7 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::Create(kWidth / 2, kHeight / 2));
test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
@ -1265,8 +1266,9 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
CreateStreams();
Start();
scoped_ptr<test::FrameGenerator> frame_generator(test::FrameGenerator::Create(
encoder_config_.streams[0].width, encoder_config_.streams[0].height));
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
encoder_config_.streams[0].width, encoder_config_.streams[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())

View File

@ -19,6 +19,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/cpu_info.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -27,6 +28,7 @@
#include "webrtc/test/direct_transport.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/statistics.h"
#include "webrtc/test/testsupport/fileutils.h"
@ -43,11 +45,13 @@ struct FullStackTestParams {
size_t width, height;
int fps;
} clip;
bool screenshare;
int min_bitrate_bps;
int target_bitrate_bps;
int max_bitrate_bps;
double avg_psnr_threshold;
double avg_ssim_threshold;
int test_durations_secs;
FakeNetworkPipe::Config link;
};
@ -71,7 +75,9 @@ class VideoAnalyzer : public PacketReceiver,
transport_(transport),
receiver_(NULL),
test_label_(test_label),
frames_left_(duration_frames),
frames_to_process_(duration_frames),
frames_recorded_(0),
frames_processed_(0),
dropped_frames_(0),
last_render_time_(0),
rtp_timestamp_delta_(0),
@ -80,24 +86,47 @@ class VideoAnalyzer : public PacketReceiver,
avg_psnr_threshold_(avg_psnr_threshold),
avg_ssim_threshold_(avg_ssim_threshold),
comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()),
comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread,
this)),
comparison_available_event_(EventWrapper::Create()),
done_(EventWrapper::Create()) {
unsigned int id;
EXPECT_TRUE(comparison_thread_->Start(id));
// Create thread pool for CPU-expensive PSNR/SSIM calculations.
// Try to use about as many threads as cores, but leave kMinCoresLeft alone,
// so that we don't accidentally starve "real" worker threads (codec etc).
// Also, don't allocate more than kMaxComparisonThreads, even if there are
// spare cores.
uint32_t num_cores = CpuInfo::DetectNumberOfCores();
assert(num_cores >= 1);
static const uint32_t kMinCoresLeft = 4;
static const uint32_t kMaxComparisonThreads = 12;
if (num_cores <= kMinCoresLeft) {
num_cores = 1;
} else {
num_cores -= kMinCoresLeft;
num_cores = std::min(num_cores, kMaxComparisonThreads);
}
for (uint32_t i = 0; i < num_cores; ++i) {
ThreadWrapper* thread =
ThreadWrapper::CreateThread(&FrameComparisonThread, this);
comparison_thread_pool_.push_back(thread);
unsigned int id;
EXPECT_TRUE(thread->Start(id));
}
}
~VideoAnalyzer() {
EXPECT_TRUE(comparison_thread_->Stop());
for (ThreadWrapper* thread : comparison_thread_pool_) {
EXPECT_TRUE(thread->Stop());
delete thread;
}
while (!frames_.empty()) {
delete frames_.back();
frames_.pop_back();
}
while (!frame_pool_.empty()) {
delete frame_pool_.back();
frame_pool_.pop_back();
}
for (I420VideoFrame* frame : frames_)
delete frame;
for (I420VideoFrame* frame : frame_pool_)
delete frame;
}
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
@ -194,7 +223,27 @@ class VideoAnalyzer : public PacketReceiver,
virtual bool IsTextureSupported() const override { return false; }
void Wait() {
EXPECT_EQ(kEventSignaled, done_->Wait(FullStackTest::kLongTimeoutMs));
// Frame comparisons can be very expensive. Wait for test to be done, but
// at time-out check if frames_processed is going up. If so, give it more
// time, otherwise fail. Hopefully this will reduce test flakiness.
int last_frames_processed = -1;
EventTypeWrapper eventType;
while ((eventType = done_->Wait(FullStackTest::kDefaultTimeoutMs)) !=
kEventSignaled) {
int frames_processed;
{
CriticalSectionScoped crit(comparison_lock_.get());
frames_processed = frames_processed_;
}
if (last_frames_processed == -1) {
last_frames_processed = frames_processed;
continue;
}
ASSERT_GT(frames_processed, last_frames_processed)
<< "Analyzer stalled while waiting for test to finish.";
last_frames_processed = frames_processed;
}
}
VideoSendStreamInput* input_;
@ -203,6 +252,9 @@ class VideoAnalyzer : public PacketReceiver,
private:
struct FrameComparison {
FrameComparison()
: dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {}
FrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
@ -226,6 +278,15 @@ class VideoAnalyzer : public PacketReceiver,
this->render.CopyFrame(compare.render);
}
void CloneStatsAndSwapFrames(FrameComparison* comparison) {
reference.SwapFrame(&comparison->reference);
render.SwapFrame(&comparison->render);
dropped = comparison->dropped;
send_time_ms = comparison->send_time_ms;
recv_time_ms = comparison->recv_time_ms;
render_time_ms = comparison->render_time_ms;
}
~FrameComparison() {}
I420VideoFrame reference;
@ -253,6 +314,7 @@ class VideoAnalyzer : public PacketReceiver,
send_time_ms,
recv_time_ms,
render_time_ms));
comparison_available_event_->Set();
}
static bool FrameComparisonThread(void* obj) {
@ -260,76 +322,110 @@ class VideoAnalyzer : public PacketReceiver,
}
bool CompareFrames() {
assert(frames_left_ > 0);
if (AllFramesRecorded())
return false;
I420VideoFrame reference;
I420VideoFrame render;
bool dropped;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
SleepMs(10);
while (true) {
{
CriticalSectionScoped crit(comparison_lock_.get());
if (comparisons_.empty())
return true;
reference.SwapFrame(&comparisons_.front().reference);
render.SwapFrame(&comparisons_.front().render);
dropped = comparisons_.front().dropped;
send_time_ms = comparisons_.front().send_time_ms;
recv_time_ms = comparisons_.front().recv_time_ms;
render_time_ms = comparisons_.front().render_time_ms;
comparisons_.pop_front();
}
PerformFrameComparison(&reference,
&render,
dropped,
send_time_ms,
recv_time_ms,
render_time_ms);
if (--frames_left_ == 0) {
PrintResult("psnr", psnr_, " dB");
PrintResult("ssim", ssim_, "");
PrintResult("sender_time", sender_time_, " ms");
printf("RESULT dropped_frames: %s = %d frames\n", test_label_,
dropped_frames_);
PrintResult("receiver_time", receiver_time_, " ms");
PrintResult("total_delay_incl_network", end_to_end_, " ms");
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
done_->Set();
FrameComparison comparison;
if (!PopComparison(&comparison)) {
// Wait until new comparison task is available, or test is done.
// If done, wake up remaining threads waiting.
comparison_available_event_->Wait(1000);
if (AllFramesRecorded()) {
comparison_available_event_->Set();
return false;
}
return true; // Try again.
}
PerformFrameComparison(comparison);
if (FrameProcessed()) {
PrintResults();
done_->Set();
comparison_available_event_->Set();
return false;
}
return true;
}
void PerformFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t send_time_ms,
int64_t recv_time_ms,
int64_t render_time_ms) {
psnr_.AddSample(I420PSNR(reference, render));
ssim_.AddSample(I420SSIM(reference, render));
if (dropped) {
bool PopComparison(FrameComparison* comparison) {
CriticalSectionScoped crit(comparison_lock_.get());
// If AllFramesRecorded() is true, it means we have already popped
// frames_to_process_ frames from comparisons_, so there is no more work
// for this thread to be done. frames_processed_ might still be lower if
// all comparisons are not done, but those frames are currently being
// worked on by other threads.
if (comparisons_.empty() || AllFramesRecorded())
return false;
comparison->CloneStatsAndSwapFrames(&comparisons_.front());
comparisons_.pop_front();
FrameRecorded();
return true;
}
// Increment counter for number of frames received for comparison.
void FrameRecorded() {
CriticalSectionScoped crit(comparison_lock_.get());
++frames_recorded_;
}
// Returns true if all frames to be compared have been taken from the queue.
bool AllFramesRecorded() {
CriticalSectionScoped crit(comparison_lock_.get());
assert(frames_recorded_ <= frames_to_process_);
return frames_recorded_ == frames_to_process_;
}
// Increase count of number of frames processed. Returns true if this was the
// last frame to be processed.
bool FrameProcessed() {
CriticalSectionScoped crit(comparison_lock_.get());
++frames_processed_;
assert(frames_processed_ <= frames_to_process_);
return frames_processed_ == frames_to_process_;
}
void PrintResults() {
CriticalSectionScoped crit(comparison_lock_.get());
PrintResult("psnr", psnr_, " dB");
PrintResult("ssim", ssim_, "");
PrintResult("sender_time", sender_time_, " ms");
printf("RESULT dropped_frames: %s = %d frames\n", test_label_,
dropped_frames_);
PrintResult("receiver_time", receiver_time_, " ms");
PrintResult("total_delay_incl_network", end_to_end_, " ms");
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
}
void PerformFrameComparison(const FrameComparison& comparison) {
// Perform expensive psnr and ssim calculations while not holding lock.
double psnr = I420PSNR(&comparison.reference, &comparison.render);
double ssim = I420SSIM(&comparison.reference, &comparison.render);
CriticalSectionScoped crit(comparison_lock_.get());
psnr_.AddSample(psnr);
ssim_.AddSample(ssim);
if (comparison.dropped) {
++dropped_frames_;
return;
}
if (last_render_time_ != 0)
rendered_delta_.AddSample(render_time_ms - last_render_time_);
last_render_time_ = render_time_ms;
rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
last_render_time_ = comparison.render_time_ms;
int64_t input_time_ms = reference->render_time_ms();
sender_time_.AddSample(send_time_ms - input_time_ms);
receiver_time_.AddSample(render_time_ms - recv_time_ms);
end_to_end_.AddSample(render_time_ms - input_time_ms);
int64_t input_time_ms = comparison.reference.render_time_ms();
sender_time_.AddSample(comparison.send_time_ms - input_time_ms);
receiver_time_.AddSample(comparison.render_time_ms -
comparison.recv_time_ms);
end_to_end_.AddSample(comparison.render_time_ms - input_time_ms);
}
void PrintResult(const char* result_type,
@ -350,7 +446,9 @@ class VideoAnalyzer : public PacketReceiver,
test::Statistics ssim_;
test::Statistics end_to_end_;
test::Statistics rendered_delta_;
int frames_left_;
const int frames_to_process_;
int frames_recorded_;
int frames_processed_;
int dropped_frames_;
int64_t last_render_time_;
uint32_t rtp_timestamp_delta_;
@ -366,7 +464,8 @@ class VideoAnalyzer : public PacketReceiver,
const double avg_ssim_threshold_;
const scoped_ptr<CriticalSectionWrapper> comparison_lock_;
const scoped_ptr<ThreadWrapper> comparison_thread_;
std::vector<ThreadWrapper*> comparison_thread_pool_;
const scoped_ptr<EventWrapper> comparison_available_event_;
std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_);
const scoped_ptr<EventWrapper> done_;
};
@ -374,12 +473,9 @@ class VideoAnalyzer : public PacketReceiver,
void FullStackTest::RunTest(const FullStackTestParams& params) {
test::DirectTransport send_transport(params.link);
test::DirectTransport recv_transport(params.link);
VideoAnalyzer analyzer(NULL,
&send_transport,
params.test_label,
params.avg_psnr_threshold,
params.avg_ssim_threshold,
kFullStackTestDurationSecs * params.clip.fps);
VideoAnalyzer analyzer(NULL, &send_transport, params.test_label,
params.avg_psnr_threshold, params.avg_ssim_threshold,
params.test_durations_secs * params.clip.fps);
CreateCalls(Call::Config(&analyzer), Call::Config(&recv_transport));
@ -395,6 +491,8 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
send_config_.encoder_settings.payload_name = "VP8";
send_config_.encoder_settings.payload_type = 124;
send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
VideoStream* stream = &encoder_config_.streams[0];
stream->width = params.clip.width;
@ -404,25 +502,56 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
stream->max_bitrate_bps = params.max_bitrate_bps;
stream->max_framerate = params.clip.fps;
if (params.screenshare) {
encoder_config_.content_type = VideoEncoderConfig::kScreenshare;
encoder_config_.min_transmit_bitrate_bps = 400 * 1000;
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers = 2;
encoder_config_.encoder_specific_settings = &vp8_settings;
stream->temporal_layer_thresholds_bps.clear();
stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
}
CreateMatchingReceiveConfigs();
receive_configs_[0].renderer = &analyzer;
receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0];
receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type =
kSendRtxSsrcs[0];
CreateStreams();
analyzer.input_ = send_stream_->Input();
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::CreateFromYuvFile(
&analyzer,
test::ResourcePath(params.clip.name, "yuv").c_str(),
params.clip.width,
params.clip.height,
params.clip.fps,
Clock::GetRealTimeClock()));
if (params.screenshare) {
std::vector<std::string> slides;
slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
ASSERT_TRUE(frame_generator_capturer_.get() != NULL)
<< "Could not create capturer for " << params.clip.name
<< ".yuv. Is this resource file present?";
scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateFromYuvFile(
slides, 1850, 1110,
10 * params.clip.fps) // Cycle image every 10 seconds.
);
frame_generator_capturer_.reset(new test::FrameGeneratorCapturer(
Clock::GetRealTimeClock(), &analyzer, frame_generator.release(),
params.clip.fps));
ASSERT_TRUE(frame_generator_capturer_->Init());
} else {
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::CreateFromYuvFile(
&analyzer, test::ResourcePath(params.clip.name, "yuv"),
params.clip.width, params.clip.height, params.clip.fps,
Clock::GetRealTimeClock()));
ASSERT_TRUE(frame_generator_capturer_.get() != NULL)
<< "Could not create capturer for " << params.clip.name
<< ".yuv. Is this resource file present?";
}
Start();
@ -439,12 +568,13 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
FullStackTestParams paris_qcif = {"net_delay_0_0_plr_0",
{"paris_qcif", 176, 144, 30},
false,
300000,
300000,
300000,
36.0,
0.96
};
0.96,
kFullStackTestDurationSecs};
RunTest(paris_qcif);
}
@ -452,24 +582,26 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) {
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
FullStackTestParams foreman_cif = {"foreman_cif_net_delay_0_0_plr_0",
{"foreman_cif", 352, 288, 30},
false,
700000,
700000,
700000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
RunTest(foreman_cif);
}
TEST_F(FullStackTest, ForemanCifPlr5) {
FullStackTestParams foreman_cif = {"foreman_cif_delay_50_0_plr_5",
{"foreman_cif", 352, 288, 30},
false,
30000,
500000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.loss_percent = 5;
foreman_cif.link.queue_delay_ms = 50;
RunTest(foreman_cif);
@ -478,12 +610,13 @@ TEST_F(FullStackTest, ForemanCifPlr5) {
TEST_F(FullStackTest, ForemanCif500kbps) {
FullStackTestParams foreman_cif = {"foreman_cif_500kbps",
{"foreman_cif", 352, 288, 30},
false,
30000,
500000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.queue_length_packets = 0;
foreman_cif.link.queue_delay_ms = 0;
foreman_cif.link.link_capacity_kbps = 500;
@ -493,12 +626,13 @@ TEST_F(FullStackTest, ForemanCif500kbps) {
TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) {
FullStackTestParams foreman_cif = {"foreman_cif_500kbps_32pkts_queue",
{"foreman_cif", 352, 288, 30},
false,
30000,
500000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.queue_length_packets = 32;
foreman_cif.link.queue_delay_ms = 0;
foreman_cif.link.link_capacity_kbps = 500;
@ -508,12 +642,13 @@ TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) {
TEST_F(FullStackTest, ForemanCif500kbps100ms) {
FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms",
{"foreman_cif", 352, 288, 30},
false,
30000,
500000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.queue_length_packets = 0;
foreman_cif.link.queue_delay_ms = 100;
foreman_cif.link.link_capacity_kbps = 500;
@ -523,12 +658,13 @@ TEST_F(FullStackTest, ForemanCif500kbps100ms) {
TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) {
FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms_32pkts_queue",
{"foreman_cif", 352, 288, 30},
false,
30000,
500000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.queue_length_packets = 32;
foreman_cif.link.queue_delay_ms = 100;
foreman_cif.link.link_capacity_kbps = 500;
@ -538,15 +674,30 @@ TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) {
TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
FullStackTestParams foreman_cif = {"foreman_cif_1000kbps_100ms_32pkts_queue",
{"foreman_cif", 352, 288, 30},
false,
30000,
2000000,
2000000,
0.0,
0.0
};
0.0,
kFullStackTestDurationSecs};
foreman_cif.link.queue_length_packets = 32;
foreman_cif.link.queue_delay_ms = 100;
foreman_cif.link.link_capacity_kbps = 1000;
RunTest(foreman_cif);
}
TEST_F(FullStackTest, ScreenshareSlides) {
FullStackTestParams screenshare_params = {
"screenshare_slides",
{"screenshare_slides", 1850, 1110, 5},
true,
50000,
100000,
1000000,
0.0,
0.0,
kFullStackTestDurationSecs};
RunTest(screenshare_params);
}
} // namespace webrtc

View File

@ -12,7 +12,8 @@
#include <map>
#include "gflags/gflags.h"
#include "webrtc/video/loopback.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/call.h"
@ -22,113 +23,55 @@
#include "webrtc/test/direct_transport.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/run_loop.h"
#include "webrtc/test/run_test.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/test/video_renderer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
namespace test {
static const int kAbsSendTimeExtensionId = 7;
namespace flags {
DEFINE_int32(width, 640, "Video width.");
size_t Width() { return static_cast<size_t>(FLAGS_width); }
DEFINE_int32(height, 480, "Video height.");
size_t Height() { return static_cast<size_t>(FLAGS_height); }
DEFINE_int32(fps, 30, "Frames per second.");
int Fps() { return static_cast<int>(FLAGS_fps); }
DEFINE_int32(min_bitrate, 50, "Minimum video bitrate.");
size_t MinBitrate() { return static_cast<size_t>(FLAGS_min_bitrate); }
DEFINE_int32(start_bitrate, 300, "Video starting bitrate.");
size_t StartBitrate() { return static_cast<size_t>(FLAGS_start_bitrate); }
DEFINE_int32(max_bitrate, 800, "Maximum video bitrate.");
size_t MaxBitrate() { return static_cast<size_t>(FLAGS_max_bitrate); }
DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() { return static_cast<std::string>(FLAGS_codec); }
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAGS_loss_percent);
}
DEFINE_int32(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacity() {
return static_cast<int>(FLAGS_link_capacity);
}
DEFINE_int32(queue_size, 0, "Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAGS_queue_size);
}
DEFINE_int32(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAGS_avg_propagation_delay_ms);
}
DEFINE_int32(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
} // namespace flags
static const uint32_t kSendSsrc = 0x654321;
static const uint32_t kSendRtxSsrc = 0x654322;
static const uint32_t kReceiverLocalSsrc = 0x123456;
static const uint8_t kRtxPayloadType = 96;
void Loopback() {
Loopback::Loopback(const Config& config)
: config_(config), clock_(Clock::GetRealTimeClock()) {
}
Loopback::~Loopback() {
}
void Loopback::Run() {
scoped_ptr<test::TraceToStderr> trace_to_stderr_;
if (webrtc::flags::FLAGS_logs)
if (config_.logs)
trace_to_stderr_.reset(new test::TraceToStderr);
scoped_ptr<test::VideoRenderer> local_preview(test::VideoRenderer::Create(
"Local Preview", flags::Width(), flags::Height()));
"Local Preview", config_.width, config_.height));
scoped_ptr<test::VideoRenderer> loopback_video(test::VideoRenderer::Create(
"Loopback Video", flags::Width(), flags::Height()));
"Loopback Video", config_.width, config_.height));
FakeNetworkPipe::Config pipe_config;
pipe_config.loss_percent = flags::LossPercent();
pipe_config.link_capacity_kbps = flags::LinkCapacity();
pipe_config.queue_length_packets = flags::QueueSize();
pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
pipe_config.loss_percent = config_.loss_percent;
pipe_config.link_capacity_kbps = config_.link_capacity_kbps;
pipe_config.queue_length_packets = config_.queue_size;
pipe_config.queue_delay_ms = config_.avg_propagation_delay_ms;
pipe_config.delay_standard_deviation_ms = config_.std_propagation_delay_ms;
test::DirectTransport transport(pipe_config);
Call::Config call_config(&transport);
call_config.stream_bitrates.min_bitrate_bps =
static_cast<int>(flags::MinBitrate()) * 1000;
static_cast<int>(config_.min_bitrate_kbps) * 1000;
call_config.stream_bitrates.start_bitrate_bps =
static_cast<int>(flags::StartBitrate()) * 1000;
static_cast<int>(config_.start_bitrate_kbps) * 1000;
call_config.stream_bitrates.max_bitrate_bps =
static_cast<int>(flags::MaxBitrate()) * 1000;
static_cast<int>(config_.max_bitrate_kbps) * 1000;
scoped_ptr<Call> call(Call::Create(call_config));
// Loopback, call sends to itself.
@ -144,9 +87,9 @@ void Loopback() {
send_config.local_renderer = local_preview.get();
scoped_ptr<VideoEncoder> encoder;
if (flags::Codec() == "VP8") {
if (config_.codec == "VP8") {
encoder.reset(VideoEncoder::Create(VideoEncoder::kVp8));
} else if (flags::Codec() == "VP9") {
} else if (config_.codec == "VP9") {
encoder.reset(VideoEncoder::Create(VideoEncoder::kVp9));
} else {
// Codec not supported.
@ -154,30 +97,15 @@ void Loopback() {
return;
}
send_config.encoder_settings.encoder = encoder.get();
send_config.encoder_settings.payload_name = flags::Codec();
send_config.encoder_settings.payload_name = config_.codec;
send_config.encoder_settings.payload_type = 124;
VideoEncoderConfig encoder_config;
encoder_config.streams = test::CreateVideoStreams(1);
VideoStream* stream = &encoder_config.streams[0];
stream->width = flags::Width();
stream->height = flags::Height();
stream->min_bitrate_bps = call_config.stream_bitrates.min_bitrate_bps;
stream->target_bitrate_bps = call_config.stream_bitrates.max_bitrate_bps;
stream->max_bitrate_bps = call_config.stream_bitrates.max_bitrate_bps;
stream->max_framerate = 30;
stream->max_qp = 56;
VideoEncoderConfig encoder_config(CreateEncoderConfig());
VideoSendStream* send_stream =
call->CreateVideoSendStream(send_config, encoder_config);
Clock* test_clock = Clock::GetRealTimeClock();
scoped_ptr<test::VideoCapturer> camera(
test::VideoCapturer::Create(send_stream->Input(),
flags::Width(),
flags::Height(),
flags::Fps(),
test_clock));
scoped_ptr<test::VideoCapturer> capturer(CreateCapturer(send_stream));
VideoReceiveStream::Config receive_config;
receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0];
@ -197,11 +125,11 @@ void Loopback() {
receive_stream->Start();
send_stream->Start();
camera->Start();
capturer->Start();
test::PressEnterToContinue();
camera->Stop();
capturer->Stop();
send_stream->Stop();
receive_stream->Stop();
@ -212,13 +140,26 @@ void Loopback() {
transport.StopSending();
}
} // namespace webrtc
int main(int argc, char* argv[]) {
::testing::InitGoogleTest(&argc, argv);
google::ParseCommandLineFlags(&argc, &argv, true);
webrtc::test::InitFieldTrialsFromString(
webrtc::flags::FLAGS_force_fieldtrials);
webrtc::test::RunTest(webrtc::Loopback);
return 0;
VideoEncoderConfig Loopback::CreateEncoderConfig() {
VideoEncoderConfig encoder_config;
encoder_config.streams = test::CreateVideoStreams(1);
VideoStream* stream = &encoder_config.streams[0];
stream->width = config_.width;
stream->height = config_.height;
stream->min_bitrate_bps = static_cast<int>(config_.min_bitrate_kbps) * 1000;
stream->max_bitrate_bps = static_cast<int>(config_.max_bitrate_kbps) * 1000;
stream->target_bitrate_bps =
static_cast<int>(config_.max_bitrate_kbps) * 1000;
stream->max_framerate = config_.fps;
stream->max_qp = 56;
return encoder_config;
}
test::VideoCapturer* Loopback::CreateCapturer(VideoSendStream* send_stream) {
return test::VideoCapturer::Create(send_stream->Input(), config_.width,
config_.height, config_.fps, clock_);
}
} // namespace test
} // namespace webrtc

57
webrtc/video/loopback.h Normal file
View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string>
#include "webrtc/config.h"
namespace webrtc {
class VideoSendStream;
class Clock;
namespace test {
class VideoCapturer;
class Loopback {
public:
struct Config {
size_t width;
size_t height;
int32_t fps;
size_t min_bitrate_kbps;
size_t start_bitrate_kbps;
size_t max_bitrate_kbps;
int32_t min_transmit_bitrate_kbps;
std::string codec;
int32_t loss_percent;
int32_t link_capacity_kbps;
int32_t queue_size;
int32_t avg_propagation_delay_ms;
int32_t std_propagation_delay_ms;
bool logs;
};
explicit Loopback(const Config& config);
virtual ~Loopback();
void Run();
protected:
virtual VideoEncoderConfig CreateEncoderConfig();
virtual VideoCapturer* CreateCapturer(VideoSendStream* send_stream);
const Config config_;
Clock* const clock_;
};
} // namespace test
} // namespace webrtc

View File

@ -0,0 +1,177 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <map>
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/run_test.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
#include "webrtc/video/loopback.h"
#include "webrtc/video/video_send_stream.h"
namespace webrtc {
namespace flags {
// Fixed for prerecorded screenshare content.
size_t Width() {
return 1850;
}
size_t Height() {
return 1110;
}
DEFINE_int32(fps, 5, "Frames per second.");
int Fps() {
return static_cast<int>(FLAGS_fps);
}
DEFINE_int32(min_bitrate, 50, "Minimum video bitrate.");
size_t MinBitrate() {
return static_cast<size_t>(FLAGS_min_bitrate);
}
DEFINE_int32(tl0_bitrate, 100, "Temporal layer 0 target bitrate.");
size_t StartBitrate() {
return static_cast<size_t>(FLAGS_tl0_bitrate);
}
DEFINE_int32(tl1_bitrate, 1000, "Temporal layer 1 target bitrate.");
size_t MaxBitrate() {
return static_cast<size_t>(FLAGS_tl1_bitrate);
}
DEFINE_int32(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
int MinTransmitBitrate() {
return FLAGS_min_transmit_bitrate;
}
DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAGS_codec);
}
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAGS_loss_percent);
}
DEFINE_int32(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacity() {
return static_cast<int>(FLAGS_link_capacity);
}
DEFINE_int32(queue_size, 0, "Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAGS_queue_size);
}
DEFINE_int32(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAGS_avg_propagation_delay_ms);
}
DEFINE_int32(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
} // namespace flags
class ScreenshareLoopback : public test::Loopback {
public:
explicit ScreenshareLoopback(const Config& config) : Loopback(config) {}
virtual ~ScreenshareLoopback() {}
protected:
VideoEncoderConfig CreateEncoderConfig() override {
VideoEncoderConfig encoder_config(test::Loopback::CreateEncoderConfig());
VideoStream* stream = &encoder_config.streams[0];
encoder_config.content_type = VideoEncoderConfig::kScreenshare;
encoder_config.min_transmit_bitrate_bps = flags::MinTransmitBitrate();
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers = 2;
encoder_config.encoder_specific_settings = &vp8_settings;
stream->temporal_layer_thresholds_bps.clear();
stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
stream->target_bitrate_bps =
static_cast<int>(config_.start_bitrate_kbps) * 1000;
return encoder_config;
}
test::VideoCapturer* CreateCapturer(VideoSendStream* send_stream) override {
std::vector<std::string> slides;
slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
test::FrameGenerator* frame_generator =
test::FrameGenerator::CreateFromYuvFile(
slides, flags::Width(), flags::Height(), 10 * flags::Fps());
test::FrameGeneratorCapturer* capturer(new test::FrameGeneratorCapturer(
clock_, send_stream->Input(), frame_generator, flags::Fps()));
EXPECT_TRUE(capturer->Init());
return capturer;
}
};
void Loopback() {
test::Loopback::Config config{flags::Width(),
flags::Height(),
flags::Fps(),
flags::MinBitrate(),
flags::StartBitrate(),
flags::MaxBitrate(),
flags::MinTransmitBitrate(),
flags::Codec(),
flags::LossPercent(),
flags::LinkCapacity(),
flags::QueueSize(),
flags::AvgPropagationDelayMs(),
flags::StdPropagationDelayMs(),
flags::FLAGS_logs};
ScreenshareLoopback loopback(config);
loopback.Run();
}
} // namespace webrtc
int main(int argc, char* argv[]) {
::testing::InitGoogleTest(&argc, argv);
google::ParseCommandLineFlags(&argc, &argv, true);
webrtc::test::InitFieldTrialsFromString(
webrtc::flags::FLAGS_force_fieldtrials);
webrtc::test::RunTest(webrtc::Loopback);
return 0;
}

View File

@ -0,0 +1,135 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <map>
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/run_test.h"
#include "webrtc/typedefs.h"
#include "webrtc/video/loopback.h"
namespace webrtc {
namespace flags {
DEFINE_int32(width, 640, "Video width.");
size_t Width() {
return static_cast<size_t>(FLAGS_width);
}
DEFINE_int32(height, 480, "Video height.");
size_t Height() {
return static_cast<size_t>(FLAGS_height);
}
DEFINE_int32(fps, 30, "Frames per second.");
int Fps() {
return static_cast<int>(FLAGS_fps);
}
DEFINE_int32(min_bitrate, 50, "Minimum video bitrate.");
size_t MinBitrate() {
return static_cast<size_t>(FLAGS_min_bitrate);
}
DEFINE_int32(start_bitrate, 300, "Video starting bitrate.");
size_t StartBitrate() {
return static_cast<size_t>(FLAGS_start_bitrate);
}
DEFINE_int32(max_bitrate, 800, "Maximum video bitrate.");
size_t MaxBitrate() {
return static_cast<size_t>(FLAGS_max_bitrate);
}
int MinTransmitBitrate() {
return 0;
} // No min padding for regular video.
DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAGS_codec);
}
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAGS_loss_percent);
}
DEFINE_int32(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacity() {
return static_cast<int>(FLAGS_link_capacity);
}
DEFINE_int32(queue_size, 0, "Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAGS_queue_size);
}
DEFINE_int32(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAGS_avg_propagation_delay_ms);
}
DEFINE_int32(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
} // namespace flags
void Loopback() {
test::Loopback::Config config{flags::Width(),
flags::Height(),
flags::Fps(),
flags::MinBitrate(),
flags::StartBitrate(),
flags::MaxBitrate(),
0, // No min transmit bitrate.
flags::Codec(),
flags::LossPercent(),
flags::LinkCapacity(),
flags::QueueSize(),
flags::AvgPropagationDelayMs(),
flags::StdPropagationDelayMs(),
flags::FLAGS_logs};
test::Loopback loopback(config);
loopback.Run();
}
} // namespace webrtc
int main(int argc, char* argv[]) {
::testing::InitGoogleTest(&argc, argv);
google::ParseCommandLineFlags(&argc, &argv, true);
webrtc::test::InitFieldTrialsFromString(
webrtc::flags::FLAGS_force_fieldtrials);
webrtc::test::RunTest(webrtc::Loopback);
return 0;
}

View File

@ -13,6 +13,10 @@
'<(DEPTH)/resources/foreman_cif.yuv',
'<(DEPTH)/resources/paris_qcif.yuv',
'<(DEPTH)/resources/voice_engine/audio_long16.pcm',
'<(DEPTH)/resources/difficult_photo_1850_1110.yuv',
'<(DEPTH)/resources/photo_1850_1110.yuv',
'<(DEPTH)/resources/presentation_1850_1110.yuv',
'<(DEPTH)/resources/web_screenshot_1850_1110.yuv',
],
},
}],

View File

@ -35,6 +35,21 @@
'webrtc_perf_tests',
],
},
{
'target_name': 'loopback_base',
'type': 'static_library',
'sources': [
'video/loopback.cc',
'video/loopback.h',
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'webrtc',
],
},
{
'target_name': 'video_loopback',
'type': 'executable',
@ -42,7 +57,7 @@
'test/mac/run_test.mm',
'test/run_test.cc',
'test/run_test.h',
'video/loopback.cc',
'video/video_loopback.cc',
],
'conditions': [
['OS=="mac"', {
@ -52,13 +67,37 @@
}],
],
'dependencies': [
'loopback_base',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/webrtc_test_common.gyp:webrtc_test_renderer',
'test/test.gyp:test_main',
'webrtc',
],
},
{
'target_name': 'screenshare_loopback',
'type': 'executable',
'sources': [
'test/mac/run_test.mm',
'test/run_test.cc',
'test/run_test.h',
'video/screenshare_loopback.cc',
],
'conditions': [
['OS=="mac"', {
'sources!': [
'test/run_test.cc',
],
}],
],
'dependencies': [
'loopback_base',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'test/webrtc_test_common.gyp:webrtc_test_common',
'test/webrtc_test_common.gyp:webrtc_test_renderer',
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'test/test.gyp:test_main',
'webrtc',
],