Added a delay measurement, measures the time between an incoming captured frame until the frame is being processed. Measures the delay per second.

R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/4249004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5212 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
asapersson@webrtc.org 2013-12-04 13:47:44 +00:00
parent 79b63206b9
commit 9e5b0342f6
8 changed files with 331 additions and 29 deletions

View File

@ -120,12 +120,30 @@ class WEBRTC_DLLEXPORT ViEBase {
virtual int RegisterCpuOveruseObserver(int channel,
CpuOveruseObserver* observer) = 0;
// Gets the last cpu overuse measure.
// Gets cpu overuse measures.
// capture_jitter_ms: The current estimated jitter in ms based on incoming
// captured frames.
// avg_encode_time_ms: The average encode time in ms.
// encode_usage_percent: The average encode time divided by the average time
// difference between incoming captured frames.
// capture_queue_delay_ms_per_s: The current time delay between an incoming
// captured frame until the frame is being
// processed. The delay is expressed in ms
// delay per second.
// TODO(asapersson): Remove default implementation.
// TODO(asapersson): Remove this function.
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms) { return -1; }
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
int* encode_usage_percent,
int* capture_queue_delay_ms_per_s) {
return -1;
}
// Specifies the VoiceEngine and VideoEngine channel pair to use for
// audio/video synchronization.
virtual int ConnectAudioChannel(const int video_channel,

View File

@ -10,10 +10,12 @@
#include "webrtc/video_engine/overuse_frame_detector.h"
#include <algorithm>
#include <assert.h>
#include <math.h>
#include <algorithm>
#include <list>
#include "webrtc/modules/video_coding/utility/include/exp_filter.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -49,6 +51,11 @@ const double kRampUpBackoffFactor = 2.0;
// The initial average encode time (set to a fairly small value).
const float kInitialAvgEncodeTimeMs = 5.0f;
// The maximum exponent to use in VCMExpFilter.
const float kSampleDiffMs = 33.0f;
const float kMaxExp = 7.0f;
} // namespace
Statistics::Statistics() :
@ -76,8 +83,8 @@ void Statistics::AddSample(float sample_ms) {
return;
}
float exp = sample_ms/33.0f;
exp = std::min(exp, 7.0f);
float exp = sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_samples_->Apply(exp, sample_ms);
filtered_variance_->Apply(exp, (sample_ms - filtered_samples_->Value()) *
(sample_ms - filtered_samples_->Value()));
@ -103,6 +110,128 @@ float Statistics::StdDev() const {
uint64_t Statistics::Count() const { return count_; }
// Class for calculating the average encode time.
class OveruseFrameDetector::EncodeTimeAvg {
public:
EncodeTimeAvg()
: kWeightFactor(0.5f),
filtered_encode_time_ms_(new VCMExpFilter(kWeightFactor)) {
filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
}
~EncodeTimeAvg() {}
void AddEncodeSample(float encode_time_ms, int64_t diff_last_sample_ms) {
float exp = diff_last_sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_encode_time_ms_->Apply(exp, encode_time_ms);
}
int filtered_encode_time_ms() const {
return static_cast<int>(filtered_encode_time_ms_->Value() + 0.5);
}
private:
const float kWeightFactor;
scoped_ptr<VCMExpFilter> filtered_encode_time_ms_;
};
// Class for calculating the encode usage.
class OveruseFrameDetector::EncodeUsage {
public:
EncodeUsage()
: kWeightFactorFrameDiff(0.998f),
kWeightFactorEncodeTime(0.995f),
filtered_encode_time_ms_(new VCMExpFilter(kWeightFactorEncodeTime)),
filtered_frame_diff_ms_(new VCMExpFilter(kWeightFactorFrameDiff)) {
filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
filtered_frame_diff_ms_->Apply(1.0f, kSampleDiffMs);
}
~EncodeUsage() {}
void AddSample(float sample_ms) {
float exp = sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_frame_diff_ms_->Apply(exp, sample_ms);
}
void AddEncodeSample(float encode_time_ms, int64_t diff_last_sample_ms) {
float exp = diff_last_sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_encode_time_ms_->Apply(exp, encode_time_ms);
}
int UsageInPercent() const {
float frame_diff_ms = std::max(filtered_frame_diff_ms_->Value(), 1.0f);
float encode_usage_percent =
100.0f * filtered_encode_time_ms_->Value() / frame_diff_ms;
return static_cast<int>(encode_usage_percent + 0.5);
}
private:
const float kWeightFactorFrameDiff;
const float kWeightFactorEncodeTime;
scoped_ptr<VCMExpFilter> filtered_encode_time_ms_;
scoped_ptr<VCMExpFilter> filtered_frame_diff_ms_;
};
// Class for calculating the capture queue delay change.
class OveruseFrameDetector::CaptureQueueDelay {
public:
CaptureQueueDelay()
: kWeightFactor(0.5f),
delay_ms_(0),
filtered_delay_ms_per_s_(new VCMExpFilter(kWeightFactor)) {
filtered_delay_ms_per_s_->Apply(1.0f, 0.0f);
}
~CaptureQueueDelay() {}
void FrameCaptured(int64_t now) {
const size_t kMaxSize = 200;
if (frames_.size() > kMaxSize) {
frames_.pop_front();
}
frames_.push_back(now);
}
void FrameProcessingStarted(int64_t now) {
if (frames_.empty()) {
return;
}
delay_ms_ = now - frames_.front();
frames_.pop_front();
}
void CalculateDelayChange(int64_t diff_last_sample_ms) {
if (diff_last_sample_ms <= 0) {
return;
}
float exp = static_cast<float>(diff_last_sample_ms) / kProcessIntervalMs;
exp = std::min(exp, kMaxExp);
filtered_delay_ms_per_s_->Apply(exp,
delay_ms_ * 1000.0f / diff_last_sample_ms);
ClearFrames();
}
void ClearFrames() {
frames_.clear();
}
int delay_ms() const {
return delay_ms_;
}
int filtered_delay_ms_per_s() const {
return static_cast<int>(filtered_delay_ms_per_s_->Value() + 0.5);
}
private:
const float kWeightFactor;
std::list<int64_t> frames_;
int delay_ms_;
scoped_ptr<VCMExpFilter> filtered_delay_ms_per_s_;
};
OveruseFrameDetector::OveruseFrameDetector(Clock* clock,
float normaluse_stddev_ms,
float overuse_stddev_ms)
@ -119,7 +248,12 @@ OveruseFrameDetector::OveruseFrameDetector(Clock* clock,
in_quick_rampup_(false),
current_rampup_delay_ms_(kStandardRampUpDelayMs),
num_pixels_(0),
avg_encode_time_ms_(kInitialAvgEncodeTimeMs) {}
last_capture_jitter_ms_(-1),
last_encode_sample_ms_(0),
encode_time_(new EncodeTimeAvg()),
encode_usage_(new EncodeUsage()),
capture_queue_delay_(new CaptureQueueDelay()) {
}
OveruseFrameDetector::~OveruseFrameDetector() {
}
@ -129,6 +263,31 @@ void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) {
observer_ = observer;
}
int OveruseFrameDetector::AvgEncodeTimeMs() const {
CriticalSectionScoped cs(crit_.get());
return encode_time_->filtered_encode_time_ms();
}
int OveruseFrameDetector::EncodeUsagePercent() const {
CriticalSectionScoped cs(crit_.get());
return encode_usage_->UsageInPercent();
}
int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const {
CriticalSectionScoped cs(crit_.get());
return capture_queue_delay_->filtered_delay_ms_per_s();
}
int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
CriticalSectionScoped cs(crit_.get());
return capture_queue_delay_->delay_ms();
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
CriticalSectionScoped cs(crit_.get());
return next_process_time_ - clock_->TimeInMilliseconds();
}
void OveruseFrameDetector::FrameCaptured(int width, int height) {
CriticalSectionScoped cs(crit_.get());
@ -138,35 +297,38 @@ void OveruseFrameDetector::FrameCaptured(int width, int height) {
num_pixels_ = num_pixels;
capture_deltas_.Reset();
last_capture_time_ = 0;
capture_queue_delay_->ClearFrames();
}
int64_t time = clock_->TimeInMilliseconds();
if (last_capture_time_ != 0) {
capture_deltas_.AddSample(time - last_capture_time_);
encode_usage_->AddSample(time - last_capture_time_);
}
last_capture_time_ = time;
capture_queue_delay_->FrameCaptured(time);
}
void OveruseFrameDetector::FrameProcessingStarted() {
CriticalSectionScoped cs(crit_.get());
capture_queue_delay_->FrameProcessingStarted(clock_->TimeInMilliseconds());
}
void OveruseFrameDetector::FrameEncoded(int encode_time_ms) {
CriticalSectionScoped cs(crit_.get());
const float kWeight = 0.1f;
avg_encode_time_ms_ = kWeight * encode_time_ms +
(1.0f - kWeight) * avg_encode_time_ms_;
int64_t time = clock_->TimeInMilliseconds();
if (last_encode_sample_ms_ != 0) {
int64_t diff_ms = time - last_encode_sample_ms_;
encode_time_->AddEncodeSample(encode_time_ms, diff_ms);
encode_usage_->AddEncodeSample(encode_time_ms, diff_ms);
}
last_encode_sample_ms_ = time;
}
int OveruseFrameDetector::last_capture_jitter_ms() const {
CriticalSectionScoped cs(crit_.get());
return static_cast<int>(capture_deltas_.StdDev() + 0.5);
}
int OveruseFrameDetector::avg_encode_time_ms() const {
CriticalSectionScoped cs(crit_.get());
return static_cast<int>(avg_encode_time_ms_ + 0.5);
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
CriticalSectionScoped cs(crit_.get());
return next_process_time_ - clock_->TimeInMilliseconds();
return last_capture_jitter_ms_;
}
int32_t OveruseFrameDetector::Process() {
@ -178,12 +340,15 @@ int32_t OveruseFrameDetector::Process() {
if (now < next_process_time_)
return 0;
int64_t diff_ms = now - next_process_time_ + kProcessIntervalMs;
next_process_time_ = now + kProcessIntervalMs;
// Don't trigger overuse unless we've seen a certain number of frames.
if (capture_deltas_.Count() < kMinFrameSampleCount)
return 0;
capture_queue_delay_->CalculateDelayChange(diff_ms);
if (IsOverusing()) {
// If the last thing we did was going up, and now have to back down, we need
// to check if this peak was short. If so we should back off to avoid going
@ -228,6 +393,7 @@ int32_t OveruseFrameDetector::Process() {
overuse_stddev_ms_,
normaluse_stddev_ms_);
last_capture_jitter_ms_ = static_cast<int>(capture_deltas_.StdDev() + 0.5);
return 0;
}

View File

@ -71,19 +71,43 @@ class OveruseFrameDetector : public Module {
// Called for each captured frame.
void FrameCaptured(int width, int height);
// Called when the processing of a captured frame is started.
void FrameProcessingStarted();
// Called for each encoded frame.
void FrameEncoded(int encode_time_ms);
// Accessors.
// The last estimated jitter based on the incoming captured frames.
int last_capture_jitter_ms() const;
// Running average of reported encode time (FrameEncoded()).
// Only used for stats.
int avg_encode_time_ms() const;
int AvgEncodeTimeMs() const;
// The average encode time divided by the average time difference between
// incoming captured frames.
// This variable is currently only used for statistics.
int EncodeUsagePercent() const;
// The current time delay between an incoming captured frame (FrameCaptured())
// until the frame is being processed (FrameProcessingStarted()).
// (Note: if a new frame is received before an old frame has been processed,
// the old frame is skipped).
// The delay is returned as the delay in ms per second.
// This variable is currently only used for statistics.
int AvgCaptureQueueDelayMsPerS() const;
int CaptureQueueDelayMsPerS() const;
// Implements Module.
virtual int32_t TimeUntilNextProcess() OVERRIDE;
virtual int32_t Process() OVERRIDE;
private:
class EncodeTimeAvg;
class EncodeUsage;
class CaptureQueueDelay;
bool IsOverusing();
bool IsUnderusing(int64_t time_now);
@ -113,7 +137,13 @@ class OveruseFrameDetector : public Module {
// Number of pixels of last captured frame.
int num_pixels_;
float avg_encode_time_ms_;
int last_capture_jitter_ms_;
int64_t last_encode_sample_ms_;
scoped_ptr<EncodeTimeAvg> encode_time_;
scoped_ptr<EncodeUsage> encode_usage_;
scoped_ptr<CaptureQueueDelay> capture_queue_delay_;
DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
};

View File

@ -96,17 +96,75 @@ TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
}
TEST_F(OveruseFrameDetectorTest, LastCaptureJitter) {
EXPECT_EQ(0, overuse_detector_->last_capture_jitter_ms());
EXPECT_EQ(-1, overuse_detector_->last_capture_jitter_ms());
TriggerOveruse();
EXPECT_GT(overuse_detector_->last_capture_jitter_ms(), 0);
}
TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) {
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0);
overuse_detector_->FrameCaptured(320, 180);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelay) {
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(100);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayMultipleFrames) {
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(10);
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(20);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 30);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayResetAtResolutionSwitch) {
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(10);
overuse_detector_->FrameCaptured(321, 180);
clock_->AdvanceTimeMilliseconds(20);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) {
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(100);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100);
// No new captured frame. The last delay should be reported.
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100);
}
TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
const int kInitialAvgEncodeTimeInMs = 5;
EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->avg_encode_time_ms());
for (int i = 0; i < 30; i++)
EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs());
for (int i = 0; i < 30; i++) {
clock_->AdvanceTimeMilliseconds(33);
overuse_detector_->FrameEncoded(2);
EXPECT_EQ(2, overuse_detector_->avg_encode_time_ms());
}
EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs());
}
TEST_F(OveruseFrameDetectorTest, EncodedUsage) {
for (int i = 0; i < 30; i++) {
overuse_detector_->FrameCaptured(320, 180);
clock_->AdvanceTimeMilliseconds(5);
overuse_detector_->FrameEncoded(5);
clock_->AdvanceTimeMilliseconds(33-5);
}
EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent());
}
} // namespace webrtc

View File

@ -120,6 +120,20 @@ int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel,
int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
int* capture_jitter_ms,
int* avg_encode_time_ms) {
int encode_usage_percent;
int capture_queue_delay_ms_per_s;
return CpuOveruseMeasures(video_channel,
capture_jitter_ms,
avg_encode_time_ms,
&encode_usage_percent,
&capture_queue_delay_ms_per_s);
}
int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
int* encode_usage_percent,
int* capture_queue_delay_ms_per_s) {
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
@ -140,7 +154,10 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
if (provider) {
ViECapturer* capturer = is.Capture(provider->Id());
if (capturer) {
capturer->CpuOveruseMeasures(capture_jitter_ms, avg_encode_time_ms);
capturer->CpuOveruseMeasures(capture_jitter_ms,
avg_encode_time_ms,
encode_usage_percent,
capture_queue_delay_ms_per_s);
return 0;
}
}

View File

@ -36,6 +36,11 @@ class ViEBaseImpl
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms);
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
int* encode_usage_percent,
int* capture_queue_delay_ms_per_s);
virtual int CreateChannel(int& video_channel); // NOLINT
virtual int CreateChannel(int& video_channel, // NOLINT
int original_channel);

View File

@ -268,9 +268,14 @@ void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
}
void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
int* avg_encode_time_ms) const {
int* avg_encode_time_ms,
int* encode_usage_percent,
int* capture_queue_delay_ms_per_s) const {
*capture_jitter_ms = overuse_detector_->last_capture_jitter_ms();
*avg_encode_time_ms = overuse_detector_->avg_encode_time_ms();
*avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
*encode_usage_percent = overuse_detector_->EncodeUsagePercent();
*capture_queue_delay_ms_per_s =
overuse_detector_->AvgCaptureQueueDelayMsPerS();
}
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
@ -534,6 +539,7 @@ bool ViECapturer::ViECaptureThreadFunction(void* obj) {
bool ViECapturer::ViECaptureProcess() {
if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
overuse_detector_->FrameProcessingStarted();
int64_t encode_start_time = -1;
deliver_cs_->Enter();
if (SwapCapturedAndDeliverFrameIfAvailable()) {

View File

@ -105,7 +105,9 @@ class ViECapturer
void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
void CpuOveruseMeasures(int* capture_jitter_ms,
int* avg_encode_time_ms) const;
int* avg_encode_time_ms,
int* encode_usage_percent,
int* capture_queue_delay_ms_per_s) const;
protected:
ViECapturer(int capture_id,