Adding possibility to use encoding time when trigger underuse for frame based overuse detection.

BUG=
TEST=Added unittest.
R=asapersson@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1885004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4452 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2013-07-31 16:42:21 +00:00
parent 09e8c47ee5
commit d4412feeb0
4 changed files with 333 additions and 39 deletions

View File

@ -10,24 +10,48 @@
#include "webrtc/video_engine/overuse_frame_detector.h"
#include <assert.h>
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/video_engine/include/vie_base.h"
namespace webrtc {
// TODO(mflodman) Test different thresholds.
// TODO(mflodman) Test different values for all of these to trigger correctly,
// avoid fluctuations etc.
namespace {
// Interval for 'Process' to be called.
const int64_t kProcessIntervalMs = 2000;
// Duration capture and encode samples are valid.
const int kOveruseHistoryMs = 5000;
// The minimum history to trigger an overuse or underuse.
const int64_t kMinValidHistoryMs = kOveruseHistoryMs / 2;
// Encode / capture ratio to decide an overuse.
const float kMinEncodeRatio = 29 / 30.0f;
// Minimum time between two callbacks.
const int kMinCallbackDeltaMs = 30000;
// Safety margin between encode time for different resolutions to decide if we
// can trigger an underuse callback.
// TODO(mflodman): This should be improved, e.g. test time per pixel?
const float kIncreaseThreshold = 1.5f;
} // namespace
OveruseFrameDetector::OveruseFrameDetector(Clock* clock)
: crit_(CriticalSectionWrapper::CreateCriticalSection()),
observer_(NULL),
clock_(clock),
last_process_time_(clock->TimeInMilliseconds()),
last_callback_time_(clock->TimeInMilliseconds()) {
last_callback_time_(clock->TimeInMilliseconds()),
underuse_encode_timing_enabled_(false),
num_pixels_(0),
max_num_pixels_(0) {
}
OveruseFrameDetector::~OveruseFrameDetector() {
@ -38,18 +62,30 @@ void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) {
observer_ = observer;
}
void OveruseFrameDetector::CapturedFrame() {
void OveruseFrameDetector::set_underuse_encode_timing_enabled(bool enable) {
CriticalSectionScoped cs(crit_.get());
underuse_encode_timing_enabled_ = enable;
}
void OveruseFrameDetector::FrameCaptured() {
CriticalSectionScoped cs(crit_.get());
CleanOldSamples();
capture_times_.push_back(clock_->TimeInMilliseconds());
}
void OveruseFrameDetector::EncodedFrame() {
void OveruseFrameDetector::FrameEncoded(int64_t encode_time, size_t width,
size_t height) {
assert(encode_time >= 0);
CriticalSectionScoped cs(crit_.get());
encode_times_.push_back(clock_->TimeInMilliseconds());
// The frame is disregarded in case of a reset, to startup in a fresh state.
if (MaybeResetResolution(width, height))
return;
encode_times_.push_back(std::make_pair(clock_->TimeInMilliseconds(),
encode_time));
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
CriticalSectionScoped cs(crit_.get());
return last_process_time_ + kProcessIntervalMs - clock_->TimeInMilliseconds();
}
@ -60,40 +96,112 @@ int32_t OveruseFrameDetector::Process() {
return 0;
last_process_time_ = now;
if (!observer_ || encode_times_.size() == 0 || capture_times_.size() == 0)
RemoveOldSamples();
// Don't trigger an overuse unless we've encoded at least one frame.
if (!observer_ || encode_times_.empty() || capture_times_.empty())
return 0;
CleanOldSamples();
if (encode_times_.front() > now - kOveruseHistoryMs / 2) {
// At least half the maximum history should be filled before we trigger an
// overuse.
// TODO(mflodman) Shall the time difference between the first and the last
// sample be checked instead?
if (encode_times_.front().first > now - kMinValidHistoryMs) {
return 0;
}
float encode_ratio = encode_times_.size() /
static_cast<float>(capture_times_.size());
if (encode_ratio < kMinEncodeRatio) {
if (IsOverusing()) {
// Overuse detected.
// Remember the average encode time for this overuse, as a help to trigger
// normal usage.
encode_overuse_times_[num_pixels_] = CalculateAverageEncodeTime();
RemoveAllSamples();
observer_->OveruseDetected();
capture_times_.clear();
encode_times_.clear();
last_callback_time_ = now;
} else if (last_callback_time_ < now - kMinCallbackDeltaMs) {
// TODO(mflodman) This should only be triggered if we have a good reason to
// believe we can increase the resolution again.
} else if (IsUnderusing(now)) {
RemoveAllSamples();
observer_->NormalUsage();
last_callback_time_ = now;
capture_times_.clear();
encode_times_.clear();
}
return 0;
}
void OveruseFrameDetector::CleanOldSamples() {
void OveruseFrameDetector::RemoveOldSamples() {
int64_t time_now = clock_->TimeInMilliseconds();
while (!capture_times_.empty() &&
capture_times_.front() < time_now - kOveruseHistoryMs) {
capture_times_.pop_front();
}
while (!encode_times_.empty() &&
encode_times_.front() < time_now - kOveruseHistoryMs) {
encode_times_.front().first < time_now - kOveruseHistoryMs) {
encode_times_.pop_front();
}
}
void OveruseFrameDetector::RemoveAllSamples() {
capture_times_.clear();
encode_times_.clear();
}
int64_t OveruseFrameDetector::CalculateAverageEncodeTime() const {
if (encode_times_.empty())
return 0;
int64_t total_encode_time = 0;
for (std::list<EncodeTime>::const_iterator it = encode_times_.begin();
it != encode_times_.end(); ++it) {
total_encode_time += it->second;
}
return total_encode_time / encode_times_.size();
}
bool OveruseFrameDetector::MaybeResetResolution(size_t width, size_t height) {
int num_pixels = width * height;
if (num_pixels == num_pixels_)
return false;
RemoveAllSamples();
num_pixels_ = num_pixels;
if (num_pixels > max_num_pixels_)
max_num_pixels_ = num_pixels;
return true;
}
bool OveruseFrameDetector::IsOverusing() {
if (encode_times_.empty())
return false;
float encode_ratio = encode_times_.size() /
static_cast<float>(capture_times_.size());
return encode_ratio < kMinEncodeRatio;
}
bool OveruseFrameDetector::IsUnderusing(int64_t time_now) {
if (time_now < last_callback_time_ + kMinCallbackDeltaMs ||
num_pixels_ >= max_num_pixels_) {
return false;
}
bool underusing = true;
if (underuse_encode_timing_enabled_) {
int prev_overuse_encode_time = 0;
for (std::map<int, int64_t>::reverse_iterator rit =
encode_overuse_times_.rbegin();
rit != encode_overuse_times_.rend() && rit->first > num_pixels_;
++rit) {
prev_overuse_encode_time = rit->second;
}
// TODO(mflodman): This might happen now if the resolution is decreased
// by the user before an overuse has been triggered.
assert(prev_overuse_encode_time > 0);
// TODO(mflodman) Use some other way to guess if an increased resolution
// might work or not, e.g. encode time per pixel?
if (CalculateAverageEncodeTime() * kIncreaseThreshold >
prev_overuse_encode_time) {
underusing = false;
}
}
return underusing;
}
} // namespace webrtc

View File

@ -12,6 +12,8 @@
#define WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
#include <list>
#include <map>
#include <utility>
#include "webrtc/modules/interface/module.h"
#include "webrtc/system_wrappers/interface/constructor_magic.h"
@ -30,20 +32,39 @@ class OveruseFrameDetector : public Module {
explicit OveruseFrameDetector(Clock* clock);
~OveruseFrameDetector();
// Registers an observer receiving overuse and underuse callbacks. Set
// 'observer' to NULL to disable callbacks.
void SetObserver(CpuOveruseObserver* observer);
// Called for each new captured frame.
void CapturedFrame();
// TODO(mflodman): Move to another API?
// Enables usage of encode time to trigger normal usage after an overuse,
// default false.
void set_underuse_encode_timing_enabled(bool enable);
// Called for each captured frame.
void FrameCaptured();
// Called for every encoded frame.
void EncodedFrame();
void FrameEncoded(int64_t encode_time, size_t width, size_t height);
// Implements Module.
virtual int32_t TimeUntilNextProcess();
virtual int32_t Process();
private:
void CleanOldSamples();
// All private functions are assumed to be critical section protected.
// Clear samples older than the overuse history.
void RemoveOldSamples();
// Clears the entire history, including samples still affecting the
// calculations.
void RemoveAllSamples();
int64_t CalculateAverageEncodeTime() const;
// Returns true and resets calculations and history if a new resolution is
// discovered, false otherwise.
bool MaybeResetResolution(size_t width, size_t height);
bool IsOverusing();
bool IsUnderusing(int64_t time_now);
// Protecting all members.
scoped_ptr<CriticalSectionWrapper> crit_;
@ -55,11 +76,21 @@ class OveruseFrameDetector : public Module {
int64_t last_process_time_;
int64_t last_callback_time_;
// Capture time for frames.
// Sorted list of times captured frames were delivered, oldest frame first.
std::list<int64_t> capture_times_;
// <Encode report time, time spent encoding the frame>.
typedef std::pair<int64_t, int64_t> EncodeTime;
// Sorted list with oldest frame first.
std::list<EncodeTime> encode_times_;
// Start encode time for a frame.
std::list<int64_t> encode_times_;
// True if encode time should be considered to trigger an underuse.
bool underuse_encode_timing_enabled_;
// Number of pixels in the currently encoded resolution.
int num_pixels_;
// Maximum resolution encoded.
int max_num_pixels_;
// <number of pixels, average encode time triggering an overuse>.
std::map<int, int64_t> encode_overuse_times_;
DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
};

View File

@ -23,6 +23,9 @@ using ::testing::Return;
namespace webrtc {
const int kProcessIntervalMs = 2000;
const int kOveruseHistoryMs = 5000;
const int kMinCallbackDeltaMs = 30000;
const int64_t kMinValidHistoryMs = kOveruseHistoryMs / 2;
class MockCpuOveruseObserver : public CpuOveruseObserver {
public:
@ -41,6 +44,32 @@ class OveruseFrameDetectorTest : public ::testing::Test {
overuse_detector_.reset(new OveruseFrameDetector(clock_.get()));
overuse_detector_->SetObserver(observer_.get());
}
void CaptureAndEncodeFrames(int num_frames, int64_t frame_interval_ms,
int encode_time_ms, size_t width, size_t height) {
for (int frame = 0; frame < num_frames; ++frame) {
overuse_detector_->FrameCaptured();
overuse_detector_->FrameEncoded(encode_time_ms, width, height);
clock_->AdvanceTimeMilliseconds(frame_interval_ms);
}
}
void CaptureAndEncodeWithOveruse(int overuse_time_ms,
int64_t frame_interval_ms,
int64_t encode_time_ms, size_t width,
size_t height) {
// 'encodes_before_dropping' is derived from 'kMinEncodeRatio' in
// 'overuse_frame_detector.h'.
const int encodes_before_dropping = 14;
for (int time_ms = 0; time_ms < overuse_time_ms;
time_ms += frame_interval_ms * (1 + encodes_before_dropping)) {
CaptureAndEncodeFrames(encodes_before_dropping, frame_interval_ms,
encode_time_ms, width, height);
overuse_detector_->FrameCaptured();
clock_->AdvanceTimeMilliseconds(frame_interval_ms);
}
}
scoped_ptr<SimulatedClock> clock_;
scoped_ptr<MockCpuOveruseObserver> observer_;
scoped_ptr<OveruseFrameDetector> overuse_detector_;
@ -48,21 +77,139 @@ class OveruseFrameDetectorTest : public ::testing::Test {
TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
EXPECT_EQ(overuse_detector_->TimeUntilNextProcess(), kProcessIntervalMs);
overuse_detector_->CapturedFrame();
overuse_detector_->EncodedFrame();
clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
// Enough history to trigger an overuse, but life is good so far.
int frame_interval_ms = 33;
int num_frames = kMinValidHistoryMs / frame_interval_ms + 1;
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 2, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
overuse_detector_->Process();
overuse_detector_->CapturedFrame();
clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
// Trigger an overuse.
CaptureAndEncodeWithOveruse(kOveruseHistoryMs, frame_interval_ms, 2, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
}
TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
overuse_detector_->set_underuse_encode_timing_enabled(true);
// Start with triggering an overuse.
// A new resolution will trigger a reset, so add one frame to get going.
int frame_interval_ms = 33;
CaptureAndEncodeWithOveruse(kMinValidHistoryMs, frame_interval_ms, 2, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
clock_->AdvanceTimeMilliseconds(5000);
overuse_detector_->CapturedFrame();
overuse_detector_->EncodedFrame();
// Make everything good again, but don't advance time long enough to trigger
// an underuse.
int num_frames = kOveruseHistoryMs / frame_interval_ms;
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 1, 1, 1);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
overuse_detector_->Process();
// Advance time long enough to trigger an increase callback.
num_frames = (kMinCallbackDeltaMs - kOveruseHistoryMs + 1) /
(frame_interval_ms - 0.5f);
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 1, 1, 1);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(1);
overuse_detector_->Process();
}
TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
overuse_detector_->set_underuse_encode_timing_enabled(true);
// Start with triggering an overuse.
int frame_interval_ms = 33;
CaptureAndEncodeWithOveruse(kMinValidHistoryMs, frame_interval_ms, 16, 4, 4);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
CaptureAndEncodeWithOveruse(kOveruseHistoryMs, frame_interval_ms, 4, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
// Let life be good again and wait for an underuse callback.
int num_frames = kMinCallbackDeltaMs / (frame_interval_ms - 0.5f);
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 1, 1, 1);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(1);
overuse_detector_->Process();
// And one more.
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 4, 2, 2);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(1);
overuse_detector_->Process();
// But no more since we're at the max resolution.
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 4, 4, 4);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
overuse_detector_->Process();
}
TEST_F(OveruseFrameDetectorTest, OveruseAndNoRecovery) {
overuse_detector_->set_underuse_encode_timing_enabled(true);
// Start with triggering an overuse.
int frame_interval_ms = 33;
CaptureAndEncodeWithOveruse(kMinValidHistoryMs, frame_interval_ms, 4, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
// Everything is fine, but we haven't waited long enough to trigger an
// increase callback.
CaptureAndEncodeFrames(30, 33, 3, 1, 1);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
overuse_detector_->Process();
// Advance time enough to trigger an increase callback, but encode time
// shouldn't have decreased enough to try an increase.
int num_frames = kMinCallbackDeltaMs / (frame_interval_ms - 0.5f);
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 3, 1, 1);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
overuse_detector_->Process();
}
TEST_F(OveruseFrameDetectorTest, NoEncodeTimeForUnderuse) {
overuse_detector_->set_underuse_encode_timing_enabled(false);
// Start with triggering an overuse.
int frame_interval_ms = 33;
CaptureAndEncodeWithOveruse(kMinValidHistoryMs, frame_interval_ms, 4, 2, 2);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
// Everything is fine, but we haven't waited long enough to trigger an
// increase callback.
int num_frames = 1000 / (frame_interval_ms - 0.5f);
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 3, 1, 1);
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
overuse_detector_->Process();
// Advance time enough to allow underuse, but keep encode time too high to
// trigger an underuse if accounted for, see 'OveruseAndNoRecovery' test case.
num_frames = kMinCallbackDeltaMs / (frame_interval_ms - 0.5f);
CaptureAndEncodeFrames(num_frames, frame_interval_ms, 3, 1, 1);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(1);
overuse_detector_->Process();
}
TEST_F(OveruseFrameDetectorTest, ResolutionChange) {
overuse_detector_->set_underuse_encode_timing_enabled(true);
int frame_interval_ms = 33;
CaptureAndEncodeWithOveruse(kMinValidHistoryMs / 2, frame_interval_ms, 3, 1,
1);
// Keep overusing, but with a new resolution.
CaptureAndEncodeWithOveruse(kMinValidHistoryMs - frame_interval_ms,
frame_interval_ms, 4, 2, 2);
// Enough samples and time to trigger an overuse, but resolution reset should
// prevent this.
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
overuse_detector_->Process();
// Fill the history.
CaptureAndEncodeFrames(2, kOveruseHistoryMs / 2, 3, 1, 1);
// Capture a frame without finish encoding to trigger an overuse.
overuse_detector_->FrameCaptured();
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
overuse_detector_->Process();
}
} // namespace webrtc

View File

@ -16,6 +16,7 @@
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
@ -349,7 +350,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
captured_frame_.SwapFrame(&video_frame);
capture_event_.Set();
overuse_detector_->CapturedFrame();
overuse_detector_->FrameCaptured();
return;
}
@ -513,12 +514,19 @@ bool ViECapturer::ViECaptureProcess() {
if (!captured_frame_.IsZeroSize()) {
// New I420 frame.
capture_cs_->Enter();
// The frame sent for encoding, update the overuse detector.
overuse_detector_->EncodedFrame();
deliver_frame_.SwapFrame(&captured_frame_);
captured_frame_.ResetSize();
capture_cs_->Leave();
int64_t encode_start_time =
Clock::GetRealTimeClock()->TimeInMilliseconds();
DeliverI420Frame(&deliver_frame_);
// The frame has been encoded, update the overuse detector with the
// duration.
overuse_detector_->FrameEncoded(
Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time,
deliver_frame_.width(), deliver_frame_.height());
}
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) {