Send black frame with previous size when muting.
Instead of sending a black frame that's the size of the VideoFormat send a black frame in the format we're already sending. This prevents expensive encoder reconfiguration when the sending format is a different resolution. This speeds up setting a null capturer (removing the capturer) significantly as it doesn't entail an encoder reconfiguration. R=mflodman@webrtc.org, pthatcher@webrtc.org BUG=1788 Review URL: https://webrtc-codereview.appspot.com/39179004 Cr-Commit-Position: refs/heads/master@{#8405} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8405 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
7d721eea14
commit
b4987bfc24
@ -1350,14 +1350,15 @@ class VideoMediaChannelTest : public testing::Test,
|
||||
EXPECT_FALSE(renderer_.black_frame());
|
||||
EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
|
||||
// Make sure a black frame is generated within the specified timeout.
|
||||
// The black frame should be the resolution of the send codec.
|
||||
// The black frame should be the resolution of the previous frame to
|
||||
// prevent expensive encoder reconfigurations.
|
||||
EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
|
||||
codec.width == renderer_.width() &&
|
||||
codec.height == renderer_.height() &&
|
||||
format.width == renderer_.width() &&
|
||||
format.height == renderer_.height() &&
|
||||
renderer_.black_frame(), kTimeout);
|
||||
EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
|
||||
EXPECT_EQ(codec.width, renderer_.width());
|
||||
EXPECT_EQ(codec.height, renderer_.height());
|
||||
EXPECT_EQ(format.width, renderer_.width());
|
||||
EXPECT_EQ(format.height, renderer_.height());
|
||||
EXPECT_TRUE(renderer_.black_frame());
|
||||
|
||||
// The black frame has the same timestamp as the next frame since it's
|
||||
|
@ -574,22 +574,35 @@ class WebRtcEncoderObserver : public webrtc::ViEEncoderObserver {
|
||||
bool suspended_;
|
||||
};
|
||||
|
||||
struct CapturedFrameInfo {
|
||||
CapturedFrameInfo()
|
||||
: width(0),
|
||||
height(0),
|
||||
screencast(false),
|
||||
elapsed_time(-1),
|
||||
timestamp(-1) {}
|
||||
CapturedFrameInfo(size_t width,
|
||||
size_t height,
|
||||
bool screencast,
|
||||
int64_t elapsed_time,
|
||||
int64_t timestamp)
|
||||
: width(width),
|
||||
height(height),
|
||||
screencast(screencast),
|
||||
elapsed_time(elapsed_time),
|
||||
timestamp(timestamp) {}
|
||||
|
||||
size_t width;
|
||||
size_t height;
|
||||
bool screencast;
|
||||
|
||||
int64_t elapsed_time;
|
||||
int64_t timestamp;
|
||||
};
|
||||
|
||||
class WebRtcLocalStreamInfo {
|
||||
public:
|
||||
WebRtcLocalStreamInfo()
|
||||
: width_(0), height_(0), elapsed_time_(-1), time_stamp_(-1) {}
|
||||
size_t width() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return width_;
|
||||
}
|
||||
size_t height() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return height_;
|
||||
}
|
||||
int64 elapsed_time() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return elapsed_time_;
|
||||
}
|
||||
WebRtcLocalStreamInfo() : time_stamp_(-1) {}
|
||||
int64 time_stamp() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return time_stamp_;
|
||||
@ -598,30 +611,15 @@ class WebRtcLocalStreamInfo {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return static_cast<int>(rate_tracker_.units_second());
|
||||
}
|
||||
void GetLastFrameInfo(
|
||||
size_t* width, size_t* height, int64* elapsed_time) const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
*width = width_;
|
||||
*height = height_;
|
||||
*elapsed_time = elapsed_time_;
|
||||
}
|
||||
|
||||
void UpdateFrame(const VideoFrame* frame) {
|
||||
rtc::CritScope cs(&crit_);
|
||||
|
||||
width_ = frame->GetWidth();
|
||||
height_ = frame->GetHeight();
|
||||
elapsed_time_ = frame->GetElapsedTime();
|
||||
time_stamp_ = frame->GetTimeStamp();
|
||||
|
||||
rate_tracker_.Update(1);
|
||||
}
|
||||
|
||||
private:
|
||||
mutable rtc::CriticalSection crit_;
|
||||
size_t width_;
|
||||
size_t height_;
|
||||
int64 elapsed_time_;
|
||||
int64 time_stamp_;
|
||||
rtc::RateTracker rate_tracker_;
|
||||
|
||||
@ -771,17 +769,14 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> {
|
||||
void SetLastCapturedFrameInfo(
|
||||
const VideoFrame* frame, bool screencast, bool* changed) {
|
||||
CapturedFrameInfo last;
|
||||
if (last_captured_frame_info_.Get(&last) &&
|
||||
frame->GetWidth() == last.width &&
|
||||
frame->GetHeight() == last.height &&
|
||||
screencast == last.screencast) {
|
||||
*changed = false;
|
||||
return;
|
||||
}
|
||||
*changed =
|
||||
!(last_captured_frame_info_.Get(&last) &&
|
||||
frame->GetWidth() == last.width &&
|
||||
frame->GetHeight() == last.height && screencast == last.screencast);
|
||||
|
||||
last_captured_frame_info_.Set(CapturedFrameInfo(
|
||||
frame->GetWidth(), frame->GetHeight(), screencast));
|
||||
*changed = true;
|
||||
last_captured_frame_info_.Set(
|
||||
CapturedFrameInfo(frame->GetWidth(), frame->GetHeight(), screencast,
|
||||
frame->GetElapsedTime(), frame->GetTimeStamp()));
|
||||
}
|
||||
|
||||
// Tells the video adapter to adapt down to a given format. The
|
||||
@ -4140,33 +4135,23 @@ void WebRtcVideoMediaChannel::QueueBlackFrame(uint32 ssrc, int64 timestamp,
|
||||
}
|
||||
|
||||
void WebRtcVideoMediaChannel::FlushBlackFrame(
|
||||
uint32 ssrc, int64 timestamp, int interval) {
|
||||
uint32 ssrc, int64 timestamp, int timestamp_delta) {
|
||||
WebRtcVideoChannelSendInfo* send_channel = GetSendChannelBySsrc(ssrc);
|
||||
if (!send_channel) {
|
||||
return;
|
||||
}
|
||||
rtc::scoped_ptr<const VideoFrame> black_frame_ptr;
|
||||
|
||||
const WebRtcLocalStreamInfo* channel_stream_info =
|
||||
send_channel->local_stream_info();
|
||||
int64 last_frame_time_stamp = channel_stream_info->time_stamp();
|
||||
if (last_frame_time_stamp == timestamp) {
|
||||
size_t last_frame_width = 0;
|
||||
size_t last_frame_height = 0;
|
||||
int64 last_frame_elapsed_time = 0;
|
||||
channel_stream_info->GetLastFrameInfo(&last_frame_width, &last_frame_height,
|
||||
&last_frame_elapsed_time);
|
||||
if (!last_frame_width || !last_frame_height) {
|
||||
return;
|
||||
}
|
||||
CapturedFrameInfo last_frame_info;
|
||||
if (!send_channel->last_captured_frame_info().Get(&last_frame_info))
|
||||
return;
|
||||
|
||||
if (last_frame_info.timestamp == timestamp) {
|
||||
WebRtcVideoFrame black_frame;
|
||||
// Black frame is not screencast.
|
||||
const bool screencasting = false;
|
||||
const int64 timestamp_delta = interval;
|
||||
if (!black_frame.InitToBlack(send_codec_->width, send_codec_->height, 1, 1,
|
||||
last_frame_elapsed_time + timestamp_delta,
|
||||
last_frame_time_stamp + timestamp_delta) ||
|
||||
!SendFrame(send_channel, &black_frame, screencasting)) {
|
||||
if (!black_frame.InitToBlack(static_cast<int>(last_frame_info.width),
|
||||
static_cast<int>(last_frame_info.height), 1, 1,
|
||||
last_frame_info.elapsed_time + timestamp_delta,
|
||||
last_frame_info.timestamp + timestamp_delta) ||
|
||||
!SendFrame(send_channel, &black_frame, last_frame_info.screencast)) {
|
||||
LOG(LS_ERROR) << "Failed to send black frame.";
|
||||
}
|
||||
}
|
||||
|
@ -236,15 +236,7 @@ class WebRtcVideoEngine : public sigslot::has_slots<>,
|
||||
rtc::scoped_ptr<rtc::CpuMonitor> cpu_monitor_;
|
||||
};
|
||||
|
||||
struct CapturedFrameInfo {
|
||||
CapturedFrameInfo() : width(0), height(0), screencast(false) {}
|
||||
CapturedFrameInfo(size_t width, size_t height, bool screencast) :
|
||||
width(width), height(height), screencast(screencast) {}
|
||||
|
||||
size_t width;
|
||||
size_t height;
|
||||
bool screencast;
|
||||
};
|
||||
struct CapturedFrameInfo;
|
||||
|
||||
// TODO(pthatcher): Add VideoOptions.
|
||||
struct VideoSendParams {
|
||||
|
@ -1356,25 +1356,17 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
|
||||
DestroyVideoEncoder(&allocated_encoder_);
|
||||
}
|
||||
|
||||
static void SetWebRtcFrameToBlack(webrtc::I420VideoFrame* video_frame) {
|
||||
assert(video_frame != NULL);
|
||||
memset(video_frame->buffer(webrtc::kYPlane),
|
||||
16,
|
||||
video_frame->allocated_size(webrtc::kYPlane));
|
||||
memset(video_frame->buffer(webrtc::kUPlane),
|
||||
128,
|
||||
video_frame->allocated_size(webrtc::kUPlane));
|
||||
memset(video_frame->buffer(webrtc::kVPlane),
|
||||
128,
|
||||
video_frame->allocated_size(webrtc::kVPlane));
|
||||
}
|
||||
|
||||
static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
|
||||
int width,
|
||||
int height) {
|
||||
video_frame->CreateEmptyFrame(
|
||||
width, height, width, (width + 1) / 2, (width + 1) / 2);
|
||||
SetWebRtcFrameToBlack(video_frame);
|
||||
video_frame->CreateEmptyFrame(width, height, width, (width + 1) / 2,
|
||||
(width + 1) / 2);
|
||||
memset(video_frame->buffer(webrtc::kYPlane), 16,
|
||||
video_frame->allocated_size(webrtc::kYPlane));
|
||||
memset(video_frame->buffer(webrtc::kUPlane), 128,
|
||||
video_frame->allocated_size(webrtc::kUPlane));
|
||||
memset(video_frame->buffer(webrtc::kVPlane), 128,
|
||||
video_frame->allocated_size(webrtc::kVPlane));
|
||||
}
|
||||
|
||||
static void ConvertToI420VideoFrame(const VideoFrame& frame,
|
||||
@ -1439,6 +1431,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
|
||||
|
||||
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
|
||||
VideoCapturer* capturer) {
|
||||
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetCapturer");
|
||||
if (!DisconnectCapturer() && capturer == NULL) {
|
||||
return false;
|
||||
}
|
||||
@ -1451,16 +1444,8 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
|
||||
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
|
||||
webrtc::I420VideoFrame black_frame;
|
||||
|
||||
// TODO(pbos): Base width/height on last_dimensions_. This will however
|
||||
// fail the test AddRemoveCapturer which needs to be fixed to permit
|
||||
// sending black frames in the same size that was previously sent.
|
||||
int width = format_.width;
|
||||
int height = format_.height;
|
||||
int half_width = (width + 1) / 2;
|
||||
black_frame.CreateEmptyFrame(
|
||||
width, height, width, half_width, half_width);
|
||||
SetWebRtcFrameToBlack(&black_frame);
|
||||
SetDimensions(width, height, last_dimensions_.is_screencast);
|
||||
CreateBlackFrame(&black_frame, last_dimensions_.width,
|
||||
last_dimensions_.height);
|
||||
stream_->Input()->SwapFrame(&black_frame);
|
||||
}
|
||||
|
||||
|
@ -319,9 +319,14 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
|
||||
};
|
||||
|
||||
struct Dimensions {
|
||||
// Use low width/height to make encoder creation (before first frame)
|
||||
// cheap.
|
||||
Dimensions() : width(16), height(16), is_screencast(false) {}
|
||||
// Initial encoder configuration (QCIF, 176x144) frame (to ensure that
|
||||
// hardware encoders can be initialized). This gives us low memory usage
|
||||
// but also makes it so configuration errors are discovered at the time we
|
||||
// apply the settings rather than when we get the first frame (waiting for
|
||||
// the first frame to know that you gave a bad codec parameter could make
|
||||
// debugging hard).
|
||||
// TODO(pbos): Consider setting up encoders lazily.
|
||||
Dimensions() : width(176), height(144), is_screencast(false) {}
|
||||
int width;
|
||||
int height;
|
||||
bool is_screencast;
|
||||
|
Loading…
x
Reference in New Issue
Block a user