Revert 8139 "Implement elapsed time and capture start NTP time e..."
> Implement elapsed time and capture start NTP time estimation. > > These two elements are required for end-to-end delay estimation. > > BUG=1788 > R=stefan@webrtc.org > TBR=pthatcher@webrtc.org > > Review URL: https://webrtc-codereview.appspot.com/36909004 TBR=pbos@webrtc.org Review URL: https://webrtc-codereview.appspot.com/41619004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@8143 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
dacdd9403d
commit
0f98844749
@ -44,8 +44,7 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
height_(0),
|
||||
num_set_sizes_(0),
|
||||
num_rendered_frames_(0),
|
||||
black_frame_(false),
|
||||
last_frame_elapsed_time_ns_(-1) {
|
||||
black_frame_(false) {
|
||||
}
|
||||
|
||||
virtual bool SetSize(int width, int height, int reserved) {
|
||||
@ -76,7 +75,6 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
++errors_;
|
||||
return false;
|
||||
}
|
||||
last_frame_elapsed_time_ns_ = frame->GetElapsedTime();
|
||||
++num_rendered_frames_;
|
||||
SignalRenderFrame(frame);
|
||||
return true;
|
||||
@ -104,11 +102,6 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
return black_frame_;
|
||||
}
|
||||
|
||||
int64_t last_frame_elapsed_time_ns() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return last_frame_elapsed_time_ns_;
|
||||
}
|
||||
|
||||
sigslot::signal3<int, int, int> SignalSetSize;
|
||||
sigslot::signal1<const VideoFrame*> SignalRenderFrame;
|
||||
|
||||
@ -167,7 +160,6 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
int num_set_sizes_;
|
||||
int num_rendered_frames_;
|
||||
bool black_frame_;
|
||||
int64_t last_frame_elapsed_time_ns_;
|
||||
mutable rtc::CriticalSection crit_;
|
||||
};
|
||||
|
||||
|
@ -436,14 +436,18 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
if (!renderer_)
|
||||
return 0;
|
||||
|
||||
const int64 elapsed_time_ns =
|
||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec;
|
||||
const int64 render_time_ns =
|
||||
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec;
|
||||
|
||||
if (!webrtc_frame->native_handle()) {
|
||||
const WebRtcVideoRenderFrame cricket_frame(webrtc_frame, elapsed_time_ms);
|
||||
WebRtcVideoRenderFrame cricket_frame(webrtc_frame, render_time_ns,
|
||||
elapsed_time_ns);
|
||||
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
||||
} else {
|
||||
return DeliverTextureFrame(
|
||||
webrtc_frame->native_handle(),
|
||||
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec,
|
||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
|
||||
return DeliverTextureFrame(webrtc_frame->native_handle(), render_time_ns,
|
||||
elapsed_time_ns);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1870,9 +1870,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
|
||||
external_decoder_factory_(external_decoder_factory),
|
||||
renderer_(NULL),
|
||||
last_width_(-1),
|
||||
last_height_(-1),
|
||||
first_frame_timestamp_(-1),
|
||||
estimated_remote_start_ntp_time_ms_(0) {
|
||||
last_height_(-1) {
|
||||
config_.renderer = this;
|
||||
// SetRecvCodecs will also reset (start) the VideoReceiveStream.
|
||||
SetRecvCodecs(recv_codecs);
|
||||
@ -1975,17 +1973,6 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
|
||||
const webrtc::I420VideoFrame& frame,
|
||||
int time_to_render_ms) {
|
||||
rtc::CritScope crit(&renderer_lock_);
|
||||
|
||||
if (first_frame_timestamp_ < 0)
|
||||
first_frame_timestamp_ = frame.timestamp();
|
||||
int64_t rtp_time_elapsed_since_first_frame =
|
||||
(timestamp_wraparound_handler_.Unwrap(frame.timestamp()) -
|
||||
first_frame_timestamp_);
|
||||
int64_t elapsed_time_ms = rtp_time_elapsed_since_first_frame /
|
||||
(cricket::kVideoCodecClockrate / 1000);
|
||||
if (frame.ntp_time_ms() > 0)
|
||||
estimated_remote_start_ntp_time_ms_ = frame.ntp_time_ms() - elapsed_time_ms;
|
||||
|
||||
if (renderer_ == NULL) {
|
||||
LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoRenderer.";
|
||||
return;
|
||||
@ -1998,7 +1985,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
|
||||
LOG(LS_VERBOSE) << "RenderFrame: (" << frame.width() << "x" << frame.height()
|
||||
<< ")";
|
||||
|
||||
const WebRtcVideoRenderFrame render_frame(&frame, elapsed_time_ms);
|
||||
const WebRtcVideoRenderFrame render_frame(&frame);
|
||||
renderer_->RenderFrame(&render_frame);
|
||||
}
|
||||
|
||||
@ -2045,7 +2032,6 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetVideoReceiverInfo() {
|
||||
rtc::CritScope frame_cs(&renderer_lock_);
|
||||
info.frame_width = last_width_;
|
||||
info.frame_height = last_height_;
|
||||
info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_;
|
||||
|
||||
// TODO(pbos): Support or remove the following stats.
|
||||
info.packets_concealed = -1;
|
||||
|
@ -48,8 +48,12 @@
|
||||
#include "webrtc/video_send_stream.h"
|
||||
|
||||
namespace webrtc {
|
||||
class VideoCaptureModule;
|
||||
class VideoDecoder;
|
||||
class VideoEncoder;
|
||||
class VideoRender;
|
||||
class VideoSendStreamInput;
|
||||
class VideoReceiveStream;
|
||||
}
|
||||
|
||||
namespace rtc {
|
||||
@ -75,6 +79,8 @@ class WebRtcVoiceEngine;
|
||||
struct CapturedFrame;
|
||||
struct Device;
|
||||
|
||||
class WebRtcVideoRenderer;
|
||||
|
||||
class UnsignalledSsrcHandler {
|
||||
public:
|
||||
enum Action {
|
||||
@ -111,6 +117,7 @@ class WebRtcCallFactory {
|
||||
// WebRtcVideoEngine2 is used for the new native WebRTC Video API (webrtc:1667).
|
||||
class WebRtcVideoEngine2 : public sigslot::has_slots<> {
|
||||
public:
|
||||
// Creates the WebRtcVideoEngine2 with internal VideoCaptureModule.
|
||||
WebRtcVideoEngine2();
|
||||
virtual ~WebRtcVideoEngine2();
|
||||
|
||||
@ -437,14 +444,6 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
|
||||
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
|
||||
int last_width_ GUARDED_BY(renderer_lock_);
|
||||
int last_height_ GUARDED_BY(renderer_lock_);
|
||||
// Expands remote RTP timestamps to int64_t to be able to estimate how long
|
||||
// the stream has been running.
|
||||
rtc::TimestampWrapAroundHandler timestamp_wraparound_handler_
|
||||
GUARDED_BY(renderer_lock_);
|
||||
int64_t first_frame_timestamp_ GUARDED_BY(renderer_lock_);
|
||||
// Start NTP time is estimated as current remote NTP time (estimated from
|
||||
// RTCP) minus the elapsed time, as soon as remote NTP time is available.
|
||||
int64_t estimated_remote_start_ntp_time_ms_ GUARDED_BY(renderer_lock_);
|
||||
};
|
||||
|
||||
void Construct(webrtc::Call* call, WebRtcVideoEngine2* engine);
|
||||
|
@ -73,19 +73,6 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
|
||||
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
|
||||
}
|
||||
|
||||
static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
|
||||
int width,
|
||||
int height) {
|
||||
video_frame->CreateEmptyFrame(
|
||||
width, height, width, (width + 1) / 2, (width + 1) / 2);
|
||||
memset(video_frame->buffer(webrtc::kYPlane), 16,
|
||||
video_frame->allocated_size(webrtc::kYPlane));
|
||||
memset(video_frame->buffer(webrtc::kUPlane), 128,
|
||||
video_frame->allocated_size(webrtc::kUPlane));
|
||||
memset(video_frame->buffer(webrtc::kVPlane), 128,
|
||||
video_frame->allocated_size(webrtc::kVPlane));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace cricket {
|
||||
@ -189,11 +176,6 @@ bool FakeVideoReceiveStream::IsReceiving() const {
|
||||
return receiving_;
|
||||
}
|
||||
|
||||
void FakeVideoReceiveStream::InjectFrame(const webrtc::I420VideoFrame& frame,
|
||||
int time_to_render_ms) {
|
||||
config_.renderer->RenderFrame(frame, time_to_render_ms);
|
||||
}
|
||||
|
||||
webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const {
|
||||
return webrtc::VideoReceiveStream::Stats();
|
||||
}
|
||||
@ -1601,44 +1583,12 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse) {
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
|
||||
// Start at last timestamp to verify that wraparounds are estimated correctly.
|
||||
static const uint32_t kInitialTimestamp = 0xFFFFFFFFu;
|
||||
static const int64_t kInitialNtpTimeMs = 1247891230;
|
||||
static const int kFrameOffsetMs = 20;
|
||||
EXPECT_TRUE(channel_->SetRecvCodecs(engine_.codecs()));
|
||||
TEST_F(WebRtcVideoChannel2Test, DISABLED_WebRtcShouldLog) {
|
||||
FAIL() << "Not implemented."; // TODO(pbos): Implement.
|
||||
}
|
||||
|
||||
FakeVideoReceiveStream* stream = AddRecvStream();
|
||||
cricket::FakeVideoRenderer renderer;
|
||||
EXPECT_TRUE(channel_->SetRenderer(last_ssrc_, &renderer));
|
||||
EXPECT_TRUE(channel_->SetRender(true));
|
||||
|
||||
webrtc::I420VideoFrame video_frame;
|
||||
CreateBlackFrame(&video_frame, 4, 4);
|
||||
video_frame.set_timestamp(kInitialTimestamp);
|
||||
// Initial NTP time is not available on the first frame, but should still be
|
||||
// able to be estimated.
|
||||
stream->InjectFrame(video_frame, 0);
|
||||
|
||||
EXPECT_EQ(1, renderer.num_rendered_frames());
|
||||
EXPECT_EQ(0, renderer.last_frame_elapsed_time_ns());
|
||||
|
||||
// This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which
|
||||
// triggers a constant-overflow warning, hence we're calculating it explicitly
|
||||
// here.
|
||||
video_frame.set_timestamp(kFrameOffsetMs * 90 - 1);
|
||||
video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs);
|
||||
stream->InjectFrame(video_frame, 0);
|
||||
|
||||
EXPECT_EQ(2, renderer.num_rendered_frames());
|
||||
EXPECT_EQ(kFrameOffsetMs * rtc::kNumNanosecsPerMillisec,
|
||||
renderer.last_frame_elapsed_time_ns());
|
||||
|
||||
// Verify that NTP time has been correctly deduced.
|
||||
cricket::VideoMediaInfo info;
|
||||
ASSERT_TRUE(channel_->GetStats(cricket::StatsOptions(), &info));
|
||||
ASSERT_EQ(1u, info.receivers.size());
|
||||
EXPECT_EQ(kInitialNtpTimeMs, info.receivers[0].capture_start_ntp_time_ms);
|
||||
TEST_F(WebRtcVideoChannel2Test, DISABLED_WebRtcShouldNotLog) {
|
||||
FAIL() << "Not implemented."; // TODO(pbos): Implement.
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannel2Test, SetDefaultSendCodecs) {
|
||||
|
@ -84,8 +84,6 @@ class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
|
||||
|
||||
bool IsReceiving() const;
|
||||
|
||||
void InjectFrame(const webrtc::I420VideoFrame& frame, int time_to_render_ms);
|
||||
|
||||
private:
|
||||
virtual webrtc::VideoReceiveStream::Stats GetStats() const OVERRIDE;
|
||||
|
||||
|
@ -359,10 +359,20 @@ void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
|
||||
elapsed_time, time_stamp, 0);
|
||||
}
|
||||
|
||||
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
|
||||
const webrtc::I420VideoFrame* frame)
|
||||
: frame_(frame),
|
||||
// Convert millisecond render time to ns timestamp.
|
||||
// Convert 90K rtp timestamp to ns timestamp.
|
||||
timestamp_((frame_->timestamp() / 90) * rtc::kNumNanosecsPerMillisec),
|
||||
elapsed_time_(frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec) {
|
||||
}
|
||||
|
||||
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
|
||||
const webrtc::I420VideoFrame* frame,
|
||||
int64_t elapsed_time_ms)
|
||||
: frame_(frame), elapsed_time_ms_(elapsed_time_ms) {
|
||||
int64_t timestamp,
|
||||
int64_t elapsed_time)
|
||||
: frame_(frame), timestamp_(timestamp), elapsed_time_(elapsed_time) {
|
||||
}
|
||||
|
||||
bool WebRtcVideoRenderFrame::InitToBlack(int w,
|
||||
@ -443,18 +453,16 @@ size_t WebRtcVideoRenderFrame::GetPixelHeight() const {
|
||||
}
|
||||
|
||||
int64_t WebRtcVideoRenderFrame::GetElapsedTime() const {
|
||||
return elapsed_time_ms_ * rtc::kNumNanosecsPerMillisec;
|
||||
return elapsed_time_;
|
||||
}
|
||||
|
||||
int64_t WebRtcVideoRenderFrame::GetTimeStamp() const {
|
||||
return frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec;
|
||||
return timestamp_;
|
||||
}
|
||||
|
||||
void WebRtcVideoRenderFrame::SetElapsedTime(int64_t elapsed_time) {
|
||||
UNIMPLEMENTED;
|
||||
elapsed_time_ = elapsed_time;
|
||||
}
|
||||
void WebRtcVideoRenderFrame::SetTimeStamp(int64_t time_stamp) {
|
||||
UNIMPLEMENTED;
|
||||
timestamp_ = time_stamp;
|
||||
}
|
||||
|
||||
int WebRtcVideoRenderFrame::GetRotation() const {
|
||||
|
@ -139,8 +139,10 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
// be written to.
|
||||
class WebRtcVideoRenderFrame : public VideoFrame {
|
||||
public:
|
||||
explicit WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame);
|
||||
WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame,
|
||||
int64_t elapsed_time_ms);
|
||||
int64_t timestamp,
|
||||
int64_t elapsed_time);
|
||||
|
||||
virtual bool InitToBlack(int w,
|
||||
int h,
|
||||
@ -193,7 +195,8 @@ class WebRtcVideoRenderFrame : public VideoFrame {
|
||||
|
||||
private:
|
||||
const webrtc::I420VideoFrame* const frame_;
|
||||
const int64_t elapsed_time_ms_;
|
||||
int64_t timestamp_;
|
||||
int64_t elapsed_time_;
|
||||
};
|
||||
|
||||
} // namespace cricket
|
||||
|
Loading…
x
Reference in New Issue
Block a user