(Auto)update libjingle 66303009-> 66322380

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6065 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
buildbot@webrtc.org
2014-05-06 21:36:31 +00:00
parent a18b4c96af
commit 0581f0ba0a
4 changed files with 51 additions and 8 deletions

View File

@@ -105,6 +105,9 @@ const char StatsReport::kStatsValueNameMinPlayoutDelayMs[] =
"googMinPlayoutDelayMs"; "googMinPlayoutDelayMs";
const char StatsReport::kStatsValueNameRenderDelayMs[] = "googRenderDelayMs"; const char StatsReport::kStatsValueNameRenderDelayMs[] = "googRenderDelayMs";
const char StatsReport::kStatsValueNameCaptureStartNtpTimeMs[] =
"googCaptureStartNtpTimeMs";
const char StatsReport::kStatsValueNameFrameRateInput[] = "googFrameRateInput"; const char StatsReport::kStatsValueNameFrameRateInput[] = "googFrameRateInput";
const char StatsReport::kStatsValueNameFrameRateSent[] = "googFrameRateSent"; const char StatsReport::kStatsValueNameFrameRateSent[] = "googFrameRateSent";
const char StatsReport::kStatsValueNameFrameWidthInput[] = const char StatsReport::kStatsValueNameFrameWidthInput[] =
@@ -338,6 +341,9 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
info.min_playout_delay_ms); info.min_playout_delay_ms);
report->AddValue(StatsReport::kStatsValueNameRenderDelayMs, report->AddValue(StatsReport::kStatsValueNameRenderDelayMs,
info.render_delay_ms); info.render_delay_ms);
report->AddValue(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
info.capture_start_ntp_time_ms);
} }
void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) { void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {

View File

@@ -161,6 +161,7 @@ class StatsReport {
static const char kStatsValueNameJitterBufferMs[]; static const char kStatsValueNameJitterBufferMs[];
static const char kStatsValueNameMinPlayoutDelayMs[]; static const char kStatsValueNameMinPlayoutDelayMs[];
static const char kStatsValueNameRenderDelayMs[]; static const char kStatsValueNameRenderDelayMs[];
static const char kStatsValueNameCaptureStartNtpTimeMs[];
static const char kStatsValueNameFrameRateInput[]; static const char kStatsValueNameFrameRateInput[];
static const char kStatsValueNameFrameRateSent[]; static const char kStatsValueNameFrameRateSent[];
static const char kStatsValueNameFrameWidthInput[]; static const char kStatsValueNameFrameWidthInput[];

View File

@@ -911,7 +911,8 @@ struct VideoReceiverInfo : public MediaReceiverInfo {
min_playout_delay_ms(0), min_playout_delay_ms(0),
render_delay_ms(0), render_delay_ms(0),
target_delay_ms(0), target_delay_ms(0),
current_delay_ms(0) { current_delay_ms(0),
capture_start_ntp_time_ms(0) {
} }
std::vector<SsrcGroup> ssrc_groups; std::vector<SsrcGroup> ssrc_groups;
@@ -948,6 +949,9 @@ struct VideoReceiverInfo : public MediaReceiverInfo {
int target_delay_ms; int target_delay_ms;
// Current overall delay, possibly ramping towards target_delay_ms. // Current overall delay, possibly ramping towards target_delay_ms.
int current_delay_ms; int current_delay_ms;
// Estimated capture start time in NTP time in ms.
int64 capture_start_ntp_time_ms;
}; };
struct DataSenderInfo : public MediaSenderInfo { struct DataSenderInfo : public MediaSenderInfo {

View File

@@ -81,6 +81,8 @@ void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine) {
} }
#endif #endif
static const int kVideoCodecClockratekHz = cricket::kVideoCodecClockrate / 1000;
namespace cricket { namespace cricket {
@@ -169,7 +171,13 @@ struct FlushBlackFrameData : public talk_base::MessageData {
class WebRtcRenderAdapter : public webrtc::ExternalRenderer { class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
public: public:
WebRtcRenderAdapter(VideoRenderer* renderer, int channel_id) WebRtcRenderAdapter(VideoRenderer* renderer, int channel_id)
: renderer_(renderer), channel_id_(channel_id), width_(0), height_(0) { : renderer_(renderer),
channel_id_(channel_id),
width_(0),
height_(0),
first_frame_arrived_(false),
capture_start_rtp_time_stamp_(0),
capture_start_ntp_time_ms_(0) {
} }
virtual ~WebRtcRenderAdapter() { virtual ~WebRtcRenderAdapter() {
@@ -213,19 +221,31 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
virtual int DeliverFrame(unsigned char* buffer, virtual int DeliverFrame(unsigned char* buffer,
int buffer_size, int buffer_size,
uint32_t time_stamp, uint32_t rtp_time_stamp,
#ifdef USE_WEBRTC_DEV_BRANCH #ifdef USE_WEBRTC_DEV_BRANCH
int64_t ntp_time_ms, int64_t ntp_time_ms,
#endif #endif
int64_t render_time, int64_t render_time,
void* handle) { void* handle) {
talk_base::CritScope cs(&crit_); talk_base::CritScope cs(&crit_);
if (!first_frame_arrived_) {
first_frame_arrived_ = true;
capture_start_rtp_time_stamp_ = rtp_time_stamp;
}
#ifdef USE_WEBRTC_DEV_BRANCH
if (ntp_time_ms > 0) {
uint32 elapsed_time_ms =
(rtp_time_stamp - capture_start_rtp_time_stamp_) /
kVideoCodecClockratekHz;
capture_start_ntp_time_ms_ = ntp_time_ms - elapsed_time_ms;
}
#endif
frame_rate_tracker_.Update(1); frame_rate_tracker_.Update(1);
if (renderer_ == NULL) { if (renderer_ == NULL) {
return 0; return 0;
} }
// Convert 90K rtp timestamp to ns timestamp. // Convert 90K rtp timestamp to ns timestamp.
int64 rtp_time_stamp_in_ns = (time_stamp / 90) * int64 rtp_time_stamp_in_ns = (rtp_time_stamp / kVideoCodecClockratekHz) *
talk_base::kNumNanosecsPerMillisec; talk_base::kNumNanosecsPerMillisec;
// Convert milisecond render time to ns timestamp. // Convert milisecond render time to ns timestamp.
int64 render_time_stamp_in_ns = render_time * int64 render_time_stamp_in_ns = render_time *
@@ -244,10 +264,10 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
virtual bool IsTextureSupported() { return true; } virtual bool IsTextureSupported() { return true; }
int DeliverBufferFrame(unsigned char* buffer, int buffer_size, int DeliverBufferFrame(unsigned char* buffer, int buffer_size,
int64 elapsed_time, int64 time_stamp) { int64 elapsed_time, int64 rtp_time_stamp_in_ns) {
WebRtcVideoFrame video_frame; WebRtcVideoFrame video_frame;
video_frame.Alias(buffer, buffer_size, width_, height_, video_frame.Alias(buffer, buffer_size, width_, height_,
1, 1, elapsed_time, time_stamp, 0); 1, 1, elapsed_time, rtp_time_stamp_in_ns, 0);
// Sanity check on decoded frame size. // Sanity check on decoded frame size.
if (buffer_size != static_cast<int>(VideoFrame::SizeOf(width_, height_))) { if (buffer_size != static_cast<int>(VideoFrame::SizeOf(width_, height_))) {
@@ -260,10 +280,12 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
return ret; return ret;
} }
int DeliverTextureFrame(void* handle, int64 elapsed_time, int64 time_stamp) { int DeliverTextureFrame(void* handle,
int64 elapsed_time,
int64 rtp_time_stamp_in_ns) {
WebRtcTextureVideoFrame video_frame( WebRtcTextureVideoFrame video_frame(
static_cast<webrtc::NativeHandle*>(handle), width_, height_, static_cast<webrtc::NativeHandle*>(handle), width_, height_,
elapsed_time, time_stamp); elapsed_time, rtp_time_stamp_in_ns);
return renderer_->RenderFrame(&video_frame); return renderer_->RenderFrame(&video_frame);
} }
@@ -287,6 +309,11 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
return renderer_; return renderer_;
} }
int64 capture_start_ntp_time_ms() {
talk_base::CritScope cs(&crit_);
return capture_start_ntp_time_ms_;
}
private: private:
talk_base::CriticalSection crit_; talk_base::CriticalSection crit_;
VideoRenderer* renderer_; VideoRenderer* renderer_;
@@ -294,6 +321,9 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
unsigned int width_; unsigned int width_;
unsigned int height_; unsigned int height_;
talk_base::RateTracker frame_rate_tracker_; talk_base::RateTracker frame_rate_tracker_;
bool first_frame_arrived_;
uint32 capture_start_rtp_time_stamp_;
int64 capture_start_ntp_time_ms_;
}; };
class WebRtcDecoderObserver : public webrtc::ViEDecoderObserver { class WebRtcDecoderObserver : public webrtc::ViEDecoderObserver {
@@ -2538,6 +2568,8 @@ bool WebRtcVideoMediaChannel::GetStats(const StatsOptions& options,
int fps = channel->render_adapter()->framerate(); int fps = channel->render_adapter()->framerate();
rinfo.framerate_decoded = fps; rinfo.framerate_decoded = fps;
rinfo.framerate_output = fps; rinfo.framerate_output = fps;
rinfo.capture_start_ntp_time_ms =
channel->render_adapter()->capture_start_ntp_time_ms();
channel->decoder_observer()->ExportTo(&rinfo); channel->decoder_observer()->ExportTo(&rinfo);
webrtc::RtcpPacketTypeCounter rtcp_sent; webrtc::RtcpPacketTypeCounter rtcp_sent;