Propagate capture ntp timestamp from rtp to renderer.

Mostly the interface changes, the real implementation of ntp timestamp will come in a follow up cl.

TEST=new tests and try bots
BUG=3111
R=niklas.enbom@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/11469004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5911 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
wu@webrtc.org 2014-04-15 17:46:33 +00:00
parent 1fd5b45a0e
commit 6c75c98964
27 changed files with 125 additions and 40 deletions

View File

@ -18,6 +18,7 @@ I420VideoFrame::I420VideoFrame()
: width_(0), : width_(0),
height_(0), height_(0),
timestamp_(0), timestamp_(0),
ntp_time_ms_(0),
render_time_ms_(0) {} render_time_ms_(0) {}
I420VideoFrame::~I420VideoFrame() {} I420VideoFrame::~I420VideoFrame() {}
@ -37,6 +38,7 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
v_plane_.CreateEmptyPlane(size_v, stride_v, size_v); v_plane_.CreateEmptyPlane(size_v, stride_v, size_v);
// Creating empty frame - reset all values. // Creating empty frame - reset all values.
timestamp_ = 0; timestamp_ = 0;
ntp_time_ms_ = 0;
render_time_ms_ = 0; render_time_ms_ = 0;
return 0; return 0;
} }
@ -71,6 +73,7 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
if (ret < 0) if (ret < 0)
return ret; return ret;
timestamp_ = videoFrame.timestamp_; timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_; render_time_ms_ = videoFrame.render_time_ms_;
return 0; return 0;
} }
@ -82,6 +85,7 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
std::swap(width_, videoFrame->width_); std::swap(width_, videoFrame->width_);
std::swap(height_, videoFrame->height_); std::swap(height_, videoFrame->height_);
std::swap(timestamp_, videoFrame->timestamp_); std::swap(timestamp_, videoFrame->timestamp_);
std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
std::swap(render_time_ms_, videoFrame->render_time_ms_); std::swap(render_time_ms_, videoFrame->render_time_ms_);
} }

View File

@ -49,10 +49,12 @@ TEST(TestI420VideoFrame, WidthHeightValues) {
EXPECT_EQ(valid_value, frame.height()); EXPECT_EQ(valid_value, frame.height());
EXPECT_EQ(invalid_value, frame.set_height(0)); EXPECT_EQ(invalid_value, frame.set_height(0));
EXPECT_EQ(valid_value, frame.height()); EXPECT_EQ(valid_value, frame.height());
frame.set_timestamp(100u); frame.set_timestamp(123u);
EXPECT_EQ(100u, frame.timestamp()); EXPECT_EQ(123u, frame.timestamp());
frame.set_render_time_ms(100); frame.set_ntp_time_ms(456);
EXPECT_EQ(100, frame.render_time_ms()); EXPECT_EQ(456, frame.ntp_time_ms());
frame.set_render_time_ms(789);
EXPECT_EQ(789, frame.render_time_ms());
} }
TEST(TestI420VideoFrame, SizeAllocation) { TEST(TestI420VideoFrame, SizeAllocation) {
@ -82,7 +84,8 @@ TEST(TestI420VideoFrame, ResetSize) {
TEST(TestI420VideoFrame, CopyFrame) { TEST(TestI420VideoFrame, CopyFrame) {
I420VideoFrame frame1, frame2; I420VideoFrame frame1, frame2;
uint32_t timestamp = 1; uint32_t timestamp = 1;
int64_t render_time_ms = 1; int64_t ntp_time_ms = 2;
int64_t render_time_ms = 3;
int stride_y = 15; int stride_y = 15;
int stride_u = 10; int stride_u = 10;
int stride_v = 10; int stride_v = 10;
@ -92,6 +95,7 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_EQ(0, frame1.CreateEmptyFrame(width, height, EXPECT_EQ(0, frame1.CreateEmptyFrame(width, height,
stride_y, stride_u, stride_v)); stride_y, stride_u, stride_v));
frame1.set_timestamp(timestamp); frame1.set_timestamp(timestamp);
frame1.set_ntp_time_ms(ntp_time_ms);
frame1.set_render_time_ms(render_time_ms); frame1.set_render_time_ms(render_time_ms);
const int kSizeY = 225; const int kSizeY = 225;
const int kSizeU = 80; const int kSizeU = 80;
@ -151,7 +155,8 @@ TEST(TestI420VideoFrame, CopyBuffer) {
TEST(TestI420VideoFrame, FrameSwap) { TEST(TestI420VideoFrame, FrameSwap) {
I420VideoFrame frame1, frame2; I420VideoFrame frame1, frame2;
uint32_t timestamp1 = 1; uint32_t timestamp1 = 1;
int64_t render_time_ms1 = 1; int64_t ntp_time_ms1 = 2;
int64_t render_time_ms1 = 3;
int stride_y1 = 15; int stride_y1 = 15;
int stride_u1 = 10; int stride_u1 = 10;
int stride_v1 = 10; int stride_v1 = 10;
@ -160,8 +165,9 @@ TEST(TestI420VideoFrame, FrameSwap) {
const int kSizeY1 = 225; const int kSizeY1 = 225;
const int kSizeU1 = 80; const int kSizeU1 = 80;
const int kSizeV1 = 80; const int kSizeV1 = 80;
uint32_t timestamp2 = 2; uint32_t timestamp2 = 4;
int64_t render_time_ms2 = 4; int64_t ntp_time_ms2 = 5;
int64_t render_time_ms2 = 6;
int stride_y2 = 30; int stride_y2 = 30;
int stride_u2 = 20; int stride_u2 = 20;
int stride_v2 = 20; int stride_v2 = 20;
@ -174,6 +180,7 @@ TEST(TestI420VideoFrame, FrameSwap) {
EXPECT_EQ(0, frame1.CreateEmptyFrame(width1, height1, EXPECT_EQ(0, frame1.CreateEmptyFrame(width1, height1,
stride_y1, stride_u1, stride_v1)); stride_y1, stride_u1, stride_v1));
frame1.set_timestamp(timestamp1); frame1.set_timestamp(timestamp1);
frame1.set_ntp_time_ms(ntp_time_ms1);
frame1.set_render_time_ms(render_time_ms1); frame1.set_render_time_ms(render_time_ms1);
// Set memory for frame1. // Set memory for frame1.
uint8_t buffer_y1[kSizeY1]; uint8_t buffer_y1[kSizeY1];
@ -190,6 +197,7 @@ TEST(TestI420VideoFrame, FrameSwap) {
EXPECT_EQ(0, frame2.CreateEmptyFrame(width2, height2, EXPECT_EQ(0, frame2.CreateEmptyFrame(width2, height2,
stride_y2, stride_u2, stride_v2)); stride_y2, stride_u2, stride_v2));
frame2.set_timestamp(timestamp2); frame2.set_timestamp(timestamp2);
frame1.set_ntp_time_ms(ntp_time_ms2);
frame2.set_render_time_ms(render_time_ms2); frame2.set_render_time_ms(render_time_ms2);
// Set memory for frame2. // Set memory for frame2.
uint8_t buffer_y2[kSizeY2]; uint8_t buffer_y2[kSizeY2];
@ -245,6 +253,7 @@ bool EqualFramesExceptSize(const I420VideoFrame& frame1,
ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane)); ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane));
ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane)); ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane));
ret |= (frame1.timestamp() == frame2.timestamp()); ret |= (frame1.timestamp() == frame2.timestamp());
ret |= (frame1.ntp_time_ms() == frame2.ntp_time_ms());
ret |= (frame1.render_time_ms() == frame2.render_time_ms()); ret |= (frame1.render_time_ms() == frame2.render_time_ms());
if (!ret) if (!ret)
return false; return false;

View File

@ -99,6 +99,14 @@ class I420VideoFrame {
// Get frame timestamp (90kHz). // Get frame timestamp (90kHz).
virtual uint32_t timestamp() const {return timestamp_;} virtual uint32_t timestamp() const {return timestamp_;}
// Set capture ntp time in miliseconds.
virtual void set_ntp_time_ms(int64_t ntp_time_ms) {
ntp_time_ms_ = ntp_time_ms;
}
// Get capture ntp time in miliseconds.
virtual int64_t ntp_time_ms() const {return ntp_time_ms_;}
// Set render time in miliseconds. // Set render time in miliseconds.
virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ = virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
render_time_ms;} render_time_ms;}
@ -136,6 +144,7 @@ class I420VideoFrame {
int width_; int width_;
int height_; int height_;
uint32_t timestamp_; uint32_t timestamp_;
int64_t ntp_time_ms_;
int64_t render_time_ms_; int64_t render_time_ms_;
}; // I420VideoFrame }; // I420VideoFrame

View File

@ -46,6 +46,7 @@ public:
: _encodedWidth(0), : _encodedWidth(0),
_encodedHeight(0), _encodedHeight(0),
_timeStamp(0), _timeStamp(0),
ntp_time_ms_(0),
capture_time_ms_(0), capture_time_ms_(0),
_frameType(kDeltaFrame), _frameType(kDeltaFrame),
_buffer(buffer), _buffer(buffer),
@ -56,6 +57,8 @@ public:
uint32_t _encodedWidth; uint32_t _encodedWidth;
uint32_t _encodedHeight; uint32_t _encodedHeight;
uint32_t _timeStamp; uint32_t _timeStamp;
// NTP time of the capture time in local timebase in milliseconds.
int64_t ntp_time_ms_;
int64_t capture_time_ms_; int64_t capture_time_ms_;
VideoFrameType _frameType; VideoFrameType _frameType;
uint8_t* _buffer; uint8_t* _buffer;

View File

@ -103,6 +103,8 @@ struct WebRtcRTPHeader {
RTPHeader header; RTPHeader header;
FrameType frameType; FrameType frameType;
RTPTypeHeader type; RTPTypeHeader type;
// NTP time of the capture time in local timebase in milliseconds.
int64_t ntp_time_ms;
}; };
class RTPFragmentationHeader { class RTPFragmentationHeader {

View File

@ -22,6 +22,8 @@ namespace webrtc {
enum { kMaxWaitEncTimeMs = 100 }; enum { kMaxWaitEncTimeMs = 100 };
enum { kMaxWaitDecTimeMs = 25 }; enum { kMaxWaitDecTimeMs = 25 };
static const uint32_t kTestTimestamp = 123;
static const int64_t kTestNtpTimeMs = 456;
// TODO(mikhal): Replace these with mocks. // TODO(mikhal): Replace these with mocks.
class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback { class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
@ -128,6 +130,7 @@ class TestVp8Impl : public ::testing::Test {
input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height, input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
stride_y, stride_uv, stride_uv); stride_y, stride_uv, stride_uv);
input_frame_.set_timestamp(kTestTimestamp);
// Using ConvertToI420 to add stride to the image. // Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
codec_inst_.width, codec_inst_.height, codec_inst_.width, codec_inst_.height,
@ -235,10 +238,13 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
VideoFrameToEncodedImage(encoded_video_frame_, encodedImage); VideoFrameToEncodedImage(encoded_video_frame_, encodedImage);
// First frame should be a key frame. // First frame should be a key frame.
encodedImage._frameType = kKeyFrame; encodedImage._frameType = kKeyFrame;
encodedImage.ntp_time_ms_ = kTestNtpTimeMs;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encodedImage, false, NULL)); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encodedImage, false, NULL));
EXPECT_GT(WaitForDecodedFrame(), 0); EXPECT_GT(WaitForDecodedFrame(), 0);
// Compute PSNR on all planes (faster than SSIM). // Compute PSNR on all planes (faster than SSIM).
EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36); EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36);
EXPECT_EQ(kTestTimestamp, decoded_video_frame_.timestamp());
EXPECT_EQ(kTestNtpTimeMs, decoded_video_frame_.ntp_time_ms());
} }
TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) { TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) {

View File

@ -710,7 +710,7 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
} }
img = vpx_codec_get_frame(decoder_, &iter); img = vpx_codec_get_frame(decoder_, &iter);
ret = ReturnFrame(img, input_image._timeStamp); ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
if (ret != 0) { if (ret != 0) {
// Reset to avoid requesting key frames too often. // Reset to avoid requesting key frames too often.
if (ret < 0 && propagation_cnt_ > 0) if (ret < 0 && propagation_cnt_ > 0)
@ -790,7 +790,9 @@ int VP8DecoderImpl::DecodePartitions(
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
uint32_t timestamp,
int64_t ntp_time_ms) {
if (img == NULL) { if (img == NULL) {
// Decoder OK and NULL image => No show frame // Decoder OK and NULL image => No show frame
return WEBRTC_VIDEO_CODEC_NO_OUTPUT; return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@ -808,6 +810,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
img->stride[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]); img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp); decoded_image_.set_timestamp(timestamp);
decoded_image_.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image_); int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0) if (ret != 0)
return ret; return ret;

View File

@ -214,7 +214,9 @@ class VP8DecoderImpl : public VP8Decoder {
int DecodePartitions(const EncodedImage& input_image, int DecodePartitions(const EncodedImage& input_image,
const RTPFragmentationHeader* fragmentation); const RTPFragmentationHeader* fragmentation);
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp); int ReturnFrame(const vpx_image_t* img,
uint32_t timeStamp,
int64_t ntp_time_ms);
I420VideoFrame decoded_image_; I420VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_; DecodedImageCallback* decode_complete_callback_;

View File

@ -96,6 +96,8 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
// First packet (empty and/or media) inserted into this frame. // First packet (empty and/or media) inserted into this frame.
// store some info and set some initial values. // store some info and set some initial values.
_timeStamp = packet.timestamp; _timeStamp = packet.timestamp;
// We only take the ntp timestamp of the first packet of a frame.
ntp_time_ms_ = packet.ntp_time_ms_;
_codec = packet.codec; _codec = packet.codec;
if (packet.frameType != kFrameEmpty) { if (packet.frameType != kFrameEmpty) {
// first media packet // first media packet

View File

@ -19,6 +19,7 @@ VCMPacket::VCMPacket()
: :
payloadType(0), payloadType(0),
timestamp(0), timestamp(0),
ntp_time_ms_(0),
seqNum(0), seqNum(0),
dataPtr(NULL), dataPtr(NULL),
sizeBytes(0), sizeBytes(0),
@ -38,6 +39,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
const WebRtcRTPHeader& rtpHeader) : const WebRtcRTPHeader& rtpHeader) :
payloadType(rtpHeader.header.payloadType), payloadType(rtpHeader.header.payloadType),
timestamp(rtpHeader.header.timestamp), timestamp(rtpHeader.header.timestamp),
ntp_time_ms_(rtpHeader.ntp_time_ms),
seqNum(rtpHeader.header.sequenceNumber), seqNum(rtpHeader.header.sequenceNumber),
dataPtr(ptr), dataPtr(ptr),
sizeBytes(size), sizeBytes(size),
@ -58,6 +60,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t ts, bool mBit) : VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t ts, bool mBit) :
payloadType(0), payloadType(0),
timestamp(ts), timestamp(ts),
ntp_time_ms_(0),
seqNum(seq), seqNum(seq),
dataPtr(ptr), dataPtr(ptr),
sizeBytes(size), sizeBytes(size),
@ -76,6 +79,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t t
void VCMPacket::Reset() { void VCMPacket::Reset() {
payloadType = 0; payloadType = 0;
timestamp = 0; timestamp = 0;
ntp_time_ms_ = 0;
seqNum = 0; seqNum = 0;
dataPtr = NULL; dataPtr = NULL;
sizeBytes = 0; sizeBytes = 0;

View File

@ -33,6 +33,8 @@ public:
uint8_t payloadType; uint8_t payloadType;
uint32_t timestamp; uint32_t timestamp;
// NTP time of the capture time in local timebase in milliseconds.
int64_t ntp_time_ms_;
uint16_t seqNum; uint16_t seqNum;
const uint8_t* dataPtr; const uint8_t* dataPtr;
uint32_t sizeBytes; uint32_t sizeBytes;

View File

@ -33,8 +33,11 @@ class WEBRTC_DLLEXPORT ViEEffectFilter {
public: public:
// This method is called with an I420 video frame allowing the user to // This method is called with an I420 video frame allowing the user to
// modify the video frame. // modify the video frame.
virtual int Transform(int size, unsigned char* frameBuffer, virtual int Transform(int size,
unsigned int timeStamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) = 0; unsigned int height) = 0;
protected: protected:
ViEEffectFilter() {} ViEEffectFilter() {}

View File

@ -38,10 +38,13 @@ class ExternalRenderer {
virtual int DeliverFrame(unsigned char* buffer, virtual int DeliverFrame(unsigned char* buffer,
int buffer_size, int buffer_size,
// RTP timestamp in 90kHz. // RTP timestamp in 90kHz.
uint32_t time_stamp, uint32_t timestamp,
// Wallclock render time in miliseconds // NTP time of the capture time in local timebase
int64_t render_time, // in milliseconds.
// Handle of the underlying video frame, int64_t ntp_time_ms,
// Wallclock render time in milliseconds.
int64_t render_time_ms,
// Handle of the underlying video frame.
void* handle) = 0; void* handle) = 0;
// Returns true if the renderer supports textures. DeliverFrame can be called // Returns true if the renderer supports textures. DeliverFrame can be called

View File

@ -40,15 +40,18 @@ class LocalRendererEffectFilter : public webrtc::ExternalRendererEffectFilter {
FrameDropDetector* frame_drop_detector) FrameDropDetector* frame_drop_detector)
: ExternalRendererEffectFilter(renderer), : ExternalRendererEffectFilter(renderer),
frame_drop_detector_(frame_drop_detector) {} frame_drop_detector_(frame_drop_detector) {}
int Transform(int size, unsigned char* frameBuffer, int Transform(int size,
unsigned int timeStamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) { unsigned int height) {
frame_drop_detector_->ReportFrameState( frame_drop_detector_->ReportFrameState(
FrameDropDetector::kCreated, FrameDropDetector::kCreated,
timeStamp90KHz, timestamp,
webrtc::TickTime::MicrosecondTimestamp()); webrtc::TickTime::MicrosecondTimestamp());
return webrtc::ExternalRendererEffectFilter::Transform( return webrtc::ExternalRendererEffectFilter::Transform(
size, frameBuffer, timeStamp90KHz, width, height); size, frame_buffer, ntp_time_ms, timestamp, width, height);
} }
private: private:
FrameDropDetector* frame_drop_detector_; FrameDropDetector* frame_drop_detector_;
@ -97,12 +100,15 @@ class DecodedTimestampEffectFilter : public webrtc::ViEEffectFilter {
explicit DecodedTimestampEffectFilter(FrameDropDetector* frame_drop_detector) explicit DecodedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
: frame_drop_detector_(frame_drop_detector) {} : frame_drop_detector_(frame_drop_detector) {}
virtual ~DecodedTimestampEffectFilter() {} virtual ~DecodedTimestampEffectFilter() {}
virtual int Transform(int size, unsigned char* frameBuffer, virtual int Transform(int size,
unsigned int timeStamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) { unsigned int height) {
frame_drop_detector_->ReportFrameState( frame_drop_detector_->ReportFrameState(
FrameDropDetector::kDecoded, FrameDropDetector::kDecoded,
timeStamp90KHz, timestamp,
webrtc::TickTime::MicrosecondTimestamp()); webrtc::TickTime::MicrosecondTimestamp());
return 0; return 0;
} }
@ -588,7 +594,7 @@ int FrameDropDetector::GetNumberOfFramesDroppedAt(State state) {
int FrameDropMonitoringRemoteFileRenderer::DeliverFrame( int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
unsigned char *buffer, int buffer_size, uint32_t time_stamp, unsigned char *buffer, int buffer_size, uint32_t time_stamp,
int64_t render_time, void* /*handle*/) { int64_t ntp_time_ms, int64_t render_time, void* /*handle*/) {
// |render_time| provides the ideal render time for this frame. If that time // |render_time| provides the ideal render time for this frame. If that time
// has already passed we will render it immediately. // has already passed we will render it immediately.
int64_t report_render_time_us = render_time * 1000; int64_t report_render_time_us = render_time * 1000;
@ -600,7 +606,8 @@ int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered, frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
time_stamp, report_render_time_us); time_stamp, report_render_time_us);
return ViEToFileRenderer::DeliverFrame(buffer, buffer_size, return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
time_stamp, render_time, NULL); time_stamp, ntp_time_ms,
render_time, NULL);
} }
int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange( int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(

View File

@ -226,6 +226,7 @@ class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
unsigned int number_of_streams) OVERRIDE; unsigned int number_of_streams) OVERRIDE;
int DeliverFrame(unsigned char* buffer, int buffer_size, int DeliverFrame(unsigned char* buffer, int buffer_size,
uint32_t time_stamp, uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time, int64_t render_time,
void* handle) OVERRIDE; void* handle) OVERRIDE;
private: private:

View File

@ -83,8 +83,11 @@ class CaptureEffectFilter : public webrtc::ViEEffectFilter {
} }
// Implements video_engineEffectFilter. // Implements video_engineEffectFilter.
virtual int Transform(int size, unsigned char* frame_buffer, virtual int Transform(int size,
unsigned int timeStamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) { unsigned int height) {
EXPECT_TRUE(frame_buffer != NULL); EXPECT_TRUE(frame_buffer != NULL);
EXPECT_EQ(expected_width_, width); EXPECT_EQ(expected_width_, width);

View File

@ -124,7 +124,8 @@ class RenderFilter : public webrtc::ViEEffectFilter {
} }
virtual int Transform(int size, virtual int Transform(int size,
unsigned char* frame_buffer, unsigned char* frame_buffer,
unsigned int time_stamp90KHz, int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width, unsigned int width,
unsigned int height) { unsigned int height) {
num_frames_++; num_frames_++;

View File

@ -28,12 +28,15 @@ public:
~MyEffectFilter() {} ~MyEffectFilter() {}
virtual int Transform(int size, unsigned char* frameBuffer, virtual int Transform(int size,
unsigned int timeStamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) unsigned int height)
{ {
// Black and white // Black and white
memset(frameBuffer + (2 * size) / 3, 0x7f, size / 3); memset(frame_buffer + (2 * size) / 3, 0x7f, size / 3);
return 0; return 0;
} }
}; };

View File

@ -56,8 +56,10 @@ public:
return 0; return 0;
} }
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, virtual int DeliverFrame(unsigned char* buffer,
int bufferSize,
uint32_t time_stamp, uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time, int64_t render_time,
void* /*handle*/) { void* /*handle*/) {
if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) { if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {

View File

@ -123,6 +123,7 @@ void ViEToFileRenderer::ForgetOutputFile() {
int ViEToFileRenderer::DeliverFrame(unsigned char *buffer, int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
int buffer_size, int buffer_size,
uint32_t time_stamp, uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time, int64_t render_time,
void* /*handle*/) { void* /*handle*/) {
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get()); webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());

View File

@ -24,8 +24,11 @@ class ExternalRendererEffectFilter : public webrtc::ViEEffectFilter {
explicit ExternalRendererEffectFilter(webrtc::ExternalRenderer* renderer) explicit ExternalRendererEffectFilter(webrtc::ExternalRenderer* renderer)
: width_(0), height_(0), renderer_(renderer) {} : width_(0), height_(0), renderer_(renderer) {}
virtual ~ExternalRendererEffectFilter() {} virtual ~ExternalRendererEffectFilter() {}
virtual int Transform(int size, unsigned char* frame_buffer, virtual int Transform(int size,
unsigned int time_stamp90KHz, unsigned int width, unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) { unsigned int height) {
if (width != width_ || height_ != height) { if (width != width_ || height_ != height) {
renderer_->FrameSizeChange(width, height, 1); renderer_->FrameSizeChange(width, height, 1);
@ -34,7 +37,8 @@ class ExternalRendererEffectFilter : public webrtc::ViEEffectFilter {
} }
return renderer_->DeliverFrame(frame_buffer, return renderer_->DeliverFrame(frame_buffer,
size, size,
time_stamp90KHz, ntp_time_ms,
timestamp,
webrtc::TickTime::MillisecondTimestamp(), webrtc::TickTime::MillisecondTimestamp(),
NULL); NULL);
} }

View File

@ -60,6 +60,7 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
int DeliverFrame(unsigned char* buffer, int DeliverFrame(unsigned char* buffer,
int buffer_size, int buffer_size,
uint32_t time_stamp, uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time, int64_t render_time,
void* handle) OVERRIDE; void* handle) OVERRIDE;

View File

@ -537,8 +537,11 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
video_frame->height()); video_frame->height());
scoped_array<uint8_t> video_buffer(new uint8_t[length]); scoped_array<uint8_t> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get()); ExtractBuffer(*video_frame, length, video_buffer.get());
effect_filter_->Transform(length, video_buffer.get(), effect_filter_->Transform(length,
video_frame->timestamp(), video_frame->width(), video_buffer.get(),
video_frame->ntp_time_ms(),
video_frame->timestamp(),
video_frame->width(),
video_frame->height()); video_frame->height());
} }
// Deliver the captured frame to all observers (channels, renderer or file). // Deliver the captured frame to all observers (channels, renderer or file).

View File

@ -1403,8 +1403,11 @@ int32_t ViEChannel::FrameToRender(
video_frame.height()); video_frame.height());
scoped_array<uint8_t> video_buffer(new uint8_t[length]); scoped_array<uint8_t> video_buffer(new uint8_t[length]);
ExtractBuffer(video_frame, length, video_buffer.get()); ExtractBuffer(video_frame, length, video_buffer.get());
effect_filter_->Transform(length, video_buffer.get(), effect_filter_->Transform(length,
video_frame.timestamp(), video_frame.width(), video_buffer.get(),
video_frame.ntp_time_ms(),
video_frame.timestamp(),
video_frame.width(),
video_frame.height()); video_frame.height());
} }
if (color_enhancement_) { if (color_enhancement_) {

View File

@ -507,6 +507,7 @@ void ViEEncoder::DeliverFrame(int id,
ExtractBuffer(*video_frame, length, video_buffer.get()); ExtractBuffer(*video_frame, length, video_buffer.get());
effect_filter_->Transform(length, effect_filter_->Transform(length,
video_buffer.get(), video_buffer.get(),
video_frame->ntp_time_ms(),
video_frame->timestamp(), video_frame->timestamp(),
video_frame->width(), video_frame->width(),
video_frame->height()); video_frame->height());

View File

@ -171,6 +171,7 @@ int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
int32_t ViEReceiver::OnReceivedPayloadData( int32_t ViEReceiver::OnReceivedPayloadData(
const uint8_t* payload_data, const uint16_t payload_size, const uint8_t* payload_data, const uint16_t payload_size,
const WebRtcRTPHeader* rtp_header) { const WebRtcRTPHeader* rtp_header) {
// TODO(wu): Calculate ntp_time_ms
if (vcm_->IncomingPacket(payload_data, payload_size, *rtp_header) != 0) { if (vcm_->IncomingPacket(payload_data, payload_size, *rtp_header) != 0) {
// Check this... // Check this...
return -1; return -1;

View File

@ -181,6 +181,7 @@ int32_t ViEExternalRendererImpl::RenderFrame(
external_renderer_->DeliverFrame(NULL, external_renderer_->DeliverFrame(NULL,
0, 0,
video_frame.timestamp(), video_frame.timestamp(),
video_frame.ntp_time_ms(),
video_frame.render_time_ms(), video_frame.render_time_ms(),
video_frame.native_handle()); video_frame.native_handle());
} else { } else {
@ -244,6 +245,7 @@ int32_t ViEExternalRendererImpl::RenderFrame(
external_renderer_->DeliverFrame(out_frame->Buffer(), external_renderer_->DeliverFrame(out_frame->Buffer(),
out_frame->Length(), out_frame->Length(),
video_frame.timestamp(), video_frame.timestamp(),
video_frame.ntp_time_ms(),
video_frame.render_time_ms(), video_frame.render_time_ms(),
NULL); NULL);
} }