Make the entry point for VideoFrames to webrtc const ref I420VideoFrame.

This removes the none const pointer entry and SwapFrame.

Since frames delivered using VideoSendStream no longer use the external capture module, VideoSendStream will not get an incoming framerate callback. VideoSendStream now uses a rtc::RateTracker.
Also, the video engine must ensure that time stamps are always increasing.

With this, time stamps (ntp, render_time and rtp timestamps ) are checked and set in ViECapturer::OnIncomingCapturedFrame

BUG=1128
R=magjed@webrtc.org, mflodman@webrtc.org, pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/46429004

Cr-Commit-Position: refs/heads/master@{#8633}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8633 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org
2015-03-06 12:37:19 +00:00
parent 75e850e192
commit bcead305a2
29 changed files with 221 additions and 281 deletions

View File

@@ -353,8 +353,9 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(
return true; return true;
} }
void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id, void WebRtcVideoCapturer::OnIncomingCapturedFrame(
webrtc::I420VideoFrame& sample) { const int32_t id,
const webrtc::I420VideoFrame& sample) {
// This would be a normal CritScope, except that it's possible that: // This would be a normal CritScope, except that it's possible that:
// (1) whatever system component producing this frame has taken a lock, and // (1) whatever system component producing this frame has taken a lock, and
// (2) Stop() probably calls back into that system component, which may take // (2) Stop() probably calls back into that system component, which may take
@@ -395,7 +396,7 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,
} }
void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread( void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread(
webrtc::I420VideoFrame* frame) { const webrtc::I420VideoFrame* frame) {
DCHECK(start_thread_->IsCurrent()); DCHECK(start_thread_->IsCurrent());
// Signal down stream components on captured frame. // Signal down stream components on captured frame.
// The CapturedFrame class doesn't support planes. We have to ExtractBuffer // The CapturedFrame class doesn't support planes. We have to ExtractBuffer

View File

@@ -81,7 +81,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
private: private:
// Callback when a frame is captured by camera. // Callback when a frame is captured by camera.
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(const int32_t id,
webrtc::I420VideoFrame& frame); const webrtc::I420VideoFrame& frame);
virtual void OnCaptureDelayChanged(const int32_t id, virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay); const int32_t delay);
@@ -91,7 +91,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
// directly from OnIncomingCapturedFrame. // directly from OnIncomingCapturedFrame.
// TODO(tommi): Remove this workaround when we've updated the WebRTC capturers // TODO(tommi): Remove this workaround when we've updated the WebRTC capturers
// to follow the same contract. // to follow the same contract.
void SignalFrameCapturedOnStartThread(webrtc::I420VideoFrame* frame); void SignalFrameCapturedOnStartThread(const webrtc::I420VideoFrame* frame);
rtc::scoped_ptr<WebRtcVcmFactoryInterface> factory_; rtc::scoped_ptr<WebRtcVcmFactoryInterface> factory_;
webrtc::VideoCaptureModule* module_; webrtc::VideoCaptureModule* module_;

View File

@@ -1412,11 +1412,11 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
SetDimensions( SetDimensions(
video_frame_.width(), video_frame_.height(), capturer->IsScreencast()); video_frame_.width(), video_frame_.height(), capturer->IsScreencast());
LOG(LS_VERBOSE) << "SwapFrame: " << video_frame_.width() << "x" LOG(LS_VERBOSE) << "IncomingCapturedFrame: " << video_frame_.width() << "x"
<< video_frame_.height() << " -> (codec) " << video_frame_.height() << " -> (codec) "
<< parameters_.encoder_config.streams.back().width << "x" << parameters_.encoder_config.streams.back().width << "x"
<< parameters_.encoder_config.streams.back().height; << parameters_.encoder_config.streams.back().height;
stream_->Input()->SwapFrame(&video_frame_); stream_->Input()->IncomingCapturedFrame(video_frame_);
} }
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
@@ -1436,7 +1436,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
CreateBlackFrame(&black_frame, last_dimensions_.width, CreateBlackFrame(&black_frame, last_dimensions_.width,
last_dimensions_.height); last_dimensions_.height);
stream_->Input()->SwapFrame(&black_frame); stream_->Input()->IncomingCapturedFrame(black_frame);
} }
capturer_ = NULL; capturer_ = NULL;

View File

@@ -139,9 +139,10 @@ int FakeVideoSendStream::GetLastHeight() const {
return last_frame_.height(); return last_frame_.height();
} }
void FakeVideoSendStream::SwapFrame(webrtc::I420VideoFrame* frame) { void FakeVideoSendStream::IncomingCapturedFrame(
const webrtc::I420VideoFrame& frame) {
++num_swapped_frames_; ++num_swapped_frames_;
last_frame_.SwapFrame(frame); last_frame_.ShallowCopy(frame);
} }
void FakeVideoSendStream::SetStats( void FakeVideoSendStream::SetStats(

View File

@@ -54,7 +54,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
void SetStats(const webrtc::VideoSendStream::Stats& stats); void SetStats(const webrtc::VideoSendStream::Stats& stats);
private: private:
void SwapFrame(webrtc::I420VideoFrame* frame) override; void IncomingCapturedFrame(const webrtc::I420VideoFrame& frame) override;
webrtc::VideoSendStream::Stats GetStats() override; webrtc::VideoSendStream::Stats GetStats() override;
bool ReconfigureVideoEncoder( bool ReconfigureVideoEncoder(

View File

@@ -139,6 +139,14 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
return 0; return 0;
} }
void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) {
video_frame_buffer_ = videoFrame.video_frame_buffer();
timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_;
rotation_ = videoFrame.rotation_;
}
I420VideoFrame* I420VideoFrame::CloneFrame() const { I420VideoFrame* I420VideoFrame::CloneFrame() const {
rtc::scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame()); rtc::scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame());
if (new_frame->CopyFrame(*this) == -1) { if (new_frame->CopyFrame(*this) == -1) {

View File

@@ -130,6 +130,63 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_TRUE(EqualFrames(small_frame, big_frame)); EXPECT_TRUE(EqualFrames(small_frame, big_frame));
} }
TEST(TestI420VideoFrame, ShallowCopy) {
uint32_t timestamp = 1;
int64_t ntp_time_ms = 2;
int64_t render_time_ms = 3;
int stride_y = 15;
int stride_u = 10;
int stride_v = 10;
int width = 15;
int height = 15;
const int kSizeY = 400;
const int kSizeU = 100;
const int kSizeV = 100;
const VideoRotation kRotation = kVideoRotation_270;
uint8_t buffer_y[kSizeY];
uint8_t buffer_u[kSizeU];
uint8_t buffer_v[kSizeV];
memset(buffer_y, 16, kSizeY);
memset(buffer_u, 8, kSizeU);
memset(buffer_v, 4, kSizeV);
I420VideoFrame frame1;
EXPECT_EQ(0, frame1.CreateFrame(kSizeY, buffer_y, kSizeU, buffer_u, kSizeV,
buffer_v, width, height, stride_y, stride_u,
stride_v, kRotation));
frame1.set_timestamp(timestamp);
frame1.set_ntp_time_ms(ntp_time_ms);
frame1.set_render_time_ms(render_time_ms);
I420VideoFrame frame2;
frame2.ShallowCopy(frame1);
// To be able to access the buffers, we need const pointers to the frames.
const I420VideoFrame* const_frame1_ptr = &frame1;
const I420VideoFrame* const_frame2_ptr = &frame2;
EXPECT_TRUE(const_frame1_ptr->buffer(kYPlane) ==
const_frame2_ptr->buffer(kYPlane));
EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) ==
const_frame2_ptr->buffer(kUPlane));
EXPECT_TRUE(const_frame1_ptr->buffer(kVPlane) ==
const_frame2_ptr->buffer(kVPlane));
EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
EXPECT_EQ(frame2.render_time_ms(), frame1.render_time_ms());
EXPECT_EQ(frame2.rotation(), frame1.rotation());
frame2.set_timestamp(timestamp + 1);
frame2.set_ntp_time_ms(ntp_time_ms + 1);
frame2.set_render_time_ms(render_time_ms + 1);
frame2.set_rotation(kVideoRotation_90);
EXPECT_NE(frame2.timestamp(), frame1.timestamp());
EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms());
EXPECT_NE(frame2.render_time_ms(), frame1.render_time_ms());
EXPECT_NE(frame2.rotation(), frame1.rotation());
}
TEST(TestI420VideoFrame, CloneFrame) { TEST(TestI420VideoFrame, CloneFrame) {
I420VideoFrame frame1; I420VideoFrame frame1;
rtc::scoped_ptr<I420VideoFrame> frame2; rtc::scoped_ptr<I420VideoFrame> frame2;

View File

@@ -86,9 +86,6 @@ public:
size_t videoFrameLength, size_t videoFrameLength,
const VideoCaptureCapability& frameInfo, const VideoCaptureCapability& frameInfo,
int64_t captureTime = 0) = 0; int64_t captureTime = 0) = 0;
virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame,
int64_t captureTime = 0) = 0;
protected: protected:
~VideoCaptureExternal() {} ~VideoCaptureExternal() {}
}; };
@@ -98,7 +95,7 @@ class VideoCaptureDataCallback
{ {
public: public:
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& videoFrame) = 0; const I420VideoFrame& videoFrame) = 0;
virtual void OnCaptureDelayChanged(const int32_t id, virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay) = 0; const int32_t delay) = 0;
protected: protected:

View File

@@ -104,8 +104,9 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
printf("No of timing warnings %d\n", timing_warnings_); printf("No of timing warnings %d\n", timing_warnings_);
} }
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(
webrtc::I420VideoFrame& videoFrame) { const int32_t id,
const webrtc::I420VideoFrame& videoFrame) {
CriticalSectionScoped cs(capture_cs_.get()); CriticalSectionScoped cs(capture_cs_.get());
int height = videoFrame.height(); int height = videoFrame.height();
int width = videoFrame.width(); int width = videoFrame.width();
@@ -479,76 +480,6 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
} }
// Test input of planar I420 frames.
// NOTE: flaky, sometimes fails on the last CompareLastFrame.
// http://code.google.com/p/webrtc/issues/detail?id=777
TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
webrtc::I420VideoFrame frame_i420;
frame_i420.CopyFrame(test_frame_);
EXPECT_EQ(0,
capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0));
EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420));
// Test with a frame with pitch not equal to width
memset(test_frame_.buffer(webrtc::kYPlane), 0xAA,
test_frame_.allocated_size(webrtc::kYPlane));
memset(test_frame_.buffer(webrtc::kUPlane), 0xAA,
test_frame_.allocated_size(webrtc::kUPlane));
memset(test_frame_.buffer(webrtc::kVPlane), 0xAA,
test_frame_.allocated_size(webrtc::kVPlane));
webrtc::I420VideoFrame aligned_test_frame;
int y_pitch = kTestWidth + 2;
int u_pitch = kTestWidth / 2 + 1;
int v_pitch = u_pitch;
aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight,
y_pitch, u_pitch, v_pitch);
memset(aligned_test_frame.buffer(webrtc::kYPlane), 0,
kTestWidth * kTestHeight);
memset(aligned_test_frame.buffer(webrtc::kUPlane), 0,
(kTestWidth + 1) / 2 * (kTestHeight + 1) / 2);
memset(aligned_test_frame.buffer(webrtc::kVPlane), 0,
(kTestWidth + 1) / 2 * (kTestHeight + 1) / 2);
// Copy the test_frame_ to aligned_test_frame.
int y_width = kTestWidth;
int uv_width = kTestWidth / 2;
int y_rows = kTestHeight;
int uv_rows = kTestHeight / 2;
const webrtc::I420VideoFrame& const_test_frame = test_frame_;
const unsigned char* y_plane = const_test_frame.buffer(webrtc::kYPlane);
const unsigned char* u_plane = const_test_frame.buffer(webrtc::kUPlane);
const unsigned char* v_plane = const_test_frame.buffer(webrtc::kVPlane);
// Copy Y
unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane);
for (int i = 0; i < y_rows; ++i) {
memcpy(current_pointer, y_plane, y_width);
// Remove the alignment which ViE doesn't support.
current_pointer += y_pitch;
y_plane += y_width;
}
// Copy U
current_pointer = aligned_test_frame.buffer(webrtc::kUPlane);
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, u_plane, uv_width);
// Remove the alignment which ViE doesn't support.
current_pointer += u_pitch;
u_plane += uv_width;
}
// Copy V
current_pointer = aligned_test_frame.buffer(webrtc::kVPlane);
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, v_plane, uv_width);
// Remove the alignment which ViE doesn't support.
current_pointer += v_pitch;
v_plane += uv_width;
}
frame_i420.CopyFrame(aligned_test_frame);
EXPECT_EQ(0,
capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0));
EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
}
// Test frame rate and no picture alarm. // Test frame rate and no picture alarm.
// Flaky on Win32, see webrtc:3270. // Flaky on Win32, see webrtc:3270.
TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) { TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) {

View File

@@ -173,10 +173,6 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_captureCallBack(NULL), _captureCallBack(NULL),
_lastProcessFrameCount(TickTime::Now()), _lastProcessFrameCount(TickTime::Now()),
_rotateFrame(kRotateNone), _rotateFrame(kRotateNone),
last_capture_time_(0),
delta_ntp_internal_ms_(
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
TickTime::MillisecondTimestamp()),
apply_rotation_(true) { apply_rotation_(true) {
_requestedCapability.width = kDefaultWidth; _requestedCapability.width = kDefaultWidth;
_requestedCapability.height = kDefaultHeight; _requestedCapability.height = kDefaultHeight;
@@ -231,8 +227,7 @@ int32_t VideoCaptureImpl::CaptureDelay()
return _setCaptureDelay; return _setCaptureDelay;
} }
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame, int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame) {
int64_t capture_time) {
UpdateFrameCount(); // frame count used for local frame rate callback. UpdateFrameCount(); // frame count used for local frame rate callback.
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
@@ -241,19 +236,6 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame,
_setCaptureDelay = _captureDelay; _setCaptureDelay = _captureDelay;
} }
// Set the capture time
if (capture_time != 0) {
captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_);
} else {
captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
}
if (captureFrame.render_time_ms() == last_capture_time_) {
// We don't allow the same capture time for two frames, drop this one.
return -1;
}
last_capture_time_ = captureFrame.render_time_ms();
if (_dataCallBack) { if (_dataCallBack) {
if (callOnCaptureDelayChanged) { if (callOnCaptureDelayChanged) {
_dataCallBack->OnCaptureDelayChanged(_id, _captureDelay); _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
@@ -337,8 +319,10 @@ int32_t VideoCaptureImpl::IncomingFrame(
} else { } else {
_captureFrame.set_rotation(kVideoRotation_0); _captureFrame.set_rotation(kVideoRotation_0);
} }
_captureFrame.set_ntp_time_ms(captureTime);
_captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
DeliverCapturedFrame(_captureFrame, captureTime); DeliverCapturedFrame(_captureFrame);
} }
else // Encoded format else // Encoded format
{ {
@@ -349,16 +333,6 @@ int32_t VideoCaptureImpl::IncomingFrame(
return 0; return 0;
} }
int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
int64_t captureTime) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
DeliverCapturedFrame(*video_frame, captureTime);
return 0;
}
int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs); CriticalSectionScoped cs2(&_callBackCs);

View File

@@ -81,15 +81,12 @@ public:
virtual int32_t Process(); virtual int32_t Process();
// Implement VideoCaptureExternal // Implement VideoCaptureExternal
// |capture_time| must be specified in the NTP time format in milliseconds. // |capture_time| must be specified in NTP time format in milliseconds.
virtual int32_t IncomingFrame(uint8_t* videoFrame, virtual int32_t IncomingFrame(uint8_t* videoFrame,
size_t videoFrameLength, size_t videoFrameLength,
const VideoCaptureCapability& frameInfo, const VideoCaptureCapability& frameInfo,
int64_t captureTime = 0); int64_t captureTime = 0);
virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame,
int64_t captureTime = 0);
// Platform dependent // Platform dependent
virtual int32_t StartCapture(const VideoCaptureCapability& capability) virtual int32_t StartCapture(const VideoCaptureCapability& capability)
{ {
@@ -106,8 +103,7 @@ public:
protected: protected:
VideoCaptureImpl(const int32_t id); VideoCaptureImpl(const int32_t id);
virtual ~VideoCaptureImpl(); virtual ~VideoCaptureImpl();
int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame, int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame);
int64_t capture_time);
int32_t _id; // Module ID int32_t _id; // Module ID
char* _deviceUniqueId; // current Device unique name; char* _deviceUniqueId; // current Device unique name;
@@ -136,12 +132,6 @@ private:
I420VideoFrame _captureFrame; I420VideoFrame _captureFrame;
// Used to make sure incoming timestamp is increasing for every frame.
int64_t last_capture_time_;
// Delta used for translating between NTP and internal timestamps.
const int64_t delta_ntp_internal_ms_;
// Indicate whether rotation should be applied before delivered externally. // Indicate whether rotation should be applied before delivered externally.
bool apply_rotation_; bool apply_rotation_;
}; };

View File

@@ -114,11 +114,11 @@ void FrameGeneratorCapturer::InsertFrame() {
CriticalSectionScoped cs(lock_.get()); CriticalSectionScoped cs(lock_.get());
if (sending_) { if (sending_) {
I420VideoFrame* frame = frame_generator_->NextFrame(); I420VideoFrame* frame = frame_generator_->NextFrame();
frame->set_render_time_ms(clock_->CurrentNtpInMilliseconds()); frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
if (first_frame_capture_time_ == -1) { if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame->render_time_ms(); first_frame_capture_time_ = frame->ntp_time_ms();
} }
input_->SwapFrame(frame); input_->IncomingCapturedFrame(*frame);
} }
} }
tick_->Wait(WEBRTC_EVENT_INFINITE); tick_->Wait(WEBRTC_EVENT_INFINITE);

View File

@@ -87,9 +87,9 @@ void VcmCapturer::Destroy() {
VcmCapturer::~VcmCapturer() { Destroy(); } VcmCapturer::~VcmCapturer() { Destroy(); }
void VcmCapturer::OnIncomingCapturedFrame(const int32_t id, void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& frame) { const I420VideoFrame& frame) {
if (started_) if (started_)
input_->SwapFrame(&frame); input_->IncomingCapturedFrame(frame);
} }
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) { void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {

View File

@@ -28,7 +28,7 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
void Stop() override; void Stop() override;
void OnIncomingCapturedFrame(const int32_t id, void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& frame) override; // NOLINT const I420VideoFrame& frame) override; // NOLINT
void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override; void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
private: private:

View File

@@ -170,7 +170,7 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
// check that the callbacks are done after processing video. // check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator( rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight)); test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, pre_render_callback.Wait()) EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback."; << "Timed out while waiting for pre-render callback.";
EXPECT_EQ(kEventSignaled, renderer.Wait()) EXPECT_EQ(kEventSignaled, renderer.Wait())
@@ -218,7 +218,7 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
rtc::scoped_ptr<test::FrameGenerator> frame_generator( rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator( test::FrameGenerator::CreateChromaGenerator(
encoder_config_.streams[0].width, encoder_config_.streams[0].height)); encoder_config_.streams[0].width, encoder_config_.streams[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, renderer.Wait()) EXPECT_EQ(kEventSignaled, renderer.Wait())
<< "Timed out while waiting for the frame to render."; << "Timed out while waiting for the frame to render.";
@@ -833,7 +833,7 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
// check that the callbacks are done after processing video. // check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator( rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2)); test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait()) EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
<< "Timed out while waiting for pre-encode callback."; << "Timed out while waiting for pre-encode callback.";
@@ -1263,7 +1263,7 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
rtc::scoped_ptr<test::FrameGenerator> frame_generator( rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator( test::FrameGenerator::CreateChromaGenerator(
encoder_config_.streams[0].width, encoder_config_.streams[0].height)); encoder_config_.streams[0].width, encoder_config_.streams[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, post_encode_observer.Wait()) EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())
<< "Timed out while waiting for send-side encoded-frame callback."; << "Timed out while waiting for send-side encoded-frame callback.";

View File

@@ -144,7 +144,7 @@ class VideoAnalyzer : public PacketReceiver,
return receiver_->DeliverPacket(packet, length); return receiver_->DeliverPacket(packet, length);
} }
void SwapFrame(I420VideoFrame* video_frame) override { void IncomingCapturedFrame(const I420VideoFrame& video_frame) override {
I420VideoFrame* copy = NULL; I420VideoFrame* copy = NULL;
{ {
CriticalSectionScoped lock(crit_.get()); CriticalSectionScoped lock(crit_.get());
@@ -156,8 +156,8 @@ class VideoAnalyzer : public PacketReceiver,
if (copy == NULL) if (copy == NULL)
copy = new I420VideoFrame(); copy = new I420VideoFrame();
copy->CopyFrame(*video_frame); copy->CopyFrame(video_frame);
copy->set_timestamp(copy->render_time_ms() * 90); copy->set_timestamp(copy->ntp_time_ms() * 90);
{ {
CriticalSectionScoped lock(crit_.get()); CriticalSectionScoped lock(crit_.get());
@@ -167,7 +167,7 @@ class VideoAnalyzer : public PacketReceiver,
frames_.push_back(copy); frames_.push_back(copy);
} }
input_->SwapFrame(video_frame); input_->IncomingCapturedFrame(video_frame);
} }
bool SendRtp(const uint8_t* packet, size_t length) override { bool SendRtp(const uint8_t* packet, size_t length) override {

View File

@@ -50,15 +50,11 @@ void SendStatisticsProxy::SuspendChange(int video_channel, bool is_suspended) {
stats_.suspended = is_suspended; stats_.suspended = is_suspended;
} }
void SendStatisticsProxy::CapturedFrameRate(const int capture_id,
const unsigned char frame_rate) {
CriticalSectionScoped lock(crit_.get());
stats_.input_frame_rate = frame_rate;
}
VideoSendStream::Stats SendStatisticsProxy::GetStats() { VideoSendStream::Stats SendStatisticsProxy::GetStats() {
CriticalSectionScoped lock(crit_.get()); CriticalSectionScoped lock(crit_.get());
PurgeOldStats(); PurgeOldStats();
stats_.input_frame_rate =
static_cast<int>(input_frame_rate_tracker_.units_second());
return stats_; return stats_;
} }
@@ -122,6 +118,11 @@ void SendStatisticsProxy::OnSendEncodedImage(
update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds(); update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
} }
void SendStatisticsProxy::OnIncomingFrame() {
CriticalSectionScoped lock(crit_.get());
input_frame_rate_tracker_.Update(1);
}
void SendStatisticsProxy::RtcpPacketTypesCounterUpdated( void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
uint32_t ssrc, uint32_t ssrc,
const RtcpPacketTypeCounter& packet_counter) { const RtcpPacketTypeCounter& packet_counter) {

View File

@@ -13,6 +13,7 @@
#include <string> #include <string>
#include "webrtc/base/ratetracker.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h" #include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
@@ -35,7 +36,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
public BitrateStatisticsObserver, public BitrateStatisticsObserver,
public FrameCountObserver, public FrameCountObserver,
public ViEEncoderObserver, public ViEEncoderObserver,
public ViECaptureObserver,
public VideoEncoderRateObserver, public VideoEncoderRateObserver,
public SendSideDelayObserver { public SendSideDelayObserver {
public: public:
@@ -48,6 +48,8 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
virtual void OnSendEncodedImage(const EncodedImage& encoded_image, virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
const RTPVideoHeader* rtp_video_header); const RTPVideoHeader* rtp_video_header);
// Used to update incoming frame rate.
void OnIncomingFrame();
// From VideoEncoderRateObserver. // From VideoEncoderRateObserver.
void OnSetRates(uint32_t bitrate_bps, int framerate) override; void OnSetRates(uint32_t bitrate_bps, int framerate) override;
@@ -83,16 +85,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
void SuspendChange(int video_channel, bool is_suspended) override; void SuspendChange(int video_channel, bool is_suspended) override;
// From ViECaptureObserver.
void BrightnessAlarm(const int capture_id,
const Brightness brightness) override {}
void CapturedFrameRate(const int capture_id,
const unsigned char frame_rate) override;
void NoPictureAlarm(const int capture_id, const CaptureAlarm alarm) override {
}
void SendSideDelayUpdated(int avg_delay_ms, void SendSideDelayUpdated(int avg_delay_ms,
int max_delay_ms, int max_delay_ms,
uint32_t ssrc) override; uint32_t ssrc) override;
@@ -110,6 +102,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
const VideoSendStream::Config config_; const VideoSendStream::Config config_;
rtc::scoped_ptr<CriticalSectionWrapper> crit_; rtc::scoped_ptr<CriticalSectionWrapper> crit_;
VideoSendStream::Stats stats_ GUARDED_BY(crit_); VideoSendStream::Stats stats_ GUARDED_BY(crit_);
rtc::RateTracker input_frame_rate_tracker_ GUARDED_BY(crit_);
std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_); std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_);
}; };

View File

@@ -128,16 +128,6 @@ TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
ExpectEqual(expected_, stats); ExpectEqual(expected_, stats);
} }
TEST_F(SendStatisticsProxyTest, CaptureFramerate) {
const int capture_fps = 31;
ViECaptureObserver* capture_observer = statistics_proxy_.get();
capture_observer->CapturedFrameRate(0, capture_fps);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(capture_fps, stats.input_frame_rate);
}
TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) { TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) {
const int media_bitrate_bps = 500; const int media_bitrate_bps = 500;
const int encode_fps = 29; const int encode_fps = 29;

View File

@@ -237,7 +237,6 @@ VideoSendStream::VideoSendStream(
rtp_rtcp_->RegisterSendFrameCountObserver(channel_, &stats_proxy_); rtp_rtcp_->RegisterSendFrameCountObserver(channel_, &stats_proxy_);
codec_->RegisterEncoderObserver(channel_, stats_proxy_); codec_->RegisterEncoderObserver(channel_, stats_proxy_);
capture_->RegisterObserver(capture_id_, stats_proxy_);
} }
VideoSendStream::~VideoSendStream() { VideoSendStream::~VideoSendStream() {
@@ -274,12 +273,13 @@ VideoSendStream::~VideoSendStream() {
rtp_rtcp_->Release(); rtp_rtcp_->Release();
} }
void VideoSendStream::SwapFrame(I420VideoFrame* frame) { void VideoSendStream::IncomingCapturedFrame(const I420VideoFrame& frame) {
// TODO(pbos): Local rendering should not be done on the capture thread. // TODO(pbos): Local rendering should not be done on the capture thread.
if (config_.local_renderer != NULL) if (config_.local_renderer != NULL)
config_.local_renderer->RenderFrame(*frame, 0); config_.local_renderer->RenderFrame(frame, 0);
external_capture_->SwapFrame(frame); stats_proxy_.OnIncomingFrame();
external_capture_->IncomingFrame(frame);
} }
VideoSendStreamInput* VideoSendStream::Input() { return this; } VideoSendStreamInput* VideoSendStream::Input() { return this; }

View File

@@ -63,7 +63,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
bool DeliverRtcp(const uint8_t* packet, size_t length); bool DeliverRtcp(const uint8_t* packet, size_t length);
// From VideoSendStreamInput. // From VideoSendStreamInput.
void SwapFrame(I420VideoFrame* frame) override; void IncomingCapturedFrame(const I420VideoFrame& frame) override;
// From webrtc::VideoSendStream. // From webrtc::VideoSendStream.
VideoSendStreamInput* Input() override; VideoSendStreamInput* Input() override;

View File

@@ -266,35 +266,6 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
StatisticianMap stats_map_; StatisticianMap stats_map_;
}; };
TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) {
static const size_t kWidth = 320;
static const size_t kHeight = 240;
test::NullTransport transport;
Call::Config call_config(&transport);
CreateSenderCall(call_config);
CreateSendConfig(1);
CreateStreams();
send_stream_->Start();
I420VideoFrame frame;
const int stride_uv = (kWidth + 1) / 2;
frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv);
uint8_t* old_y_buffer = frame.buffer(kYPlane);
// Initialize memory to avoid DrMemory errors.
const int half_height = (kHeight + 1) / 2;
memset(frame.buffer(kYPlane), 0, kWidth * kHeight);
memset(frame.buffer(kUPlane), 0, stride_uv * half_height);
memset(frame.buffer(kVPlane), 0, stride_uv * half_height);
send_stream_->Input()->SwapFrame(&frame);
EXPECT_NE(frame.buffer(kYPlane), old_y_buffer);
DestroyStreams();
}
TEST_F(VideoSendStreamTest, SupportsFec) { TEST_F(VideoSendStreamTest, SupportsFec) {
class FecObserver : public test::SendTest { class FecObserver : public test::SendTest {
public: public:
@@ -1044,15 +1015,13 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) {
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames.push_back(new I420VideoFrame(handle1, width, height, 1, 1)); input_frames.push_back(new I420VideoFrame(handle1, width, height, 1, 1));
input_frames.push_back(new I420VideoFrame(handle2, width, height, 2, 2)); input_frames.push_back(new I420VideoFrame(handle2, width, height, 2, 2));
input_frames.push_back(CreateI420VideoFrame(width, height, 1)); input_frames.push_back(CreateI420VideoFrame(width, height, 3));
input_frames.push_back(CreateI420VideoFrame(width, height, 2)); input_frames.push_back(CreateI420VideoFrame(width, height, 4));
input_frames.push_back(new I420VideoFrame(handle3, width, height, 3, 3)); input_frames.push_back(new I420VideoFrame(handle3, width, height, 5, 5));
send_stream_->Start(); send_stream_->Start();
for (size_t i = 0; i < input_frames.size(); i++) { for (size_t i = 0; i < input_frames.size(); i++) {
// Make a copy of the input frame because the buffer will be swapped. send_stream_->Input()->IncomingCapturedFrame(*input_frames[i]);
rtc::scoped_ptr<I420VideoFrame> frame(input_frames[i]->CloneFrame());
send_stream_->Input()->SwapFrame(frame.get());
// Do not send the next frame too fast, so the frame dropper won't drop it. // Do not send the next frame too fast, so the frame dropper won't drop it.
if (i < input_frames.size() - 1) if (i < input_frames.size() - 1)
SleepMs(1000 / encoder_config_.streams[0].max_framerate); SleepMs(1000 / encoder_config_.streams[0].max_framerate);
@@ -1082,6 +1051,7 @@ void ExpectEqualTextureFrames(const I420VideoFrame& frame1,
EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); EXPECT_EQ(frame1.native_handle(), frame2.native_handle());
EXPECT_EQ(frame1.width(), frame2.width()); EXPECT_EQ(frame1.width(), frame2.width());
EXPECT_EQ(frame1.height(), frame2.height()); EXPECT_EQ(frame1.height(), frame2.height());
EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms());
} }
void ExpectEqualBufferFrames(const I420VideoFrame& frame1, void ExpectEqualBufferFrames(const I420VideoFrame& frame1,
@@ -1091,7 +1061,7 @@ void ExpectEqualBufferFrames(const I420VideoFrame& frame1,
EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane)); EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane));
EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane)); EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane));
EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane)); EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane));
EXPECT_EQ(frame1.ntp_time_ms(), frame2.ntp_time_ms()); EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms());
ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane)); ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane));
EXPECT_EQ(0, EXPECT_EQ(0,
memcmp(frame1.buffer(kYPlane), memcmp(frame1.buffer(kYPlane),
@@ -1134,7 +1104,6 @@ I420VideoFrame* CreateI420VideoFrame(int width, int height, uint8_t data) {
width / 2, width / 2,
width / 2); width / 2);
frame->set_timestamp(data); frame->set_timestamp(data);
frame->set_ntp_time_ms(data);
frame->set_render_time_ms(data); frame->set_render_time_ms(data);
return frame; return frame;
} }

View File

@@ -113,7 +113,7 @@ class WEBRTC_DLLEXPORT ViEExternalCapture {
const ViEVideoFrameI420& video_frame, const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0) = 0; unsigned long long capture_time = 0) = 0;
virtual void SwapFrame(I420VideoFrame* frame) {} virtual void IncomingFrame(const I420VideoFrame& frame) {}
}; };
// This class declares an abstract interface for a user defined observer. It is // This class declares an abstract interface for a user defined observer. It is

View File

@@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace_event.h" #include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/include/vie_image_process.h" #include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/overuse_frame_detector.h" #include "webrtc/video_engine/overuse_frame_detector.h"
@@ -78,6 +79,10 @@ ViECapturer::ViECapturer(int capture_id,
capture_event_(*EventWrapper::Create()), capture_event_(*EventWrapper::Create()),
deliver_event_(*EventWrapper::Create()), deliver_event_(*EventWrapper::Create()),
stop_(0), stop_(0),
last_captured_timestamp_(0),
delta_ntp_internal_ms_(
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
TickTime::MillisecondTimestamp()),
effect_filter_(NULL), effect_filter_(NULL),
image_proc_module_(NULL), image_proc_module_(NULL),
image_proc_module_ref_counter_(0), image_proc_module_ref_counter_(0),
@@ -310,10 +315,6 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame, int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time) { // NOLINT unsigned long long capture_time) { // NOLINT
if (!external_capture_module_) {
return -1;
}
int size_y = video_frame.height * video_frame.y_pitch; int size_y = video_frame.height * video_frame.y_pitch;
int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2); int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2); int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
@@ -329,46 +330,61 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
video_frame.y_pitch, video_frame.y_pitch,
video_frame.u_pitch, video_frame.u_pitch,
video_frame.v_pitch); video_frame.v_pitch);
if (ret < 0) { if (ret < 0) {
LOG_F(LS_ERROR) << "Could not create I420Frame."; LOG_F(LS_ERROR) << "Could not create I420Frame.";
return -1; return -1;
} }
incoming_frame_.set_ntp_time_ms(capture_time);
return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_, OnIncomingCapturedFrame(-1, incoming_frame_);
capture_time); return 0;
} }
void ViECapturer::SwapFrame(I420VideoFrame* frame) { void ViECapturer::IncomingFrame(const I420VideoFrame& frame) {
external_capture_module_->IncomingI420VideoFrame(frame, OnIncomingCapturedFrame(-1, frame);
frame->render_time_ms());
frame->set_timestamp(0);
frame->set_ntp_time_ms(0);
frame->set_render_time_ms(0);
} }
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
I420VideoFrame& video_frame) { const I420VideoFrame& video_frame) {
CriticalSectionScoped cs(capture_cs_.get()); CriticalSectionScoped cs(capture_cs_.get());
// Make sure we render this frame earlier since we know the render time set captured_frame_.reset(new I420VideoFrame());
// is slightly off since it's being set when the frame has been received from captured_frame_->ShallowCopy(video_frame);
// the camera, and not when the camera actually captured the frame.
video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
overuse_detector_->FrameCaptured(video_frame.width(), if (captured_frame_->ntp_time_ms() != 0) {
video_frame.height(), // If a ntp time stamp is set, this is the time stamp we will use.
video_frame.render_time_ms()); captured_frame_->set_render_time_ms(
captured_frame_->ntp_time_ms() - delta_ntp_internal_ms_);
} else { // ntp time stamp not set.
int64_t render_time = captured_frame_->render_time_ms() != 0 ?
captured_frame_->render_time_ms() : TickTime::MillisecondTimestamp();
// Make sure we render this frame earlier since we know the render time set
// is slightly off since it's being set when the frame was received
// from the camera, and not when the camera actually captured the frame.
render_time -= FrameDelay();
captured_frame_->set_render_time_ms(render_time);
captured_frame_->set_ntp_time_ms(
render_time + delta_ntp_internal_ms_);
}
if (captured_frame_->ntp_time_ms() <= last_captured_timestamp_) {
// We don't allow the same capture time for two frames, drop this one.
return;
}
last_captured_timestamp_ = captured_frame_->ntp_time_ms();
// Convert ntp time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
captured_frame_->set_timestamp(kMsToRtpTimestamp *
static_cast<uint32_t>(captured_frame_->ntp_time_ms()));
overuse_detector_->FrameCaptured(captured_frame_->width(),
captured_frame_->height(),
captured_frame_->render_time_ms());
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms()); "render_time", video_frame.render_time_ms());
if (video_frame.native_handle() != NULL) {
captured_frame_.reset(video_frame.CloneFrame());
} else {
if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
captured_frame_.reset(new I420VideoFrame());
captured_frame_->SwapFrame(&video_frame);
}
capture_event_.Set(); capture_event_.Set();
} }

View File

@@ -69,17 +69,17 @@ class ViECapturer
int FrameCallbackChanged(); int FrameCallbackChanged();
// Implements ExternalCapture. // Implements ExternalCapture.
virtual int IncomingFrame(unsigned char* video_frame, int IncomingFrame(unsigned char* video_frame,
size_t video_frame_length, size_t video_frame_length,
uint16_t width, uint16_t width,
uint16_t height, uint16_t height,
RawVideoType video_type, RawVideoType video_type,
unsigned long long capture_time = 0); // NOLINT unsigned long long capture_time = 0) override;
virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame, int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0); // NOLINT unsigned long long capture_time = 0) override;
void SwapFrame(I420VideoFrame* frame) override; void IncomingFrame(const I420VideoFrame& frame) override;
// Start/Stop. // Start/Stop.
int32_t Start( int32_t Start(
@@ -123,7 +123,7 @@ class ViECapturer
// Implements VideoCaptureDataCallback. // Implements VideoCaptureDataCallback.
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& video_frame); const I420VideoFrame& video_frame);
virtual void OnCaptureDelayChanged(const int32_t id, virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay); const int32_t delay);
@@ -172,6 +172,10 @@ class ViECapturer
volatile int stop_; volatile int stop_;
rtc::scoped_ptr<I420VideoFrame> captured_frame_; rtc::scoped_ptr<I420VideoFrame> captured_frame_;
// Used to make sure incoming time stamp is increasing for every frame.
int64_t last_captured_timestamp_;
// Delta used for translating between NTP and internal timestamps.
const int64_t delta_ntp_internal_ms_;
rtc::scoped_ptr<I420VideoFrame> deliver_frame_; rtc::scoped_ptr<I420VideoFrame> deliver_frame_;
// Image processing. // Image processing.

View File

@@ -129,13 +129,37 @@ class ViECapturerTest : public ::testing::Test {
std::vector<const uint8_t*> output_frame_ybuffers_; std::vector<const uint8_t*> output_frame_ybuffers_;
}; };
TEST_F(ViECapturerTest, TestNtpTimeStampSetIfRenderTimeSet) {
input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(0)));
input_frames_[0]->set_render_time_ms(5);
input_frames_[0]->set_ntp_time_ms(0);
AddInputFrame(input_frames_[0]);
WaitOutputFrame();
EXPECT_GT(output_frames_[0]->ntp_time_ms(),
input_frames_[0]->render_time_ms());
}
TEST_F(ViECapturerTest, TestRtpTimeStampSet) {
input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(0)));
input_frames_[0]->set_render_time_ms(0);
input_frames_[0]->set_ntp_time_ms(1);
input_frames_[0]->set_timestamp(0);
AddInputFrame(input_frames_[0]);
WaitOutputFrame();
EXPECT_EQ(output_frames_[0]->timestamp(),
input_frames_[0]->ntp_time_ms() * 90);
}
TEST_F(ViECapturerTest, TestTextureFrames) { TEST_F(ViECapturerTest, TestTextureFrames) {
const int kNumFrame = 3; const int kNumFrame = 3;
for (int i = 0 ; i < kNumFrame; ++i) { for (int i = 0 ; i < kNumFrame; ++i) {
webrtc::RefCountImpl<FakeNativeHandle>* handle = webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
// Add one to |i| so that width/height > 0. // Add one to |i| so that width/height > 0.
input_frames_.push_back(new I420VideoFrame(handle, i + 1, i + 1, i, i)); input_frames_.push_back(
new I420VideoFrame(handle, i + 1, i + 1, i + 1, i + 1));
AddInputFrame(input_frames_[i]); AddInputFrame(input_frames_[i]);
WaitOutputFrame(); WaitOutputFrame();
} }
@@ -145,20 +169,17 @@ TEST_F(ViECapturerTest, TestTextureFrames) {
TEST_F(ViECapturerTest, TestI420Frames) { TEST_F(ViECapturerTest, TestI420Frames) {
const int kNumFrame = 4; const int kNumFrame = 4;
ScopedVector<I420VideoFrame> copied_input_frames;
std::vector<const uint8_t*> ybuffer_pointers; std::vector<const uint8_t*> ybuffer_pointers;
for (int i = 0; i < kNumFrame; ++i) { for (int i = 0; i < kNumFrame; ++i) {
input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1))); input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
const I420VideoFrame* const_input_frame = input_frames_[i]; const I420VideoFrame* const_input_frame = input_frames_[i];
ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane));
// Copy input frames because the buffer data will be swapped.
copied_input_frames.push_back(input_frames_[i]->CloneFrame());
AddInputFrame(input_frames_[i]); AddInputFrame(input_frames_[i]);
WaitOutputFrame(); WaitOutputFrame();
} }
EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_)); EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
// Make sure the buffer is swapped and not copied. // Make sure the buffer is not copied.
for (int i = 0; i < kNumFrame; ++i) for (int i = 0; i < kNumFrame; ++i)
EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
} }
@@ -170,10 +191,8 @@ TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
AddInputFrame(input_frames_[0]); AddInputFrame(input_frames_[0]);
WaitOutputFrame(); WaitOutputFrame();
input_frames_.push_back(CreateI420VideoFrame(1)); input_frames_.push_back(CreateI420VideoFrame(2));
rtc::scoped_ptr<I420VideoFrame> copied_input_frame( AddInputFrame(input_frames_[1]);
input_frames_[1]->CloneFrame());
AddInputFrame(copied_input_frame.get());
WaitOutputFrame(); WaitOutputFrame();
EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
@@ -181,14 +200,12 @@ TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) { TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
input_frames_.push_back(CreateI420VideoFrame(1)); input_frames_.push_back(CreateI420VideoFrame(1));
rtc::scoped_ptr<I420VideoFrame> copied_input_frame( AddInputFrame(input_frames_[0]);
input_frames_[0]->CloneFrame());
AddInputFrame(copied_input_frame.get());
WaitOutputFrame(); WaitOutputFrame();
webrtc::RefCountImpl<FakeNativeHandle>* handle = webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 1, 1)); input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 2, 2));
AddInputFrame(input_frames_[1]); AddInputFrame(input_frames_[1]);
WaitOutputFrame(); WaitOutputFrame();
@@ -207,7 +224,6 @@ bool EqualTextureFrames(const I420VideoFrame& frame1,
return ((frame1.native_handle() == frame2.native_handle()) && return ((frame1.native_handle() == frame2.native_handle()) &&
(frame1.width() == frame2.width()) && (frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()) && (frame1.height() == frame2.height()) &&
(frame1.timestamp() == frame2.timestamp()) &&
(frame1.render_time_ms() == frame2.render_time_ms())); (frame1.render_time_ms() == frame2.render_time_ms()));
} }
@@ -218,8 +234,6 @@ bool EqualBufferFrames(const I420VideoFrame& frame1,
(frame1.stride(kYPlane) == frame2.stride(kYPlane)) && (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
(frame1.stride(kUPlane) == frame2.stride(kUPlane)) && (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
(frame1.stride(kVPlane) == frame2.stride(kVPlane)) && (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
(frame1.timestamp() == frame2.timestamp()) &&
(frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
(frame1.render_time_ms() == frame2.render_time_ms()) && (frame1.render_time_ms() == frame2.render_time_ms()) &&
(frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
(frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
@@ -254,8 +268,6 @@ I420VideoFrame* CreateI420VideoFrame(uint8_t data) {
frame->CreateFrame( frame->CreateFrame(
kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width, kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
width / 2, width / 2); width / 2, width / 2);
frame->set_timestamp(data);
frame->set_ntp_time_ms(data);
frame->set_render_time_ms(data); frame->set_render_time_ms(data);
return frame; return frame;
} }

View File

@@ -582,16 +582,8 @@ void ViEEncoder::DeliverFrame(int id,
TraceFrameDropEnd(); TraceFrameDropEnd();
} }
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
const uint32_t time_stamp =
kMsToRtpTimestamp *
static_cast<uint32_t>(video_frame->render_time_ms());
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame->render_time_ms(), TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame->render_time_ms(),
"Encode"); "Encode");
video_frame->set_timestamp(time_stamp);
I420VideoFrame* decimated_frame = NULL; I420VideoFrame* decimated_frame = NULL;
// TODO(wuchengli): support texture frames. // TODO(wuchengli): support texture frames.
if (video_frame->native_handle() == NULL) { if (video_frame->native_handle() == NULL) {

View File

@@ -73,11 +73,15 @@ class I420VideoFrame {
int stride_v, int stride_v,
VideoRotation rotation); VideoRotation rotation);
// Copy frame: If required size is bigger than allocated one, new buffers of // Deep copy frame: If required size is bigger than allocated one, new
// adequate size will be allocated. // buffers of adequate size will be allocated.
// Return value: 0 on success, -1 on error. // Return value: 0 on success, -1 on error.
int CopyFrame(const I420VideoFrame& videoFrame); int CopyFrame(const I420VideoFrame& videoFrame);
// Creates a shallow copy of |videoFrame|, i.e, the this object will retain a
// reference to the video buffer also retained by |videoFrame|.
void ShallowCopy(const I420VideoFrame& videoFrame);
// Make a copy of |this|. The caller owns the returned frame. // Make a copy of |this|. The caller owns the returned frame.
// Return value: a new frame on success, NULL on error. // Return value: a new frame on success, NULL on error.
I420VideoFrame* CloneFrame() const; I420VideoFrame* CloneFrame() const;

View File

@@ -29,7 +29,7 @@ class VideoSendStreamInput {
// These methods do not lock internally and must be called sequentially. // These methods do not lock internally and must be called sequentially.
// If your application switches input sources synchronization must be done // If your application switches input sources synchronization must be done
// externally to make sure that any old frames are not delivered concurrently. // externally to make sure that any old frames are not delivered concurrently.
virtual void SwapFrame(I420VideoFrame* video_frame) = 0; virtual void IncomingCapturedFrame(const I420VideoFrame& video_frame) = 0;
protected: protected:
virtual ~VideoSendStreamInput() {} virtual ~VideoSendStreamInput() {}