CVO capturer feature: allow unrotated frame flows through the capture pipeline.

split from https://webrtc-codereview.appspot.com/37029004/

This is based on clean up code change at https://webrtc-codereview.appspot.com/37129004

BUG=4145
R=perkj@webrtc.org, pthatcher@webrtc.org, stefan@webrtc.org, tommi@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/39799004

Cr-Commit-Position: refs/heads/master@{#8337}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8337 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
guoweis@webrtc.org 2015-02-11 17:51:17 +00:00
parent 073dd7b423
commit 20e8f22766
23 changed files with 370 additions and 32 deletions

View File

@ -49,7 +49,8 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
: running_(false),
initial_unix_timestamp_(time(NULL) * rtc::kNumNanosecsPerSec),
next_timestamp_(rtc::kNumNanosecsPerMillisec),
is_screencast_(false) {
is_screencast_(false),
rotation_(webrtc::kVideoRotation_0) {
#ifdef HAVE_WEBRTC_VIDEO
set_frame_factory(new cricket::WebRtcVideoFrameFactory());
#endif
@ -115,6 +116,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
memset(reinterpret_cast<uint8*>(frame.data) + (size / 2), 2,
size - (size / 2));
memcpy(frame.data, reinterpret_cast<const uint8*>(&fourcc), 4);
frame.rotation = rotation_;
// TODO(zhurunz): SignalFrameCaptured carry returned value to be able to
// capture results from downstream.
SignalFrameCaptured(this, &frame);
@ -148,11 +150,18 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
return true;
}
void SetRotation(webrtc::VideoRotation rotation) {
rotation_ = rotation;
}
webrtc::VideoRotation GetRotation() { return rotation_; }
private:
bool running_;
int64 initial_unix_timestamp_;
int64 next_timestamp_;
bool is_screencast_;
webrtc::VideoRotation rotation_;
};
} // namespace cricket

View File

@ -110,7 +110,8 @@ VideoCapturer::VideoCapturer()
: thread_(rtc::Thread::Current()),
adapt_frame_drops_data_(kMaxAccumulatorSize),
effect_frame_drops_data_(kMaxAccumulatorSize),
frame_time_data_(kMaxAccumulatorSize) {
frame_time_data_(kMaxAccumulatorSize),
apply_rotation_(true) {
Construct();
}
@ -118,7 +119,8 @@ VideoCapturer::VideoCapturer(rtc::Thread* thread)
: thread_(thread),
adapt_frame_drops_data_(kMaxAccumulatorSize),
effect_frame_drops_data_(kMaxAccumulatorSize),
frame_time_data_(kMaxAccumulatorSize) {
frame_time_data_(kMaxAccumulatorSize),
apply_rotation_(true) {
Construct();
}
@ -254,6 +256,14 @@ bool VideoCapturer::MuteToBlackThenPause(bool muted) {
return Pause(false);
}
bool VideoCapturer::SetApplyRotation(bool enable) {
apply_rotation_ = enable;
if (frame_factory_) {
frame_factory_->SetApplyRotation(apply_rotation_);
}
return true;
}
void VideoCapturer::SetSupportedFormats(
const std::vector<VideoFormat>& formats) {
supported_formats_ = formats;
@ -340,6 +350,13 @@ std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
return ss.str();
}
void VideoCapturer::set_frame_factory(VideoFrameFactory* frame_factory) {
frame_factory_.reset(frame_factory);
if (frame_factory) {
frame_factory->SetApplyRotation(apply_rotation_);
}
}
void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
VariableInfo<int>* effect_drops_stats,
VariableInfo<double>* frame_time_stats,

View File

@ -225,6 +225,13 @@ class VideoCapturer
return capture_state_;
}
// Tells videocapturer whether to apply the pending rotation. By default, the
// rotation is applied and the generated frame is up right. When set to false,
// generated frames will carry the rotation information from
// SetCaptureRotation. Return value indicates whether this operation succeeds.
virtual bool SetApplyRotation(bool enable);
virtual bool GetApplyRotation() { return apply_rotation_; }
// Adds a video processor that will be applied on VideoFrames returned by
// |SignalVideoFrame|. Multiple video processors can be added. The video
// processors will be applied in the order they were added.
@ -301,9 +308,7 @@ class VideoCapturer
}
// Takes ownership.
void set_frame_factory(VideoFrameFactory* frame_factory) {
frame_factory_.reset(frame_factory);
}
void set_frame_factory(VideoFrameFactory* frame_factory);
// Gets statistics for tracked variables recorded since the last call to
// GetStats. Note that calling GetStats resets any gathered data so it
@ -412,6 +417,9 @@ class VideoCapturer
rtc::CriticalSection crit_;
VideoProcessors video_processors_;
// Whether capturer should apply rotation to the frame before signaling it.
bool apply_rotation_;
DISALLOW_COPY_AND_ASSIGN(VideoCapturer);
};

View File

@ -83,16 +83,26 @@ class VideoCapturerTest
: capture_state_(cricket::CS_STOPPED),
num_state_changes_(0),
video_frames_received_(0),
last_frame_elapsed_time_(0) {
last_frame_elapsed_time_(0),
expects_rotation_applied_(true) {
capturer_.SignalVideoFrame.connect(this, &VideoCapturerTest::OnVideoFrame);
capturer_.SignalStateChange.connect(this,
&VideoCapturerTest::OnStateChange);
}
void set_expected_compensation(bool compensation) {
expects_rotation_applied_ = compensation;
}
protected:
void OnVideoFrame(cricket::VideoCapturer*, const cricket::VideoFrame* frame) {
++video_frames_received_;
last_frame_elapsed_time_ = frame->GetElapsedTime();
if (expects_rotation_applied_) {
EXPECT_EQ(webrtc::kVideoRotation_0, frame->GetRotation());
} else {
EXPECT_EQ(capturer_.GetRotation(), frame->GetRotation());
}
renderer_.RenderFrame(frame);
}
void OnStateChange(cricket::VideoCapturer*,
@ -113,6 +123,7 @@ class VideoCapturerTest
int video_frames_received_;
int64 last_frame_elapsed_time_;
cricket::FakeVideoRenderer renderer_;
bool expects_rotation_applied_;
};
TEST_F(VideoCapturerTest, CaptureState) {
@ -276,6 +287,107 @@ TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
EXPECT_EQ(1, renderer_.num_rendered_frames());
}
TEST_F(VideoCapturerTest, TestRotationPending) {
int kWidth = 800;
int kHeight = 400;
int frame_count = 0;
std::vector<cricket::VideoFormat> formats;
formats.push_back(cricket::VideoFormat(kWidth, kHeight,
cricket::VideoFormat::FpsToInterval(5),
cricket::FOURCC_I420));
capturer_.ResetSupportedFormats(formats);
// capturer_ should compensate rotation as default.
capturer_.UpdateAspectRatio(400, 200);
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ(0, renderer_.num_rendered_frames());
// If the frame's rotation is compensated anywhere in the pipeline based on
// the rotation information, the renderer should be given the right dimension
// such that the frame could be rendered.
// Swap the dimension for the next 2 frames which are rotated by 90 and 270
// degree.
int expected_width = kHeight;
int expected_height = kWidth;
NormalizeVideoSize(&expected_width, &expected_height);
renderer_.SetSize(expected_width, expected_height, 0);
capturer_.SetRotation(webrtc::kVideoRotation_90);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
capturer_.SetRotation(webrtc::kVideoRotation_270);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
// Reset the renderer to have corresponding width and height.
expected_width = kWidth;
expected_height = kHeight;
NormalizeVideoSize(&expected_width, &expected_height);
renderer_.SetSize(expected_width, expected_height, 0);
capturer_.SetRotation(webrtc::kVideoRotation_180);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
}
TEST_F(VideoCapturerTest, TestRotationApplied) {
int kWidth = 800;
int kHeight = 400;
std::vector<cricket::VideoFormat> formats;
formats.push_back(cricket::VideoFormat(kWidth, kHeight,
cricket::VideoFormat::FpsToInterval(5),
cricket::FOURCC_I420));
capturer_.ResetSupportedFormats(formats);
// capturer_ should not compensate rotation.
capturer_.SetApplyRotation(false);
capturer_.UpdateAspectRatio(400, 200);
set_expected_compensation(false);
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ(0, renderer_.num_rendered_frames());
int expected_width = kWidth;
int expected_height = kHeight;
NormalizeVideoSize(&expected_width, &expected_height);
renderer_.SetSize(expected_width, expected_height, 0);
// If the frame's rotation is compensated anywhere in the pipeline, the frame
// won't have its original dimension out from capturer. Since the renderer
// here has the same dimension as the capturer, it will skip that frame as the
// resolution won't match anymore.
int frame_count = 0;
capturer_.SetRotation(webrtc::kVideoRotation_0);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
capturer_.SetRotation(webrtc::kVideoRotation_90);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
capturer_.SetRotation(webrtc::kVideoRotation_180);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
capturer_.SetRotation(webrtc::kVideoRotation_270);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
}
TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
capturer_.SetScreencast(true);

View File

@ -118,11 +118,13 @@ class VideoFrame {
// Indicates the rotation angle in degrees.
// TODO(guoweis): Remove this function, rename GetVideoRotation and remove the
// skeleton implementation to GetRotation once chrome is updated.
// skeleton implementation of GetRotation once chrome is updated.
virtual int GetRotation() const { return GetVideoRotation(); }
virtual webrtc::VideoRotation GetVideoRotation() const {
return webrtc::kVideoRotation_0;
}
// TODO(guoweis): Remove the skeleton implementation once chrome is updated.
virtual void SetRotation(webrtc::VideoRotation rotation) {}
// Make a shallow copy of the frame. The frame buffer itself is not copied.
// Both the current and new VideoFrame will share a single reference-counted

View File

@ -40,7 +40,7 @@ class VideoFrame;
// depending on the subclass of VideoFrameFactory.
class VideoFrameFactory {
public:
VideoFrameFactory() {}
VideoFrameFactory() : apply_rotation_(true) {}
virtual ~VideoFrameFactory() {}
// The returned frame aliases the aliased_frame if the input color
@ -65,6 +65,11 @@ class VideoFrameFactory {
int output_width,
int output_height) const;
void SetApplyRotation(bool enable) { apply_rotation_ = enable; }
protected:
bool apply_rotation_;
private:
// An internal frame buffer to avoid reallocations. It is mutable because it
// does not affect behaviour, only performance.

View File

@ -100,6 +100,12 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
webrtc::VideoCaptureRotation rotation) OVERRIDE {
return -1; // not implemented
}
virtual bool SetApplyRotation(bool enable) OVERRIDE {
return false; // not implemented
}
virtual bool GetApplyRotation() OVERRIDE {
return true; // Rotation compensation is turned on.
}
virtual VideoCaptureEncodeInterface* GetEncodeInterface(
const webrtc::VideoCodec& codec) OVERRIDE {
return NULL; // not implemented

View File

@ -213,6 +213,10 @@ bool WebRtcVideoCapturer::Init(const Device& device) {
module_->AddRef();
SetId(device.id);
SetSupportedFormats(supported);
// Ensure these 2 have the same value.
SetApplyRotation(module_->GetApplyRotation());
return true;
}
@ -249,6 +253,16 @@ bool WebRtcVideoCapturer::GetBestCaptureFormat(const VideoFormat& desired,
}
return true;
}
bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
rtc::CritScope cs(&critical_section_stopping_);
assert(module_);
if (!VideoCapturer::SetApplyRotation(enable)) {
return false;
}
return module_->SetApplyRotation(enable);
}
CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
if (!module_) {
@ -381,6 +395,7 @@ WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
time_stamp = elapsed_time;
data_size = rtc::checked_cast<uint32>(length);
data = buffer;
rotation = sample.rotation();
}
} // namespace cricket

View File

@ -72,6 +72,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
virtual void Stop();
virtual bool IsRunning();
virtual bool IsScreencast() const { return false; }
virtual bool SetApplyRotation(bool enable);
protected:
// Override virtual methods of the parent class VideoCapturer.

View File

@ -394,6 +394,24 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
if (!renderer_) {
return 0;
}
if (!webrtc_frame->native_handle()) {
WebRtcVideoRenderFrame cricket_frame(webrtc_frame, elapsed_time_ms);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
} else {
return DeliverTextureFrame(
webrtc_frame->native_handle(),
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec,
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
}
}
virtual bool IsTextureSupported() { return true; }
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
int64 time_stamp, int64 elapsed_time) {
WebRtcVideoFrame video_frame;
video_frame.Alias(buffer, buffer_size, width_, height_, 1, 1, elapsed_time,
time_stamp, webrtc::kVideoRotation_0);
WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;

View File

@ -139,9 +139,13 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
frame->time_stamp, frame->GetRotation());
}
bool WebRtcVideoFrame::Alias(const CapturedFrame* frame, int dw, int dh) {
bool WebRtcVideoFrame::Alias(const CapturedFrame* frame,
int dw,
int dh,
bool apply_rotation) {
if (CanonicalFourCC(frame->fourcc) != FOURCC_I420 ||
(frame->GetRotation() != webrtc::kVideoRotation_0) ||
(apply_rotation &&
frame->GetRotation() != webrtc::kVideoRotation_0) ||
frame->width != dw || frame->height != dh) {
// TODO(fbarchard): Enable aliasing of more formats.
return Init(frame, dw, dh);

View File

@ -72,7 +72,10 @@ class WebRtcVideoFrame : public VideoFrame {
// Aliases this WebRtcVideoFrame to a CapturedFrame. |frame| must outlive
// this WebRtcVideoFrame.
bool Alias(const CapturedFrame* frame, int dw, int dh);
bool Alias(const CapturedFrame* frame,
int dw,
int dh,
bool apply_rotation);
bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time, int64_t time_stamp);
@ -129,6 +132,9 @@ class WebRtcVideoFrame : public VideoFrame {
virtual void SetTimeStamp(int64_t time_stamp) { time_stamp_ = time_stamp; }
virtual webrtc::VideoRotation GetVideoRotation() const { return rotation_; }
virtual void SetRotation(webrtc::VideoRotation rotation) {
rotation_ = rotation;
}
virtual VideoFrame* Copy() const;
virtual bool MakeExclusive();

View File

@ -36,7 +36,7 @@ VideoFrame* WebRtcVideoFrameFactory::CreateAliasedFrame(
// TODO(pthatcher): Move Alias logic into the VideoFrameFactory and
// out of the VideoFrame.
rtc::scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame());
if (!frame->Alias(aliased_frame, width, height)) {
if (!frame->Alias(aliased_frame, width, height, apply_rotation_)) {
LOG(LS_ERROR) <<
"Failed to create WebRtcVideoFrame in CreateAliasedFrame.";
return NULL;

View File

@ -21,7 +21,9 @@ I420VideoFrame::I420VideoFrame()
height_(0),
timestamp_(0),
ntp_time_ms_(0),
render_time_ms_(0) {}
render_time_ms_(0),
rotation_(kVideoRotation_0) {
}
I420VideoFrame::~I420VideoFrame() {}
@ -42,6 +44,7 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
timestamp_ = 0;
ntp_time_ms_ = 0;
render_time_ms_ = 0;
rotation_ = kVideoRotation_0;
return 0;
}
@ -50,6 +53,23 @@ int I420VideoFrame::CreateFrame(int size_y, const uint8_t* buffer_y,
int size_v, const uint8_t* buffer_v,
int width, int height,
int stride_y, int stride_u, int stride_v) {
return CreateFrame(size_y, buffer_y, size_u, buffer_u, size_v, buffer_v,
width, height, stride_y, stride_u, stride_v,
kVideoRotation_0);
}
int I420VideoFrame::CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
VideoRotation rotation) {
if (size_y < 1 || size_u < 1 || size_v < 1)
return -1;
if (CheckDimensions(width, height, stride_y, stride_u, stride_v) < 0)
@ -59,6 +79,7 @@ int I420VideoFrame::CreateFrame(int size_y, const uint8_t* buffer_y,
v_plane_.Copy(size_v, stride_v, buffer_v);
width_ = width;
height_ = height;
rotation_ = rotation;
return 0;
}
@ -77,6 +98,7 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_;
rotation_ = videoFrame.rotation_;
return 0;
}
@ -98,6 +120,7 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
std::swap(timestamp_, videoFrame->timestamp_);
std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
std::swap(render_time_ms_, videoFrame->render_time_ms_);
std::swap(rotation_, videoFrame->rotation_);
}
uint8_t* I420VideoFrame::buffer(PlaneType type) {

View File

@ -29,6 +29,7 @@ TEST(TestI420VideoFrame, InitialValues) {
I420VideoFrame frame;
// Invalid arguments - one call for each variable.
EXPECT_TRUE(frame.IsZeroSize());
EXPECT_EQ(kVideoRotation_0, frame.rotation());
EXPECT_EQ(-1, frame.CreateEmptyFrame(0, 10, 10, 14, 14));
EXPECT_EQ(-1, frame.CreateEmptyFrame(10, -1, 10, 90, 14));
EXPECT_EQ(-1, frame.CreateEmptyFrame(10, 10, 0, 14, 18));
@ -100,22 +101,23 @@ TEST(TestI420VideoFrame, CopyFrame) {
const int kSizeY = 225;
const int kSizeU = 80;
const int kSizeV = 80;
const VideoRotation kRotation = VideoFrameRotation_270;
uint8_t buffer_y[kSizeY];
uint8_t buffer_u[kSizeU];
uint8_t buffer_v[kSizeV];
memset(buffer_y, 16, kSizeY);
memset(buffer_u, 8, kSizeU);
memset(buffer_v, 4, kSizeV);
frame2.CreateFrame(kSizeY, buffer_y,
kSizeU, buffer_u,
kSizeV, buffer_v,
width + 5, height + 5, stride_y + 5, stride_u, stride_v);
frame2.CreateFrame(kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v,
width + 5, height + 5, stride_y + 5, stride_u, stride_v,
kRotation);
// Frame of smaller dimensions - allocated sizes should not vary.
EXPECT_EQ(0, frame1.CopyFrame(frame2));
EXPECT_TRUE(EqualFramesExceptSize(frame1, frame2));
EXPECT_EQ(kSizeY, frame1.allocated_size(kYPlane));
EXPECT_EQ(kSizeU, frame1.allocated_size(kUPlane));
EXPECT_EQ(kSizeV, frame1.allocated_size(kVPlane));
EXPECT_EQ(kRotation, frame1.rotation());
// Verify copy of all parameters.
// Frame of larger dimensions - update allocated sizes.
EXPECT_EQ(0, frame2.CopyFrame(frame1));

View File

@ -48,6 +48,18 @@ class TextureVideoFrame : public I420VideoFrame {
int stride_y,
int stride_u,
int stride_v) OVERRIDE;
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
webrtc::VideoRotation rotation) OVERRIDE;
virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
virtual I420VideoFrame* CloneFrame() const OVERRIDE;
virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;

View File

@ -11,8 +11,6 @@
#ifndef WEBRTC_COMMON_VIDEO_ROTATION_H_
#define WEBRTC_COMMON_VIDEO_ROTATION_H_
#include "webrtc/base/common.h"
namespace webrtc {
// enum for clockwise rotation.

View File

@ -52,6 +52,22 @@ int TextureVideoFrame::CreateFrame(int size_y,
return -1;
}
int TextureVideoFrame::CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
webrtc::VideoRotation rotation) {
assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
assert(false); // Should not be called.
return -1;

View File

@ -39,6 +39,8 @@ class MockVideoCaptureModule : public VideoCaptureModule {
MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
MOCK_METHOD0(CaptureDelay, int32_t());
MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
MOCK_METHOD1(SetApplyRotation, bool(bool));
MOCK_METHOD0(GetApplyRotation, bool());
MOCK_METHOD1(GetEncodeInterface,
VideoCaptureEncodeInterface*(const VideoCodec& codec));
MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));

View File

@ -135,6 +135,15 @@ class VideoCaptureModule: public RefCountedModule {
// displayed correctly if rendered.
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation) = 0;
// Tells the capture module whether to apply the pending rotation. By default,
// the rotation is applied and the generated frame is up right. When set to
// false, generated frames will carry the rotation information from
// SetCaptureRotation. Return value indicates whether this operation succeeds.
virtual bool SetApplyRotation(bool enable) = 0;
// Return whether the rotation is applied or left pending.
virtual bool GetApplyRotation() = 0;
// Gets a pointer to an encode interface if the capture device supports the
// requested type and size. NULL otherwise.
virtual VideoCaptureEncodeInterface* GetEncodeInterface(

View File

@ -24,6 +24,23 @@
namespace webrtc
{
// Converting the rotation mode from capturemodule's to I420VideoFrame's define.
VideoRotation ConvertRotation(VideoRotationMode rotation) {
switch (rotation) {
case kRotateNone:
return kVideoRotation_0;
case kRotate90:
return kVideoRotation_90;
case kRotate180:
return kVideoRotation_180;
case kRotate270:
return kVideoRotation_270;
}
assert(false);
return kVideoRotation_0;
}
namespace videocapturemodule
{
VideoCaptureModule* VideoCaptureImpl::Create(
@ -159,7 +176,8 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
last_capture_time_(0),
delta_ntp_internal_ms_(
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
TickTime::MillisecondTimestamp()) {
TickTime::MillisecondTimestamp()),
apply_rotation_(true) {
_requestedCapability.width = kDefaultWidth;
_requestedCapability.height = kDefaultHeight;
_requestedCapability.maxFPS = 30;
@ -278,11 +296,15 @@ int32_t VideoCaptureImpl::IncomingFrame(
int stride_uv = (width + 1) / 2;
int target_width = width;
int target_height = height;
if (apply_rotation_) {
// Rotating resolution when for 90/270 degree rotations.
if (_rotateFrame == kRotate90 || _rotateFrame == kRotate270) {
target_width = abs(height);
target_height = width;
}
}
// TODO(mikhal): Update correct aligned stride values.
//Calc16ByteAlignedStride(target_width, &stride_y, &stride_uv);
// Setting absolute height (in case it was negative).
@ -298,12 +320,10 @@ int32_t VideoCaptureImpl::IncomingFrame(
"happen due to bad parameters.";
return -1;
}
const int conversionResult = ConvertToI420(commonVideoType,
videoFrame,
0, 0, // No cropping
width, height,
videoFrameLength,
_rotateFrame,
const int conversionResult = ConvertToI420(
commonVideoType, videoFrame, 0, 0, // No cropping
width, height, videoFrameLength,
apply_rotation_ ? _rotateFrame : kRotateNone,
&_captureFrame);
if (conversionResult < 0)
{
@ -311,6 +331,13 @@ int32_t VideoCaptureImpl::IncomingFrame(
<< frameInfo.rawType << "to I420.";
return -1;
}
if (!apply_rotation_) {
_captureFrame.set_rotation(ConvertRotation(_rotateFrame));
} else {
_captureFrame.set_rotation(kVideoRotation_0);
}
DeliverCapturedFrame(_captureFrame, captureTime);
}
else // Encoded format
@ -364,6 +391,14 @@ void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
}
}
bool VideoCaptureImpl::SetApplyRotation(bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
// The effect of this is the last caller wins.
apply_rotation_ = enable;
return true;
}
void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);

View File

@ -68,6 +68,10 @@ public:
virtual void SetCaptureDelay(int32_t delayMS);
virtual int32_t CaptureDelay();
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
virtual bool SetApplyRotation(bool enable);
virtual bool GetApplyRotation() {
return apply_rotation_;
}
virtual void EnableFrameRateCallback(const bool enable);
virtual void EnableNoPictureAlarm(const bool enable);
@ -140,6 +144,9 @@ private:
// Delta used for translating between NTP and internal timestamps.
const int64_t delta_ntp_internal_ms_;
// Indicate whether rotation should be applied before delivered externally.
bool apply_rotation_;
};
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -18,6 +18,7 @@
// used).
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/typedefs.h"
#include "webrtc/common_video/rotation.h"
namespace webrtc {
@ -73,6 +74,20 @@ class I420VideoFrame {
int stride_u,
int stride_v);
// TODO(guoweis): remove the previous CreateFrame when chromium has this code.
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
VideoRotation rotation);
// Copy frame: If required size is bigger than allocated one, new buffers of
// adequate size will be allocated.
// Return value: 0 on success, -1 on error.
@ -122,6 +137,21 @@ class I420VideoFrame {
// Get capture ntp time in miliseconds.
virtual int64_t ntp_time_ms() const { return ntp_time_ms_; }
// Naming convention for Coordination of Video Orientation. Please see
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
//
// "pending rotation" or "pending" = a frame that has a VideoRotation > 0.
//
// "not pending" = a frame that has a VideoRotation == 0.
//
// "apply rotation" = modify a frame from being "pending" to being "not
// pending" rotation (a no-op for "unrotated").
//
virtual VideoRotation rotation() const { return rotation_; }
virtual void set_rotation(VideoRotation rotation) {
rotation_ = rotation;
}
// Set render time in miliseconds.
virtual void set_render_time_ms(int64_t render_time_ms) {
render_time_ms_ = render_time_ms;
@ -165,6 +195,7 @@ class I420VideoFrame {
uint32_t timestamp_;
int64_t ntp_time_ms_;
int64_t render_time_ms_;
VideoRotation rotation_;
};
enum VideoFrameType {