Revert "Enable CVO by default through webrtc pipeline."

This reverts commit 1b1c15cad16de57053bb6aa8a916079e0534bdae.

Due to failure on
http://build.chromium.org/p/client.webrtc/builders/Linux64%20Release%20%5Blarge%20tests%5D/builds/4092
and following builds (the test hangs and never finishes).
R=kjellander@webrtc.org
TBR=guoweis@chromium.org
TESTED=Local revert + execution of libjingle_peerconnection_java_unittest show that this is the culprit.

Review URL: https://webrtc-codereview.appspot.com/47909004

Cr-Commit-Position: refs/heads/master@{#8911}
This commit is contained in:
Minyue 2015-04-01 16:19:58 +02:00
parent d91cb5d5fb
commit 31331cfd2d
21 changed files with 83 additions and 447 deletions

View File

@ -271,15 +271,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Mapping array from original UV mapping to the rotated mapping. The number // Mapping array from original UV mapping to the rotated mapping. The number
// is the position where the original UV coordination should be mapped // is the position where the original UV coordination should be mapped
// to. (0,1) is the top left coord. (2,3) is the bottom left. (4,5) is the // to. (0,1) is the left top coord. (2,3) is the top bottom. (4,5) is the
// top right. (6,7) is the bottom right. // right top. (6,7) is the right bottom. Note that this is the coordination
// that got rotated. For example, using the original left bottom (2,3) as
// the top left (0,1) means 90 degree clockwise rotation.
private static int rotation_matrix[][] = private static int rotation_matrix[][] =
{ {4, 5, 0, 1, 6, 7, 2, 3}, // 90 degree (clockwise) { {0, 1, 2, 3, 4, 5, 6, 7}, // 0 degree
{2, 3, 6, 7, 0, 1, 4, 5}, // 90 degree (clockwise)
{6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise) {6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
{2, 3, 6, 7, 0, 1, 4, 5} }; // 270 degree (clockwise) {4, 5, 0, 1, 6, 7, 2, 3} };// 270 degree (clockwise)
private static int mirror_matrix[] =
{4, 1, 6, 3, 0, 5, 2, 7}; // mirrored
private YuvImageRenderer( private YuvImageRenderer(
GLSurfaceView surface, int id, GLSurfaceView surface, int id,
@ -381,21 +381,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
if (scalingType == ScalingType.SCALE_ASPECT_FILL) { if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
// Need to re-adjust UV coordinates to match display AR. // Need to re-adjust UV coordinates to match display AR.
boolean adjustU = true;
float ratio = 0;
if (displayAspectRatio > videoAspectRatio) { if (displayAspectRatio > videoAspectRatio) {
ratio = (1.0f - videoAspectRatio / displayAspectRatio) / texOffsetV = (1.0f - videoAspectRatio / displayAspectRatio) /
2.0f; 2.0f;
adjustU = (rotationDegree == 90 || rotationDegree == 270);
} else { } else {
ratio = (1.0f - displayAspectRatio / videoAspectRatio) / texOffsetU = (1.0f - displayAspectRatio / videoAspectRatio) /
2.0f; 2.0f;
adjustU = (rotationDegree == 0 || rotationDegree == 180);
}
if (adjustU) {
texOffsetU = ratio;
} else {
texOffsetV = ratio;
} }
} }
Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom + Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
@ -411,59 +402,37 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Log.d(TAG, " Texture UV offsets: " + texOffsetU + ", " + texOffsetV); Log.d(TAG, " Texture UV offsets: " + texOffsetU + ", " + texOffsetV);
float uLeft = texOffsetU; float uLeft = texOffsetU;
float uRight = 1.0f - texOffsetU; float uRight = 1.0f - texOffsetU;
if (mirror) {
// Swap U coordinates for mirror image.
uLeft = 1.0f - texOffsetU;
uRight = texOffsetU;
}
float textureCoordinatesFloat[] = new float[] { float textureCoordinatesFloat[] = new float[] {
uLeft, texOffsetV, // top left uLeft, texOffsetV, // left top
uLeft, 1.0f - texOffsetV, // bottom left uLeft, 1.0f - texOffsetV, // left bottom
uRight, texOffsetV, // top right uRight, texOffsetV, // right top
uRight, 1.0f - texOffsetV // bottom right uRight, 1.0f - texOffsetV // right bottom
}; };
// Rotation needs to be done before mirroring. float textureCoordinatesRotatedFloat[];
textureCoordinatesFloat = applyRotation(textureCoordinatesFloat, if (rotationDegree == 0) {
rotationDegree); textureCoordinatesRotatedFloat = textureCoordinatesFloat;
textureCoordinatesFloat = applyMirror(textureCoordinatesFloat, } else {
mirror); textureCoordinatesRotatedFloat =
new float[textureCoordinatesFloat.length];
int index = rotationDegree / 90;
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
textureCoordinatesRotatedFloat[rotation_matrix[index][i]] =
textureCoordinatesFloat[i];
}
}
textureCoords = textureCoords =
directNativeFloatBuffer(textureCoordinatesFloat); directNativeFloatBuffer(textureCoordinatesRotatedFloat);
} }
updateTextureProperties = false; updateTextureProperties = false;
} }
} }
private float[] applyMirror(float textureCoordinatesFloat[],
boolean mirror) {
if (!mirror) {
return textureCoordinatesFloat;
}
return applyMatrixOperation(textureCoordinatesFloat,
mirror_matrix);
}
private float[] applyRotation(float textureCoordinatesFloat[],
int rotationDegree) {
if (rotationDegree == 0) {
return textureCoordinatesFloat;
}
int index = rotationDegree / 90 - 1;
return applyMatrixOperation(textureCoordinatesFloat,
rotation_matrix[index]);
}
private float[] applyMatrixOperation(float textureCoordinatesFloat[],
int matrix_operation[]) {
float textureCoordinatesModifiedFloat[] =
new float[textureCoordinatesFloat.length];
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
textureCoordinatesModifiedFloat[matrix_operation[i]] =
textureCoordinatesFloat[i];
}
return textureCoordinatesModifiedFloat;
}
private void draw() { private void draw() {
if (!seenFrame) { if (!seenFrame) {
// No frame received yet - nothing to render. // No frame received yet - nothing to render.

View File

@ -254,8 +254,6 @@ bool VideoCapturer::MuteToBlackThenPause(bool muted) {
return Pause(false); return Pause(false);
} }
// Note that the last caller decides whether rotation should be applied if there
// are multiple send streams using the same camera.
bool VideoCapturer::SetApplyRotation(bool enable) { bool VideoCapturer::SetApplyRotation(bool enable) {
apply_rotation_ = enable; apply_rotation_ = enable;
if (frame_factory_) { if (frame_factory_) {

View File

@ -83,10 +83,8 @@ class VideoFrame {
} }
// Basic accessors. // Basic accessors.
// Note this is the width and height without rotation applied.
virtual size_t GetWidth() const = 0; virtual size_t GetWidth() const = 0;
virtual size_t GetHeight() const = 0; virtual size_t GetHeight() const = 0;
size_t GetChromaWidth() const { return (GetWidth() + 1) / 2; } size_t GetChromaWidth() const { return (GetWidth() + 1) / 2; }
size_t GetChromaHeight() const { return (GetHeight() + 1) / 2; } size_t GetChromaHeight() const { return (GetHeight() + 1) / 2; }
size_t GetChromaSize() const { return GetUPitch() * GetChromaHeight(); } size_t GetChromaSize() const { return GetUPitch() * GetChromaHeight(); }

View File

@ -44,7 +44,6 @@
#include "webrtc/base/win32.h" // Need this to #include the impl files. #include "webrtc/base/win32.h" // Need this to #include the impl files.
#include "webrtc/modules/video_capture/include/video_capture_factory.h" #include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/system_wrappers/interface/field_trial.h"
namespace cricket { namespace cricket {
@ -265,13 +264,6 @@ bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
assert(module_); assert(module_);
const std::string group_name =
webrtc::field_trial::FindFullName("WebRTC-CVO");
if (group_name == "Disabled") {
return true;
}
if (!VideoCapturer::SetApplyRotation(enable)) { if (!VideoCapturer::SetApplyRotation(enable)) {
return false; return false;
} }

View File

@ -425,8 +425,7 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
WebRtcVideoFrame cricket_frame( WebRtcVideoFrame cricket_frame(
webrtc_frame.video_frame_buffer(), webrtc_frame.video_frame_buffer(),
elapsed_time_ms * rtc::kNumNanosecsPerMillisec, elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
webrtc_frame.rotation());
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1; return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
} }
@ -1162,9 +1161,6 @@ void WebRtcVideoEngine::Construct(ViEWrapper* vie_wrapper,
rtp_header_extensions_.push_back( rtp_header_extensions_.push_back(
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension, RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId)); kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
rtp_header_extensions_.push_back(
RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
kRtpVideoRotationHeaderExtensionDefaultId));
} }
WebRtcVideoEngine::~WebRtcVideoEngine() { WebRtcVideoEngine::~WebRtcVideoEngine() {
@ -2836,10 +2832,6 @@ bool WebRtcVideoMediaChannel::SetCapturer(uint32 ssrc,
QueueBlackFrame(ssrc, timestamp, QueueBlackFrame(ssrc, timestamp,
VideoFormat::FpsToInterval(send_codec_->maxFramerate)); VideoFormat::FpsToInterval(send_codec_->maxFramerate));
} }
capturer->SetApplyRotation(
!FindHeaderExtension(send_extensions_, kRtpVideoRotationHeaderExtension));
return true; return true;
} }
@ -2937,8 +2929,6 @@ bool WebRtcVideoMediaChannel::SetRecvRtpHeaderExtensions(
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension); FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
const RtpHeaderExtension* send_time_extension = const RtpHeaderExtension* send_time_extension =
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension); FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
const RtpHeaderExtension* cvo_extension =
FindHeaderExtension(extensions, kRtpVideoRotationHeaderExtension);
// Loop through all receive channels and enable/disable the extensions. // Loop through all receive channels and enable/disable the extensions.
for (RecvChannelMap::iterator channel_it = recv_channels_.begin(); for (RecvChannelMap::iterator channel_it = recv_channels_.begin();
@ -2954,10 +2944,6 @@ bool WebRtcVideoMediaChannel::SetRecvRtpHeaderExtensions(
send_time_extension)) { send_time_extension)) {
return false; return false;
} }
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetReceiveVideoRotationStatus,
channel_id, cvo_extension)) {
return false;
}
} }
receive_extensions_ = extensions; receive_extensions_ = extensions;
@ -2974,8 +2960,6 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension); FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
const RtpHeaderExtension* send_time_extension = const RtpHeaderExtension* send_time_extension =
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension); FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
const RtpHeaderExtension* cvo_extension =
FindHeaderExtension(extensions, kRtpVideoRotationHeaderExtension);
// Loop through all send channels and enable/disable the extensions. // Loop through all send channels and enable/disable the extensions.
for (SendChannelMap::iterator channel_it = send_channels_.begin(); for (SendChannelMap::iterator channel_it = send_channels_.begin();
@ -2991,10 +2975,6 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
send_time_extension)) { send_time_extension)) {
return false; return false;
} }
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendVideoRotationStatus,
channel_id, cvo_extension)) {
return false;
}
} }
if (send_time_extension) { if (send_time_extension) {
@ -3006,14 +2986,6 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
send_time_extension->id); send_time_extension->id);
} }
// For now assume that all streams want the same CVO setting.
// TODO(guoweis): Remove the need for this assumption.
for (const auto& kv : send_channels_) {
if (kv.second->video_capturer()) {
kv.second->video_capturer()->SetApplyRotation(!cvo_extension);
}
}
send_extensions_ = extensions; send_extensions_ = extensions;
return true; return true;
} }
@ -3512,11 +3484,6 @@ bool WebRtcVideoMediaChannel::ConfigureReceiving(int channel_id,
receive_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) { receive_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) {
return false; return false;
} }
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetReceiveVideoRotationStatus,
channel_id, receive_extensions_,
kRtpVideoRotationHeaderExtension)) {
return false;
}
if (receiver_report_ssrc_ != kSsrcUnset) { if (receiver_report_ssrc_ != kSsrcUnset) {
if (engine()->vie()->rtp()->SetLocalSSRC( if (engine()->vie()->rtp()->SetLocalSSRC(
@ -3627,12 +3594,6 @@ bool WebRtcVideoMediaChannel::ConfigureSending(int channel_id,
return false; return false;
} }
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendVideoRotationStatus,
channel_id, send_extensions_,
kRtpVideoRotationHeaderExtension)) {
return false;
}
if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id, if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
true) != 0) { true) != 0) {
LOG_RTCERR2(SetTransmissionSmoothingStatus, channel_id, true); LOG_RTCERR2(SetTransmissionSmoothingStatus, channel_id, true);

View File

@ -134,17 +134,6 @@ static std::string RtpExtensionsToString(
return out.str(); return out.str();
} }
inline const webrtc::RtpExtension* FindHeaderExtension(
const std::vector<webrtc::RtpExtension>& extensions,
const std::string& name) {
for (const auto& kv : extensions) {
if (kv.name == name) {
return &kv;
}
}
return NULL;
}
// Merges two fec configs and logs an error if a conflict arises // Merges two fec configs and logs an error if a conflict arises
// such that merging in diferent order would trigger a diferent output. // such that merging in diferent order would trigger a diferent output.
static void MergeFecConfig(const webrtc::FecConfig& other, static void MergeFecConfig(const webrtc::FecConfig& other,
@ -379,9 +368,6 @@ WebRtcVideoEngine2::WebRtcVideoEngine2(WebRtcVoiceEngine* voice_engine)
rtp_header_extensions_.push_back( rtp_header_extensions_.push_back(
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension, RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId)); kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
rtp_header_extensions_.push_back(
RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
kRtpVideoRotationHeaderExtensionDefaultId));
} }
WebRtcVideoEngine2::~WebRtcVideoEngine2() { WebRtcVideoEngine2::~WebRtcVideoEngine2() {
@ -1149,16 +1135,7 @@ bool WebRtcVideoChannel2::SetCapturer(uint32 ssrc, VideoCapturer* capturer) {
LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc; LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
return false; return false;
} }
if (!send_streams_[ssrc]->SetCapturer(capturer)) { return send_streams_[ssrc]->SetCapturer(capturer);
return false;
}
if (capturer) {
capturer->SetApplyRotation(
!FindHeaderExtension(send_rtp_extensions_,
kRtpVideoRotationHeaderExtension));
}
return true;
} }
bool WebRtcVideoChannel2::SendIntraFrame() { bool WebRtcVideoChannel2::SendIntraFrame() {
@ -1281,16 +1258,12 @@ bool WebRtcVideoChannel2::SetSendRtpHeaderExtensions(
send_rtp_extensions_ = filtered_extensions; send_rtp_extensions_ = filtered_extensions;
const webrtc::RtpExtension* cvo_extension = FindHeaderExtension(
send_rtp_extensions_, kRtpVideoRotationHeaderExtension);
rtc::CritScope stream_lock(&stream_crit_); rtc::CritScope stream_lock(&stream_crit_);
for (std::map<uint32, WebRtcVideoSendStream*>::iterator it = for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
send_streams_.begin(); send_streams_.begin();
it != send_streams_.end(); it != send_streams_.end();
++it) { ++it) {
it->second->SetRtpExtensions(send_rtp_extensions_); it->second->SetRtpExtensions(send_rtp_extensions_);
it->second->SetApplyRotation(!cvo_extension);
} }
return true; return true;
} }
@ -1607,15 +1580,6 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetSsrcs() const {
return ssrcs_; return ssrcs_;
} }
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetApplyRotation(
bool apply_rotation) {
rtc::CritScope cs(&lock_);
if (capturer_ == NULL)
return;
capturer_->SetApplyRotation(apply_rotation);
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions( void WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions(
const VideoOptions& options) { const VideoOptions& options) {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
@ -2157,7 +2121,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
const WebRtcVideoFrame render_frame( const WebRtcVideoFrame render_frame(
frame.video_frame_buffer(), frame.video_frame_buffer(),
elapsed_time_ms * rtc::kNumNanosecsPerMillisec, elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation()); frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
renderer_->RenderFrame(&render_frame); renderer_->RenderFrame(&render_frame);
} }

View File

@ -284,8 +284,6 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void MuteStream(bool mute); void MuteStream(bool mute);
bool DisconnectCapturer(); bool DisconnectCapturer();
void SetApplyRotation(bool apply_rotation);
void Start(); void Start();
void Stop(); void Stop();

View File

@ -530,82 +530,6 @@ TEST_F(WebRtcVideoEngine2Test, SupportsAbsoluteSenderTimeHeaderExtension) {
FAIL() << "Absolute Sender Time extension not in header-extension list."; FAIL() << "Absolute Sender Time extension not in header-extension list.";
} }
TEST_F(WebRtcVideoEngine2Test, SupportsVideoRotationHeaderExtension) {
std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
ASSERT_FALSE(extensions.empty());
for (size_t i = 0; i < extensions.size(); ++i) {
if (extensions[i].uri == kRtpVideoRotationHeaderExtension) {
EXPECT_EQ(kRtpVideoRotationHeaderExtensionDefaultId, extensions[i].id);
return;
}
}
FAIL() << "Video Rotation extension not in header-extension list.";
}
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionBeforeCapturer) {
// Allocate the capturer first to prevent early destruction before channel's
// dtor is called.
cricket::FakeVideoCapturer capturer;
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVp8Codec);
rtc::scoped_ptr<VideoMediaChannel> channel(
SetUpForExternalEncoderFactory(&encoder_factory, codecs));
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Add CVO extension.
const int id = 1;
std::vector<cricket::RtpHeaderExtension> extensions;
extensions.push_back(
cricket::RtpHeaderExtension(kRtpVideoRotationHeaderExtension, id));
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
// Set capturer.
EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
// Verify capturer has turned off applying rotation.
EXPECT_FALSE(capturer.GetApplyRotation());
// Verify removing header extension turns on applying rotation.
extensions.clear();
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
EXPECT_TRUE(capturer.GetApplyRotation());
}
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionAfterCapturer) {
cricket::FakeVideoCapturer capturer;
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVp8Codec);
rtc::scoped_ptr<VideoMediaChannel> channel(
SetUpForExternalEncoderFactory(&encoder_factory, codecs));
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Set capturer.
EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
// Add CVO extension.
const int id = 1;
std::vector<cricket::RtpHeaderExtension> extensions;
extensions.push_back(
cricket::RtpHeaderExtension(kRtpVideoRotationHeaderExtension, id));
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
// Verify capturer has turned off applying rotation.
EXPECT_FALSE(capturer.GetApplyRotation());
// Verify removing header extension turns on applying rotation.
extensions.clear();
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
EXPECT_TRUE(capturer.GetApplyRotation());
}
TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) { TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
engine_.Init(rtc::Thread::Current()); engine_.Init(rtc::Thread::Current());
rtc::scoped_ptr<VideoMediaChannel> channel( rtc::scoped_ptr<VideoMediaChannel> channel(
@ -1274,34 +1198,21 @@ TEST_F(WebRtcVideoChannel2Test, RecvAbsoluteSendTimeHeaderExtensions) {
webrtc::RtpExtension::kAbsSendTime); webrtc::RtpExtension::kAbsSendTime);
} }
// Test support for video rotation header extension.
TEST_F(WebRtcVideoChannel2Test, SendVideoRotationHeaderExtensions) {
TestSetSendRtpHeaderExtensions(kRtpVideoRotationHeaderExtension,
webrtc::RtpExtension::kVideoRotation);
}
TEST_F(WebRtcVideoChannel2Test, RecvVideoRotationHeaderExtensions) {
TestSetRecvRtpHeaderExtensions(kRtpVideoRotationHeaderExtension,
webrtc::RtpExtension::kVideoRotation);
}
TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) { TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) {
const int kTOffsetId = 1; const int kTOffsetId = 1;
const int kAbsSendTimeId = 2; const int kAbsSendTimeId = 2;
const int kVideoRotationId = 3;
std::vector<cricket::RtpHeaderExtension> extensions; std::vector<cricket::RtpHeaderExtension> extensions;
extensions.push_back(cricket::RtpHeaderExtension( extensions.push_back(cricket::RtpHeaderExtension(
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId)); kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
extensions.push_back(cricket::RtpHeaderExtension( extensions.push_back(cricket::RtpHeaderExtension(
kRtpTimestampOffsetHeaderExtension, kTOffsetId)); kRtpTimestampOffsetHeaderExtension, kTOffsetId));
extensions.push_back(cricket::RtpHeaderExtension(
kRtpVideoRotationHeaderExtension, kVideoRotationId));
EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions)); EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions));
FakeVideoSendStream* send_stream = FakeVideoSendStream* send_stream =
AddSendStream(cricket::StreamParams::CreateLegacy(123)); AddSendStream(cricket::StreamParams::CreateLegacy(123));
EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
ASSERT_EQ(3u, send_stream->GetConfig().rtp.extensions.size()); ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size());
// Setting the same extensions (even if in different order) shouldn't // Setting the same extensions (even if in different order) shouldn't
// reallocate the stream. // reallocate the stream.
@ -1320,21 +1231,18 @@ TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) {
TEST_F(WebRtcVideoChannel2Test, IdenticalRecvExtensionsDoesntRecreateStream) { TEST_F(WebRtcVideoChannel2Test, IdenticalRecvExtensionsDoesntRecreateStream) {
const int kTOffsetId = 1; const int kTOffsetId = 1;
const int kAbsSendTimeId = 2; const int kAbsSendTimeId = 2;
const int kVideoRotationId = 3;
std::vector<cricket::RtpHeaderExtension> extensions; std::vector<cricket::RtpHeaderExtension> extensions;
extensions.push_back(cricket::RtpHeaderExtension( extensions.push_back(cricket::RtpHeaderExtension(
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId)); kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
extensions.push_back(cricket::RtpHeaderExtension( extensions.push_back(cricket::RtpHeaderExtension(
kRtpTimestampOffsetHeaderExtension, kTOffsetId)); kRtpTimestampOffsetHeaderExtension, kTOffsetId));
extensions.push_back(cricket::RtpHeaderExtension(
kRtpVideoRotationHeaderExtension, kVideoRotationId));
EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions)); EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
FakeVideoReceiveStream* send_stream = FakeVideoReceiveStream* send_stream =
AddRecvStream(cricket::StreamParams::CreateLegacy(123)); AddRecvStream(cricket::StreamParams::CreateLegacy(123));
EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams()); EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
ASSERT_EQ(3u, send_stream->GetConfig().rtp.extensions.size()); ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size());
// Setting the same extensions (even if in different order) shouldn't // Setting the same extensions (even if in different order) shouldn't
// reallocate the stream. // reallocate the stream.

View File

@ -50,7 +50,6 @@
using cricket::kRtpTimestampOffsetHeaderExtension; using cricket::kRtpTimestampOffsetHeaderExtension;
using cricket::kRtpAbsoluteSenderTimeHeaderExtension; using cricket::kRtpAbsoluteSenderTimeHeaderExtension;
using cricket::kRtpVideoRotationHeaderExtension;
static const cricket::VideoCodec kVP8Codec720p(100, "VP8", 1280, 720, 30, 0); static const cricket::VideoCodec kVP8Codec720p(100, "VP8", 1280, 720, 30, 0);
static const cricket::VideoCodec kVP8Codec360p(100, "VP8", 640, 360, 30, 0); static const cricket::VideoCodec kVP8Codec360p(100, "VP8", 640, 360, 30, 0);
@ -1006,14 +1005,6 @@ TEST_F(WebRtcVideoEngineTestFake, RecvAbsoluteSendTimeHeaderExtensions) {
TestSetRecvRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension); TestSetRecvRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension);
} }
// Test support for Coordination of Video Orientation (CVO) header extension.
TEST_F(WebRtcVideoEngineTestFake, SendVideoRotationHeaderExtensions) {
TestSetSendRtpHeaderExtensions(kRtpVideoRotationHeaderExtension);
}
TEST_F(WebRtcVideoEngineTestFake, RecvVideoRotationHeaderExtensions) {
TestSetRecvRtpHeaderExtensions(kRtpVideoRotationHeaderExtension);
}
TEST_F(WebRtcVideoEngineTestFake, LeakyBucketTest) { TEST_F(WebRtcVideoEngineTestFake, LeakyBucketTest) {
EXPECT_TRUE(SetupEngine()); EXPECT_TRUE(SetupEngine());

View File

@ -46,19 +46,6 @@ WebRtcVideoFrame::WebRtcVideoFrame():
time_stamp_ns_(0), time_stamp_ns_(0),
rotation_(webrtc::kVideoRotation_0) {} rotation_(webrtc::kVideoRotation_0) {}
WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation)
: video_frame_buffer_(buffer),
pixel_width_(1),
pixel_height_(1),
elapsed_time_ns_(elapsed_time_ns),
time_stamp_ns_(time_stamp_ns),
rotation_(rotation) {
}
WebRtcVideoFrame::WebRtcVideoFrame( WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns, int64_t elapsed_time_ns,
@ -75,8 +62,7 @@ WebRtcVideoFrame::WebRtcVideoFrame(webrtc::NativeHandle* handle,
int width, int width,
int height, int height,
int64_t elapsed_time_ns, int64_t elapsed_time_ns,
int64_t time_stamp_ns, int64_t time_stamp_ns)
webrtc::VideoRotation rotation)
: video_frame_buffer_( : video_frame_buffer_(
new rtc::RefCountedObject<webrtc::TextureBuffer>(handle, new rtc::RefCountedObject<webrtc::TextureBuffer>(handle,
width, width,
@ -85,7 +71,7 @@ WebRtcVideoFrame::WebRtcVideoFrame(webrtc::NativeHandle* handle,
pixel_height_(1), pixel_height_(1),
elapsed_time_ns_(elapsed_time_ns), elapsed_time_ns_(elapsed_time_ns),
time_stamp_ns_(time_stamp_ns), time_stamp_ns_(time_stamp_ns),
rotation_(rotation) { rotation_(webrtc::kVideoRotation_0) {
} }
WebRtcVideoFrame::~WebRtcVideoFrame() {} WebRtcVideoFrame::~WebRtcVideoFrame() {}
@ -190,9 +176,10 @@ WebRtcVideoFrame::GetVideoFrameBuffer() const {
VideoFrame* WebRtcVideoFrame::Copy() const { VideoFrame* WebRtcVideoFrame::Copy() const {
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_, rotation_); video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_);
new_frame->pixel_width_ = pixel_width_; new_frame->pixel_width_ = pixel_width_;
new_frame->pixel_height_ = pixel_height_; new_frame->pixel_height_ = pixel_height_;
new_frame->rotation_ = rotation_;
return new_frame; return new_frame;
} }

View File

@ -42,22 +42,14 @@ struct CapturedFrame;
class WebRtcVideoFrame : public VideoFrame { class WebRtcVideoFrame : public VideoFrame {
public: public:
WebRtcVideoFrame(); WebRtcVideoFrame();
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
// TODO(guoweis): Remove this when chrome code base is updated.
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns, int64_t elapsed_time_ns,
int64_t time_stamp_ns); int64_t time_stamp_ns);
WebRtcVideoFrame(webrtc::NativeHandle* handle, WebRtcVideoFrame(webrtc::NativeHandle* handle,
int width, int width,
int height, int height,
int64_t elapsed_time_ns, int64_t elapsed_time_ns,
int64_t time_stamp_ns, int64_t time_stamp_ns);
webrtc::VideoRotation rotation);
~WebRtcVideoFrame(); ~WebRtcVideoFrame();
// Creates a frame from a raw sample with FourCC "format" and size "w" x "h". // Creates a frame from a raw sample with FourCC "format" and size "w" x "h".

View File

@ -340,8 +340,7 @@ TEST_F(WebRtcVideoFrameTest, InitRotated90DontApplyRotation) {
TEST_F(WebRtcVideoFrameTest, TextureInitialValues) { TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
NativeHandleImpl handle; NativeHandleImpl handle;
cricket::WebRtcVideoFrame frame(&handle, 640, 480, 100, 200, cricket::WebRtcVideoFrame frame(&handle, 640, 480, 100, 200);
webrtc::kVideoRotation_0);
EXPECT_EQ(&handle, frame.GetNativeHandle()); EXPECT_EQ(&handle, frame.GetNativeHandle());
EXPECT_EQ(640u, frame.GetWidth()); EXPECT_EQ(640u, frame.GetWidth());
EXPECT_EQ(480u, frame.GetHeight()); EXPECT_EQ(480u, frame.GetHeight());
@ -355,8 +354,7 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) { TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
NativeHandleImpl handle; NativeHandleImpl handle;
cricket::WebRtcVideoFrame frame1(&handle, 640, 480, 100, 200, cricket::WebRtcVideoFrame frame1(&handle, 640, 480, 100, 200);
webrtc::kVideoRotation_0);
cricket::VideoFrame* frame2 = frame1.Copy(); cricket::VideoFrame* frame2 = frame1.Copy();
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle()); EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth()); EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());

View File

@ -34,17 +34,6 @@ void RtpHeaderExtensionMap::Erase() {
int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type, int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
const uint8_t id) { const uint8_t id) {
return Register(type, id, true);
}
int32_t RtpHeaderExtensionMap::RegisterInactive(const RTPExtensionType type,
const uint8_t id) {
return Register(type, id, false);
}
int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
const uint8_t id,
bool active) {
if (id < 1 || id > 14) { if (id < 1 || id > 14) {
return -1; return -1;
} }
@ -58,24 +47,12 @@ int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
} }
// This extension type is already registered with this id, // This extension type is already registered with this id,
// so return success. // so return success.
it->second->active = active;
return 0; return 0;
} }
extensionMap_[id] = new HeaderExtension(type, active); extensionMap_[id] = new HeaderExtension(type);
return 0; return 0;
} }
bool RtpHeaderExtensionMap::SetActive(const RTPExtensionType type,
bool active) {
for (auto& kv : extensionMap_) {
if (kv.second->type == type) {
kv.second->active = active;
return true;
}
}
return false;
}
int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) { int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) {
uint8_t id; uint8_t id;
if (GetId(type, &id) != 0) { if (GetId(type, &id) != 0) {
@ -136,9 +113,7 @@ size_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
extensionMap_.begin(); extensionMap_.begin();
while (it != extensionMap_.end()) { while (it != extensionMap_.end()) {
HeaderExtension* extension = it->second; HeaderExtension* extension = it->second;
if (extension->active) {
length += extension->length; length += extension->length;
}
it++; it++;
} }
// Add RTP extension header length. // Add RTP extension header length.
@ -165,11 +140,8 @@ int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
while (it != extensionMap_.end()) { while (it != extensionMap_.end()) {
HeaderExtension* extension = it->second; HeaderExtension* extension = it->second;
if (extension->type == type) { if (extension->type == type) {
if (!extension->active) {
return -1;
}
break; break;
} else if (extension->active) { } else {
length += extension->length; length += extension->length;
} }
it++; it++;
@ -178,24 +150,18 @@ int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
} }
int32_t RtpHeaderExtensionMap::Size() const { int32_t RtpHeaderExtensionMap::Size() const {
int32_t count = 0; return extensionMap_.size();
for (auto& kv : extensionMap_) {
if (kv.second->active) {
count++;
}
}
return count;
} }
RTPExtensionType RtpHeaderExtensionMap::First() const { RTPExtensionType RtpHeaderExtensionMap::First() const {
for (auto& kv : extensionMap_) { std::map<uint8_t, HeaderExtension*>::const_iterator it =
if (kv.second->active) { extensionMap_.begin();
return kv.second->type; if (it == extensionMap_.end()) {
}
}
return kRtpExtensionNone; return kRtpExtensionNone;
} }
HeaderExtension* extension = it->second;
return extension->type;
}
RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const { RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
uint8_t id; uint8_t id;
@ -204,17 +170,16 @@ RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
} }
std::map<uint8_t, HeaderExtension*>::const_iterator it = std::map<uint8_t, HeaderExtension*>::const_iterator it =
extensionMap_.find(id); extensionMap_.find(id);
if (it == extensionMap_.end() || !it->second->active) { if (it == extensionMap_.end()) {
return kRtpExtensionNone; return kRtpExtensionNone;
} }
while ((++it) != extensionMap_.end()) { it++;
if (it->second->active) { if (it == extensionMap_.end()) {
return it->second->type;
}
}
return kRtpExtensionNone; return kRtpExtensionNone;
} }
HeaderExtension* extension = it->second;
return extension->type;
}
void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const { void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
assert(map); assert(map);
@ -222,7 +187,7 @@ void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
extensionMap_.begin(); extensionMap_.begin();
while (it != extensionMap_.end()) { while (it != extensionMap_.end()) {
HeaderExtension* extension = it->second; HeaderExtension* extension = it->second;
map->Register(extension->type, it->first, extension->active); map->Register(extension->type, it->first);
it++; it++;
} }
} }

View File

@ -29,16 +29,8 @@ const size_t kTransportSequenceNumberLength = 3;
struct HeaderExtension { struct HeaderExtension {
HeaderExtension(RTPExtensionType extension_type) HeaderExtension(RTPExtensionType extension_type)
: type(extension_type), length(0), active(true) { : type(extension_type),
Init(); length(0) {
}
HeaderExtension(RTPExtensionType extension_type, bool active)
: type(extension_type), length(0), active(active) {
Init();
}
void Init() {
// TODO(solenberg): Create handler classes for header extensions so we can // TODO(solenberg): Create handler classes for header extensions so we can
// get rid of switches like these as well as handling code spread out all // get rid of switches like these as well as handling code spread out all
// over. // over.
@ -65,7 +57,6 @@ struct HeaderExtension {
const RTPExtensionType type; const RTPExtensionType type;
uint8_t length; uint8_t length;
bool active;
}; };
class RtpHeaderExtensionMap { class RtpHeaderExtensionMap {
@ -77,13 +68,6 @@ class RtpHeaderExtensionMap {
int32_t Register(const RTPExtensionType type, const uint8_t id); int32_t Register(const RTPExtensionType type, const uint8_t id);
// Active is a concept for a registered rtp header extension which doesn't
// take effect yet until being activated. Inactive RTP header extensions do
// not take effect and should not be included in size calculations until they
// are activated.
int32_t RegisterInactive(const RTPExtensionType type, const uint8_t id);
bool SetActive(const RTPExtensionType type, bool active);
int32_t Deregister(const RTPExtensionType type); int32_t Deregister(const RTPExtensionType type);
bool IsRegistered(RTPExtensionType type) const; bool IsRegistered(RTPExtensionType type) const;
@ -92,10 +76,6 @@ class RtpHeaderExtensionMap {
int32_t GetId(const RTPExtensionType type, uint8_t* id) const; int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
//
// Methods below ignore any inactive rtp header extensions.
//
size_t GetTotalLengthInBytes() const; size_t GetTotalLengthInBytes() const;
int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const; int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const;
@ -109,7 +89,6 @@ class RtpHeaderExtensionMap {
RTPExtensionType Next(RTPExtensionType type) const; RTPExtensionType Next(RTPExtensionType type) const;
private: private:
int32_t Register(const RTPExtensionType type, const uint8_t id, bool active);
std::map<uint8_t, HeaderExtension*> extensionMap_; std::map<uint8_t, HeaderExtension*> extensionMap_;
}; };
} }

View File

@ -35,16 +35,9 @@ const uint8_t RtpHeaderExtensionTest::kId = 3;
TEST_F(RtpHeaderExtensionTest, Register) { TEST_F(RtpHeaderExtensionTest, Register) {
EXPECT_EQ(0, map_.Size()); EXPECT_EQ(0, map_.Size());
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId)); EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_TRUE(map_.IsRegistered(kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(1, map_.Size()); EXPECT_EQ(1, map_.Size());
EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset)); EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(0, map_.Size()); EXPECT_EQ(0, map_.Size());
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(0, map_.Size());
EXPECT_TRUE(map_.IsRegistered(kRtpExtensionTransmissionTimeOffset));
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
EXPECT_EQ(1, map_.Size());
} }
TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) { TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) {
@ -63,14 +56,10 @@ TEST_F(RtpHeaderExtensionTest, Idempotent) {
TEST_F(RtpHeaderExtensionTest, NonUniqueId) { TEST_F(RtpHeaderExtensionTest, NonUniqueId) {
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId)); EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(-1, map_.Register(kRtpExtensionAudioLevel, kId)); EXPECT_EQ(-1, map_.Register(kRtpExtensionAudioLevel, kId));
EXPECT_EQ(-1, map_.RegisterInactive(kRtpExtensionAudioLevel, kId));
} }
TEST_F(RtpHeaderExtensionTest, GetTotalLength) { TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
EXPECT_EQ(0u, map_.GetTotalLengthInBytes()); EXPECT_EQ(0u, map_.GetTotalLengthInBytes());
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(0u, map_.GetTotalLengthInBytes());
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId)); EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength, EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength,
map_.GetTotalLengthInBytes()); map_.GetTotalLengthInBytes());
@ -79,11 +68,7 @@ TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) { TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) {
EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes( EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
kRtpExtensionTransmissionTimeOffset)); kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId)); EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
kRtpExtensionTransmissionTimeOffset));
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
EXPECT_EQ(static_cast<int>(kRtpOneByteHeaderLength), EXPECT_EQ(static_cast<int>(kRtpOneByteHeaderLength),
map_.GetLengthUntilBlockStartInBytes( map_.GetLengthUntilBlockStartInBytes(
kRtpExtensionTransmissionTimeOffset)); kRtpExtensionTransmissionTimeOffset));
@ -111,11 +96,7 @@ TEST_F(RtpHeaderExtensionTest, IterateTypes) {
EXPECT_EQ(kRtpExtensionNone, map_.First()); EXPECT_EQ(kRtpExtensionNone, map_.First());
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset)); EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId)); EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
EXPECT_EQ(kRtpExtensionNone, map_.First());
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First()); EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First());
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset)); EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));

View File

@ -63,6 +63,13 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const size_t payload_data_length = const size_t payload_data_length =
payload_length - rtp_header->header.paddingLength; payload_length - rtp_header->header.paddingLength;
// Retrieve the video rotation information.
rtp_header->type.Video.rotation = kVideoRotation_0;
if (rtp_header->header.extension.hasVideoRotation) {
rtp_header->type.Video.rotation = ConvertCVOByteToVideoRotation(
rtp_header->header.extension.videoRotation);
}
if (payload == NULL || payload_data_length == 0) { if (payload == NULL || payload_data_length == 0) {
return data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) == 0 ? 0 return data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) == 0 ? 0
: -1; : -1;
@ -83,14 +90,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->frameType = parsed_payload.frame_type; rtp_header->frameType = parsed_payload.frame_type;
rtp_header->type = parsed_payload.type; rtp_header->type = parsed_payload.type;
rtp_header->type.Video.rotation = kVideoRotation_0;
// Retrieve the video rotation information.
if (rtp_header->header.extension.hasVideoRotation) {
rtp_header->type.Video.rotation = ConvertCVOByteToVideoRotation(
rtp_header->header.extension.videoRotation);
}
return data_callback_->OnReceivedPayloadData(parsed_payload.payload, return data_callback_->OnReceivedPayloadData(parsed_payload.payload,
parsed_payload.payload_length, parsed_payload.payload_length,
rtp_header) == 0 rtp_header) == 0

View File

@ -129,7 +129,6 @@ RTPSender::RTPSender(int32_t id,
transmission_time_offset_(0), transmission_time_offset_(0),
absolute_send_time_(0), absolute_send_time_(0),
rotation_(kVideoRotation_0), rotation_(kVideoRotation_0),
cvo_mode_(kCVONone),
transport_sequence_number_(0), transport_sequence_number_(0),
// NACK. // NACK.
nack_byte_count_times_(), nack_byte_count_times_(),
@ -267,10 +266,6 @@ int32_t RTPSender::SetTransportSequenceNumber(uint16_t sequence_number) {
int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type, int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type,
uint8_t id) { uint8_t id) {
CriticalSectionScoped cs(send_critsect_.get()); CriticalSectionScoped cs(send_critsect_.get());
if (type == kRtpExtensionVideoRotation) {
cvo_mode_ = kCVOInactive;
return rtp_header_extension_map_.RegisterInactive(type, id);
}
return rtp_header_extension_map_.Register(type, id); return rtp_header_extension_map_.Register(type, id);
} }
@ -467,16 +462,6 @@ int32_t RTPSender::CheckPayloadType(int8_t payload_type,
return 0; return 0;
} }
RTPSenderInterface::CVOMode RTPSender::ActivateCVORtpHeaderExtension() {
if (cvo_mode_ == kCVOInactive) {
CriticalSectionScoped cs(send_critsect_.get());
if (rtp_header_extension_map_.SetActive(kRtpExtensionVideoRotation, true)) {
cvo_mode_ = kCVOActivated;
}
}
return cvo_mode_;
}
int32_t RTPSender::SendOutgoingData(FrameType frame_type, int32_t RTPSender::SendOutgoingData(FrameType frame_type,
int8_t payload_type, int8_t payload_type,
uint32_t capture_timestamp, uint32_t capture_timestamp,
@ -1216,6 +1201,7 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer,
block_length = BuildAbsoluteSendTimeExtension(extension_data); block_length = BuildAbsoluteSendTimeExtension(extension_data);
break; break;
case kRtpExtensionVideoRotation: case kRtpExtensionVideoRotation:
if (marker_bit)
block_length = BuildVideoRotationExtension(extension_data); block_length = BuildVideoRotationExtension(extension_data);
break; break;
case kRtpExtensionTransportSequenceNumber: case kRtpExtensionTransportSequenceNumber:

View File

@ -41,14 +41,6 @@ class RTPSenderInterface {
RTPSenderInterface() {} RTPSenderInterface() {}
virtual ~RTPSenderInterface() {} virtual ~RTPSenderInterface() {}
enum CVOMode {
kCVONone,
kCVOInactive, // CVO rtp header extension is registered but haven't
// received any frame with rotation pending.
kCVOActivated, // CVO rtp header extension will be present in the rtp
// packets.
};
virtual uint32_t SSRC() const = 0; virtual uint32_t SSRC() const = 0;
virtual uint32_t Timestamp() const = 0; virtual uint32_t Timestamp() const = 0;
@ -78,7 +70,6 @@ class RTPSenderInterface {
const RTPHeader& rtp_header, const RTPHeader& rtp_header,
VideoRotation rotation) const = 0; VideoRotation rotation) const = 0;
virtual bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) = 0; virtual bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) = 0;
virtual CVOMode ActivateCVORtpHeaderExtension() = 0;
}; };
class RTPSender : public RTPSenderInterface { class RTPSender : public RTPSenderInterface {
@ -294,7 +285,6 @@ class RTPSender : public RTPSenderInterface {
RtpState GetRtpState() const; RtpState GetRtpState() const;
void SetRtxRtpState(const RtpState& rtp_state); void SetRtxRtpState(const RtpState& rtp_state);
RtpState GetRtxRtpState() const; RtpState GetRtxRtpState() const;
CVOMode ActivateCVORtpHeaderExtension() override;
protected: protected:
int32_t CheckPayloadType(int8_t payload_type, RtpVideoCodecTypes* video_type); int32_t CheckPayloadType(int8_t payload_type, RtpVideoCodecTypes* video_type);
@ -388,7 +378,6 @@ class RTPSender : public RTPSenderInterface {
int32_t transmission_time_offset_; int32_t transmission_time_offset_;
uint32_t absolute_send_time_; uint32_t absolute_send_time_;
VideoRotation rotation_; VideoRotation rotation_;
CVOMode cvo_mode_;
uint16_t transport_sequence_number_; uint16_t transport_sequence_number_;
// NACK // NACK

View File

@ -186,6 +186,7 @@ class RtpSenderVideoTest : public RtpSenderTest {
} }
ASSERT_TRUE(rtp_parser.Parse(rtp_header, map)); ASSERT_TRUE(rtp_parser.Parse(rtp_header, map));
ASSERT_FALSE(rtp_parser.RTCP()); ASSERT_FALSE(rtp_parser.RTCP());
EXPECT_EQ(expect_cvo, rtp_header.markerBit);
EXPECT_EQ(payload_, rtp_header.payloadType); EXPECT_EQ(payload_, rtp_header.payloadType);
EXPECT_EQ(seq_num, rtp_header.sequenceNumber); EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
EXPECT_EQ(kTimestamp, rtp_header.timestamp); EXPECT_EQ(kTimestamp, rtp_header.timestamp);
@ -253,7 +254,6 @@ TEST_F(RtpSenderTest, RegisterRtpHeaderExtensions) {
rtp_sender_->RtpHeaderExtensionTotalLength()); rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionVideoRotation, kVideoRotationExtensionId)); kRtpExtensionVideoRotation, kVideoRotationExtensionId));
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength + EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kTransmissionTimeOffsetLength + kTransmissionTimeOffsetLength +
kAbsoluteSendTimeLength + kAbsoluteSendTimeLength +
@ -286,9 +286,6 @@ TEST_F(RtpSenderTest, RegisterRtpVideoRotationHeaderExtension) {
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength()); EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionVideoRotation, kVideoRotationExtensionId)); kRtpExtensionVideoRotation, kVideoRotationExtensionId));
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
EXPECT_EQ( EXPECT_EQ(
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength), RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
rtp_sender_->RtpHeaderExtensionTotalLength()); rtp_sender_->RtpHeaderExtensionTotalLength());
@ -427,7 +424,6 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_MarkerBit) {
rtp_sender_->SetVideoRotation(kRotation); rtp_sender_->SetVideoRotation(kRotation);
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionVideoRotation, kVideoRotationExtensionId)); kRtpExtensionVideoRotation, kVideoRotationExtensionId));
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
RtpHeaderExtensionMap map; RtpHeaderExtensionMap map;
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId); map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
@ -451,11 +447,10 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_MarkerBit) {
} }
// Test CVO header extension is not set when marker bit is false. // Test CVO header extension is not set when marker bit is false.
TEST_F(RtpSenderTest, DISABLED_BuildRTPPacketWithVideoRotation_NoMarkerBit) { TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_NoMarkerBit) {
rtp_sender_->SetVideoRotation(kRotation); rtp_sender_->SetVideoRotation(kRotation);
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionVideoRotation, kVideoRotationExtensionId)); kRtpExtensionVideoRotation, kVideoRotationExtensionId));
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
RtpHeaderExtensionMap map; RtpHeaderExtensionMap map;
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId); map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
@ -1338,15 +1333,13 @@ TEST_F(RtpSenderTest, BytesReportedCorrectly) {
rtx_stats.transmitted.TotalBytes()); rtx_stats.transmitted.TotalBytes());
} }
// Verify that all packets of a frame have CVO byte set. // Verify that only the last packet of a frame has CVO byte set.
TEST_F(RtpSenderVideoTest, SendVideoWithCVO) { TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
RTPVideoHeader hdr = {0}; RTPVideoHeader hdr = {0};
hdr.rotation = kVideoRotation_90; hdr.rotation = kVideoRotation_90;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension( EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionVideoRotation, kVideoRotationExtensionId)); kRtpExtensionVideoRotation, kVideoRotationExtensionId));
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
EXPECT_EQ( EXPECT_EQ(
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength), RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
rtp_sender_->RtpHeaderExtensionTotalLength()); rtp_sender_->RtpHeaderExtensionTotalLength());
@ -1358,12 +1351,13 @@ TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
RtpHeaderExtensionMap map; RtpHeaderExtensionMap map;
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId); map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
// Verify that this packet does have CVO byte. // Verify that this packet doesn't have CVO byte.
VerifyCVOPacket( VerifyCVOPacket(
reinterpret_cast<uint8_t*>(transport_.sent_packets_[0]->data()), reinterpret_cast<uint8_t*>(transport_.sent_packets_[0]->data()),
transport_.sent_packets_[0]->length(), true, &map, kSeqNum, hdr.rotation); transport_.sent_packets_[0]->size(), false, &map, kSeqNum,
kVideoRotation_0);
// Verify that this packet does have CVO byte. // Verify that this packet doesn't have CVO byte.
VerifyCVOPacket( VerifyCVOPacket(
reinterpret_cast<uint8_t*>(transport_.sent_packets_[1]->data()), reinterpret_cast<uint8_t*>(transport_.sent_packets_[1]->data()),
transport_.sent_packets_[1]->size(), true, &map, kSeqNum + 1, transport_.sent_packets_[1]->size(), true, &map, kSeqNum + 1,

View File

@ -301,13 +301,6 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
const size_t payloadSize, const size_t payloadSize,
const RTPFragmentationHeader* fragmentation, const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpHdr) { const RTPVideoHeader* rtpHdr) {
// Register CVO rtp header extension at the first time when we receive a frame
// with pending rotation.
RTPSenderInterface::CVOMode cvo_mode = RTPSenderInterface::kCVONone;
if (rtpHdr && rtpHdr->rotation != kVideoRotation_0) {
cvo_mode = _rtpSender.ActivateCVORtpHeaderExtension();
}
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength(); uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
size_t payload_bytes_to_send = payloadSize; size_t payload_bytes_to_send = payloadSize;
const uint8_t* data = payloadData; const uint8_t* data = payloadData;
@ -348,16 +341,13 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
// packet in each group of packets which make up another type of frame // packet in each group of packets which make up another type of frame
// (e.g. a P-Frame) only if the current value is different from the previous // (e.g. a P-Frame) only if the current value is different from the previous
// value sent. // value sent.
// Here we are adding it to every packet of every frame at this point. // Here we are adding it to the last packet of every frame at this point.
if (!rtpHdr) { if (!rtpHdr) {
assert(!_rtpSender.IsRtpHeaderExtensionRegistered( assert(!_rtpSender.IsRtpHeaderExtensionRegistered(
kRtpExtensionVideoRotation)); kRtpExtensionVideoRotation));
} else if (cvo_mode == RTPSenderInterface::kCVOActivated) { } else if (last) {
// Checking whether CVO header extension is registered will require taking // Checking whether CVO header extension is registered will require taking
// a lock. It'll be a no-op if it's not registered. // a lock. It'll be a no-op if it's not registered.
// TODO(guoweis): For now, all packets sent will carry the CVO such that
// the RTP header length is consistent, although the receiver side will
// only exam the packets with market bit set.
size_t packetSize = payloadSize + rtp_header_length; size_t packetSize = payloadSize + rtp_header_length;
RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize); RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
RTPHeader rtp_header; RTPHeader rtp_header;

View File

@ -99,9 +99,6 @@ void VCMPacket::Reset() {
} }
void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) { void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) {
if (markerBit) {
codecSpecificHeader.rotation = videoHeader.rotation;
}
switch (videoHeader.codec) { switch (videoHeader.codec) {
case kRtpVideoVp8: case kRtpVideoVp8:
// Handle all packets within a frame as depending on the previous packet // Handle all packets within a frame as depending on the previous packet