Enable CVO by default through webrtc pipeline.
All RTP packets from sender side will carry the rotation info. (will file a bug to track this) On the receiving side, only packets with marker bit set will be examined. Tests completed: 1. android standalone to android standalone 2. android standalone to chrome (with and without this change) 3. android on chrome BUG=4145 R=glaznev@webrtc.org, mflodman@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org Committed: https://crrev.com/1b1c15cad16de57053bb6aa8a916079e0534bdae Cr-Commit-Position: refs/heads/master@{#8905} Review URL: https://webrtc-codereview.appspot.com/47399004 Cr-Commit-Position: refs/heads/master@{#8917}
This commit is contained in:
parent
aaf61e460b
commit
64c1e8cda5
@ -271,15 +271,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
|
|
||||||
// Mapping array from original UV mapping to the rotated mapping. The number
|
// Mapping array from original UV mapping to the rotated mapping. The number
|
||||||
// is the position where the original UV coordination should be mapped
|
// is the position where the original UV coordination should be mapped
|
||||||
// to. (0,1) is the left top coord. (2,3) is the top bottom. (4,5) is the
|
// to. (0,1) is the top left coord. (2,3) is the bottom left. (4,5) is the
|
||||||
// right top. (6,7) is the right bottom. Note that this is the coordination
|
// top right. (6,7) is the bottom right.
|
||||||
// that got rotated. For example, using the original left bottom (2,3) as
|
|
||||||
// the top left (0,1) means 90 degree clockwise rotation.
|
|
||||||
private static int rotation_matrix[][] =
|
private static int rotation_matrix[][] =
|
||||||
{ {0, 1, 2, 3, 4, 5, 6, 7}, // 0 degree
|
{ {4, 5, 0, 1, 6, 7, 2, 3}, // 90 degree (clockwise)
|
||||||
{2, 3, 6, 7, 0, 1, 4, 5}, // 90 degree (clockwise)
|
{6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
|
||||||
{6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
|
{2, 3, 6, 7, 0, 1, 4, 5} }; // 270 degree (clockwise)
|
||||||
{4, 5, 0, 1, 6, 7, 2, 3} };// 270 degree (clockwise)
|
|
||||||
|
private static int mirror_matrix[] =
|
||||||
|
{4, 1, 6, 3, 0, 5, 2, 7}; // mirrored
|
||||||
|
|
||||||
private YuvImageRenderer(
|
private YuvImageRenderer(
|
||||||
GLSurfaceView surface, int id,
|
GLSurfaceView surface, int id,
|
||||||
@ -381,12 +381,21 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
}
|
}
|
||||||
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
||||||
// Need to re-adjust UV coordinates to match display AR.
|
// Need to re-adjust UV coordinates to match display AR.
|
||||||
|
boolean adjustU = true;
|
||||||
|
float ratio = 0;
|
||||||
if (displayAspectRatio > videoAspectRatio) {
|
if (displayAspectRatio > videoAspectRatio) {
|
||||||
texOffsetV = (1.0f - videoAspectRatio / displayAspectRatio) /
|
ratio = (1.0f - videoAspectRatio / displayAspectRatio) /
|
||||||
2.0f;
|
2.0f;
|
||||||
|
adjustU = (rotationDegree == 90 || rotationDegree == 270);
|
||||||
} else {
|
} else {
|
||||||
texOffsetU = (1.0f - displayAspectRatio / videoAspectRatio) /
|
ratio = (1.0f - displayAspectRatio / videoAspectRatio) /
|
||||||
2.0f;
|
2.0f;
|
||||||
|
adjustU = (rotationDegree == 0 || rotationDegree == 180);
|
||||||
|
}
|
||||||
|
if (adjustU) {
|
||||||
|
texOffsetU = ratio;
|
||||||
|
} else {
|
||||||
|
texOffsetV = ratio;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
|
Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
|
||||||
@ -402,37 +411,59 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
Log.d(TAG, " Texture UV offsets: " + texOffsetU + ", " + texOffsetV);
|
Log.d(TAG, " Texture UV offsets: " + texOffsetU + ", " + texOffsetV);
|
||||||
float uLeft = texOffsetU;
|
float uLeft = texOffsetU;
|
||||||
float uRight = 1.0f - texOffsetU;
|
float uRight = 1.0f - texOffsetU;
|
||||||
if (mirror) {
|
|
||||||
// Swap U coordinates for mirror image.
|
|
||||||
uLeft = 1.0f - texOffsetU;
|
|
||||||
uRight = texOffsetU;
|
|
||||||
}
|
|
||||||
float textureCoordinatesFloat[] = new float[] {
|
float textureCoordinatesFloat[] = new float[] {
|
||||||
uLeft, texOffsetV, // left top
|
uLeft, texOffsetV, // top left
|
||||||
uLeft, 1.0f - texOffsetV, // left bottom
|
uLeft, 1.0f - texOffsetV, // bottom left
|
||||||
uRight, texOffsetV, // right top
|
uRight, texOffsetV, // top right
|
||||||
uRight, 1.0f - texOffsetV // right bottom
|
uRight, 1.0f - texOffsetV // bottom right
|
||||||
};
|
};
|
||||||
|
|
||||||
float textureCoordinatesRotatedFloat[];
|
// Rotation needs to be done before mirroring.
|
||||||
if (rotationDegree == 0) {
|
textureCoordinatesFloat = applyRotation(textureCoordinatesFloat,
|
||||||
textureCoordinatesRotatedFloat = textureCoordinatesFloat;
|
rotationDegree);
|
||||||
} else {
|
textureCoordinatesFloat = applyMirror(textureCoordinatesFloat,
|
||||||
textureCoordinatesRotatedFloat =
|
mirror);
|
||||||
new float[textureCoordinatesFloat.length];
|
|
||||||
int index = rotationDegree / 90;
|
|
||||||
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
|
|
||||||
textureCoordinatesRotatedFloat[rotation_matrix[index][i]] =
|
|
||||||
textureCoordinatesFloat[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
textureCoords =
|
textureCoords =
|
||||||
directNativeFloatBuffer(textureCoordinatesRotatedFloat);
|
directNativeFloatBuffer(textureCoordinatesFloat);
|
||||||
}
|
}
|
||||||
updateTextureProperties = false;
|
updateTextureProperties = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private float[] applyMirror(float textureCoordinatesFloat[],
|
||||||
|
boolean mirror) {
|
||||||
|
if (!mirror) {
|
||||||
|
return textureCoordinatesFloat;
|
||||||
|
}
|
||||||
|
|
||||||
|
return applyMatrixOperation(textureCoordinatesFloat,
|
||||||
|
mirror_matrix);
|
||||||
|
}
|
||||||
|
|
||||||
|
private float[] applyRotation(float textureCoordinatesFloat[],
|
||||||
|
int rotationDegree) {
|
||||||
|
if (rotationDegree == 0) {
|
||||||
|
return textureCoordinatesFloat;
|
||||||
|
}
|
||||||
|
|
||||||
|
int index = rotationDegree / 90 - 1;
|
||||||
|
return applyMatrixOperation(textureCoordinatesFloat,
|
||||||
|
rotation_matrix[index]);
|
||||||
|
}
|
||||||
|
|
||||||
|
private float[] applyMatrixOperation(float textureCoordinatesFloat[],
|
||||||
|
int matrix_operation[]) {
|
||||||
|
float textureCoordinatesModifiedFloat[] =
|
||||||
|
new float[textureCoordinatesFloat.length];
|
||||||
|
|
||||||
|
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
|
||||||
|
textureCoordinatesModifiedFloat[matrix_operation[i]] =
|
||||||
|
textureCoordinatesFloat[i];
|
||||||
|
}
|
||||||
|
return textureCoordinatesModifiedFloat;
|
||||||
|
}
|
||||||
|
|
||||||
private void draw() {
|
private void draw() {
|
||||||
if (!seenFrame) {
|
if (!seenFrame) {
|
||||||
// No frame received yet - nothing to render.
|
// No frame received yet - nothing to render.
|
||||||
|
@ -254,6 +254,8 @@ bool VideoCapturer::MuteToBlackThenPause(bool muted) {
|
|||||||
return Pause(false);
|
return Pause(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note that the last caller decides whether rotation should be applied if there
|
||||||
|
// are multiple send streams using the same camera.
|
||||||
bool VideoCapturer::SetApplyRotation(bool enable) {
|
bool VideoCapturer::SetApplyRotation(bool enable) {
|
||||||
apply_rotation_ = enable;
|
apply_rotation_ = enable;
|
||||||
if (frame_factory_) {
|
if (frame_factory_) {
|
||||||
|
@ -83,8 +83,10 @@ class VideoFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Basic accessors.
|
// Basic accessors.
|
||||||
|
// Note this is the width and height without rotation applied.
|
||||||
virtual size_t GetWidth() const = 0;
|
virtual size_t GetWidth() const = 0;
|
||||||
virtual size_t GetHeight() const = 0;
|
virtual size_t GetHeight() const = 0;
|
||||||
|
|
||||||
size_t GetChromaWidth() const { return (GetWidth() + 1) / 2; }
|
size_t GetChromaWidth() const { return (GetWidth() + 1) / 2; }
|
||||||
size_t GetChromaHeight() const { return (GetHeight() + 1) / 2; }
|
size_t GetChromaHeight() const { return (GetHeight() + 1) / 2; }
|
||||||
size_t GetChromaSize() const { return GetUPitch() * GetChromaHeight(); }
|
size_t GetChromaSize() const { return GetUPitch() * GetChromaHeight(); }
|
||||||
|
@ -44,6 +44,7 @@
|
|||||||
|
|
||||||
#include "webrtc/base/win32.h" // Need this to #include the impl files.
|
#include "webrtc/base/win32.h" // Need this to #include the impl files.
|
||||||
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
|
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
|
||||||
|
#include "webrtc/system_wrappers/interface/field_trial.h"
|
||||||
|
|
||||||
namespace cricket {
|
namespace cricket {
|
||||||
|
|
||||||
@ -260,10 +261,18 @@ bool WebRtcVideoCapturer::GetBestCaptureFormat(const VideoFormat& desired,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
|
bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
|
||||||
rtc::CritScope cs(&critical_section_stopping_);
|
// Can't take lock here as this will cause deadlock with
|
||||||
|
// OnIncomingCapturedFrame. In fact, the whole method, including methods it
|
||||||
|
// calls, can't take lock.
|
||||||
assert(module_);
|
assert(module_);
|
||||||
|
|
||||||
|
const std::string group_name =
|
||||||
|
webrtc::field_trial::FindFullName("WebRTC-CVO");
|
||||||
|
|
||||||
|
if (group_name == "Disabled") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (!VideoCapturer::SetApplyRotation(enable)) {
|
if (!VideoCapturer::SetApplyRotation(enable)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -425,7 +425,8 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
|||||||
WebRtcVideoFrame cricket_frame(
|
WebRtcVideoFrame cricket_frame(
|
||||||
webrtc_frame.video_frame_buffer(),
|
webrtc_frame.video_frame_buffer(),
|
||||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
|
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
|
||||||
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
|
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec,
|
||||||
|
webrtc_frame.rotation());
|
||||||
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1161,6 +1162,9 @@ void WebRtcVideoEngine::Construct(ViEWrapper* vie_wrapper,
|
|||||||
rtp_header_extensions_.push_back(
|
rtp_header_extensions_.push_back(
|
||||||
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
|
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
|
||||||
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
|
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
|
||||||
|
rtp_header_extensions_.push_back(
|
||||||
|
RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
|
||||||
|
kRtpVideoRotationHeaderExtensionDefaultId));
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtcVideoEngine::~WebRtcVideoEngine() {
|
WebRtcVideoEngine::~WebRtcVideoEngine() {
|
||||||
@ -2832,6 +2836,10 @@ bool WebRtcVideoMediaChannel::SetCapturer(uint32 ssrc,
|
|||||||
QueueBlackFrame(ssrc, timestamp,
|
QueueBlackFrame(ssrc, timestamp,
|
||||||
VideoFormat::FpsToInterval(send_codec_->maxFramerate));
|
VideoFormat::FpsToInterval(send_codec_->maxFramerate));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
capturer->SetApplyRotation(
|
||||||
|
!FindHeaderExtension(send_extensions_, kRtpVideoRotationHeaderExtension));
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2929,6 +2937,8 @@ bool WebRtcVideoMediaChannel::SetRecvRtpHeaderExtensions(
|
|||||||
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
|
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
|
||||||
const RtpHeaderExtension* send_time_extension =
|
const RtpHeaderExtension* send_time_extension =
|
||||||
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
|
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
|
||||||
|
const RtpHeaderExtension* cvo_extension =
|
||||||
|
FindHeaderExtension(extensions, kRtpVideoRotationHeaderExtension);
|
||||||
|
|
||||||
// Loop through all receive channels and enable/disable the extensions.
|
// Loop through all receive channels and enable/disable the extensions.
|
||||||
for (RecvChannelMap::iterator channel_it = recv_channels_.begin();
|
for (RecvChannelMap::iterator channel_it = recv_channels_.begin();
|
||||||
@ -2944,6 +2954,10 @@ bool WebRtcVideoMediaChannel::SetRecvRtpHeaderExtensions(
|
|||||||
send_time_extension)) {
|
send_time_extension)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetReceiveVideoRotationStatus,
|
||||||
|
channel_id, cvo_extension)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
receive_extensions_ = extensions;
|
receive_extensions_ = extensions;
|
||||||
@ -2960,6 +2974,8 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
|
|||||||
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
|
FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
|
||||||
const RtpHeaderExtension* send_time_extension =
|
const RtpHeaderExtension* send_time_extension =
|
||||||
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
|
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
|
||||||
|
const RtpHeaderExtension* cvo_extension =
|
||||||
|
FindHeaderExtension(extensions, kRtpVideoRotationHeaderExtension);
|
||||||
|
|
||||||
// Loop through all send channels and enable/disable the extensions.
|
// Loop through all send channels and enable/disable the extensions.
|
||||||
for (SendChannelMap::iterator channel_it = send_channels_.begin();
|
for (SendChannelMap::iterator channel_it = send_channels_.begin();
|
||||||
@ -2975,6 +2991,10 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
|
|||||||
send_time_extension)) {
|
send_time_extension)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendVideoRotationStatus,
|
||||||
|
channel_id, cvo_extension)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (send_time_extension) {
|
if (send_time_extension) {
|
||||||
@ -2986,6 +3006,14 @@ bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
|
|||||||
send_time_extension->id);
|
send_time_extension->id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For now assume that all streams want the same CVO setting.
|
||||||
|
// TODO(guoweis): Remove the need for this assumption.
|
||||||
|
for (const auto& kv : send_channels_) {
|
||||||
|
if (kv.second->video_capturer()) {
|
||||||
|
kv.second->video_capturer()->SetApplyRotation(!cvo_extension);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
send_extensions_ = extensions;
|
send_extensions_ = extensions;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -3484,6 +3512,11 @@ bool WebRtcVideoMediaChannel::ConfigureReceiving(int channel_id,
|
|||||||
receive_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) {
|
receive_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetReceiveVideoRotationStatus,
|
||||||
|
channel_id, receive_extensions_,
|
||||||
|
kRtpVideoRotationHeaderExtension)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if (receiver_report_ssrc_ != kSsrcUnset) {
|
if (receiver_report_ssrc_ != kSsrcUnset) {
|
||||||
if (engine()->vie()->rtp()->SetLocalSSRC(
|
if (engine()->vie()->rtp()->SetLocalSSRC(
|
||||||
@ -3594,6 +3627,12 @@ bool WebRtcVideoMediaChannel::ConfigureSending(int channel_id,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendVideoRotationStatus,
|
||||||
|
channel_id, send_extensions_,
|
||||||
|
kRtpVideoRotationHeaderExtension)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
|
if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
|
||||||
true) != 0) {
|
true) != 0) {
|
||||||
LOG_RTCERR2(SetTransmissionSmoothingStatus, channel_id, true);
|
LOG_RTCERR2(SetTransmissionSmoothingStatus, channel_id, true);
|
||||||
|
@ -134,6 +134,17 @@ static std::string RtpExtensionsToString(
|
|||||||
return out.str();
|
return out.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline const webrtc::RtpExtension* FindHeaderExtension(
|
||||||
|
const std::vector<webrtc::RtpExtension>& extensions,
|
||||||
|
const std::string& name) {
|
||||||
|
for (const auto& kv : extensions) {
|
||||||
|
if (kv.name == name) {
|
||||||
|
return &kv;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
// Merges two fec configs and logs an error if a conflict arises
|
// Merges two fec configs and logs an error if a conflict arises
|
||||||
// such that merging in diferent order would trigger a diferent output.
|
// such that merging in diferent order would trigger a diferent output.
|
||||||
static void MergeFecConfig(const webrtc::FecConfig& other,
|
static void MergeFecConfig(const webrtc::FecConfig& other,
|
||||||
@ -368,6 +379,9 @@ WebRtcVideoEngine2::WebRtcVideoEngine2(WebRtcVoiceEngine* voice_engine)
|
|||||||
rtp_header_extensions_.push_back(
|
rtp_header_extensions_.push_back(
|
||||||
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
|
RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
|
||||||
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
|
kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
|
||||||
|
rtp_header_extensions_.push_back(
|
||||||
|
RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
|
||||||
|
kRtpVideoRotationHeaderExtensionDefaultId));
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtcVideoEngine2::~WebRtcVideoEngine2() {
|
WebRtcVideoEngine2::~WebRtcVideoEngine2() {
|
||||||
@ -1135,7 +1149,16 @@ bool WebRtcVideoChannel2::SetCapturer(uint32 ssrc, VideoCapturer* capturer) {
|
|||||||
LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
|
LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return send_streams_[ssrc]->SetCapturer(capturer);
|
if (!send_streams_[ssrc]->SetCapturer(capturer)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (capturer) {
|
||||||
|
capturer->SetApplyRotation(
|
||||||
|
!FindHeaderExtension(send_rtp_extensions_,
|
||||||
|
kRtpVideoRotationHeaderExtension));
|
||||||
|
}
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool WebRtcVideoChannel2::SendIntraFrame() {
|
bool WebRtcVideoChannel2::SendIntraFrame() {
|
||||||
@ -1258,12 +1281,16 @@ bool WebRtcVideoChannel2::SetSendRtpHeaderExtensions(
|
|||||||
|
|
||||||
send_rtp_extensions_ = filtered_extensions;
|
send_rtp_extensions_ = filtered_extensions;
|
||||||
|
|
||||||
|
const webrtc::RtpExtension* cvo_extension = FindHeaderExtension(
|
||||||
|
send_rtp_extensions_, kRtpVideoRotationHeaderExtension);
|
||||||
|
|
||||||
rtc::CritScope stream_lock(&stream_crit_);
|
rtc::CritScope stream_lock(&stream_crit_);
|
||||||
for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
|
for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
|
||||||
send_streams_.begin();
|
send_streams_.begin();
|
||||||
it != send_streams_.end();
|
it != send_streams_.end();
|
||||||
++it) {
|
++it) {
|
||||||
it->second->SetRtpExtensions(send_rtp_extensions_);
|
it->second->SetRtpExtensions(send_rtp_extensions_);
|
||||||
|
it->second->SetApplyRotation(!cvo_extension);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -1580,6 +1607,15 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetSsrcs() const {
|
|||||||
return ssrcs_;
|
return ssrcs_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetApplyRotation(
|
||||||
|
bool apply_rotation) {
|
||||||
|
rtc::CritScope cs(&lock_);
|
||||||
|
if (capturer_ == NULL)
|
||||||
|
return;
|
||||||
|
|
||||||
|
capturer_->SetApplyRotation(apply_rotation);
|
||||||
|
}
|
||||||
|
|
||||||
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions(
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions(
|
||||||
const VideoOptions& options) {
|
const VideoOptions& options) {
|
||||||
rtc::CritScope cs(&lock_);
|
rtc::CritScope cs(&lock_);
|
||||||
@ -2121,7 +2157,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
|
|||||||
const WebRtcVideoFrame render_frame(
|
const WebRtcVideoFrame render_frame(
|
||||||
frame.video_frame_buffer(),
|
frame.video_frame_buffer(),
|
||||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
|
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
|
||||||
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
|
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
|
||||||
renderer_->RenderFrame(&render_frame);
|
renderer_->RenderFrame(&render_frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,6 +284,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
|
|||||||
void MuteStream(bool mute);
|
void MuteStream(bool mute);
|
||||||
bool DisconnectCapturer();
|
bool DisconnectCapturer();
|
||||||
|
|
||||||
|
void SetApplyRotation(bool apply_rotation);
|
||||||
|
|
||||||
void Start();
|
void Start();
|
||||||
void Stop();
|
void Stop();
|
||||||
|
|
||||||
|
@ -530,6 +530,82 @@ TEST_F(WebRtcVideoEngine2Test, SupportsAbsoluteSenderTimeHeaderExtension) {
|
|||||||
FAIL() << "Absolute Sender Time extension not in header-extension list.";
|
FAIL() << "Absolute Sender Time extension not in header-extension list.";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(WebRtcVideoEngine2Test, SupportsVideoRotationHeaderExtension) {
|
||||||
|
std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
|
||||||
|
ASSERT_FALSE(extensions.empty());
|
||||||
|
for (size_t i = 0; i < extensions.size(); ++i) {
|
||||||
|
if (extensions[i].uri == kRtpVideoRotationHeaderExtension) {
|
||||||
|
EXPECT_EQ(kRtpVideoRotationHeaderExtensionDefaultId, extensions[i].id);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FAIL() << "Video Rotation extension not in header-extension list.";
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionBeforeCapturer) {
|
||||||
|
// Allocate the capturer first to prevent early destruction before channel's
|
||||||
|
// dtor is called.
|
||||||
|
cricket::FakeVideoCapturer capturer;
|
||||||
|
|
||||||
|
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
|
||||||
|
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
|
||||||
|
std::vector<cricket::VideoCodec> codecs;
|
||||||
|
codecs.push_back(kVp8Codec);
|
||||||
|
|
||||||
|
rtc::scoped_ptr<VideoMediaChannel> channel(
|
||||||
|
SetUpForExternalEncoderFactory(&encoder_factory, codecs));
|
||||||
|
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
|
||||||
|
|
||||||
|
// Add CVO extension.
|
||||||
|
const int id = 1;
|
||||||
|
std::vector<cricket::RtpHeaderExtension> extensions;
|
||||||
|
extensions.push_back(
|
||||||
|
cricket::RtpHeaderExtension(kRtpVideoRotationHeaderExtension, id));
|
||||||
|
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
|
||||||
|
|
||||||
|
// Set capturer.
|
||||||
|
EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
|
||||||
|
|
||||||
|
// Verify capturer has turned off applying rotation.
|
||||||
|
EXPECT_FALSE(capturer.GetApplyRotation());
|
||||||
|
|
||||||
|
// Verify removing header extension turns on applying rotation.
|
||||||
|
extensions.clear();
|
||||||
|
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
|
||||||
|
EXPECT_TRUE(capturer.GetApplyRotation());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionAfterCapturer) {
|
||||||
|
cricket::FakeVideoCapturer capturer;
|
||||||
|
|
||||||
|
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
|
||||||
|
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
|
||||||
|
std::vector<cricket::VideoCodec> codecs;
|
||||||
|
codecs.push_back(kVp8Codec);
|
||||||
|
|
||||||
|
rtc::scoped_ptr<VideoMediaChannel> channel(
|
||||||
|
SetUpForExternalEncoderFactory(&encoder_factory, codecs));
|
||||||
|
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
|
||||||
|
|
||||||
|
// Set capturer.
|
||||||
|
EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
|
||||||
|
|
||||||
|
// Add CVO extension.
|
||||||
|
const int id = 1;
|
||||||
|
std::vector<cricket::RtpHeaderExtension> extensions;
|
||||||
|
extensions.push_back(
|
||||||
|
cricket::RtpHeaderExtension(kRtpVideoRotationHeaderExtension, id));
|
||||||
|
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
|
||||||
|
|
||||||
|
// Verify capturer has turned off applying rotation.
|
||||||
|
EXPECT_FALSE(capturer.GetApplyRotation());
|
||||||
|
|
||||||
|
// Verify removing header extension turns on applying rotation.
|
||||||
|
extensions.clear();
|
||||||
|
EXPECT_TRUE(channel->SetSendRtpHeaderExtensions(extensions));
|
||||||
|
EXPECT_TRUE(capturer.GetApplyRotation());
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
|
TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
|
||||||
engine_.Init(rtc::Thread::Current());
|
engine_.Init(rtc::Thread::Current());
|
||||||
rtc::scoped_ptr<VideoMediaChannel> channel(
|
rtc::scoped_ptr<VideoMediaChannel> channel(
|
||||||
@ -1198,21 +1274,34 @@ TEST_F(WebRtcVideoChannel2Test, RecvAbsoluteSendTimeHeaderExtensions) {
|
|||||||
webrtc::RtpExtension::kAbsSendTime);
|
webrtc::RtpExtension::kAbsSendTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test support for video rotation header extension.
|
||||||
|
TEST_F(WebRtcVideoChannel2Test, SendVideoRotationHeaderExtensions) {
|
||||||
|
TestSetSendRtpHeaderExtensions(kRtpVideoRotationHeaderExtension,
|
||||||
|
webrtc::RtpExtension::kVideoRotation);
|
||||||
|
}
|
||||||
|
TEST_F(WebRtcVideoChannel2Test, RecvVideoRotationHeaderExtensions) {
|
||||||
|
TestSetRecvRtpHeaderExtensions(kRtpVideoRotationHeaderExtension,
|
||||||
|
webrtc::RtpExtension::kVideoRotation);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) {
|
TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) {
|
||||||
const int kTOffsetId = 1;
|
const int kTOffsetId = 1;
|
||||||
const int kAbsSendTimeId = 2;
|
const int kAbsSendTimeId = 2;
|
||||||
|
const int kVideoRotationId = 3;
|
||||||
std::vector<cricket::RtpHeaderExtension> extensions;
|
std::vector<cricket::RtpHeaderExtension> extensions;
|
||||||
extensions.push_back(cricket::RtpHeaderExtension(
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
|
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
|
||||||
extensions.push_back(cricket::RtpHeaderExtension(
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
kRtpTimestampOffsetHeaderExtension, kTOffsetId));
|
kRtpTimestampOffsetHeaderExtension, kTOffsetId));
|
||||||
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
|
kRtpVideoRotationHeaderExtension, kVideoRotationId));
|
||||||
|
|
||||||
EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions));
|
EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions));
|
||||||
FakeVideoSendStream* send_stream =
|
FakeVideoSendStream* send_stream =
|
||||||
AddSendStream(cricket::StreamParams::CreateLegacy(123));
|
AddSendStream(cricket::StreamParams::CreateLegacy(123));
|
||||||
|
|
||||||
EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
|
EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
|
||||||
ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size());
|
ASSERT_EQ(3u, send_stream->GetConfig().rtp.extensions.size());
|
||||||
|
|
||||||
// Setting the same extensions (even if in different order) shouldn't
|
// Setting the same extensions (even if in different order) shouldn't
|
||||||
// reallocate the stream.
|
// reallocate the stream.
|
||||||
@ -1231,18 +1320,21 @@ TEST_F(WebRtcVideoChannel2Test, IdenticalSendExtensionsDoesntRecreateStream) {
|
|||||||
TEST_F(WebRtcVideoChannel2Test, IdenticalRecvExtensionsDoesntRecreateStream) {
|
TEST_F(WebRtcVideoChannel2Test, IdenticalRecvExtensionsDoesntRecreateStream) {
|
||||||
const int kTOffsetId = 1;
|
const int kTOffsetId = 1;
|
||||||
const int kAbsSendTimeId = 2;
|
const int kAbsSendTimeId = 2;
|
||||||
|
const int kVideoRotationId = 3;
|
||||||
std::vector<cricket::RtpHeaderExtension> extensions;
|
std::vector<cricket::RtpHeaderExtension> extensions;
|
||||||
extensions.push_back(cricket::RtpHeaderExtension(
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
|
kRtpAbsoluteSenderTimeHeaderExtension, kAbsSendTimeId));
|
||||||
extensions.push_back(cricket::RtpHeaderExtension(
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
kRtpTimestampOffsetHeaderExtension, kTOffsetId));
|
kRtpTimestampOffsetHeaderExtension, kTOffsetId));
|
||||||
|
extensions.push_back(cricket::RtpHeaderExtension(
|
||||||
|
kRtpVideoRotationHeaderExtension, kVideoRotationId));
|
||||||
|
|
||||||
EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
|
EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
|
||||||
FakeVideoReceiveStream* send_stream =
|
FakeVideoReceiveStream* send_stream =
|
||||||
AddRecvStream(cricket::StreamParams::CreateLegacy(123));
|
AddRecvStream(cricket::StreamParams::CreateLegacy(123));
|
||||||
|
|
||||||
EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
|
EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
|
||||||
ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size());
|
ASSERT_EQ(3u, send_stream->GetConfig().rtp.extensions.size());
|
||||||
|
|
||||||
// Setting the same extensions (even if in different order) shouldn't
|
// Setting the same extensions (even if in different order) shouldn't
|
||||||
// reallocate the stream.
|
// reallocate the stream.
|
||||||
|
@ -50,6 +50,7 @@
|
|||||||
|
|
||||||
using cricket::kRtpTimestampOffsetHeaderExtension;
|
using cricket::kRtpTimestampOffsetHeaderExtension;
|
||||||
using cricket::kRtpAbsoluteSenderTimeHeaderExtension;
|
using cricket::kRtpAbsoluteSenderTimeHeaderExtension;
|
||||||
|
using cricket::kRtpVideoRotationHeaderExtension;
|
||||||
|
|
||||||
static const cricket::VideoCodec kVP8Codec720p(100, "VP8", 1280, 720, 30, 0);
|
static const cricket::VideoCodec kVP8Codec720p(100, "VP8", 1280, 720, 30, 0);
|
||||||
static const cricket::VideoCodec kVP8Codec360p(100, "VP8", 640, 360, 30, 0);
|
static const cricket::VideoCodec kVP8Codec360p(100, "VP8", 640, 360, 30, 0);
|
||||||
@ -1005,6 +1006,14 @@ TEST_F(WebRtcVideoEngineTestFake, RecvAbsoluteSendTimeHeaderExtensions) {
|
|||||||
TestSetRecvRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension);
|
TestSetRecvRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test support for Coordination of Video Orientation (CVO) header extension.
|
||||||
|
TEST_F(WebRtcVideoEngineTestFake, SendVideoRotationHeaderExtensions) {
|
||||||
|
TestSetSendRtpHeaderExtensions(kRtpVideoRotationHeaderExtension);
|
||||||
|
}
|
||||||
|
TEST_F(WebRtcVideoEngineTestFake, RecvVideoRotationHeaderExtensions) {
|
||||||
|
TestSetRecvRtpHeaderExtensions(kRtpVideoRotationHeaderExtension);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(WebRtcVideoEngineTestFake, LeakyBucketTest) {
|
TEST_F(WebRtcVideoEngineTestFake, LeakyBucketTest) {
|
||||||
EXPECT_TRUE(SetupEngine());
|
EXPECT_TRUE(SetupEngine());
|
||||||
|
|
||||||
|
@ -46,6 +46,19 @@ WebRtcVideoFrame::WebRtcVideoFrame():
|
|||||||
time_stamp_ns_(0),
|
time_stamp_ns_(0),
|
||||||
rotation_(webrtc::kVideoRotation_0) {}
|
rotation_(webrtc::kVideoRotation_0) {}
|
||||||
|
|
||||||
|
WebRtcVideoFrame::WebRtcVideoFrame(
|
||||||
|
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||||
|
int64_t elapsed_time_ns,
|
||||||
|
int64_t time_stamp_ns,
|
||||||
|
webrtc::VideoRotation rotation)
|
||||||
|
: video_frame_buffer_(buffer),
|
||||||
|
pixel_width_(1),
|
||||||
|
pixel_height_(1),
|
||||||
|
elapsed_time_ns_(elapsed_time_ns),
|
||||||
|
time_stamp_ns_(time_stamp_ns),
|
||||||
|
rotation_(rotation) {
|
||||||
|
}
|
||||||
|
|
||||||
WebRtcVideoFrame::WebRtcVideoFrame(
|
WebRtcVideoFrame::WebRtcVideoFrame(
|
||||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||||
int64_t elapsed_time_ns,
|
int64_t elapsed_time_ns,
|
||||||
@ -62,7 +75,8 @@ WebRtcVideoFrame::WebRtcVideoFrame(webrtc::NativeHandle* handle,
|
|||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
int64_t elapsed_time_ns,
|
int64_t elapsed_time_ns,
|
||||||
int64_t time_stamp_ns)
|
int64_t time_stamp_ns,
|
||||||
|
webrtc::VideoRotation rotation)
|
||||||
: video_frame_buffer_(
|
: video_frame_buffer_(
|
||||||
new rtc::RefCountedObject<webrtc::TextureBuffer>(handle,
|
new rtc::RefCountedObject<webrtc::TextureBuffer>(handle,
|
||||||
width,
|
width,
|
||||||
@ -71,7 +85,7 @@ WebRtcVideoFrame::WebRtcVideoFrame(webrtc::NativeHandle* handle,
|
|||||||
pixel_height_(1),
|
pixel_height_(1),
|
||||||
elapsed_time_ns_(elapsed_time_ns),
|
elapsed_time_ns_(elapsed_time_ns),
|
||||||
time_stamp_ns_(time_stamp_ns),
|
time_stamp_ns_(time_stamp_ns),
|
||||||
rotation_(webrtc::kVideoRotation_0) {
|
rotation_(rotation) {
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtcVideoFrame::~WebRtcVideoFrame() {}
|
WebRtcVideoFrame::~WebRtcVideoFrame() {}
|
||||||
@ -176,10 +190,9 @@ WebRtcVideoFrame::GetVideoFrameBuffer() const {
|
|||||||
|
|
||||||
VideoFrame* WebRtcVideoFrame::Copy() const {
|
VideoFrame* WebRtcVideoFrame::Copy() const {
|
||||||
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
|
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
|
||||||
video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_);
|
video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_, rotation_);
|
||||||
new_frame->pixel_width_ = pixel_width_;
|
new_frame->pixel_width_ = pixel_width_;
|
||||||
new_frame->pixel_height_ = pixel_height_;
|
new_frame->pixel_height_ = pixel_height_;
|
||||||
new_frame->rotation_ = rotation_;
|
|
||||||
return new_frame;
|
return new_frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,14 +42,22 @@ struct CapturedFrame;
|
|||||||
class WebRtcVideoFrame : public VideoFrame {
|
class WebRtcVideoFrame : public VideoFrame {
|
||||||
public:
|
public:
|
||||||
WebRtcVideoFrame();
|
WebRtcVideoFrame();
|
||||||
|
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||||
|
int64_t elapsed_time_ns,
|
||||||
|
int64_t time_stamp_ns,
|
||||||
|
webrtc::VideoRotation rotation);
|
||||||
|
|
||||||
|
// TODO(guoweis): Remove this when chrome code base is updated.
|
||||||
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||||
int64_t elapsed_time_ns,
|
int64_t elapsed_time_ns,
|
||||||
int64_t time_stamp_ns);
|
int64_t time_stamp_ns);
|
||||||
|
|
||||||
WebRtcVideoFrame(webrtc::NativeHandle* handle,
|
WebRtcVideoFrame(webrtc::NativeHandle* handle,
|
||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
int64_t elapsed_time_ns,
|
int64_t elapsed_time_ns,
|
||||||
int64_t time_stamp_ns);
|
int64_t time_stamp_ns,
|
||||||
|
webrtc::VideoRotation rotation);
|
||||||
~WebRtcVideoFrame();
|
~WebRtcVideoFrame();
|
||||||
|
|
||||||
// Creates a frame from a raw sample with FourCC "format" and size "w" x "h".
|
// Creates a frame from a raw sample with FourCC "format" and size "w" x "h".
|
||||||
|
@ -340,7 +340,8 @@ TEST_F(WebRtcVideoFrameTest, InitRotated90DontApplyRotation) {
|
|||||||
|
|
||||||
TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
|
TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
|
||||||
NativeHandleImpl handle;
|
NativeHandleImpl handle;
|
||||||
cricket::WebRtcVideoFrame frame(&handle, 640, 480, 100, 200);
|
cricket::WebRtcVideoFrame frame(&handle, 640, 480, 100, 200,
|
||||||
|
webrtc::kVideoRotation_0);
|
||||||
EXPECT_EQ(&handle, frame.GetNativeHandle());
|
EXPECT_EQ(&handle, frame.GetNativeHandle());
|
||||||
EXPECT_EQ(640u, frame.GetWidth());
|
EXPECT_EQ(640u, frame.GetWidth());
|
||||||
EXPECT_EQ(480u, frame.GetHeight());
|
EXPECT_EQ(480u, frame.GetHeight());
|
||||||
@ -354,7 +355,8 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
|
|||||||
|
|
||||||
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
|
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
|
||||||
NativeHandleImpl handle;
|
NativeHandleImpl handle;
|
||||||
cricket::WebRtcVideoFrame frame1(&handle, 640, 480, 100, 200);
|
cricket::WebRtcVideoFrame frame1(&handle, 640, 480, 100, 200,
|
||||||
|
webrtc::kVideoRotation_0);
|
||||||
cricket::VideoFrame* frame2 = frame1.Copy();
|
cricket::VideoFrame* frame2 = frame1.Copy();
|
||||||
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
|
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
|
||||||
EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());
|
EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());
|
||||||
|
@ -34,6 +34,17 @@ void RtpHeaderExtensionMap::Erase() {
|
|||||||
|
|
||||||
int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
|
int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
|
||||||
const uint8_t id) {
|
const uint8_t id) {
|
||||||
|
return Register(type, id, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t RtpHeaderExtensionMap::RegisterInactive(const RTPExtensionType type,
|
||||||
|
const uint8_t id) {
|
||||||
|
return Register(type, id, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
|
||||||
|
const uint8_t id,
|
||||||
|
bool active) {
|
||||||
if (id < 1 || id > 14) {
|
if (id < 1 || id > 14) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -47,12 +58,24 @@ int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
|
|||||||
}
|
}
|
||||||
// This extension type is already registered with this id,
|
// This extension type is already registered with this id,
|
||||||
// so return success.
|
// so return success.
|
||||||
|
it->second->active = active;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
extensionMap_[id] = new HeaderExtension(type);
|
extensionMap_[id] = new HeaderExtension(type, active);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool RtpHeaderExtensionMap::SetActive(const RTPExtensionType type,
|
||||||
|
bool active) {
|
||||||
|
for (auto& kv : extensionMap_) {
|
||||||
|
if (kv.second->type == type) {
|
||||||
|
kv.second->active = active;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) {
|
int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) {
|
||||||
uint8_t id;
|
uint8_t id;
|
||||||
if (GetId(type, &id) != 0) {
|
if (GetId(type, &id) != 0) {
|
||||||
@ -113,7 +136,9 @@ size_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
|
|||||||
extensionMap_.begin();
|
extensionMap_.begin();
|
||||||
while (it != extensionMap_.end()) {
|
while (it != extensionMap_.end()) {
|
||||||
HeaderExtension* extension = it->second;
|
HeaderExtension* extension = it->second;
|
||||||
length += extension->length;
|
if (extension->active) {
|
||||||
|
length += extension->length;
|
||||||
|
}
|
||||||
it++;
|
it++;
|
||||||
}
|
}
|
||||||
// Add RTP extension header length.
|
// Add RTP extension header length.
|
||||||
@ -140,8 +165,11 @@ int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
|
|||||||
while (it != extensionMap_.end()) {
|
while (it != extensionMap_.end()) {
|
||||||
HeaderExtension* extension = it->second;
|
HeaderExtension* extension = it->second;
|
||||||
if (extension->type == type) {
|
if (extension->type == type) {
|
||||||
|
if (!extension->active) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
} else {
|
} else if (extension->active) {
|
||||||
length += extension->length;
|
length += extension->length;
|
||||||
}
|
}
|
||||||
it++;
|
it++;
|
||||||
@ -150,17 +178,23 @@ int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t RtpHeaderExtensionMap::Size() const {
|
int32_t RtpHeaderExtensionMap::Size() const {
|
||||||
return extensionMap_.size();
|
int32_t count = 0;
|
||||||
|
for (auto& kv : extensionMap_) {
|
||||||
|
if (kv.second->active) {
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTPExtensionType RtpHeaderExtensionMap::First() const {
|
RTPExtensionType RtpHeaderExtensionMap::First() const {
|
||||||
std::map<uint8_t, HeaderExtension*>::const_iterator it =
|
for (auto& kv : extensionMap_) {
|
||||||
extensionMap_.begin();
|
if (kv.second->active) {
|
||||||
if (it == extensionMap_.end()) {
|
return kv.second->type;
|
||||||
return kRtpExtensionNone;
|
}
|
||||||
}
|
}
|
||||||
HeaderExtension* extension = it->second;
|
|
||||||
return extension->type;
|
return kRtpExtensionNone;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
|
RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
|
||||||
@ -170,15 +204,16 @@ RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
|
|||||||
}
|
}
|
||||||
std::map<uint8_t, HeaderExtension*>::const_iterator it =
|
std::map<uint8_t, HeaderExtension*>::const_iterator it =
|
||||||
extensionMap_.find(id);
|
extensionMap_.find(id);
|
||||||
if (it == extensionMap_.end()) {
|
if (it == extensionMap_.end() || !it->second->active) {
|
||||||
return kRtpExtensionNone;
|
return kRtpExtensionNone;
|
||||||
}
|
}
|
||||||
it++;
|
while ((++it) != extensionMap_.end()) {
|
||||||
if (it == extensionMap_.end()) {
|
if (it->second->active) {
|
||||||
return kRtpExtensionNone;
|
return it->second->type;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
HeaderExtension* extension = it->second;
|
|
||||||
return extension->type;
|
return kRtpExtensionNone;
|
||||||
}
|
}
|
||||||
|
|
||||||
void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
|
void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
|
||||||
@ -187,7 +222,7 @@ void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
|
|||||||
extensionMap_.begin();
|
extensionMap_.begin();
|
||||||
while (it != extensionMap_.end()) {
|
while (it != extensionMap_.end()) {
|
||||||
HeaderExtension* extension = it->second;
|
HeaderExtension* extension = it->second;
|
||||||
map->Register(extension->type, it->first);
|
map->Register(extension->type, it->first, extension->active);
|
||||||
it++;
|
it++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,8 +29,16 @@ const size_t kTransportSequenceNumberLength = 3;
|
|||||||
|
|
||||||
struct HeaderExtension {
|
struct HeaderExtension {
|
||||||
HeaderExtension(RTPExtensionType extension_type)
|
HeaderExtension(RTPExtensionType extension_type)
|
||||||
: type(extension_type),
|
: type(extension_type), length(0), active(true) {
|
||||||
length(0) {
|
Init();
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderExtension(RTPExtensionType extension_type, bool active)
|
||||||
|
: type(extension_type), length(0), active(active) {
|
||||||
|
Init();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Init() {
|
||||||
// TODO(solenberg): Create handler classes for header extensions so we can
|
// TODO(solenberg): Create handler classes for header extensions so we can
|
||||||
// get rid of switches like these as well as handling code spread out all
|
// get rid of switches like these as well as handling code spread out all
|
||||||
// over.
|
// over.
|
||||||
@ -57,6 +65,7 @@ struct HeaderExtension {
|
|||||||
|
|
||||||
const RTPExtensionType type;
|
const RTPExtensionType type;
|
||||||
uint8_t length;
|
uint8_t length;
|
||||||
|
bool active;
|
||||||
};
|
};
|
||||||
|
|
||||||
class RtpHeaderExtensionMap {
|
class RtpHeaderExtensionMap {
|
||||||
@ -68,6 +77,13 @@ class RtpHeaderExtensionMap {
|
|||||||
|
|
||||||
int32_t Register(const RTPExtensionType type, const uint8_t id);
|
int32_t Register(const RTPExtensionType type, const uint8_t id);
|
||||||
|
|
||||||
|
// Active is a concept for a registered rtp header extension which doesn't
|
||||||
|
// take effect yet until being activated. Inactive RTP header extensions do
|
||||||
|
// not take effect and should not be included in size calculations until they
|
||||||
|
// are activated.
|
||||||
|
int32_t RegisterInactive(const RTPExtensionType type, const uint8_t id);
|
||||||
|
bool SetActive(const RTPExtensionType type, bool active);
|
||||||
|
|
||||||
int32_t Deregister(const RTPExtensionType type);
|
int32_t Deregister(const RTPExtensionType type);
|
||||||
|
|
||||||
bool IsRegistered(RTPExtensionType type) const;
|
bool IsRegistered(RTPExtensionType type) const;
|
||||||
@ -76,6 +92,10 @@ class RtpHeaderExtensionMap {
|
|||||||
|
|
||||||
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
|
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
|
||||||
|
|
||||||
|
//
|
||||||
|
// Methods below ignore any inactive rtp header extensions.
|
||||||
|
//
|
||||||
|
|
||||||
size_t GetTotalLengthInBytes() const;
|
size_t GetTotalLengthInBytes() const;
|
||||||
|
|
||||||
int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const;
|
int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const;
|
||||||
@ -89,6 +109,7 @@ class RtpHeaderExtensionMap {
|
|||||||
RTPExtensionType Next(RTPExtensionType type) const;
|
RTPExtensionType Next(RTPExtensionType type) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
int32_t Register(const RTPExtensionType type, const uint8_t id, bool active);
|
||||||
std::map<uint8_t, HeaderExtension*> extensionMap_;
|
std::map<uint8_t, HeaderExtension*> extensionMap_;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -35,9 +35,16 @@ const uint8_t RtpHeaderExtensionTest::kId = 3;
|
|||||||
TEST_F(RtpHeaderExtensionTest, Register) {
|
TEST_F(RtpHeaderExtensionTest, Register) {
|
||||||
EXPECT_EQ(0, map_.Size());
|
EXPECT_EQ(0, map_.Size());
|
||||||
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
|
EXPECT_TRUE(map_.IsRegistered(kRtpExtensionTransmissionTimeOffset));
|
||||||
EXPECT_EQ(1, map_.Size());
|
EXPECT_EQ(1, map_.Size());
|
||||||
EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
|
EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
|
||||||
EXPECT_EQ(0, map_.Size());
|
EXPECT_EQ(0, map_.Size());
|
||||||
|
|
||||||
|
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
|
EXPECT_EQ(0, map_.Size());
|
||||||
|
EXPECT_TRUE(map_.IsRegistered(kRtpExtensionTransmissionTimeOffset));
|
||||||
|
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
|
||||||
|
EXPECT_EQ(1, map_.Size());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) {
|
TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) {
|
||||||
@ -56,10 +63,14 @@ TEST_F(RtpHeaderExtensionTest, Idempotent) {
|
|||||||
TEST_F(RtpHeaderExtensionTest, NonUniqueId) {
|
TEST_F(RtpHeaderExtensionTest, NonUniqueId) {
|
||||||
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
EXPECT_EQ(-1, map_.Register(kRtpExtensionAudioLevel, kId));
|
EXPECT_EQ(-1, map_.Register(kRtpExtensionAudioLevel, kId));
|
||||||
|
EXPECT_EQ(-1, map_.RegisterInactive(kRtpExtensionAudioLevel, kId));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
|
TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
|
||||||
EXPECT_EQ(0u, map_.GetTotalLengthInBytes());
|
EXPECT_EQ(0u, map_.GetTotalLengthInBytes());
|
||||||
|
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
|
EXPECT_EQ(0u, map_.GetTotalLengthInBytes());
|
||||||
|
|
||||||
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength,
|
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength,
|
||||||
map_.GetTotalLengthInBytes());
|
map_.GetTotalLengthInBytes());
|
||||||
@ -68,7 +79,11 @@ TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
|
|||||||
TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) {
|
TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) {
|
||||||
EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
|
EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
|
||||||
kRtpExtensionTransmissionTimeOffset));
|
kRtpExtensionTransmissionTimeOffset));
|
||||||
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
|
EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
|
||||||
|
kRtpExtensionTransmissionTimeOffset));
|
||||||
|
|
||||||
|
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
|
||||||
EXPECT_EQ(static_cast<int>(kRtpOneByteHeaderLength),
|
EXPECT_EQ(static_cast<int>(kRtpOneByteHeaderLength),
|
||||||
map_.GetLengthUntilBlockStartInBytes(
|
map_.GetLengthUntilBlockStartInBytes(
|
||||||
kRtpExtensionTransmissionTimeOffset));
|
kRtpExtensionTransmissionTimeOffset));
|
||||||
@ -96,7 +111,11 @@ TEST_F(RtpHeaderExtensionTest, IterateTypes) {
|
|||||||
EXPECT_EQ(kRtpExtensionNone, map_.First());
|
EXPECT_EQ(kRtpExtensionNone, map_.First());
|
||||||
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
|
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
|
||||||
|
|
||||||
EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
|
EXPECT_EQ(0, map_.RegisterInactive(kRtpExtensionTransmissionTimeOffset, kId));
|
||||||
|
|
||||||
|
EXPECT_EQ(kRtpExtensionNone, map_.First());
|
||||||
|
|
||||||
|
EXPECT_TRUE(map_.SetActive(kRtpExtensionTransmissionTimeOffset, true));
|
||||||
|
|
||||||
EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First());
|
EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First());
|
||||||
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
|
EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
|
||||||
|
@ -63,13 +63,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
|
|||||||
const size_t payload_data_length =
|
const size_t payload_data_length =
|
||||||
payload_length - rtp_header->header.paddingLength;
|
payload_length - rtp_header->header.paddingLength;
|
||||||
|
|
||||||
// Retrieve the video rotation information.
|
|
||||||
rtp_header->type.Video.rotation = kVideoRotation_0;
|
|
||||||
if (rtp_header->header.extension.hasVideoRotation) {
|
|
||||||
rtp_header->type.Video.rotation = ConvertCVOByteToVideoRotation(
|
|
||||||
rtp_header->header.extension.videoRotation);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (payload == NULL || payload_data_length == 0) {
|
if (payload == NULL || payload_data_length == 0) {
|
||||||
return data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) == 0 ? 0
|
return data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) == 0 ? 0
|
||||||
: -1;
|
: -1;
|
||||||
@ -90,6 +83,14 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
|
|||||||
|
|
||||||
rtp_header->frameType = parsed_payload.frame_type;
|
rtp_header->frameType = parsed_payload.frame_type;
|
||||||
rtp_header->type = parsed_payload.type;
|
rtp_header->type = parsed_payload.type;
|
||||||
|
rtp_header->type.Video.rotation = kVideoRotation_0;
|
||||||
|
|
||||||
|
// Retrieve the video rotation information.
|
||||||
|
if (rtp_header->header.extension.hasVideoRotation) {
|
||||||
|
rtp_header->type.Video.rotation = ConvertCVOByteToVideoRotation(
|
||||||
|
rtp_header->header.extension.videoRotation);
|
||||||
|
}
|
||||||
|
|
||||||
return data_callback_->OnReceivedPayloadData(parsed_payload.payload,
|
return data_callback_->OnReceivedPayloadData(parsed_payload.payload,
|
||||||
parsed_payload.payload_length,
|
parsed_payload.payload_length,
|
||||||
rtp_header) == 0
|
rtp_header) == 0
|
||||||
|
@ -129,6 +129,7 @@ RTPSender::RTPSender(int32_t id,
|
|||||||
transmission_time_offset_(0),
|
transmission_time_offset_(0),
|
||||||
absolute_send_time_(0),
|
absolute_send_time_(0),
|
||||||
rotation_(kVideoRotation_0),
|
rotation_(kVideoRotation_0),
|
||||||
|
cvo_mode_(kCVONone),
|
||||||
transport_sequence_number_(0),
|
transport_sequence_number_(0),
|
||||||
// NACK.
|
// NACK.
|
||||||
nack_byte_count_times_(),
|
nack_byte_count_times_(),
|
||||||
@ -266,6 +267,10 @@ int32_t RTPSender::SetTransportSequenceNumber(uint16_t sequence_number) {
|
|||||||
int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type,
|
int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type,
|
||||||
uint8_t id) {
|
uint8_t id) {
|
||||||
CriticalSectionScoped cs(send_critsect_.get());
|
CriticalSectionScoped cs(send_critsect_.get());
|
||||||
|
if (type == kRtpExtensionVideoRotation) {
|
||||||
|
cvo_mode_ = kCVOInactive;
|
||||||
|
return rtp_header_extension_map_.RegisterInactive(type, id);
|
||||||
|
}
|
||||||
return rtp_header_extension_map_.Register(type, id);
|
return rtp_header_extension_map_.Register(type, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,6 +467,16 @@ int32_t RTPSender::CheckPayloadType(int8_t payload_type,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RTPSenderInterface::CVOMode RTPSender::ActivateCVORtpHeaderExtension() {
|
||||||
|
if (cvo_mode_ == kCVOInactive) {
|
||||||
|
CriticalSectionScoped cs(send_critsect_.get());
|
||||||
|
if (rtp_header_extension_map_.SetActive(kRtpExtensionVideoRotation, true)) {
|
||||||
|
cvo_mode_ = kCVOActivated;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cvo_mode_;
|
||||||
|
}
|
||||||
|
|
||||||
int32_t RTPSender::SendOutgoingData(FrameType frame_type,
|
int32_t RTPSender::SendOutgoingData(FrameType frame_type,
|
||||||
int8_t payload_type,
|
int8_t payload_type,
|
||||||
uint32_t capture_timestamp,
|
uint32_t capture_timestamp,
|
||||||
@ -1201,8 +1216,7 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer,
|
|||||||
block_length = BuildAbsoluteSendTimeExtension(extension_data);
|
block_length = BuildAbsoluteSendTimeExtension(extension_data);
|
||||||
break;
|
break;
|
||||||
case kRtpExtensionVideoRotation:
|
case kRtpExtensionVideoRotation:
|
||||||
if (marker_bit)
|
block_length = BuildVideoRotationExtension(extension_data);
|
||||||
block_length = BuildVideoRotationExtension(extension_data);
|
|
||||||
break;
|
break;
|
||||||
case kRtpExtensionTransportSequenceNumber:
|
case kRtpExtensionTransportSequenceNumber:
|
||||||
block_length = BuildTransportSequenceNumberExtension(extension_data);
|
block_length = BuildTransportSequenceNumberExtension(extension_data);
|
||||||
|
@ -41,6 +41,14 @@ class RTPSenderInterface {
|
|||||||
RTPSenderInterface() {}
|
RTPSenderInterface() {}
|
||||||
virtual ~RTPSenderInterface() {}
|
virtual ~RTPSenderInterface() {}
|
||||||
|
|
||||||
|
enum CVOMode {
|
||||||
|
kCVONone,
|
||||||
|
kCVOInactive, // CVO rtp header extension is registered but haven't
|
||||||
|
// received any frame with rotation pending.
|
||||||
|
kCVOActivated, // CVO rtp header extension will be present in the rtp
|
||||||
|
// packets.
|
||||||
|
};
|
||||||
|
|
||||||
virtual uint32_t SSRC() const = 0;
|
virtual uint32_t SSRC() const = 0;
|
||||||
virtual uint32_t Timestamp() const = 0;
|
virtual uint32_t Timestamp() const = 0;
|
||||||
|
|
||||||
@ -70,6 +78,7 @@ class RTPSenderInterface {
|
|||||||
const RTPHeader& rtp_header,
|
const RTPHeader& rtp_header,
|
||||||
VideoRotation rotation) const = 0;
|
VideoRotation rotation) const = 0;
|
||||||
virtual bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) = 0;
|
virtual bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) = 0;
|
||||||
|
virtual CVOMode ActivateCVORtpHeaderExtension() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class RTPSender : public RTPSenderInterface {
|
class RTPSender : public RTPSenderInterface {
|
||||||
@ -285,6 +294,7 @@ class RTPSender : public RTPSenderInterface {
|
|||||||
RtpState GetRtpState() const;
|
RtpState GetRtpState() const;
|
||||||
void SetRtxRtpState(const RtpState& rtp_state);
|
void SetRtxRtpState(const RtpState& rtp_state);
|
||||||
RtpState GetRtxRtpState() const;
|
RtpState GetRtxRtpState() const;
|
||||||
|
CVOMode ActivateCVORtpHeaderExtension() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
int32_t CheckPayloadType(int8_t payload_type, RtpVideoCodecTypes* video_type);
|
int32_t CheckPayloadType(int8_t payload_type, RtpVideoCodecTypes* video_type);
|
||||||
@ -378,6 +388,7 @@ class RTPSender : public RTPSenderInterface {
|
|||||||
int32_t transmission_time_offset_;
|
int32_t transmission_time_offset_;
|
||||||
uint32_t absolute_send_time_;
|
uint32_t absolute_send_time_;
|
||||||
VideoRotation rotation_;
|
VideoRotation rotation_;
|
||||||
|
CVOMode cvo_mode_;
|
||||||
uint16_t transport_sequence_number_;
|
uint16_t transport_sequence_number_;
|
||||||
|
|
||||||
// NACK
|
// NACK
|
||||||
|
@ -186,7 +186,6 @@ class RtpSenderVideoTest : public RtpSenderTest {
|
|||||||
}
|
}
|
||||||
ASSERT_TRUE(rtp_parser.Parse(rtp_header, map));
|
ASSERT_TRUE(rtp_parser.Parse(rtp_header, map));
|
||||||
ASSERT_FALSE(rtp_parser.RTCP());
|
ASSERT_FALSE(rtp_parser.RTCP());
|
||||||
EXPECT_EQ(expect_cvo, rtp_header.markerBit);
|
|
||||||
EXPECT_EQ(payload_, rtp_header.payloadType);
|
EXPECT_EQ(payload_, rtp_header.payloadType);
|
||||||
EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
|
EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
|
||||||
EXPECT_EQ(kTimestamp, rtp_header.timestamp);
|
EXPECT_EQ(kTimestamp, rtp_header.timestamp);
|
||||||
@ -254,6 +253,7 @@ TEST_F(RtpSenderTest, RegisterRtpHeaderExtensions) {
|
|||||||
rtp_sender_->RtpHeaderExtensionTotalLength());
|
rtp_sender_->RtpHeaderExtensionTotalLength());
|
||||||
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
||||||
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
||||||
|
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
|
||||||
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
|
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
|
||||||
kTransmissionTimeOffsetLength +
|
kTransmissionTimeOffsetLength +
|
||||||
kAbsoluteSendTimeLength +
|
kAbsoluteSendTimeLength +
|
||||||
@ -286,6 +286,9 @@ TEST_F(RtpSenderTest, RegisterRtpVideoRotationHeaderExtension) {
|
|||||||
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
|
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
|
||||||
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
||||||
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
||||||
|
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
|
||||||
|
|
||||||
|
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
|
||||||
EXPECT_EQ(
|
EXPECT_EQ(
|
||||||
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
|
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
|
||||||
rtp_sender_->RtpHeaderExtensionTotalLength());
|
rtp_sender_->RtpHeaderExtensionTotalLength());
|
||||||
@ -424,6 +427,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_MarkerBit) {
|
|||||||
rtp_sender_->SetVideoRotation(kRotation);
|
rtp_sender_->SetVideoRotation(kRotation);
|
||||||
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
||||||
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
||||||
|
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
|
||||||
|
|
||||||
RtpHeaderExtensionMap map;
|
RtpHeaderExtensionMap map;
|
||||||
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
||||||
@ -447,10 +451,11 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_MarkerBit) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Test CVO header extension is not set when marker bit is false.
|
// Test CVO header extension is not set when marker bit is false.
|
||||||
TEST_F(RtpSenderTest, BuildRTPPacketWithVideoRotation_NoMarkerBit) {
|
TEST_F(RtpSenderTest, DISABLED_BuildRTPPacketWithVideoRotation_NoMarkerBit) {
|
||||||
rtp_sender_->SetVideoRotation(kRotation);
|
rtp_sender_->SetVideoRotation(kRotation);
|
||||||
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
||||||
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
||||||
|
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
|
||||||
|
|
||||||
RtpHeaderExtensionMap map;
|
RtpHeaderExtensionMap map;
|
||||||
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
||||||
@ -1333,13 +1338,15 @@ TEST_F(RtpSenderTest, BytesReportedCorrectly) {
|
|||||||
rtx_stats.transmitted.TotalBytes());
|
rtx_stats.transmitted.TotalBytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify that only the last packet of a frame has CVO byte set.
|
// Verify that all packets of a frame have CVO byte set.
|
||||||
TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
|
TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
|
||||||
RTPVideoHeader hdr = {0};
|
RTPVideoHeader hdr = {0};
|
||||||
hdr.rotation = kVideoRotation_90;
|
hdr.rotation = kVideoRotation_90;
|
||||||
|
|
||||||
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
|
||||||
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
kRtpExtensionVideoRotation, kVideoRotationExtensionId));
|
||||||
|
EXPECT_TRUE(rtp_sender_->ActivateCVORtpHeaderExtension());
|
||||||
|
|
||||||
EXPECT_EQ(
|
EXPECT_EQ(
|
||||||
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
|
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
|
||||||
rtp_sender_->RtpHeaderExtensionTotalLength());
|
rtp_sender_->RtpHeaderExtensionTotalLength());
|
||||||
@ -1351,13 +1358,12 @@ TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
|
|||||||
RtpHeaderExtensionMap map;
|
RtpHeaderExtensionMap map;
|
||||||
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);
|
||||||
|
|
||||||
// Verify that this packet doesn't have CVO byte.
|
// Verify that this packet does have CVO byte.
|
||||||
VerifyCVOPacket(
|
VerifyCVOPacket(
|
||||||
reinterpret_cast<uint8_t*>(transport_.sent_packets_[0]->data()),
|
reinterpret_cast<uint8_t*>(transport_.sent_packets_[0]->data()),
|
||||||
transport_.sent_packets_[0]->size(), false, &map, kSeqNum,
|
transport_.sent_packets_[0]->length(), true, &map, kSeqNum, hdr.rotation);
|
||||||
kVideoRotation_0);
|
|
||||||
|
|
||||||
// Verify that this packet doesn't have CVO byte.
|
// Verify that this packet does have CVO byte.
|
||||||
VerifyCVOPacket(
|
VerifyCVOPacket(
|
||||||
reinterpret_cast<uint8_t*>(transport_.sent_packets_[1]->data()),
|
reinterpret_cast<uint8_t*>(transport_.sent_packets_[1]->data()),
|
||||||
transport_.sent_packets_[1]->size(), true, &map, kSeqNum + 1,
|
transport_.sent_packets_[1]->size(), true, &map, kSeqNum + 1,
|
||||||
|
@ -301,6 +301,13 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
|
|||||||
const size_t payloadSize,
|
const size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation,
|
const RTPFragmentationHeader* fragmentation,
|
||||||
const RTPVideoHeader* rtpHdr) {
|
const RTPVideoHeader* rtpHdr) {
|
||||||
|
// Register CVO rtp header extension at the first time when we receive a frame
|
||||||
|
// with pending rotation.
|
||||||
|
RTPSenderInterface::CVOMode cvo_mode = RTPSenderInterface::kCVONone;
|
||||||
|
if (rtpHdr && rtpHdr->rotation != kVideoRotation_0) {
|
||||||
|
cvo_mode = _rtpSender.ActivateCVORtpHeaderExtension();
|
||||||
|
}
|
||||||
|
|
||||||
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
|
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
|
||||||
size_t payload_bytes_to_send = payloadSize;
|
size_t payload_bytes_to_send = payloadSize;
|
||||||
const uint8_t* data = payloadData;
|
const uint8_t* data = payloadData;
|
||||||
@ -341,13 +348,16 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
|
|||||||
// packet in each group of packets which make up another type of frame
|
// packet in each group of packets which make up another type of frame
|
||||||
// (e.g. a P-Frame) only if the current value is different from the previous
|
// (e.g. a P-Frame) only if the current value is different from the previous
|
||||||
// value sent.
|
// value sent.
|
||||||
// Here we are adding it to the last packet of every frame at this point.
|
// Here we are adding it to every packet of every frame at this point.
|
||||||
if (!rtpHdr) {
|
if (!rtpHdr) {
|
||||||
assert(!_rtpSender.IsRtpHeaderExtensionRegistered(
|
assert(!_rtpSender.IsRtpHeaderExtensionRegistered(
|
||||||
kRtpExtensionVideoRotation));
|
kRtpExtensionVideoRotation));
|
||||||
} else if (last) {
|
} else if (cvo_mode == RTPSenderInterface::kCVOActivated) {
|
||||||
// Checking whether CVO header extension is registered will require taking
|
// Checking whether CVO header extension is registered will require taking
|
||||||
// a lock. It'll be a no-op if it's not registered.
|
// a lock. It'll be a no-op if it's not registered.
|
||||||
|
// TODO(guoweis): For now, all packets sent will carry the CVO such that
|
||||||
|
// the RTP header length is consistent, although the receiver side will
|
||||||
|
// only exam the packets with market bit set.
|
||||||
size_t packetSize = payloadSize + rtp_header_length;
|
size_t packetSize = payloadSize + rtp_header_length;
|
||||||
RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
|
RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
|
||||||
RTPHeader rtp_header;
|
RTPHeader rtp_header;
|
||||||
|
@ -263,7 +263,10 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
|||||||
int target_width = width;
|
int target_width = width;
|
||||||
int target_height = height;
|
int target_height = height;
|
||||||
|
|
||||||
if (apply_rotation_) {
|
// SetApplyRotation doesn't take any lock. Make a local copy here.
|
||||||
|
bool apply_rotation = apply_rotation_;
|
||||||
|
|
||||||
|
if (apply_rotation) {
|
||||||
// Rotating resolution when for 90/270 degree rotations.
|
// Rotating resolution when for 90/270 degree rotations.
|
||||||
if (_rotateFrame == kVideoRotation_90 ||
|
if (_rotateFrame == kVideoRotation_90 ||
|
||||||
_rotateFrame == kVideoRotation_270) {
|
_rotateFrame == kVideoRotation_270) {
|
||||||
@ -290,7 +293,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
|||||||
const int conversionResult = ConvertToI420(
|
const int conversionResult = ConvertToI420(
|
||||||
commonVideoType, videoFrame, 0, 0, // No cropping
|
commonVideoType, videoFrame, 0, 0, // No cropping
|
||||||
width, height, videoFrameLength,
|
width, height, videoFrameLength,
|
||||||
apply_rotation_ ? _rotateFrame : kVideoRotation_0, &_captureFrame);
|
apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
|
||||||
if (conversionResult < 0)
|
if (conversionResult < 0)
|
||||||
{
|
{
|
||||||
LOG(LS_ERROR) << "Failed to convert capture frame from type "
|
LOG(LS_ERROR) << "Failed to convert capture frame from type "
|
||||||
@ -298,7 +301,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!apply_rotation_) {
|
if (!apply_rotation) {
|
||||||
_captureFrame.set_rotation(_rotateFrame);
|
_captureFrame.set_rotation(_rotateFrame);
|
||||||
} else {
|
} else {
|
||||||
_captureFrame.set_rotation(kVideoRotation_0);
|
_captureFrame.set_rotation(kVideoRotation_0);
|
||||||
@ -335,8 +338,8 @@ void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool VideoCaptureImpl::SetApplyRotation(bool enable) {
|
bool VideoCaptureImpl::SetApplyRotation(bool enable) {
|
||||||
CriticalSectionScoped cs(&_apiCs);
|
// We can't take any lock here as it'll cause deadlock with IncomingFrame.
|
||||||
CriticalSectionScoped cs2(&_callBackCs);
|
|
||||||
// The effect of this is the last caller wins.
|
// The effect of this is the last caller wins.
|
||||||
apply_rotation_ = enable;
|
apply_rotation_ = enable;
|
||||||
return true;
|
return true;
|
||||||
|
@ -99,6 +99,9 @@ void VCMPacket::Reset() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) {
|
void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) {
|
||||||
|
if (markerBit) {
|
||||||
|
codecSpecificHeader.rotation = videoHeader.rotation;
|
||||||
|
}
|
||||||
switch (videoHeader.codec) {
|
switch (videoHeader.codec) {
|
||||||
case kRtpVideoVp8:
|
case kRtpVideoVp8:
|
||||||
// Handle all packets within a frame as depending on the previous packet
|
// Handle all packets within a frame as depending on the previous packet
|
||||||
|
Loading…
x
Reference in New Issue
Block a user