Make RTPSender/RTPReceiver generic.

Changes include,
1) Introduce class RtpPacketizerGeneric & RtpDePacketizerGeneric.
2) Introduce class RtpDepacketizerVp8.
3) Make RTPSenderVideo::SendH264 generic and used by all packetizers.
4) Move codec specific functions from RTPSenderVideo/RTPReceiverVideo to
RtpPacketizer/RtpDePacketizer sub-classes.

R=pbos@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/26399004

Patch from Changbin Shao <changbin.shao@intel.com>.

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7163 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2014-09-12 11:05:55 +00:00
parent 6071b0636d
commit b5e6bfc76a
17 changed files with 722 additions and 663 deletions

View File

@ -85,6 +85,7 @@ source_set("rtp_rtcp") {
"source/rtp_format_h264.h",
"source/rtp_format_vp8.cc",
"source/rtp_format_vp8.h",
"source/rtp_format_video_generic.cc",
"source/rtp_format_video_generic.h",
"source/vp8_partition_aggregator.cc",
"source/vp8_partition_aggregator.h",

View File

@ -63,6 +63,11 @@ enum RTPAliveType
kRtpAlive = 2
};
enum ProtectionType {
kUnprotectedPacket,
kProtectedPacket
};
enum StorageType {
kDontStore,
kDontRetransmit,

View File

@ -11,16 +11,23 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_h264.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
namespace webrtc {
RtpPacketizer* RtpPacketizer::Create(RtpVideoCodecTypes type,
size_t max_payload_len) {
size_t max_payload_len,
const RTPVideoTypeHeader* rtp_type_header,
FrameType frame_type) {
switch (type) {
case kRtpVideoH264:
return new RtpPacketizerH264(max_payload_len);
case kRtpVideoNone:
case kRtpVideoGeneric:
return new RtpPacketizerH264(frame_type, max_payload_len);
case kRtpVideoVp8:
assert(rtp_type_header != NULL);
return new RtpPacketizerVp8(rtp_type_header->VP8, max_payload_len);
case kRtpVideoGeneric:
return new RtpPacketizerGeneric(frame_type, max_payload_len);
case kRtpVideoNone:
assert(false);
}
return NULL;
@ -31,9 +38,11 @@ RtpDepacketizer* RtpDepacketizer::Create(RtpVideoCodecTypes type,
switch (type) {
case kRtpVideoH264:
return new RtpDepacketizerH264(callback);
case kRtpVideoNone:
case kRtpVideoGeneric:
case kRtpVideoVp8:
return new RtpDepacketizerVp8(callback);
case kRtpVideoGeneric:
return new RtpDepacketizerGeneric(callback);
case kRtpVideoNone:
assert(false);
}
return NULL;

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H_
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
@ -19,7 +21,10 @@ namespace webrtc {
class RtpPacketizer {
public:
static RtpPacketizer* Create(RtpVideoCodecTypes type, size_t max_payload_len);
static RtpPacketizer* Create(RtpVideoCodecTypes type,
size_t max_payload_len,
const RTPVideoTypeHeader* rtp_type_header,
FrameType frame_type);
virtual ~RtpPacketizer() {}
@ -37,6 +42,12 @@ class RtpPacketizer {
virtual bool NextPacket(uint8_t* buffer,
size_t* bytes_to_send,
bool* last_packet) = 0;
virtual ProtectionType GetProtectionType() = 0;
virtual StorageType GetStorageType(uint32_t retransmission_settings) = 0;
virtual std::string ToString() = 0;
};
class RtpDepacketizer {

View File

@ -94,8 +94,12 @@ void ParseFuaNalu(WebRtcRTPHeader* rtp_header,
}
} // namespace
RtpPacketizerH264::RtpPacketizerH264(size_t max_payload_len)
: payload_data_(NULL), payload_size_(0), max_payload_len_(max_payload_len) {
RtpPacketizerH264::RtpPacketizerH264(FrameType frame_type,
size_t max_payload_len)
: payload_data_(NULL),
payload_size_(0),
max_payload_len_(max_payload_len),
frame_type_(frame_type) {
}
RtpPacketizerH264::~RtpPacketizerH264() {
@ -272,6 +276,20 @@ void RtpPacketizerH264::NextFragmentPacket(uint8_t* buffer,
packets_.pop();
}
ProtectionType RtpPacketizerH264::GetProtectionType() {
return (frame_type_ == kVideoFrameKey) ? kProtectedPacket
: kUnprotectedPacket;
}
StorageType RtpPacketizerH264::GetStorageType(
uint32_t retransmission_settings) {
return kAllowRetransmission;
}
std::string RtpPacketizerH264::ToString() {
return "RtpPacketizerH264";
}
RtpDepacketizerH264::RtpDepacketizerH264(RtpData* const callback)
: callback_(callback) {
}

View File

@ -21,7 +21,7 @@ class RtpPacketizerH264 : public RtpPacketizer {
public:
// Initialize with payload from encoder.
// The payload_data must be exactly one encoded H264 frame.
explicit RtpPacketizerH264(size_t max_payload_len);
RtpPacketizerH264(FrameType frame_type, size_t max_payload_len);
virtual ~RtpPacketizerH264();
@ -41,6 +41,12 @@ class RtpPacketizerH264 : public RtpPacketizer {
size_t* bytes_to_send,
bool* last_packet) OVERRIDE;
virtual ProtectionType GetProtectionType() OVERRIDE;
virtual StorageType GetStorageType(uint32_t retrasmission_settings) OVERRIDE;
virtual std::string ToString() OVERRIDE;
private:
struct Packet {
Packet(size_t offset,
@ -78,6 +84,7 @@ class RtpPacketizerH264 : public RtpPacketizer {
const size_t max_payload_len_;
RTPFragmentationHeader fragmentation_;
PacketQueue packets_;
FrameType frame_type_;
DISALLOW_COPY_AND_ASSIGN(RtpPacketizerH264);
};

View File

@ -88,8 +88,8 @@ void TestFua(size_t frame_size,
fragmentation.VerifyAndAllocateFragmentationHeader(1);
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = frame_size;
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, max_payload_size));
scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
kRtpVideoH264, max_payload_size, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame.get(), frame_size, &fragmentation);
scoped_ptr<uint8_t[]> packet(new uint8_t[max_payload_size]);
@ -163,7 +163,7 @@ TEST(RtpPacketizerH264Test, TestSingleNalu) {
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = sizeof(frame);
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize));
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame, sizeof(frame), &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
size_t length = 0;
@ -192,7 +192,7 @@ TEST(RtpPacketizerH264Test, TestSingleNaluTwoPackets) {
frame[fragmentation.fragmentationOffset[1]] = 0x01;
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize));
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
@ -229,7 +229,7 @@ TEST(RtpPacketizerH264Test, TestStapA) {
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize));
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
@ -264,7 +264,7 @@ TEST(RtpPacketizerH264Test, TestTooSmallForStapAHeaders) {
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize));
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
@ -312,7 +312,7 @@ TEST(RtpPacketizerH264Test, TestMixedStapA_FUA) {
}
}
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize));
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kFrameEmpty));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
// First expecting two FU-A packets.

View File

@ -0,0 +1,112 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string>
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
namespace webrtc {
static const size_t kGenericHeaderLength = 1;
RtpPacketizerGeneric::RtpPacketizerGeneric(FrameType frame_type,
size_t max_payload_len)
: payload_data_(NULL),
payload_size_(0),
max_payload_len_(max_payload_len - kGenericHeaderLength),
frame_type_(frame_type) {
}
RtpPacketizerGeneric::~RtpPacketizerGeneric() {
}
void RtpPacketizerGeneric::SetPayloadData(
const uint8_t* payload_data,
size_t payload_size,
const RTPFragmentationHeader* fragmentation) {
payload_data_ = payload_data;
payload_size_ = payload_size;
// Fragment packets more evenly by splitting the payload up evenly.
uint32_t num_packets =
(payload_size_ + max_payload_len_ - 1) / max_payload_len_;
payload_length_ = (payload_size_ + num_packets - 1) / num_packets;
assert(payload_length_ <= max_payload_len_);
generic_header_ = RtpFormatVideoGeneric::kFirstPacketBit;
}
bool RtpPacketizerGeneric::NextPacket(uint8_t* buffer,
size_t* bytes_to_send,
bool* last_packet) {
if (payload_size_ < payload_length_) {
payload_length_ = payload_size_;
}
payload_size_ -= payload_length_;
*bytes_to_send = payload_length_ + kGenericHeaderLength;
assert(payload_length_ <= max_payload_len_);
uint8_t* out_ptr = buffer;
// Put generic header in packet
if (frame_type_ == kVideoFrameKey) {
generic_header_ |= RtpFormatVideoGeneric::kKeyFrameBit;
}
*out_ptr++ = generic_header_;
// Remove first-packet bit, following packets are intermediate
generic_header_ &= ~RtpFormatVideoGeneric::kFirstPacketBit;
// Put payload in packet
memcpy(out_ptr, payload_data_, payload_length_);
payload_data_ += payload_length_;
*last_packet = payload_size_ <= 0;
return true;
}
ProtectionType RtpPacketizerGeneric::GetProtectionType() {
return kProtectedPacket;
}
StorageType RtpPacketizerGeneric::GetStorageType(
uint32_t retransmission_settings) {
return kAllowRetransmission;
}
std::string RtpPacketizerGeneric::ToString() {
return "RtpPacketizerGeneric";
}
RtpDepacketizerGeneric::RtpDepacketizerGeneric(RtpData* const callback)
: callback_(callback) {
}
bool RtpDepacketizerGeneric::Parse(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
size_t payload_data_length) {
uint8_t generic_header = *payload_data++;
--payload_data_length;
rtp_header->frameType =
((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0)
? kVideoFrameKey
: kVideoFrameDelta;
rtp_header->type.Video.isFirstPacket =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
if (callback_->OnReceivedPayloadData(
payload_data, payload_data_length, rtp_header) != 0) {
return false;
}
return true;
}
} // namespace webrtc

View File

@ -10,13 +10,72 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/typedefs.h"
namespace webrtc {
namespace RtpFormatVideoGeneric {
static const uint8_t kKeyFrameBit = 0x01;
static const uint8_t kFirstPacketBit = 0x02;
static const uint8_t kKeyFrameBit = 0x01;
static const uint8_t kFirstPacketBit = 0x02;
} // namespace RtpFormatVideoGeneric
} // namespace webrtc
class RtpPacketizerGeneric : public RtpPacketizer {
public:
// Initialize with payload from encoder.
// The payload_data must be exactly one encoded generic frame.
RtpPacketizerGeneric(FrameType frametype, size_t max_payload_len);
virtual ~RtpPacketizerGeneric();
virtual void SetPayloadData(
const uint8_t* payload_data,
size_t payload_size,
const RTPFragmentationHeader* fragmentation) OVERRIDE;
// Get the next payload with generic payload header.
// buffer is a pointer to where the output will be written.
// bytes_to_send is an output variable that will contain number of bytes
// written to buffer. The parameter last_packet is true for the last packet of
// the frame, false otherwise (i.e., call the function again to get the
// next packet).
// Returns true on success or false if there was no payload to packetize.
virtual bool NextPacket(uint8_t* buffer,
size_t* bytes_to_send,
bool* last_packet) OVERRIDE;
virtual ProtectionType GetProtectionType() OVERRIDE;
virtual StorageType GetStorageType(uint32_t retransmission_settings) OVERRIDE;
virtual std::string ToString() OVERRIDE;
private:
const uint8_t* payload_data_;
size_t payload_size_;
const size_t max_payload_len_;
FrameType frame_type_;
uint32_t payload_length_;
uint8_t generic_header_;
DISALLOW_COPY_AND_ASSIGN(RtpPacketizerGeneric);
};
// Depacketizer for generic codec.
class RtpDepacketizerGeneric : public RtpDepacketizer {
public:
explicit RtpDepacketizerGeneric(RtpData* const callback);
virtual ~RtpDepacketizerGeneric() {}
virtual bool Parse(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
size_t payload_data_length) OVERRIDE;
private:
RtpData* const callback_;
DISALLOW_COPY_AND_ASSIGN(RtpDepacketizerGeneric);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_

View File

@ -10,11 +10,12 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include <assert.h> // assert
#include <assert.h> // assert
#include <string.h> // memcpy
#include <vector>
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h"
namespace webrtc {
@ -107,6 +108,29 @@ bool RtpPacketizerVp8::NextPacket(uint8_t* buffer,
return true;
}
ProtectionType RtpPacketizerVp8::GetProtectionType() {
bool protect =
hdr_info_.temporalIdx == 0 || hdr_info_.temporalIdx == kNoTemporalIdx;
return protect ? kProtectedPacket : kUnprotectedPacket;
}
StorageType RtpPacketizerVp8::GetStorageType(uint32_t retransmission_settings) {
StorageType storage = kAllowRetransmission;
if (hdr_info_.temporalIdx == 0 &&
!(retransmission_settings & kRetransmitBaseLayer)) {
storage = kDontRetransmit;
} else if (hdr_info_.temporalIdx != kNoTemporalIdx &&
hdr_info_.temporalIdx > 0 &&
!(retransmission_settings & kRetransmitHigherLayers)) {
storage = kDontRetransmit;
}
return storage;
}
std::string RtpPacketizerVp8::ToString() {
return "RtpPacketizerVp8";
}
int RtpPacketizerVp8::CalcNextSize(int max_payload_len,
int remaining_bytes,
bool split_payload) const {
@ -123,17 +147,17 @@ int RtpPacketizerVp8::CalcNextSize(int max_payload_len,
// Number of fragments for remaining_bytes:
int num_frags = remaining_bytes / max_payload_len + 1;
// Number of bytes in this fragment:
return static_cast<int>(static_cast<double>(remaining_bytes)
/ num_frags + 0.5);
return static_cast<int>(static_cast<double>(remaining_bytes) / num_frags +
0.5);
} else {
return max_payload_len >= remaining_bytes ? remaining_bytes
: max_payload_len;
: max_payload_len;
}
}
int RtpPacketizerVp8::GeneratePackets() {
if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+ PayloadDescriptorExtraLength() + 1) {
if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_ +
PayloadDescriptorExtraLength() + 1) {
// The provided payload length is not long enough for the payload
// descriptor and one payload byte. Return an error.
return -1;
@ -143,16 +167,18 @@ int RtpPacketizerVp8::GeneratePackets() {
bool beginning = true;
int part_ix = 0;
while (total_bytes_processed < payload_size_) {
int packet_bytes = 0; // How much data to send in this packet.
int packet_bytes = 0; // How much data to send in this packet.
bool split_payload = true; // Splitting of partitions is initially allowed.
int remaining_in_partition = part_info_.fragmentationOffset[part_ix] -
total_bytes_processed + part_info_.fragmentationLength[part_ix];
int rem_payload_len = max_payload_len_ -
total_bytes_processed +
part_info_.fragmentationLength[part_ix];
int rem_payload_len =
max_payload_len_ -
(vp8_fixed_payload_descriptor_bytes_ + PayloadDescriptorExtraLength());
int first_partition_in_packet = part_ix;
while (int next_size = CalcNextSize(rem_payload_len, remaining_in_partition,
split_payload)) {
while (int next_size = CalcNextSize(
rem_payload_len, remaining_in_partition, split_payload)) {
packet_bytes += next_size;
rem_payload_len -= next_size;
remaining_in_partition -= next_size;
@ -165,7 +191,7 @@ int RtpPacketizerVp8::GeneratePackets() {
// with an intact partition (indicated by first_fragment_ == true).
if (part_ix + 1 < num_partitions_ &&
((aggr_mode_ == kAggrFragments) ||
(aggr_mode_ == kAggrPartitions && start_on_new_fragment))) {
(aggr_mode_ == kAggrPartitions && start_on_new_fragment))) {
assert(part_ix < num_partitions_);
remaining_in_partition = part_info_.fragmentationLength[++part_ix];
// Disallow splitting unless kAggrFragments. In kAggrPartitions,
@ -181,7 +207,9 @@ int RtpPacketizerVp8::GeneratePackets() {
}
assert(packet_bytes > 0);
QueuePacket(total_bytes_processed, packet_bytes, first_partition_in_packet,
QueuePacket(total_bytes_processed,
packet_bytes,
first_partition_in_packet,
start_on_new_fragment);
total_bytes_processed += packet_bytes;
start_on_new_fragment = (remaining_in_partition == 0);
@ -193,15 +221,15 @@ int RtpPacketizerVp8::GeneratePackets() {
}
int RtpPacketizerVp8::GeneratePacketsBalancedAggregates() {
if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+ PayloadDescriptorExtraLength() + 1) {
if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_ +
PayloadDescriptorExtraLength() + 1) {
// The provided payload length is not long enough for the payload
// descriptor and one payload byte. Return an error.
return -1;
}
std::vector<int> partition_decision;
const int overhead = vp8_fixed_payload_descriptor_bytes_ +
PayloadDescriptorExtraLength();
const int overhead =
vp8_fixed_payload_descriptor_bytes_ + PayloadDescriptorExtraLength();
const uint32_t max_payload_len = max_payload_len_ - overhead;
int min_size, max_size;
AggregateSmallPartitions(&partition_decision, &min_size, &max_size);
@ -217,10 +245,11 @@ int RtpPacketizerVp8::GeneratePacketsBalancedAggregates() {
const int packet_bytes =
(remaining_partition + num_fragments - 1) / num_fragments;
for (int n = 0; n < num_fragments; ++n) {
const int this_packet_bytes = packet_bytes < remaining_partition ?
packet_bytes : remaining_partition;
QueuePacket(total_bytes_processed, this_packet_bytes, part_ix,
(n == 0));
const int this_packet_bytes = packet_bytes < remaining_partition
? packet_bytes
: remaining_partition;
QueuePacket(
total_bytes_processed, this_packet_bytes, part_ix, (n == 0));
remaining_partition -= this_packet_bytes;
total_bytes_processed += this_packet_bytes;
if (this_packet_bytes < min_size) {
@ -237,13 +266,15 @@ int RtpPacketizerVp8::GeneratePacketsBalancedAggregates() {
const int first_partition_in_packet = part_ix;
const int aggregation_index = partition_decision[part_ix];
while (static_cast<size_t>(part_ix) < partition_decision.size() &&
partition_decision[part_ix] == aggregation_index) {
partition_decision[part_ix] == aggregation_index) {
// Collect all partitions that were aggregated into the same packet.
this_packet_bytes += part_info_.fragmentationLength[part_ix];
++part_ix;
}
QueuePacket(total_bytes_processed, this_packet_bytes,
first_partition_in_packet, true);
QueuePacket(total_bytes_processed,
this_packet_bytes,
first_partition_in_packet,
true);
total_bytes_processed += this_packet_bytes;
}
}
@ -259,8 +290,8 @@ void RtpPacketizerVp8::AggregateSmallPartitions(std::vector<int>* partition_vec,
*max_size = -1;
assert(partition_vec);
partition_vec->assign(num_partitions_, -1);
const int overhead = vp8_fixed_payload_descriptor_bytes_ +
PayloadDescriptorExtraLength();
const int overhead =
vp8_fixed_payload_descriptor_bytes_ + PayloadDescriptorExtraLength();
const uint32_t max_payload_len = max_payload_len_ - overhead;
int first_in_set = 0;
int last_in_set = 0;
@ -271,12 +302,12 @@ void RtpPacketizerVp8::AggregateSmallPartitions(std::vector<int>* partition_vec,
// Found start of a set.
last_in_set = first_in_set;
while (last_in_set + 1 < num_partitions_ &&
part_info_.fragmentationLength[last_in_set + 1] < max_payload_len) {
part_info_.fragmentationLength[last_in_set + 1] <
max_payload_len) {
++last_in_set;
}
// Found end of a set. Run optimized aggregator. It is ok if start == end.
Vp8PartitionAggregator aggregator(part_info_, first_in_set,
last_in_set);
Vp8PartitionAggregator aggregator(part_info_, first_in_set, last_in_set);
if (*min_size >= 0 && *max_size >= 0) {
aggregator.SetPriorMinMax(*min_size, *max_size);
}
@ -328,19 +359,23 @@ int RtpPacketizerVp8::WriteHeaderAndPayload(const InfoStruct& packet_info,
assert(packet_info.size > 0);
buffer[0] = 0;
if (XFieldPresent()) buffer[0] |= kXBit;
if (hdr_info_.nonReference) buffer[0] |= kNBit;
if (packet_info.first_fragment) buffer[0] |= kSBit;
if (XFieldPresent())
buffer[0] |= kXBit;
if (hdr_info_.nonReference)
buffer[0] |= kNBit;
if (packet_info.first_fragment)
buffer[0] |= kSBit;
buffer[0] |= (packet_info.first_partition_ix & kPartIdField);
const int extension_length = WriteExtensionFields(buffer, buffer_length);
memcpy(&buffer[vp8_fixed_payload_descriptor_bytes_ + extension_length],
&payload_data_[packet_info.payload_start_pos], packet_info.size);
&payload_data_[packet_info.payload_start_pos],
packet_info.size);
// Return total length of written data.
return packet_info.size + vp8_fixed_payload_descriptor_bytes_
+ extension_length;
return packet_info.size + vp8_fixed_payload_descriptor_bytes_ +
extension_length;
}
int RtpPacketizerVp8::WriteExtensionFields(uint8_t* buffer,
@ -351,20 +386,20 @@ int RtpPacketizerVp8::WriteExtensionFields(uint8_t* buffer,
*x_field = 0;
extension_length = 1; // One octet for the X field.
if (PictureIdPresent()) {
if (WritePictureIDFields(x_field, buffer, buffer_length,
&extension_length) < 0) {
if (WritePictureIDFields(
x_field, buffer, buffer_length, &extension_length) < 0) {
return -1;
}
}
if (TL0PicIdxFieldPresent()) {
if (WriteTl0PicIdxFields(x_field, buffer, buffer_length,
&extension_length) < 0) {
if (WriteTl0PicIdxFields(
x_field, buffer, buffer_length, &extension_length) < 0) {
return -1;
}
}
if (TIDFieldPresent() || KeyIdxFieldPresent()) {
if (WriteTIDAndKeyIdxFields(x_field, buffer, buffer_length,
&extension_length) < 0) {
if (WriteTIDAndKeyIdxFields(
x_field, buffer, buffer_length, &extension_length) < 0) {
return -1;
}
}
@ -380,18 +415,18 @@ int RtpPacketizerVp8::WritePictureIDFields(uint8_t* x_field,
*x_field |= kIBit;
const int pic_id_length = WritePictureID(
buffer + vp8_fixed_payload_descriptor_bytes_ + *extension_length,
buffer_length - vp8_fixed_payload_descriptor_bytes_
- *extension_length);
if (pic_id_length < 0) return -1;
buffer_length - vp8_fixed_payload_descriptor_bytes_ - *extension_length);
if (pic_id_length < 0)
return -1;
*extension_length += pic_id_length;
return 0;
}
int RtpPacketizerVp8::WritePictureID(uint8_t* buffer, int buffer_length) const {
const uint16_t pic_id =
static_cast<uint16_t> (hdr_info_.pictureId);
const uint16_t pic_id = static_cast<uint16_t>(hdr_info_.pictureId);
int picture_id_len = PictureIdLength();
if (picture_id_len > buffer_length) return -1;
if (picture_id_len > buffer_length)
return -1;
if (picture_id_len == 2) {
buffer[0] = 0x80 | ((pic_id >> 8) & 0x7F);
buffer[1] = pic_id & 0xFF;
@ -405,13 +440,13 @@ int RtpPacketizerVp8::WriteTl0PicIdxFields(uint8_t* x_field,
uint8_t* buffer,
int buffer_length,
int* extension_length) const {
if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+ 1) {
if (buffer_length <
vp8_fixed_payload_descriptor_bytes_ + *extension_length + 1) {
return -1;
}
*x_field |= kLBit;
buffer[vp8_fixed_payload_descriptor_bytes_
+ *extension_length] = hdr_info_.tl0PicIdx;
buffer[vp8_fixed_payload_descriptor_bytes_ + *extension_length] =
hdr_info_.tl0PicIdx;
++*extension_length;
return 0;
}
@ -420,8 +455,8 @@ int RtpPacketizerVp8::WriteTIDAndKeyIdxFields(uint8_t* x_field,
uint8_t* buffer,
int buffer_length,
int* extension_length) const {
if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+ 1) {
if (buffer_length <
vp8_fixed_payload_descriptor_bytes_ + *extension_length + 1) {
return -1;
}
uint8_t* data_field =
@ -443,9 +478,12 @@ int RtpPacketizerVp8::WriteTIDAndKeyIdxFields(uint8_t* x_field,
int RtpPacketizerVp8::PayloadDescriptorExtraLength() const {
int length_bytes = PictureIdLength();
if (TL0PicIdxFieldPresent()) ++length_bytes;
if (TIDFieldPresent() || KeyIdxFieldPresent()) ++length_bytes;
if (length_bytes > 0) ++length_bytes; // Include the extension field.
if (TL0PicIdxFieldPresent())
++length_bytes;
if (TIDFieldPresent() || KeyIdxFieldPresent())
++length_bytes;
if (length_bytes > 0)
++length_bytes; // Include the extension field.
return length_bytes;
}
@ -460,8 +498,8 @@ int RtpPacketizerVp8::PictureIdLength() const {
}
bool RtpPacketizerVp8::XFieldPresent() const {
return (TIDFieldPresent() || TL0PicIdxFieldPresent() || PictureIdPresent()
|| KeyIdxFieldPresent());
return (TIDFieldPresent() || TL0PicIdxFieldPresent() || PictureIdPresent() ||
KeyIdxFieldPresent());
}
bool RtpPacketizerVp8::TIDFieldPresent() const {
@ -477,4 +515,58 @@ bool RtpPacketizerVp8::KeyIdxFieldPresent() const {
bool RtpPacketizerVp8::TL0PicIdxFieldPresent() const {
return (hdr_info_.tl0PicIdx != kNoTl0PicIdx);
}
RtpDepacketizerVp8::RtpDepacketizerVp8(RtpData* const callback)
: callback_(callback) {
}
bool RtpDepacketizerVp8::Parse(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
size_t payload_data_length) {
RtpUtility::RTPPayload parsed_packet;
RtpUtility::RTPPayloadParser rtp_payload_parser(
kRtpVideoVp8, payload_data, payload_data_length);
if (!rtp_payload_parser.Parse(parsed_packet))
return false;
if (parsed_packet.info.VP8.dataLength == 0)
return true;
rtp_header->frameType = (parsed_packet.frameType == RtpUtility::kIFrame)
? kVideoFrameKey
: kVideoFrameDelta;
RTPVideoHeaderVP8* to_header = &rtp_header->type.Video.codecHeader.VP8;
RtpUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
rtp_header->type.Video.isFirstPacket =
from_header->beginningOfPartition && (from_header->partitionID == 0);
to_header->nonReference = from_header->nonReferenceFrame;
to_header->pictureId =
from_header->hasPictureID ? from_header->pictureID : kNoPictureId;
to_header->tl0PicIdx =
from_header->hasTl0PicIdx ? from_header->tl0PicIdx : kNoTl0PicIdx;
if (from_header->hasTID) {
to_header->temporalIdx = from_header->tID;
to_header->layerSync = from_header->layerSync;
} else {
to_header->temporalIdx = kNoTemporalIdx;
to_header->layerSync = false;
}
to_header->keyIdx = from_header->hasKeyIdx ? from_header->keyIdx : kNoKeyIdx;
rtp_header->type.Video.width = from_header->frameWidth;
rtp_header->type.Video.height = from_header->frameHeight;
to_header->partitionId = from_header->partitionID;
to_header->beginningOfPartition = from_header->beginningOfPartition;
if (callback_->OnReceivedPayloadData(parsed_packet.info.VP8.data,
parsed_packet.info.VP8.dataLength,
rtp_header) != 0) {
return false;
}
return true;
}
} // namespace webrtc

View File

@ -79,6 +79,12 @@ class RtpPacketizerVp8 : public RtpPacketizer {
size_t* bytes_to_send,
bool* last_packet) OVERRIDE;
virtual ProtectionType GetProtectionType() OVERRIDE;
virtual StorageType GetStorageType(uint32_t retransmission_settings) OVERRIDE;
virtual std::string ToString() OVERRIDE;
private:
typedef struct {
int payload_start_pos;
@ -96,19 +102,20 @@ class RtpPacketizerVp8 : public RtpPacketizer {
static const AggregationMode aggr_modes_[kNumModes];
static const bool balance_modes_[kNumModes];
static const bool separate_first_modes_[kNumModes];
static const int kXBit = 0x80;
static const int kNBit = 0x20;
static const int kSBit = 0x10;
static const int kXBit = 0x80;
static const int kNBit = 0x20;
static const int kSBit = 0x10;
static const int kPartIdField = 0x0F;
static const int kKeyIdxField = 0x1F;
static const int kIBit = 0x80;
static const int kLBit = 0x40;
static const int kTBit = 0x20;
static const int kKBit = 0x10;
static const int kYBit = 0x20;
static const int kIBit = 0x80;
static const int kLBit = 0x40;
static const int kTBit = 0x20;
static const int kKBit = 0x10;
static const int kYBit = 0x20;
// Calculate size of next chunk to send. Returns 0 if none can be sent.
int CalcNextSize(int max_payload_len, int remaining_bytes,
int CalcNextSize(int max_payload_len,
int remaining_bytes,
bool split_payload) const;
// Calculate all packet sizes and load to packet info queue.
@ -144,7 +151,6 @@ class RtpPacketizerVp8 : public RtpPacketizer {
uint8_t* buffer,
int buffer_length) const;
// Write the X field and the appropriate extension fields to buffer.
// The function returns the extension length (including X field), or -1
// on error.
@ -152,19 +158,25 @@ class RtpPacketizerVp8 : public RtpPacketizer {
// Set the I bit in the x_field, and write PictureID to the appropriate
// position in buffer. The function returns 0 on success, -1 otherwise.
int WritePictureIDFields(uint8_t* x_field, uint8_t* buffer,
int buffer_length, int* extension_length) const;
int WritePictureIDFields(uint8_t* x_field,
uint8_t* buffer,
int buffer_length,
int* extension_length) const;
// Set the L bit in the x_field, and write Tl0PicIdx to the appropriate
// position in buffer. The function returns 0 on success, -1 otherwise.
int WriteTl0PicIdxFields(uint8_t* x_field, uint8_t* buffer,
int buffer_length, int* extension_length) const;
int WriteTl0PicIdxFields(uint8_t* x_field,
uint8_t* buffer,
int buffer_length,
int* extension_length) const;
// Set the T and K bits in the x_field, and write TID, Y and KeyIdx to the
// appropriate position in buffer. The function returns 0 on success,
// -1 otherwise.
int WriteTIDAndKeyIdxFields(uint8_t* x_field, uint8_t* buffer,
int buffer_length, int* extension_length) const;
int WriteTIDAndKeyIdxFields(uint8_t* x_field,
uint8_t* buffer,
int buffer_length,
int* extension_length) const;
// Write the PictureID from codec_specific_info_ to buffer. One or two
// bytes are written, depending on magnitude of PictureID. The function
@ -202,5 +214,22 @@ class RtpPacketizerVp8 : public RtpPacketizer {
DISALLOW_COPY_AND_ASSIGN(RtpPacketizerVp8);
};
} // namespace
// Depacketizer for VP8.
class RtpDepacketizerVp8 : public RtpDepacketizer {
public:
explicit RtpDepacketizerVp8(RtpData* const callback);
virtual ~RtpDepacketizerVp8() {}
virtual bool Parse(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
size_t payload_data_length) OVERRIDE;
private:
RtpData* const callback_;
DISALLOW_COPY_AND_ASSIGN(RtpDepacketizerVp8);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_

View File

@ -297,5 +297,4 @@ TEST_F(RtpPacketizerVp8Test, TestTIDAndKeyIdx) {
helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
kExpectedFragStart, kExpectedNum);
}
} // namespace

View File

@ -29,13 +29,13 @@ RTPReceiverStrategy* RTPReceiverStrategy::CreateVideoStrategy(
}
RTPReceiverVideo::RTPReceiverVideo(RtpData* data_callback)
: RTPReceiverStrategy(data_callback) {}
: RTPReceiverStrategy(data_callback) {
}
RTPReceiverVideo::~RTPReceiverVideo() {
}
bool RTPReceiverVideo::ShouldReportCsrcChanges(
uint8_t payload_type) const {
bool RTPReceiverVideo::ShouldReportCsrcChanges(uint8_t payload_type) const {
// Always do this for video packets.
return true;
}
@ -47,17 +47,19 @@ int32_t RTPReceiverVideo::OnNewPayloadTypeCreated(
return 0;
}
int32_t RTPReceiverVideo::ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* payload,
uint16_t payload_length,
int64_t timestamp_ms,
bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp", "Video::ParseRtp",
"seqnum", rtp_header->header.sequenceNumber,
"timestamp", rtp_header->header.timestamp);
int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* payload,
uint16_t payload_length,
int64_t timestamp_ms,
bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp",
"Video::ParseRtp",
"seqnum",
rtp_header->header.sequenceNumber,
"timestamp",
rtp_header->header.timestamp);
rtp_header->type.Video.codec = specific_payload.Video.videoCodecType;
const uint16_t payload_data_length =
@ -67,12 +69,16 @@ int32_t RTPReceiverVideo::ParseRtpPacket(
return data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) == 0 ? 0
: -1;
return ParseVideoCodecSpecific(rtp_header,
payload,
payload_data_length,
specific_payload.Video.videoCodecType,
timestamp_ms,
is_first_packet);
// We are not allowed to hold a critical section when calling below functions.
scoped_ptr<RtpDepacketizer> depacketizer(
RtpDepacketizer::Create(rtp_header->type.Video.codec, data_callback_));
if (depacketizer.get() == NULL) {
LOG(LS_ERROR) << "Failed to create depacketizer.";
return -1;
}
rtp_header->type.Video.isFirstPacket = is_first_packet;
return depacketizer->Parse(rtp_header, payload, payload_data_length) ? 0 : -1;
}
int RTPReceiverVideo::GetPayloadTypeFrequency() const {
@ -91,8 +97,9 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const {
// For video we just go with default values.
if (-1 == callback->OnInitializeDecoder(
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
if (-1 ==
callback->OnInitializeDecoder(
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
LOG(LS_ERROR) << "Failed to created decoder for payload type: "
<< payload_type;
return -1;
@ -100,36 +107,8 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
return 0;
}
// We are not allowed to hold a critical section when calling this function.
int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet) {
switch (rtp_header->type.Video.codec) {
case kRtpVideoGeneric:
rtp_header->type.Video.isFirstPacket = is_first_packet;
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
case kRtpVideoVp8:
return ReceiveVp8Codec(rtp_header, payload_data, payload_data_length);
case kRtpVideoH264: {
scoped_ptr<RtpDepacketizer> depacketizer(RtpDepacketizer::Create(
rtp_header->type.Video.codec, data_callback_));
return depacketizer->Parse(rtp_header, payload_data, payload_data_length)
? 0
: -1;
}
case kRtpVideoNone:
break;
}
return -1;
}
int32_t RTPReceiverVideo::BuildRTPheader(
const WebRtcRTPHeader* rtp_header,
uint8_t* data_buffer) const {
int32_t RTPReceiverVideo::BuildRTPheader(const WebRtcRTPHeader* rtp_header,
uint8_t* data_buffer) const {
data_buffer[0] = static_cast<uint8_t>(0x80); // version 2
data_buffer[1] = static_cast<uint8_t>(rtp_header->header.payloadType);
if (rtp_header->header.markerBit) {
@ -161,73 +140,4 @@ int32_t RTPReceiverVideo::BuildRTPheader(
return rtp_header_length;
}
int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
RtpUtility::RTPPayload parsed_packet;
RtpUtility::RTPPayloadParser rtp_payload_parser(
kRtpVideoVp8, payload_data, payload_data_length);
if (!rtp_payload_parser.Parse(parsed_packet))
return -1;
if (parsed_packet.info.VP8.dataLength == 0)
return 0;
rtp_header->frameType = (parsed_packet.frameType == RtpUtility::kIFrame)
? kVideoFrameKey
: kVideoFrameDelta;
RTPVideoHeaderVP8* to_header = &rtp_header->type.Video.codecHeader.VP8;
RtpUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
rtp_header->type.Video.isFirstPacket =
from_header->beginningOfPartition && (from_header->partitionID == 0);
to_header->nonReference = from_header->nonReferenceFrame;
to_header->pictureId =
from_header->hasPictureID ? from_header->pictureID : kNoPictureId;
to_header->tl0PicIdx =
from_header->hasTl0PicIdx ? from_header->tl0PicIdx : kNoTl0PicIdx;
if (from_header->hasTID) {
to_header->temporalIdx = from_header->tID;
to_header->layerSync = from_header->layerSync;
} else {
to_header->temporalIdx = kNoTemporalIdx;
to_header->layerSync = false;
}
to_header->keyIdx = from_header->hasKeyIdx ? from_header->keyIdx : kNoKeyIdx;
rtp_header->type.Video.width = from_header->frameWidth;
rtp_header->type.Video.height = from_header->frameHeight;
to_header->partitionId = from_header->partitionID;
to_header->beginningOfPartition = from_header->beginningOfPartition;
if (data_callback_->OnReceivedPayloadData(parsed_packet.info.VP8.data,
parsed_packet.info.VP8.dataLength,
rtp_header) != 0) {
return -1;
}
return 0;
}
int32_t RTPReceiverVideo::ReceiveGenericCodec(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
uint8_t generic_header = *payload_data++;
--payload_data_length;
rtp_header->frameType =
((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0) ?
kVideoFrameKey : kVideoFrameDelta;
rtp_header->type.Video.isFirstPacket =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
if (data_callback_->OnReceivedPayloadData(
payload_data, payload_data_length, rtp_header) != 0) {
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -22,27 +22,24 @@ namespace webrtc {
class RTPReceiverVideo : public RTPReceiverStrategy {
public:
RTPReceiverVideo(RtpData* data_callback);
explicit RTPReceiverVideo(RtpData* data_callback);
virtual ~RTPReceiverVideo();
virtual int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp,
bool is_first_packet) OVERRIDE;
virtual int32_t ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp,
bool is_first_packet) OVERRIDE;
TelephoneEventHandler* GetTelephoneEventHandler() {
return NULL;
}
TelephoneEventHandler* GetTelephoneEventHandler() { return NULL; }
int GetPayloadTypeFrequency() const OVERRIDE;
virtual RTPAliveType ProcessDeadOrAlive(uint16_t last_payload_length) const
OVERRIDE;
virtual RTPAliveType ProcessDeadOrAlive(
uint16_t last_payload_length) const OVERRIDE;
virtual bool ShouldReportCsrcChanges(uint8_t payload_type) const OVERRIDE;
@ -61,28 +58,8 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
void SetPacketOverHead(uint16_t packet_over_head);
private:
int32_t ReceiveGenericCodec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length);
int32_t ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length);
int32_t ReceiveH264Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length);
int32_t BuildRTPheader(const WebRtcRTPHeader* rtp_header,
uint8_t* data_buffer) const;
int32_t ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet);
};
} // namespace webrtc

View File

@ -92,6 +92,7 @@
'rtp_format_h264.h',
'rtp_format_vp8.cc',
'rtp_format_vp8.h',
'rtp_format_video_generic.cc',
'rtp_format_video_generic.h',
'vp8_partition_aggregator.cc',
'vp8_partition_aggregator.h',

View File

@ -31,8 +31,7 @@ struct RtpPacket {
ForwardErrorCorrection::Packet* pkt;
};
RTPSenderVideo::RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender)
RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
: _rtpSender(*rtpSender),
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
@ -55,29 +54,23 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock,
memset(&key_fec_params_, 0, sizeof(key_fec_params_));
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
kFecMaskRandom;
kFecMaskRandom;
}
RTPSenderVideo::~RTPSenderVideo()
{
if(_videoCodecInformation)
{
delete _videoCodecInformation;
}
delete _sendVideoCritsect;
RTPSenderVideo::~RTPSenderVideo() {
if (_videoCodecInformation) {
delete _videoCodecInformation;
}
delete _sendVideoCritsect;
}
void
RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType)
{
CriticalSectionScoped cs(_sendVideoCritsect);
_videoType = videoType;
void RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType) {
CriticalSectionScoped cs(_sendVideoCritsect);
_videoType = videoType;
}
RtpVideoCodecTypes
RTPSenderVideo::VideoCodecType() const
{
return _videoType;
RtpVideoCodecTypes RTPSenderVideo::VideoCodecType() const {
return _videoType;
}
int32_t RTPSenderVideo::RegisterVideoPayload(
@ -106,34 +99,34 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
return 0;
}
int32_t
RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
const uint16_t payload_length,
const uint16_t rtp_header_length,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect) {
if(_fecEnabled) {
int32_t RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
const uint16_t payload_length,
const uint16_t rtp_header_length,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect) {
if (_fecEnabled) {
int ret = 0;
int fec_overhead_sent = 0;
int video_sent = 0;
RedPacket* red_packet = producer_fec_.BuildRedPacket(data_buffer,
payload_length,
rtp_header_length,
_payloadTypeRED);
TRACE_EVENT_INSTANT2("webrtc_rtp", "Video::PacketRed",
"timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
RedPacket* red_packet = producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, _payloadTypeRED);
TRACE_EVENT_INSTANT2("webrtc_rtp",
"Video::PacketRed",
"timestamp",
capture_timestamp,
"seqnum",
_rtpSender.SequenceNumber());
// Sending the media packet with RED header.
int packet_success = _rtpSender.SendToNetwork(
red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
int packet_success =
_rtpSender.SendToNetwork(red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
@ -144,34 +137,36 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
red_packet = NULL;
if (protect) {
ret = producer_fec_.AddRtpPacketAndGenerateFec(data_buffer,
payload_length,
rtp_header_length);
ret = producer_fec_.AddRtpPacketAndGenerateFec(
data_buffer, payload_length, rtp_header_length);
if (ret != 0)
return ret;
}
while (producer_fec_.FecAvailable()) {
red_packet = producer_fec_.GetFecPacket(
_payloadTypeRED,
_payloadTypeFEC,
_rtpSender.IncrementSequenceNumber(),
rtp_header_length);
red_packet =
producer_fec_.GetFecPacket(_payloadTypeRED,
_payloadTypeFEC,
_rtpSender.IncrementSequenceNumber(),
rtp_header_length);
StorageType storage = kDontRetransmit;
if (_retransmissionSettings & kRetransmitFECPackets) {
storage = kAllowRetransmission;
}
TRACE_EVENT_INSTANT2("webrtc_rtp", "Video::PacketFec",
"timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
TRACE_EVENT_INSTANT2("webrtc_rtp",
"Video::PacketFec",
"timestamp",
capture_timestamp,
"seqnum",
_rtpSender.SequenceNumber());
// Sending FEC packet with RED header.
int packet_success = _rtpSender.SendToNetwork(
red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
int packet_success =
_rtpSender.SendToNetwork(red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
@ -185,9 +180,12 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
_fecOverheadRate.Update(fec_overhead_sent);
return ret;
}
TRACE_EVENT_INSTANT2("webrtc_rtp", "Video::PacketNormal",
"timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
TRACE_EVENT_INSTANT2("webrtc_rtp",
"Video::PacketNormal",
"timestamp",
capture_timestamp,
"seqnum",
_rtpSender.SequenceNumber());
int ret = _rtpSender.SendToNetwork(data_buffer,
payload_length,
rtp_header_length,
@ -200,69 +198,61 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
return ret;
}
int32_t
RTPSenderVideo::SendRTPIntraRequest()
{
// RFC 2032
// 5.2.1. Full intra-frame Request (FIR) packet
int32_t RTPSenderVideo::SendRTPIntraRequest() {
// RFC 2032
// 5.2.1. Full intra-frame Request (FIR) packet
uint16_t length = 8;
uint8_t data[8];
data[0] = 0x80;
data[1] = 192;
data[2] = 0;
data[3] = 1; // length
uint16_t length = 8;
uint8_t data[8];
data[0] = 0x80;
data[1] = 192;
data[2] = 0;
data[3] = 1; // length
RtpUtility::AssignUWord32ToBuffer(data + 4, _rtpSender.SSRC());
RtpUtility::AssignUWord32ToBuffer(data + 4, _rtpSender.SSRC());
TRACE_EVENT_INSTANT1("webrtc_rtp",
"Video::IntraRequest",
"seqnum", _rtpSender.SequenceNumber());
return _rtpSender.SendToNetwork(data, 0, length, -1, kDontStore,
PacedSender::kNormalPriority);
TRACE_EVENT_INSTANT1("webrtc_rtp",
"Video::IntraRequest",
"seqnum",
_rtpSender.SequenceNumber());
return _rtpSender.SendToNetwork(
data, 0, length, -1, kDontStore, PacedSender::kNormalPriority);
}
int32_t
RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC)
{
_fecEnabled = enable;
_payloadTypeRED = payloadTypeRED;
_payloadTypeFEC = payloadTypeFEC;
memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
memset(&key_fec_params_, 0, sizeof(key_fec_params_));
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
kFecMaskRandom;
return 0;
int32_t RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
_fecEnabled = enable;
_payloadTypeRED = payloadTypeRED;
_payloadTypeFEC = payloadTypeFEC;
memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
memset(&key_fec_params_, 0, sizeof(key_fec_params_));
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
kFecMaskRandom;
return 0;
}
int32_t
RTPSenderVideo::GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const
{
enable = _fecEnabled;
payloadTypeRED = _payloadTypeRED;
payloadTypeFEC = _payloadTypeFEC;
return 0;
int32_t RTPSenderVideo::GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const {
enable = _fecEnabled;
payloadTypeRED = _payloadTypeRED;
payloadTypeFEC = _payloadTypeFEC;
return 0;
}
uint16_t
RTPSenderVideo::FECPacketOverhead() const
{
if (_fecEnabled)
{
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
// This reason for the header extensions to be included here is that
// from an FEC viewpoint, they are part of the payload to be protected.
// (The base RTP header is already protected by the FEC header.)
return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
(_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
}
return 0;
uint16_t RTPSenderVideo::FECPacketOverhead() const {
if (_fecEnabled) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
// This reason for the header extensions to be included here is that
// from an FEC viewpoint, they are part of the payload to be protected.
// (The base RTP header is already protected by the FEC header.)
return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
(_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
}
return 0;
}
int32_t RTPSenderVideo::SetFecParameters(
@ -299,206 +289,58 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
// Will be extracted in SendVP8 for VP8 codec; other codecs use 0
_numberFirstPartition = 0;
switch (videoType) {
case kRtpVideoGeneric:
return SendGeneric(frameType,
payloadType,
captureTimeStamp,
capture_time_ms,
payloadData,
payloadSize);
case kRtpVideoVp8:
return SendVP8(frameType,
payloadType,
captureTimeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
rtpTypeHdr);
case kRtpVideoH264:
return SendH264(frameType,
payloadType,
captureTimeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
rtpTypeHdr)
? 0
: -1;
default:
assert(false);
break;
}
return 0;
return Send(videoType,
frameType,
payloadType,
captureTimeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
rtpTypeHdr)
? 0
: -1;
}
int32_t RTPSenderVideo::SendGeneric(const FrameType frame_type,
const int8_t payload_type,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
const uint8_t* payload,
uint32_t size) {
assert(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
VideoCodecInformation* RTPSenderVideo::CodecInformationVideo() {
return _videoCodecInformation;
}
void RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate) {
_maxBitrate = maxBitrate;
}
uint32_t RTPSenderVideo::MaxConfiguredBitrateVideo() const {
return _maxBitrate;
}
bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr) {
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
uint16_t max_length = _rtpSender.MaxPayloadLength() - FECPacketOverhead() -
rtp_header_length - (1 /* generic header length */);
// Fragment packets more evenly by splitting the payload up evenly.
uint32_t num_packets = (size + max_length - 1) / max_length;
uint32_t payload_length = (size + num_packets - 1) / num_packets;
assert(payload_length <= max_length);
// Fragment packet into packets of max MaxPayloadLength bytes payload.
uint8_t buffer[IP_PACKET_SIZE];
uint8_t generic_header = RtpFormatVideoGeneric::kFirstPacketBit;
if (frame_type == kVideoFrameKey) {
generic_header |= RtpFormatVideoGeneric::kKeyFrameBit;
}
while (size > 0) {
if (size < payload_length) {
payload_length = size;
}
size -= payload_length;
// MarkerBit is 1 on final packet (bytes_to_send == 0)
if (_rtpSender.BuildRTPheader(buffer, payload_type, size == 0,
capture_timestamp,
capture_time_ms) != rtp_header_length) {
return -1;
}
uint8_t* out_ptr = &buffer[rtp_header_length];
// Put generic header in packet
*out_ptr++ = generic_header;
// Remove first-packet bit, following packets are intermediate
generic_header &= ~RtpFormatVideoGeneric::kFirstPacketBit;
// Put payload in packet
memcpy(out_ptr, payload, payload_length);
payload += payload_length;
if (SendVideoPacket(buffer, payload_length + 1, rtp_header_length,
capture_timestamp, capture_time_ms,
kAllowRetransmission, true)) {
return -1;
}
}
return 0;
}
VideoCodecInformation*
RTPSenderVideo::CodecInformationVideo()
{
return _videoCodecInformation;
}
void
RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate)
{
_maxBitrate = maxBitrate;
}
uint32_t
RTPSenderVideo::MaxConfiguredBitrateVideo() const
{
return _maxBitrate;
}
int32_t
RTPSenderVideo::SendVP8(const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr)
{
const uint16_t rtpHeaderLength = _rtpSender.RTPHeaderLength();
int32_t payloadBytesToSend = payloadSize;
const uint8_t* data = payloadData;
uint16_t maxPayloadLengthVP8 = _rtpSender.MaxDataPayloadLength();
assert(rtpTypeHdr);
// Initialize disregarding partition boundaries: this will use kEqualSize
// packetization mode, which produces ~equal size packets for each frame.
RtpPacketizerVp8 packetizer(rtpTypeHdr->VP8, maxPayloadLengthVP8);
packetizer.SetPayloadData(data, payloadBytesToSend, NULL);
StorageType storage = kAllowRetransmission;
if (rtpTypeHdr->VP8.temporalIdx == 0 &&
!(_retransmissionSettings & kRetransmitBaseLayer)) {
storage = kDontRetransmit;
} else if (rtpTypeHdr->VP8.temporalIdx != kNoTemporalIdx &&
rtpTypeHdr->VP8.temporalIdx > 0 &&
!(_retransmissionSettings & kRetransmitHigherLayers)) {
storage = kDontRetransmit;
}
bool last = false;
_numberFirstPartition = 0;
// |rtpTypeHdr->VP8.temporalIdx| is zero for base layers, or kNoTemporalIdx
// if the field isn't used (so all layers are the base layer). We currently
// only protect base layers, so look for these two cases.
bool protect = rtpTypeHdr->VP8.temporalIdx == 0 ||
rtpTypeHdr->VP8.temporalIdx == kNoTemporalIdx;
while (!last)
{
// Write VP8 Payload Descriptor and VP8 payload.
uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
size_t payloadBytesInPacket = 0;
if (!packetizer.NextPacket(
&dataBuffer[rtpHeaderLength], &payloadBytesInPacket, &last))
return -1;
// Write RTP header.
// Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader(dataBuffer, payloadType, last,
captureTimeStamp, capture_time_ms);
if (-1 == SendVideoPacket(dataBuffer, payloadBytesInPacket,
rtpHeaderLength, captureTimeStamp,
capture_time_ms, storage, protect))
{
LOG(LS_WARNING)
<< "RTPSenderVideo::SendVP8 failed to send packet number "
<< _rtpSender.SequenceNumber();
}
}
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms,
"timestamp", _rtpSender.Timestamp());
return 0;
}
bool RTPSenderVideo::SendH264(const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr) {
size_t rtp_header_length = _rtpSender.RTPHeaderLength();
int32_t payload_bytes_to_send = payloadSize;
const uint8_t* data = payloadData;
size_t max_payload_length = _rtpSender.MaxDataPayloadLength();
scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, max_payload_length));
packetizer->SetPayloadData(data, payload_bytes_to_send, fragmentation);
scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
videoType, max_payload_length, rtpTypeHdr, frameType));
// TODO(changbin): we currently don't support to configure the codec to
// output multiple partitions for VP8. Should remove below check after the
// issue is fixed.
const RTPFragmentationHeader* frag =
(videoType == kRtpVideoVp8) ? NULL : fragmentation;
packetizer->SetPayloadData(data, payload_bytes_to_send, frag);
StorageType storage = kAllowRetransmission;
bool protect = (frameType == kVideoFrameKey);
bool last = false;
while (!last) {
// Write H264 payload.
uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
size_t payload_bytes_in_packet = 0;
if (!packetizer->NextPacket(
@ -515,13 +357,16 @@ bool RTPSenderVideo::SendH264(const FrameType frameType,
rtp_header_length,
captureTimeStamp,
capture_time_ms,
storage,
protect)) {
LOG(LS_WARNING)
<< "RTPSenderVideo::SendH264 failed to send packet number "
<< _rtpSender.SequenceNumber();
packetizer->GetStorageType(_retransmissionSettings),
packetizer->GetProtectionType() == kProtectedPacket)) {
LOG(LS_WARNING) << packetizer->ToString()
<< " failed to send packet number "
<< _rtpSender.SequenceNumber();
}
}
TRACE_EVENT_ASYNC_END1(
"webrtc", "Video", capture_time_ms, "timestamp", _rtpSender.Timestamp());
return true;
}

View File

@ -28,122 +28,106 @@ namespace webrtc {
class CriticalSectionWrapper;
struct RtpPacket;
class RTPSenderVideo
{
public:
RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender);
virtual ~RTPSenderVideo();
class RTPSenderVideo {
public:
RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender);
virtual ~RTPSenderVideo();
virtual RtpVideoCodecTypes VideoCodecType() const;
virtual RtpVideoCodecTypes VideoCodecType() const;
uint16_t FECPacketOverhead() const;
uint16_t FECPacketOverhead() const;
int32_t RegisterVideoPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate,
RtpUtility::Payload*& payload);
int32_t RegisterVideoPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate,
RtpUtility::Payload*& payload);
int32_t SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoTypeHeader* rtpTypeHdr);
int32_t SendRTPIntraRequest();
void SetVideoCodecType(RtpVideoCodecTypes type);
VideoCodecInformation* CodecInformationVideo();
void SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate);
uint32_t MaxConfiguredBitrateVideo() const;
// FEC
int32_t SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC);
int32_t GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const;
int32_t SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
void ProcessBitrate();
uint32_t VideoBitrateSent() const;
uint32_t FecOverheadRate() const;
int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings);
protected:
virtual int32_t SendVideoPacket(uint8_t* dataBuffer,
const uint16_t payloadLength,
const uint16_t rtpHeaderLength,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect);
private:
int32_t SendGeneric(const FrameType frame_type,
const int8_t payload_type,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
const uint8_t* payload, const uint32_t size);
int32_t SendVP8(const FrameType frameType,
int32_t SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoTypeHeader* rtpTypeHdr);
bool SendH264(const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr);
int32_t SendRTPIntraRequest();
private:
RTPSenderInterface& _rtpSender;
void SetVideoCodecType(RtpVideoCodecTypes type);
CriticalSectionWrapper* _sendVideoCritsect;
RtpVideoCodecTypes _videoType;
VideoCodecInformation* _videoCodecInformation;
uint32_t _maxBitrate;
int32_t _retransmissionSettings;
VideoCodecInformation* CodecInformationVideo();
// FEC
ForwardErrorCorrection _fec;
bool _fecEnabled;
int8_t _payloadTypeRED;
int8_t _payloadTypeFEC;
unsigned int _numberFirstPartition;
FecProtectionParams delta_fec_params_;
FecProtectionParams key_fec_params_;
ProducerFec producer_fec_;
void SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate);
// Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
// and any padding overhead.
Bitrate _fecOverheadRate;
// Bitrate used for video payload and RTP headers
Bitrate _videoBitrate;
uint32_t MaxConfiguredBitrateVideo() const;
// FEC
int32_t SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC);
int32_t GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const;
int32_t SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
void ProcessBitrate();
uint32_t VideoBitrateSent() const;
uint32_t FecOverheadRate() const;
int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings);
protected:
virtual int32_t SendVideoPacket(uint8_t* dataBuffer,
const uint16_t payloadLength,
const uint16_t rtpHeaderLength,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect);
private:
bool Send(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const uint32_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr);
private:
RTPSenderInterface& _rtpSender;
CriticalSectionWrapper* _sendVideoCritsect;
RtpVideoCodecTypes _videoType;
VideoCodecInformation* _videoCodecInformation;
uint32_t _maxBitrate;
int32_t _retransmissionSettings;
// FEC
ForwardErrorCorrection _fec;
bool _fecEnabled;
int8_t _payloadTypeRED;
int8_t _payloadTypeFEC;
unsigned int _numberFirstPartition;
FecProtectionParams delta_fec_params_;
FecProtectionParams key_fec_params_;
ProducerFec producer_fec_;
// Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
// and any padding overhead.
Bitrate _fecOverheadRate;
// Bitrate used for video payload and RTP headers
Bitrate _videoBitrate;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_