Remove partially defined WebRtcRTPHeader from Parse().

It' bit ugly that RtpDepacketizer::ParsedPayload partially defines WebRtcRTPHeader, and then sent to Parse() function for internal change.
To make it clearer, the CL gets rid of using partially-defined WebRtcRTPHeader.

BUG=
R=pbos@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/28919004

Patch from Changbin Shao <changbin.shao@intel.com>.

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7660 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2014-11-07 11:02:12 +00:00
parent a2ef4fe9c3
commit d42a3adf42
7 changed files with 141 additions and 156 deletions

View File

@ -53,12 +53,10 @@ class RtpPacketizer {
class RtpDepacketizer { class RtpDepacketizer {
public: public:
struct ParsedPayload { struct ParsedPayload {
explicit ParsedPayload(WebRtcRTPHeader* rtp_header)
: payload(NULL), payload_length(0), header(rtp_header) {}
const uint8_t* payload; const uint8_t* payload;
size_t payload_length; size_t payload_length;
WebRtcRTPHeader* header; FrameType frame_type;
RTPTypeHeader type;
}; };
static RtpDepacketizer* Create(RtpVideoCodecTypes type); static RtpDepacketizer* Create(RtpVideoCodecTypes type);

View File

@ -37,12 +37,15 @@ enum NalDefs { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F };
// Bit masks for FU (A and B) headers. // Bit masks for FU (A and B) headers.
enum FuDefs { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 }; enum FuDefs { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 };
void ParseSingleNalu(WebRtcRTPHeader* rtp_header, void ParseSingleNalu(RtpDepacketizer::ParsedPayload* parsed_payload,
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_data_length) { size_t payload_data_length) {
rtp_header->type.Video.codec = kRtpVideoH264; parsed_payload->type.Video.width = 0;
rtp_header->type.Video.isFirstPacket = true; parsed_payload->type.Video.height = 0;
RTPVideoHeaderH264* h264_header = &rtp_header->type.Video.codecHeader.H264; parsed_payload->type.Video.codec = kRtpVideoH264;
parsed_payload->type.Video.isFirstPacket = true;
RTPVideoHeaderH264* h264_header =
&parsed_payload->type.Video.codecHeader.H264;
h264_header->single_nalu = true; h264_header->single_nalu = true;
h264_header->stap_a = false; h264_header->stap_a = false;
@ -56,15 +59,15 @@ void ParseSingleNalu(WebRtcRTPHeader* rtp_header,
case kSps: case kSps:
case kPps: case kPps:
case kIdr: case kIdr:
rtp_header->frameType = kVideoFrameKey; parsed_payload->frame_type = kVideoFrameKey;
break; break;
default: default:
rtp_header->frameType = kVideoFrameDelta; parsed_payload->frame_type = kVideoFrameDelta;
break; break;
} }
} }
void ParseFuaNalu(WebRtcRTPHeader* rtp_header, void ParseFuaNalu(RtpDepacketizer::ParsedPayload* parsed_payload,
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_data_length, size_t payload_data_length,
size_t* offset) { size_t* offset) {
@ -82,13 +85,16 @@ void ParseFuaNalu(WebRtcRTPHeader* rtp_header,
} }
if (original_nal_type == kIdr) { if (original_nal_type == kIdr) {
rtp_header->frameType = kVideoFrameKey; parsed_payload->frame_type = kVideoFrameKey;
} else { } else {
rtp_header->frameType = kVideoFrameDelta; parsed_payload->frame_type = kVideoFrameDelta;
} }
rtp_header->type.Video.codec = kRtpVideoH264; parsed_payload->type.Video.width = 0;
rtp_header->type.Video.isFirstPacket = first_fragment; parsed_payload->type.Video.height = 0;
RTPVideoHeaderH264* h264_header = &rtp_header->type.Video.codecHeader.H264; parsed_payload->type.Video.codec = kRtpVideoH264;
parsed_payload->type.Video.isFirstPacket = first_fragment;
RTPVideoHeaderH264* h264_header =
&parsed_payload->type.Video.codecHeader.H264;
h264_header->single_nalu = false; h264_header->single_nalu = false;
h264_header->stap_a = false; h264_header->stap_a = false;
} }
@ -298,12 +304,11 @@ bool RtpDepacketizerH264::Parse(ParsedPayload* parsed_payload,
size_t offset = 0; size_t offset = 0;
if (nal_type == kFuA) { if (nal_type == kFuA) {
// Fragmented NAL units (FU-A). // Fragmented NAL units (FU-A).
ParseFuaNalu( ParseFuaNalu(parsed_payload, payload_data, payload_data_length, &offset);
parsed_payload->header, payload_data, payload_data_length, &offset);
} else { } else {
// We handle STAP-A and single NALU's the same way here. The jitter buffer // We handle STAP-A and single NALU's the same way here. The jitter buffer
// will depacketize the STAP-A into NAL units later. // will depacketize the STAP-A into NAL units later.
ParseSingleNalu(parsed_payload->header, payload_data, payload_data_length); ParseSingleNalu(parsed_payload, payload_data, payload_data_length);
} }
parsed_payload->payload = payload_data + offset; parsed_payload->payload = payload_data + offset;

View File

@ -399,17 +399,15 @@ class RtpDepacketizerH264Test : public ::testing::Test {
TEST_F(RtpDepacketizerH264Test, TestSingleNalu) { TEST_F(RtpDepacketizerH264Test, TestSingleNalu) {
uint8_t packet[2] = {0x05, 0xFF}; // F=0, NRI=0, Type=5. uint8_t packet[2] = {0x05, 0xFF}; // F=0, NRI=0, Type=5.
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader expected_header;
memset(&expected_header, 0, sizeof(expected_header));
RtpDepacketizer::ParsedPayload payload(&expected_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet)); ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_TRUE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_TRUE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_TRUE(payload.type.Video.isFirstPacket);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_TRUE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_FALSE(payload.type.Video.codecHeader.H264.stap_a);
} }
TEST_F(RtpDepacketizerH264Test, TestStapAKey) { TEST_F(RtpDepacketizerH264Test, TestStapAKey) {
@ -417,17 +415,15 @@ TEST_F(RtpDepacketizerH264Test, TestStapAKey) {
// Length, nal header, payload. // Length, nal header, payload.
0, 0x02, kIdr, 0xFF, 0, 0x03, kIdr, 0xFF, 0, 0x02, kIdr, 0xFF, 0, 0x03, kIdr, 0xFF,
0x00, 0, 0x04, kIdr, 0xFF, 0x00, 0x11}; 0x00, 0, 0x04, kIdr, 0xFF, 0x00, 0x11};
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader expected_header;
memset(&expected_header, 0, sizeof(expected_header));
RtpDepacketizer::ParsedPayload payload(&expected_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet)); ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_TRUE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_TRUE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_TRUE(payload.type.Video.isFirstPacket);
EXPECT_TRUE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_TRUE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_TRUE(payload.type.Video.codecHeader.H264.stap_a);
} }
TEST_F(RtpDepacketizerH264Test, TestStapADelta) { TEST_F(RtpDepacketizerH264Test, TestStapADelta) {
@ -435,17 +431,15 @@ TEST_F(RtpDepacketizerH264Test, TestStapADelta) {
// Length, nal header, payload. // Length, nal header, payload.
0, 0x02, kSlice, 0xFF, 0, 0x03, kSlice, 0xFF, 0, 0x02, kSlice, 0xFF, 0, 0x03, kSlice, 0xFF,
0x00, 0, 0x04, kSlice, 0xFF, 0x00, 0x11}; 0x00, 0, 0x04, kSlice, 0xFF, 0x00, 0x11};
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader expected_header;
memset(&expected_header, 0, sizeof(expected_header));
RtpDepacketizer::ParsedPayload payload(&expected_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet)); ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_TRUE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_TRUE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_TRUE(payload.type.Video.isFirstPacket);
EXPECT_TRUE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_TRUE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_TRUE(payload.type.Video.codecHeader.H264.stap_a);
} }
TEST_F(RtpDepacketizerH264Test, TestFuA) { TEST_F(RtpDepacketizerH264Test, TestFuA) {
@ -470,33 +464,36 @@ TEST_F(RtpDepacketizerH264Test, TestFuA) {
}; };
const uint8_t kExpected3[1] = {0x03}; const uint8_t kExpected3[1] = {0x03};
WebRtcRTPHeader expected_header; RtpDepacketizer::ParsedPayload payload;
memset(&expected_header, 0, sizeof(expected_header));
RtpDepacketizer::ParsedPayload payload(&expected_header);
// We expect that the first packet is one byte shorter since the FU-A header // We expect that the first packet is one byte shorter since the FU-A header
// has been replaced by the original nal header. // has been replaced by the original nal header.
ASSERT_TRUE(depacketizer_->Parse(&payload, packet1, sizeof(packet1))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet1, sizeof(packet1)));
ExpectPacket(&payload, kExpected1, sizeof(kExpected1)); ExpectPacket(&payload, kExpected1, sizeof(kExpected1));
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_TRUE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_TRUE(payload.type.Video.isFirstPacket);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_FALSE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_FALSE(payload.type.Video.codecHeader.H264.stap_a);
// Following packets will be 2 bytes shorter since they will only be appended // Following packets will be 2 bytes shorter since they will only be appended
// onto the first packet. // onto the first packet.
payload = RtpDepacketizer::ParsedPayload();
ASSERT_TRUE(depacketizer_->Parse(&payload, packet2, sizeof(packet2))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet2, sizeof(packet2)));
ExpectPacket(&payload, kExpected2, sizeof(kExpected2)); ExpectPacket(&payload, kExpected2, sizeof(kExpected2));
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_FALSE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_FALSE(payload.type.Video.isFirstPacket);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_FALSE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_FALSE(payload.type.Video.codecHeader.H264.stap_a);
payload = RtpDepacketizer::ParsedPayload();
ASSERT_TRUE(depacketizer_->Parse(&payload, packet3, sizeof(packet3))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet3, sizeof(packet3)));
ExpectPacket(&payload, kExpected3, sizeof(kExpected3)); ExpectPacket(&payload, kExpected3, sizeof(kExpected3));
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_FALSE(payload.header->type.Video.isFirstPacket); EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.single_nalu); EXPECT_FALSE(payload.type.Video.isFirstPacket);
EXPECT_FALSE(payload.header->type.Video.codecHeader.H264.stap_a); EXPECT_FALSE(payload.type.Video.codecHeader.H264.single_nalu);
EXPECT_FALSE(payload.type.Video.codecHeader.H264.stap_a);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -90,17 +90,19 @@ bool RtpDepacketizerGeneric::Parse(ParsedPayload* parsed_payload,
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_data_length) { size_t payload_data_length) {
assert(parsed_payload != NULL); assert(parsed_payload != NULL);
assert(parsed_payload->header != NULL);
uint8_t generic_header = *payload_data++; uint8_t generic_header = *payload_data++;
--payload_data_length; --payload_data_length;
parsed_payload->header->frameType = parsed_payload->frame_type =
((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0) ((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0)
? kVideoFrameKey ? kVideoFrameKey
: kVideoFrameDelta; : kVideoFrameDelta;
parsed_payload->header->type.Video.isFirstPacket = parsed_payload->type.Video.isFirstPacket =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0; (generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
parsed_payload->type.Video.codec = kRtpVideoGeneric;
parsed_payload->type.Video.width = 0;
parsed_payload->type.Video.height = 0;
parsed_payload->payload = payload_data; parsed_payload->payload = payload_data;
parsed_payload->payload_length = payload_data_length; parsed_payload->payload_length = payload_data_length;

View File

@ -121,11 +121,11 @@ int ParseVP8Extension(RTPVideoHeaderVP8* vp8,
return parsed_bytes; return parsed_bytes;
} }
int ParseVP8FrameSize(WebRtcRTPHeader* rtp_header, int ParseVP8FrameSize(RtpDepacketizer::ParsedPayload* parsed_payload,
const uint8_t* data, const uint8_t* data,
int data_length) { int data_length) {
assert(rtp_header != NULL); assert(parsed_payload != NULL);
if (rtp_header->frameType != kVideoFrameKey) { if (parsed_payload->frame_type != kVideoFrameKey) {
// Included in payload header for I-frames. // Included in payload header for I-frames.
return 0; return 0;
} }
@ -134,8 +134,8 @@ int ParseVP8FrameSize(WebRtcRTPHeader* rtp_header,
// in the beginning of the partition. // in the beginning of the partition.
return -1; return -1;
} }
rtp_header->type.Video.width = ((data[7] << 8) + data[6]) & 0x3FFF; parsed_payload->type.Video.width = ((data[7] << 8) + data[6]) & 0x3FFF;
rtp_header->type.Video.height = ((data[9] << 8) + data[8]) & 0x3FFF; parsed_payload->type.Video.height = ((data[9] << 8) + data[8]) & 0x3FFF;
return 0; return 0;
} }
} // namespace } // namespace
@ -664,27 +664,27 @@ bool RtpDepacketizerVp8::Parse(ParsedPayload* parsed_payload,
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_data_length) { size_t payload_data_length) {
assert(parsed_payload != NULL); assert(parsed_payload != NULL);
assert(parsed_payload->header != NULL);
// Parse mandatory first byte of payload descriptor. // Parse mandatory first byte of payload descriptor.
bool extension = (*payload_data & 0x80) ? true : false; // X bit bool extension = (*payload_data & 0x80) ? true : false; // X bit
bool beginning_of_partition = (*payload_data & 0x10) ? true : false; // S bit bool beginning_of_partition = (*payload_data & 0x10) ? true : false; // S bit
int partition_id = (*payload_data & 0x0F); // PartID field int partition_id = (*payload_data & 0x0F); // PartID field
parsed_payload->header->type.Video.isFirstPacket = parsed_payload->type.Video.width = 0;
parsed_payload->type.Video.height = 0;
parsed_payload->type.Video.isFirstPacket =
beginning_of_partition && (partition_id == 0); beginning_of_partition && (partition_id == 0);
parsed_payload->type.Video.codec = kRtpVideoVp8;
parsed_payload->header->type.Video.codecHeader.VP8.nonReference = parsed_payload->type.Video.codecHeader.VP8.nonReference =
(*payload_data & 0x20) ? true : false; // N bit (*payload_data & 0x20) ? true : false; // N bit
parsed_payload->header->type.Video.codecHeader.VP8.partitionId = partition_id; parsed_payload->type.Video.codecHeader.VP8.partitionId = partition_id;
parsed_payload->header->type.Video.codecHeader.VP8.beginningOfPartition = parsed_payload->type.Video.codecHeader.VP8.beginningOfPartition =
beginning_of_partition; beginning_of_partition;
parsed_payload->header->type.Video.codecHeader.VP8.pictureId = kNoPictureId; parsed_payload->type.Video.codecHeader.VP8.pictureId = kNoPictureId;
parsed_payload->header->type.Video.codecHeader.VP8.tl0PicIdx = kNoTl0PicIdx; parsed_payload->type.Video.codecHeader.VP8.tl0PicIdx = kNoTl0PicIdx;
parsed_payload->header->type.Video.codecHeader.VP8.temporalIdx = parsed_payload->type.Video.codecHeader.VP8.temporalIdx = kNoTemporalIdx;
kNoTemporalIdx; parsed_payload->type.Video.codecHeader.VP8.layerSync = false;
parsed_payload->header->type.Video.codecHeader.VP8.layerSync = false; parsed_payload->type.Video.codecHeader.VP8.keyIdx = kNoKeyIdx;
parsed_payload->header->type.Video.codecHeader.VP8.keyIdx = kNoKeyIdx;
if (partition_id > 8) { if (partition_id > 8) {
// Weak check for corrupt payload_data: PartID MUST NOT be larger than 8. // Weak check for corrupt payload_data: PartID MUST NOT be larger than 8.
@ -697,7 +697,7 @@ bool RtpDepacketizerVp8::Parse(ParsedPayload* parsed_payload,
if (extension) { if (extension) {
const int parsed_bytes = const int parsed_bytes =
ParseVP8Extension(&parsed_payload->header->type.Video.codecHeader.VP8, ParseVP8Extension(&parsed_payload->type.Video.codecHeader.VP8,
payload_data, payload_data,
payload_data_length); payload_data_length);
if (parsed_bytes < 0) if (parsed_bytes < 0)
@ -713,14 +713,14 @@ bool RtpDepacketizerVp8::Parse(ParsedPayload* parsed_payload,
// Read P bit from payload header (only at beginning of first partition). // Read P bit from payload header (only at beginning of first partition).
if (payload_data_length > 0 && beginning_of_partition && partition_id == 0) { if (payload_data_length > 0 && beginning_of_partition && partition_id == 0) {
parsed_payload->header->frameType = parsed_payload->frame_type =
(*payload_data & 0x01) ? kVideoFrameDelta : kVideoFrameKey; (*payload_data & 0x01) ? kVideoFrameDelta : kVideoFrameKey;
} else { } else {
parsed_payload->header->frameType = kVideoFrameDelta; parsed_payload->frame_type = kVideoFrameDelta;
} }
if (0 != ParseVP8FrameSize( if (ParseVP8FrameSize(parsed_payload, payload_data, payload_data_length) !=
parsed_payload->header, payload_data, payload_data_length)) { 0) {
return false; return false;
} }

View File

@ -56,24 +56,23 @@ namespace {
// | padding | // | padding |
// : : // : :
// +-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+
void VerifyBasicHeader(RTPTypeHeader* type, bool N, bool S, int part_id) {
void VerifyBasicHeader(WebRtcRTPHeader* header, bool N, bool S, int part_id) { ASSERT_TRUE(type != NULL);
ASSERT_TRUE(header != NULL); EXPECT_EQ(N, type->Video.codecHeader.VP8.nonReference);
EXPECT_EQ(N, header->type.Video.codecHeader.VP8.nonReference); EXPECT_EQ(S, type->Video.codecHeader.VP8.beginningOfPartition);
EXPECT_EQ(S, header->type.Video.codecHeader.VP8.beginningOfPartition); EXPECT_EQ(part_id, type->Video.codecHeader.VP8.partitionId);
EXPECT_EQ(part_id, header->type.Video.codecHeader.VP8.partitionId);
} }
void VerifyExtensions(WebRtcRTPHeader* header, void VerifyExtensions(RTPTypeHeader* type,
int16_t picture_id, /* I */ int16_t picture_id, /* I */
int16_t tl0_pic_idx, /* L */ int16_t tl0_pic_idx, /* L */
uint8_t temporal_idx, /* T */ uint8_t temporal_idx, /* T */
int key_idx /* K */) { int key_idx /* K */) {
ASSERT_TRUE(header != NULL); ASSERT_TRUE(type != NULL);
EXPECT_EQ(picture_id, header->type.Video.codecHeader.VP8.pictureId); EXPECT_EQ(picture_id, type->Video.codecHeader.VP8.pictureId);
EXPECT_EQ(tl0_pic_idx, header->type.Video.codecHeader.VP8.tl0PicIdx); EXPECT_EQ(tl0_pic_idx, type->Video.codecHeader.VP8.tl0PicIdx);
EXPECT_EQ(temporal_idx, header->type.Video.codecHeader.VP8.temporalIdx); EXPECT_EQ(temporal_idx, type->Video.codecHeader.VP8.temporalIdx);
EXPECT_EQ(key_idx, header->type.Video.codecHeader.VP8.keyIdx); EXPECT_EQ(key_idx, type->Video.codecHeader.VP8.keyIdx);
} }
} // namespace } // namespace
@ -405,18 +404,16 @@ TEST_F(RtpDepacketizerVp8Test, BasicHeader) {
uint8_t packet[4] = {0}; uint8_t packet[4] = {0};
packet[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4. packet[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
packet[1] = 0x01; // P frame. packet[1] = 0x01; // P frame.
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
VerifyBasicHeader(payload.header, 0, 1, 4); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyBasicHeader(&payload.type, 0, 1, 4);
VerifyExtensions( VerifyExtensions(
payload.header, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx); &payload.type, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
} }
TEST_F(RtpDepacketizerVp8Test, PictureID) { TEST_F(RtpDepacketizerVp8Test, PictureID) {
@ -427,29 +424,27 @@ TEST_F(RtpDepacketizerVp8Test, PictureID) {
packet[0] = 0xA0; packet[0] = 0xA0;
packet[1] = 0x80; packet[1] = 0x80;
packet[2] = kPictureId; packet[2] = kPictureId;
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength1, sizeof(packet) - kHeaderLength1); &payload, packet + kHeaderLength1, sizeof(packet) - kHeaderLength1);
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
VerifyBasicHeader(payload.header, 1, 0, 0); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyBasicHeader(&payload.type, 1, 0, 0);
VerifyExtensions( VerifyExtensions(
payload.header, kPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx); &payload.type, kPictureId, kNoTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
// Re-use packet, but change to long PictureID. // Re-use packet, but change to long PictureID.
packet[2] = 0x80 | kPictureId; packet[2] = 0x80 | kPictureId;
packet[3] = kPictureId; packet[3] = kPictureId;
memset(payload.header, 0, sizeof(rtp_header));
payload = RtpDepacketizer::ParsedPayload();
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength2, sizeof(packet) - kHeaderLength2); &payload, packet + kHeaderLength2, sizeof(packet) - kHeaderLength2);
VerifyBasicHeader(payload.header, 1, 0, 0); VerifyBasicHeader(&payload.type, 1, 0, 0);
VerifyExtensions(payload.header, VerifyExtensions(&payload.type,
(kPictureId << 8) + kPictureId, (kPictureId << 8) + kPictureId,
kNoTl0PicIdx, kNoTl0PicIdx,
kNoTemporalIdx, kNoTemporalIdx,
@ -463,18 +458,16 @@ TEST_F(RtpDepacketizerVp8Test, Tl0PicIdx) {
packet[0] = 0x90; packet[0] = 0x90;
packet[1] = 0x40; packet[1] = 0x40;
packet[2] = kTl0PicIdx; packet[2] = kTl0PicIdx;
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
VerifyBasicHeader(payload.header, 0, 1, 0); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyBasicHeader(&payload.type, 0, 1, 0);
VerifyExtensions( VerifyExtensions(
payload.header, kNoPictureId, kTl0PicIdx, kNoTemporalIdx, kNoKeyIdx); &payload.type, kNoPictureId, kTl0PicIdx, kNoTemporalIdx, kNoKeyIdx);
} }
TEST_F(RtpDepacketizerVp8Test, TIDAndLayerSync) { TEST_F(RtpDepacketizerVp8Test, TIDAndLayerSync) {
@ -483,18 +476,16 @@ TEST_F(RtpDepacketizerVp8Test, TIDAndLayerSync) {
packet[0] = 0x88; packet[0] = 0x88;
packet[1] = 0x20; packet[1] = 0x20;
packet[2] = 0x80; // TID(2) + LayerSync(false) packet[2] = 0x80; // TID(2) + LayerSync(false)
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
VerifyBasicHeader(payload.header, 0, 0, 8); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyExtensions(payload.header, kNoPictureId, kNoTl0PicIdx, 2, kNoKeyIdx); VerifyBasicHeader(&payload.type, 0, 0, 8);
EXPECT_FALSE(payload.header->type.Video.codecHeader.VP8.layerSync); VerifyExtensions(&payload.type, kNoPictureId, kNoTl0PicIdx, 2, kNoKeyIdx);
EXPECT_FALSE(payload.type.Video.codecHeader.VP8.layerSync);
} }
TEST_F(RtpDepacketizerVp8Test, KeyIdx) { TEST_F(RtpDepacketizerVp8Test, KeyIdx) {
@ -504,18 +495,16 @@ TEST_F(RtpDepacketizerVp8Test, KeyIdx) {
packet[0] = 0x88; packet[0] = 0x88;
packet[1] = 0x10; // K = 1. packet[1] = 0x10; // K = 1.
packet[2] = kKeyIdx; packet[2] = kKeyIdx;
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
VerifyBasicHeader(payload.header, 0, 0, 8); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyBasicHeader(&payload.type, 0, 0, 8);
VerifyExtensions( VerifyExtensions(
payload.header, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kKeyIdx); &payload.type, kNoPictureId, kNoTl0PicIdx, kNoTemporalIdx, kKeyIdx);
} }
TEST_F(RtpDepacketizerVp8Test, MultipleExtensions) { TEST_F(RtpDepacketizerVp8Test, MultipleExtensions) {
@ -527,17 +516,15 @@ TEST_F(RtpDepacketizerVp8Test, MultipleExtensions) {
packet[3] = 17; // PictureID, low 8 bits. packet[3] = 17; // PictureID, low 8 bits.
packet[4] = 42; // Tl0PicIdx. packet[4] = 42; // Tl0PicIdx.
packet[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17). packet[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.header->frameType); EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
VerifyBasicHeader(payload.header, 0, 0, 8); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyExtensions(payload.header, (17 << 8) + 17, 42, 1, 17); VerifyBasicHeader(&payload.type, 0, 0, 8);
VerifyExtensions(&payload.type, (17 << 8) + 17, 42, 1, 17);
} }
TEST_F(RtpDepacketizerVp8Test, TooShortHeader) { TEST_F(RtpDepacketizerVp8Test, TooShortHeader) {
@ -546,10 +533,7 @@ TEST_F(RtpDepacketizerVp8Test, TooShortHeader) {
packet[1] = 0x80 | 0x40 | 0x20 | 0x10; // All extensions are enabled... packet[1] = 0x80 | 0x40 | 0x20 | 0x10; // All extensions are enabled...
packet[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided. packet[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
packet[3] = 17; // PictureID, low 8 bits. packet[3] = 17; // PictureID, low 8 bits.
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
EXPECT_FALSE(depacketizer_->Parse(&payload, packet, sizeof(packet))); EXPECT_FALSE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
} }
@ -571,23 +555,20 @@ TEST_F(RtpDepacketizerVp8Test, TestWithPacketizer) {
size_t send_bytes; size_t send_bytes;
ASSERT_TRUE(packetizer.NextPacket(packet, &send_bytes, &last)); ASSERT_TRUE(packetizer.NextPacket(packet, &send_bytes, &last));
ASSERT_TRUE(last); ASSERT_TRUE(last);
RtpDepacketizer::ParsedPayload payload;
WebRtcRTPHeader rtp_header;
memset(&rtp_header, 0, sizeof(rtp_header));
RtpDepacketizer::ParsedPayload payload(&rtp_header);
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet))); ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket( ExpectPacket(
&payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength); &payload, packet + kHeaderLength, sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameKey, payload.header->frameType); EXPECT_EQ(kVideoFrameKey, payload.frame_type);
VerifyBasicHeader(payload.header, 1, 1, 0); EXPECT_EQ(kRtpVideoVp8, payload.type.Video.codec);
VerifyExtensions(payload.header, VerifyBasicHeader(&payload.type, 1, 1, 0);
VerifyExtensions(&payload.type,
input_header.pictureId, input_header.pictureId,
input_header.tl0PicIdx, input_header.tl0PicIdx,
input_header.temporalIdx, input_header.temporalIdx,
input_header.keyIdx); input_header.keyIdx);
EXPECT_EQ(payload.header->type.Video.codecHeader.VP8.layerSync, EXPECT_EQ(payload.type.Video.codecHeader.VP8.layerSync,
input_header.layerSync); input_header.layerSync);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -79,13 +79,15 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
} }
rtp_header->type.Video.isFirstPacket = is_first_packet; rtp_header->type.Video.isFirstPacket = is_first_packet;
RtpDepacketizer::ParsedPayload parsed_payload(rtp_header); RtpDepacketizer::ParsedPayload parsed_payload;
if (!depacketizer->Parse(&parsed_payload, payload, payload_data_length)) if (!depacketizer->Parse(&parsed_payload, payload, payload_data_length))
return -1; return -1;
rtp_header->frameType = parsed_payload.frame_type;
rtp_header->type = parsed_payload.type;
return data_callback_->OnReceivedPayloadData(parsed_payload.payload, return data_callback_->OnReceivedPayloadData(parsed_payload.payload,
parsed_payload.payload_length, parsed_payload.payload_length,
parsed_payload.header) == 0 rtp_header) == 0
? 0 ? 0
: -1; : -1;
} }