Generic video-codec support.
Labels frames as key/delta, also marks the first RTP packet of a frame as such, to allow proper reconstruction even if packets are received out of order. BUG=1442 TBR=ajm@webrtc.org Review URL: https://webrtc-codereview.appspot.com/1207004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3680 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
3cb42b11bf
commit
8911ce46a4
@ -526,6 +526,7 @@ enum VideoCodecType
|
||||
kVideoCodecI420,
|
||||
kVideoCodecRED,
|
||||
kVideoCodecULPFEC,
|
||||
kVideoCodecGeneric,
|
||||
kVideoCodecUnknown
|
||||
};
|
||||
|
||||
|
22
webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
Normal file
22
webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
Normal file
@ -0,0 +1,22 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_
|
||||
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace RtpFormatVideoGeneric {
|
||||
static const uint8_t kKeyFrameBit = 0x01;
|
||||
static const uint8_t kFirstPacketBit = 0x02;
|
||||
} // namespace RtpFormatVideoGeneric
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_
|
@ -313,11 +313,13 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
|
||||
const WebRtc_UWord32 frequency,
|
||||
const WebRtc_UWord8 channels,
|
||||
const WebRtc_UWord32 rate) const {
|
||||
RtpVideoCodecTypes videoType = kRtpNoVideo;
|
||||
RtpVideoCodecTypes videoType = kRtpGenericVideo;
|
||||
if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
|
||||
videoType = kRtpVp8Video;
|
||||
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
|
||||
videoType = kRtpNoVideo;
|
||||
videoType = kRtpGenericVideo;
|
||||
} else if (ModuleRTPUtility::StringCompare(payloadName, "GENERIC", 7)) {
|
||||
videoType = kRtpGenericVideo;
|
||||
} else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
|
||||
videoType = kRtpFecVideo;
|
||||
} else {
|
||||
|
@ -16,6 +16,7 @@
|
||||
#include <cstring> // memcpy()
|
||||
|
||||
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
|
||||
@ -261,7 +262,7 @@ WebRtc_Word32 RTPReceiverVideo::SetCodecType(
|
||||
const RtpVideoCodecTypes video_type,
|
||||
WebRtcRTPHeader* rtp_header) const {
|
||||
switch (video_type) {
|
||||
case kRtpNoVideo:
|
||||
case kRtpGenericVideo:
|
||||
rtp_header->type.Video.codec = kRTPVideoGeneric;
|
||||
break;
|
||||
case kRtpVp8Video:
|
||||
@ -295,7 +296,7 @@ WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
|
||||
// All receive functions release critical_section_receiver_video_ before
|
||||
// returning.
|
||||
switch (video_type) {
|
||||
case kRtpNoVideo:
|
||||
case kRtpGenericVideo:
|
||||
rtp_header->type.Video.isFirstPacket = is_first_packet;
|
||||
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
|
||||
case kRtpVp8Video:
|
||||
@ -376,8 +377,15 @@ WebRtc_Word32 RTPReceiverVideo::ReceiveVp8Codec(
|
||||
WebRtc_Word32 RTPReceiverVideo::ReceiveGenericCodec(
|
||||
WebRtcRTPHeader* rtp_header,
|
||||
const WebRtc_UWord8* payload_data,
|
||||
const WebRtc_UWord16 payload_data_length) {
|
||||
rtp_header->frameType = kVideoFrameKey;
|
||||
WebRtc_UWord16 payload_data_length) {
|
||||
uint8_t generic_header = *payload_data++;
|
||||
--payload_data_length;
|
||||
|
||||
rtp_header->frameType =
|
||||
((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0) ?
|
||||
kVideoFrameKey : kVideoFrameDelta;
|
||||
rtp_header->type.Video.isFirstPacket =
|
||||
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
|
||||
|
||||
critical_section_receiver_video_->Leave();
|
||||
|
||||
|
@ -86,6 +86,7 @@
|
||||
'video_codec_information.h',
|
||||
'rtp_format_vp8.cc',
|
||||
'rtp_format_vp8.h',
|
||||
'rtp_format_video_generic.h',
|
||||
'vp8_partition_aggregator.cc',
|
||||
'vp8_partition_aggregator.h',
|
||||
# Mocks
|
||||
|
@ -312,7 +312,7 @@ WebRtc_Word32 RTPSender::SendOutgoingData(
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
RtpVideoCodecTypes video_type = kRtpNoVideo;
|
||||
RtpVideoCodecTypes video_type = kRtpGenericVideo;
|
||||
if (CheckPayloadType(payload_type, &video_type) != 0) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
|
||||
"%s invalid argument failed to find payload_type:%d",
|
||||
@ -1133,9 +1133,7 @@ VideoCodecInformation *RTPSender::CodecInformationVideo() {
|
||||
}
|
||||
|
||||
RtpVideoCodecTypes RTPSender::VideoCodecType() const {
|
||||
if (audio_configured_) {
|
||||
return kRtpNoVideo;
|
||||
}
|
||||
assert(!audio_configured_ && "Sender is an audio stream!");
|
||||
return video_->VideoCodecType();
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
|
||||
@ -220,4 +221,63 @@ TEST_F(RtpSenderTest, DISABLED_TrafficSmoothing) {
|
||||
// Verify transmission time offset.
|
||||
EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderTest, SendGenericVideo) {
|
||||
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
|
||||
const uint8_t payload_type = 127;
|
||||
ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
|
||||
0, 1500));
|
||||
uint8_t payload[] = {47, 11, 32, 93, 89};
|
||||
|
||||
// Send keyframe
|
||||
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234,
|
||||
4321, payload, sizeof(payload),
|
||||
NULL));
|
||||
|
||||
ModuleRTPUtility::RTPHeaderParser rtp_parser(transport_.last_sent_packet_,
|
||||
transport_.last_sent_packet_len_);
|
||||
webrtc::WebRtcRTPHeader rtp_header;
|
||||
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
|
||||
|
||||
const uint8_t* payload_data = ModuleRTPUtility::GetPayloadData(&rtp_header,
|
||||
transport_.last_sent_packet_);
|
||||
uint8_t generic_header = *payload_data++;
|
||||
|
||||
ASSERT_EQ(sizeof(payload) + sizeof(generic_header),
|
||||
ModuleRTPUtility::GetPayloadDataLength(&rtp_header,
|
||||
transport_.last_sent_packet_len_));
|
||||
|
||||
EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
|
||||
EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
|
||||
|
||||
EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload)));
|
||||
|
||||
// Send delta frame
|
||||
payload[0] = 13;
|
||||
payload[1] = 42;
|
||||
payload[4] = 13;
|
||||
|
||||
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta, payload_type,
|
||||
1234, 4321, payload,
|
||||
sizeof(payload), NULL));
|
||||
|
||||
ModuleRTPUtility::RTPHeaderParser rtp_parser2(transport_.last_sent_packet_,
|
||||
transport_.last_sent_packet_len_);
|
||||
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
|
||||
|
||||
payload_data = ModuleRTPUtility::GetPayloadData(&rtp_header,
|
||||
transport_.last_sent_packet_);
|
||||
generic_header = *payload_data++;
|
||||
|
||||
EXPECT_FALSE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
|
||||
EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
|
||||
|
||||
ASSERT_EQ(sizeof(payload) + sizeof(generic_header),
|
||||
ModuleRTPUtility::GetPayloadDataLength(&rtp_header,
|
||||
transport_.last_sent_packet_len_));
|
||||
|
||||
EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload)));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
|
||||
#include "producer_fec.h"
|
||||
#include "rtp_format_vp8.h"
|
||||
#include "rtp_format_video_generic.h"
|
||||
|
||||
namespace webrtc {
|
||||
enum { REDForFECHeaderLength = 1 };
|
||||
@ -37,7 +38,7 @@ RTPSenderVideo::RTPSenderVideo(const WebRtc_Word32 id,
|
||||
_rtpSender(*rtpSender),
|
||||
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
|
||||
_videoType(kRtpNoVideo),
|
||||
_videoType(kRtpGenericVideo),
|
||||
_videoCodecInformation(NULL),
|
||||
_maxBitrate(0),
|
||||
_retransmissionSettings(kRetransmitBaseLayer),
|
||||
@ -89,11 +90,13 @@ WebRtc_Word32 RTPSenderVideo::RegisterVideoPayload(
|
||||
ModuleRTPUtility::Payload*& payload) {
|
||||
CriticalSectionScoped cs(_sendVideoCritsect);
|
||||
|
||||
RtpVideoCodecTypes videoType = kRtpNoVideo;
|
||||
RtpVideoCodecTypes videoType = kRtpGenericVideo;
|
||||
if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
|
||||
videoType = kRtpVp8Video;
|
||||
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
|
||||
videoType = kRtpNoVideo;
|
||||
videoType = kRtpGenericVideo;
|
||||
} else if (ModuleRTPUtility::StringCompare(payloadName, "GENERIC", 7)) {
|
||||
videoType = kRtpGenericVideo;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
@ -285,9 +288,9 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
|
||||
WebRtc_Word32 retVal = -1;
|
||||
switch(videoType)
|
||||
{
|
||||
case kRtpNoVideo:
|
||||
retVal = SendGeneric(payloadType, captureTimeStamp, capture_time_ms,
|
||||
payloadData, payloadSize);
|
||||
case kRtpGenericVideo:
|
||||
retVal = SendGeneric(frameType, payloadType, captureTimeStamp,
|
||||
capture_time_ms, payloadData, payloadSize);
|
||||
break;
|
||||
case kRtpVp8Video:
|
||||
retVal = SendVP8(frameType,
|
||||
@ -312,67 +315,59 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
|
||||
const uint32_t captureTimeStamp,
|
||||
int64_t capture_time_ms,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord32 payloadSize)
|
||||
{
|
||||
WebRtc_UWord16 payloadBytesInPacket = 0;
|
||||
WebRtc_UWord32 bytesSent = 0;
|
||||
WebRtc_Word32 payloadBytesToSend = payloadSize;
|
||||
int32_t RTPSenderVideo::SendGeneric(const FrameType frame_type,
|
||||
const int8_t payload_type,
|
||||
const uint32_t capture_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
const uint8_t* payload,
|
||||
uint32_t size) {
|
||||
assert(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
|
||||
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
|
||||
uint16_t max_length = _rtpSender.MaxPayloadLength() - FECPacketOverhead() -
|
||||
rtp_header_length - (1 /* generic header length */);
|
||||
|
||||
const WebRtc_UWord8* data = payloadData;
|
||||
WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
|
||||
WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() -
|
||||
FECPacketOverhead() - rtpHeaderLength;
|
||||
WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
|
||||
// Fragment packets more evenly by splitting the payload up evenly.
|
||||
uint32_t num_packets = (size + max_length - 1) / max_length;
|
||||
uint32_t payload_length = (size + num_packets - 1) / num_packets;
|
||||
assert(payload_length <= max_length);
|
||||
|
||||
// Fragment packet into packets of max MaxPayloadLength bytes payload.
|
||||
while (payloadBytesToSend > 0)
|
||||
{
|
||||
if (payloadBytesToSend > maxLength)
|
||||
{
|
||||
payloadBytesInPacket = maxLength;
|
||||
payloadBytesToSend -= payloadBytesInPacket;
|
||||
// MarkerBit is 0
|
||||
if(_rtpSender.BuildRTPheader(dataBuffer,
|
||||
payloadType,
|
||||
false,
|
||||
captureTimeStamp) != rtpHeaderLength)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
|
||||
payloadBytesToSend = 0;
|
||||
// MarkerBit is 1
|
||||
if(_rtpSender.BuildRTPheader(dataBuffer, payloadType, true,
|
||||
captureTimeStamp) != rtpHeaderLength)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
// Fragment packet into packets of max MaxPayloadLength bytes payload.
|
||||
uint8_t buffer[IP_PACKET_SIZE];
|
||||
|
||||
// Put payload in packet
|
||||
memcpy(&dataBuffer[rtpHeaderLength], &data[bytesSent],
|
||||
payloadBytesInPacket);
|
||||
bytesSent += payloadBytesInPacket;
|
||||
uint8_t generic_header = RtpFormatVideoGeneric::kFirstPacketBit;
|
||||
if (frame_type == kVideoFrameKey) {
|
||||
generic_header |= RtpFormatVideoGeneric::kKeyFrameBit;
|
||||
}
|
||||
|
||||
if(-1 == SendVideoPacket(dataBuffer,
|
||||
payloadBytesInPacket,
|
||||
rtpHeaderLength,
|
||||
capture_time_ms,
|
||||
kAllowRetransmission,
|
||||
true))
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
while (size > 0) {
|
||||
if (size < payload_length) {
|
||||
payload_length = size;
|
||||
}
|
||||
return 0;
|
||||
size -= payload_length;
|
||||
|
||||
// MarkerBit is 1 on final packet (bytes_to_send == 0)
|
||||
if (_rtpSender.BuildRTPheader(buffer, payload_type, size == 0,
|
||||
capture_timestamp) != rtp_header_length) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
uint8_t* out_ptr = &buffer[rtp_header_length];
|
||||
|
||||
// Put generic header in packet
|
||||
*out_ptr++ = generic_header;
|
||||
// Remove first-packet bit, following packets are intermediate
|
||||
generic_header &= ~RtpFormatVideoGeneric::kFirstPacketBit;
|
||||
|
||||
// Put payload in packet
|
||||
memcpy(out_ptr, payload, payload_length);
|
||||
payload += payload_length;
|
||||
|
||||
if (SendVideoPacket(buffer, payload_length + 1, rtp_header_length,
|
||||
capture_time_ms, kAllowRetransmission, true)) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
VideoCodecInformation*
|
||||
|
@ -97,11 +97,11 @@ protected:
|
||||
bool protect);
|
||||
|
||||
private:
|
||||
WebRtc_Word32 SendGeneric(const WebRtc_Word8 payloadType,
|
||||
const uint32_t captureTimeStamp,
|
||||
int64_t capture_time_ms,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord32 payloadSize);
|
||||
WebRtc_Word32 SendGeneric(const FrameType frame_type,
|
||||
const int8_t payload_type,
|
||||
const uint32_t capture_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
const uint8_t* payload, const uint32_t size);
|
||||
|
||||
WebRtc_Word32 SendVP8(const FrameType frameType,
|
||||
const WebRtc_Word8 payloadType,
|
||||
|
@ -197,7 +197,7 @@ void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
|
||||
type = videoType;
|
||||
|
||||
switch (type) {
|
||||
case kRtpNoVideo:
|
||||
case kRtpGenericVideo:
|
||||
break;
|
||||
case kRtpVp8Video: {
|
||||
info.VP8.nonReferenceFrame = false;
|
||||
@ -520,7 +520,7 @@ bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
|
||||
parsedPacket.SetType(_videoType);
|
||||
|
||||
switch (_videoType) {
|
||||
case kRtpNoVideo:
|
||||
case kRtpGenericVideo:
|
||||
return ParseGeneric(parsedPacket);
|
||||
case kRtpVp8Video:
|
||||
return ParseVP8(parsedPacket);
|
||||
|
@ -21,7 +21,7 @@
|
||||
namespace webrtc {
|
||||
enum RtpVideoCodecTypes
|
||||
{
|
||||
kRtpNoVideo = 0,
|
||||
kRtpGenericVideo = 0,
|
||||
kRtpFecVideo = 10,
|
||||
kRtpVp8Video = 11
|
||||
};
|
||||
|
@ -8,17 +8,18 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
|
||||
|
||||
#include "video_codec_interface.h"
|
||||
#include "typedefs.h"
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class I420Encoder : public VideoEncoder {
|
||||
public:
|
||||
|
||||
public:
|
||||
I420Encoder();
|
||||
|
||||
virtual ~I420Encoder();
|
||||
@ -64,25 +65,29 @@ public:
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int Release();
|
||||
|
||||
virtual int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
virtual int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
virtual int SetChannelParameters(uint32_t /*packetLoss*/, int /*rtt*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
virtual int SetChannelParameters(uint32_t /*packetLoss*/, int /*rtt*/) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
virtual int CodecConfigParameters(uint8_t* /*buffer*/, int /*size*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
virtual int CodecConfigParameters(uint8_t* /*buffer*/, int /*size*/) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
private:
|
||||
static uint8_t* InsertHeader(uint8_t* buffer, uint16_t width,
|
||||
uint16_t height);
|
||||
|
||||
private:
|
||||
bool _inited;
|
||||
EncodedImage _encodedImage;
|
||||
EncodedImageCallback* _encodedCompleteCallback;
|
||||
|
||||
}; // end of WebRtcI420DEncoder class
|
||||
}; // class I420Encoder
|
||||
|
||||
class I420Decoder : public VideoDecoder {
|
||||
public:
|
||||
|
||||
public:
|
||||
I420Decoder();
|
||||
|
||||
virtual ~I420Decoder();
|
||||
@ -95,8 +100,10 @@ public:
|
||||
virtual int InitDecode(const VideoCodec* codecSettings,
|
||||
int /*numberOfCores*/);
|
||||
|
||||
virtual int SetCodecConfigParameters(const uint8_t* /*buffer*/, int /*size*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;};
|
||||
virtual int SetCodecConfigParameters(const uint8_t* /*buffer*/,
|
||||
int /*size*/) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
// Decode encoded image (as a part of a video stream). The decoded image
|
||||
// will be returned to the user through the decode complete callback.
|
||||
@ -136,16 +143,17 @@ public:
|
||||
// <0 - Error
|
||||
virtual int Reset();
|
||||
|
||||
private:
|
||||
private:
|
||||
static const uint8_t* ExtractHeader(const uint8_t* buffer, uint16_t* width,
|
||||
uint16_t* height);
|
||||
|
||||
I420VideoFrame _decodedImage;
|
||||
int _width;
|
||||
int _height;
|
||||
bool _inited;
|
||||
DecodedImageCallback* _decodeCompleteCallback;
|
||||
}; // class I420Decoder
|
||||
|
||||
}; // End of WebRtcI420Decoder class.
|
||||
} // namespace webrtc
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
|
||||
|
@ -10,25 +10,20 @@
|
||||
|
||||
#include "modules/video_coding/codecs/i420/main/interface/i420.h"
|
||||
|
||||
#include <string.h>
|
||||
#include <string>
|
||||
#include <limits>
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
namespace webrtc {
|
||||
|
||||
I420Encoder::I420Encoder():
|
||||
_inited(false),
|
||||
_encodedImage(),
|
||||
_encodedCompleteCallback(NULL)
|
||||
{}
|
||||
I420Encoder::I420Encoder() : _inited(false), _encodedImage(),
|
||||
_encodedCompleteCallback(NULL) {
|
||||
}
|
||||
|
||||
I420Encoder::~I420Encoder() {
|
||||
_inited = false;
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
}
|
||||
delete [] _encodedImage._buffer;
|
||||
}
|
||||
|
||||
int I420Encoder::Release() {
|
||||
@ -85,37 +80,53 @@ int I420Encoder::Encode(const I420VideoFrame& inputImage,
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
_encodedImage._frameType = kKeyFrame; // No coding.
|
||||
_encodedImage._frameType = kKeyFrame;
|
||||
_encodedImage._timeStamp = inputImage.timestamp();
|
||||
_encodedImage._encodedHeight = inputImage.height();
|
||||
_encodedImage._encodedWidth = inputImage.width();
|
||||
|
||||
int req_length = CalcBufferSize(kI420, inputImage.width(),
|
||||
inputImage.height());
|
||||
if (_encodedImage._size > static_cast<unsigned int>(req_length)) {
|
||||
// Allocating encoded memory.
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
_encodedImage._size = 0;
|
||||
}
|
||||
uint8_t* newBuffer = new uint8_t[req_length];
|
||||
if (newBuffer == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
_encodedImage._size = req_length;
|
||||
_encodedImage._buffer = newBuffer;
|
||||
int width = inputImage.width();
|
||||
if (width > std::numeric_limits<uint16_t>::max()) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_SIZE;
|
||||
}
|
||||
int height = inputImage.height();
|
||||
if (height > std::numeric_limits<uint16_t>::max()) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_SIZE;
|
||||
}
|
||||
|
||||
int ret_length = ExtractBuffer(inputImage, req_length, _encodedImage._buffer);
|
||||
const size_t kI420HeaderSize = 4;
|
||||
int req_length = CalcBufferSize(kI420, inputImage.width(),
|
||||
inputImage.height()) + kI420HeaderSize;
|
||||
if (_encodedImage._size > static_cast<unsigned int>(req_length)) {
|
||||
// Reallocate buffer.
|
||||
delete [] _encodedImage._buffer;
|
||||
|
||||
_encodedImage._buffer = new uint8_t[req_length];
|
||||
_encodedImage._size = req_length;
|
||||
}
|
||||
|
||||
uint8_t *buffer = _encodedImage._buffer;
|
||||
|
||||
buffer = InsertHeader(buffer, width, height);
|
||||
|
||||
int ret_length = ExtractBuffer(inputImage, req_length - kI420HeaderSize,
|
||||
buffer);
|
||||
if (ret_length < 0)
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
_encodedImage._length = ret_length;
|
||||
_encodedImage._length = ret_length + kI420HeaderSize;
|
||||
|
||||
_encodedCompleteCallback->Encoded(_encodedImage);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
uint8_t* I420Encoder::InsertHeader(uint8_t *buffer, uint16_t width,
|
||||
uint16_t height) {
|
||||
*buffer++ = static_cast<uint8_t>(width >> 8);
|
||||
*buffer++ = static_cast<uint8_t>(width & 0xFF);
|
||||
*buffer++ = static_cast<uint8_t>(height >> 8);
|
||||
*buffer++ = static_cast<uint8_t>(height & 0xFF);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
int
|
||||
I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
|
||||
@ -124,13 +135,9 @@ I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
|
||||
}
|
||||
|
||||
|
||||
I420Decoder::I420Decoder():
|
||||
_decodedImage(),
|
||||
_width(0),
|
||||
_height(0),
|
||||
_inited(false),
|
||||
_decodeCompleteCallback(NULL)
|
||||
{}
|
||||
I420Decoder::I420Decoder() : _decodedImage(), _width(0), _height(0),
|
||||
_inited(false), _decodeCompleteCallback(NULL) {
|
||||
}
|
||||
|
||||
I420Decoder::~I420Decoder() {
|
||||
Release();
|
||||
@ -156,12 +163,10 @@ I420Decoder::InitDecode(const VideoCodec* codecSettings,
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
bool /*missingFrames*/,
|
||||
const RTPFragmentationHeader* /*fragmentation*/,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
int64_t /*renderTimeMs*/) {
|
||||
int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
|
||||
const RTPFragmentationHeader* /*fragmentation*/,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
int64_t /*renderTimeMs*/) {
|
||||
if (inputImage._buffer == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
@ -175,8 +180,14 @@ I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (!_inited) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
const uint8_t* buffer = inputImage._buffer;
|
||||
uint16_t width, height;
|
||||
|
||||
buffer = ExtractHeader(buffer, &width, &height);
|
||||
_width = width;
|
||||
_height = height;
|
||||
|
||||
// Verify that the available length is sufficient:
|
||||
int req_length = CalcBufferSize(kI420, _width, _height);
|
||||
@ -187,9 +198,9 @@ I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
int half_width = (_width + 1) / 2;
|
||||
_decodedImage.CreateEmptyFrame(_width, _height,
|
||||
_width, half_width, half_width);
|
||||
// Converting from buffer to plane representation.
|
||||
int ret = ConvertToI420(kI420, inputImage._buffer, 0, 0, _width, _height,
|
||||
0, kRotateNone, &_decodedImage);
|
||||
// Converting from buffer to plane representation.
|
||||
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0, kRotateNone,
|
||||
&_decodedImage);
|
||||
if (ret < 0) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
@ -199,16 +210,24 @@ I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
|
||||
const uint8_t* I420Decoder::ExtractHeader(const uint8_t* buffer,
|
||||
uint16_t* width, uint16_t* height) {
|
||||
*width = static_cast<uint16_t>(*buffer++) << 8;
|
||||
*width |= *buffer++;
|
||||
*height = static_cast<uint16_t>(*buffer++) << 8;
|
||||
*height |= *buffer++;
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
int I420Decoder::RegisterDecodeCompleteCallback(
|
||||
DecodedImageCallback* callback) {
|
||||
_decodeCompleteCallback = callback;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::Release() {
|
||||
int I420Decoder::Release() {
|
||||
_inited = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
@ -107,10 +107,6 @@ void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
|
||||
case webrtc::kVideoCodecVP8:
|
||||
ViETest::Log("\tcodecType: VP8");
|
||||
break;
|
||||
// TODO(sh): keep or remove MPEG4?
|
||||
// case webrtc::kVideoCodecMPEG4:
|
||||
// ViETest::Log("\tcodecType: MPEG4");
|
||||
// break;
|
||||
case webrtc::kVideoCodecI420:
|
||||
ViETest::Log("\tcodecType: I420");
|
||||
break;
|
||||
@ -120,8 +116,11 @@ void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
|
||||
case webrtc::kVideoCodecULPFEC:
|
||||
ViETest::Log("\tcodecType: ULPFEC");
|
||||
break;
|
||||
case webrtc::kVideoCodecGeneric:
|
||||
ViETest::Log("\tcodecType: GENERIC");
|
||||
break;
|
||||
case webrtc::kVideoCodecUnknown:
|
||||
ViETest::Log("\tcodecType: ????");
|
||||
ViETest::Log("\tcodecType: UNKNOWN");
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -21,17 +21,19 @@
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "common_types.h"
|
||||
#include "tb_external_transport.h"
|
||||
#include "voe_base.h"
|
||||
#include "vie_autotest_defines.h"
|
||||
#include "vie_autotest.h"
|
||||
#include "vie_base.h"
|
||||
#include "vie_capture.h"
|
||||
#include "vie_codec.h"
|
||||
#include "vie_network.h"
|
||||
#include "vie_render.h"
|
||||
#include "vie_rtp_rtcp.h"
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#include "webrtc/video_engine/include/vie_base.h"
|
||||
#include "webrtc/video_engine/include/vie_capture.h"
|
||||
#include "webrtc/video_engine/include/vie_codec.h"
|
||||
#include "webrtc/video_engine/include/vie_external_codec.h"
|
||||
#include "webrtc/video_engine/include/vie_network.h"
|
||||
#include "webrtc/video_engine/include/vie_render.h"
|
||||
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
|
||||
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
|
||||
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
|
||||
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
|
||||
#include "webrtc/voice_engine/include/voe_base.h"
|
||||
|
||||
#define VCM_RED_PAYLOAD_TYPE 96
|
||||
#define VCM_ULPFEC_PAYLOAD_TYPE 97
|
||||
@ -323,6 +325,8 @@ int VideoEngineSampleCode(void* window1, void* window2)
|
||||
printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
|
||||
}
|
||||
}
|
||||
printf("%d. VP8 over Generic.\n", ptrViECodec->NumberOfCodecs() + 1);
|
||||
|
||||
printf("Choose codec: ");
|
||||
#ifdef WEBRTC_ANDROID
|
||||
codecIdx = 0;
|
||||
@ -336,12 +340,33 @@ int VideoEngineSampleCode(void* window1, void* window2)
|
||||
getchar();
|
||||
codecIdx = codecIdx - 1; // Compensate for idx start at 1.
|
||||
#endif
|
||||
|
||||
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
|
||||
if (error == -1)
|
||||
{
|
||||
if (codecIdx == ptrViECodec->NumberOfCodecs()) {
|
||||
for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); ++codecIdx) {
|
||||
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
|
||||
assert(error != -1);
|
||||
if (videoCodec.codecType == webrtc::kVideoCodecVP8)
|
||||
break;
|
||||
}
|
||||
assert(videoCodec.codecType == webrtc::kVideoCodecVP8);
|
||||
videoCodec.codecType = webrtc::kVideoCodecGeneric;
|
||||
strcpy(videoCodec.plName, "GENERIC");
|
||||
uint8_t pl_type = 127;
|
||||
videoCodec.plType = pl_type;
|
||||
webrtc::ViEExternalCodec* external_codec = webrtc::ViEExternalCodec
|
||||
::GetInterface(ptrViE);
|
||||
assert(external_codec != NULL);
|
||||
error = external_codec->RegisterExternalSendCodec(videoChannel, pl_type,
|
||||
webrtc::VP8Encoder::Create(), false);
|
||||
assert(error != -1);
|
||||
error = external_codec->RegisterExternalReceiveCodec(videoChannel,
|
||||
pl_type, webrtc::VP8Decoder::Create(), false);
|
||||
assert(error != -1);
|
||||
} else {
|
||||
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
|
||||
if (error == -1) {
|
||||
printf("ERROR in ViECodec::GetCodec\n");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// Set spatial resolution option
|
||||
@ -352,13 +377,9 @@ int VideoEngineSampleCode(void* window1, void* window2)
|
||||
std::cout << "3. VGA (640X480) " << std::endl;
|
||||
std::cout << "4. 4CIF (704X576) " << std::endl;
|
||||
std::cout << "5. WHD (1280X720) " << std::endl;
|
||||
std::cout << "6. FHD (1920X1080) " << std::endl;
|
||||
std::getline(std::cin, str);
|
||||
int resolnOption = atoi(str.c_str());
|
||||
// Try to keep the test frame size small when I420
|
||||
if (videoCodec.codecType == webrtc::kVideoCodecI420)
|
||||
{
|
||||
resolnOption = 1;
|
||||
}
|
||||
switch (resolnOption)
|
||||
{
|
||||
case 1:
|
||||
@ -381,6 +402,10 @@ int VideoEngineSampleCode(void* window1, void* window2)
|
||||
videoCodec.width = 1280;
|
||||
videoCodec.height = 720;
|
||||
break;
|
||||
case 6:
|
||||
videoCodec.width = 1920;
|
||||
videoCodec.height = 1080;
|
||||
break;
|
||||
}
|
||||
|
||||
// Set number of temporal layers.
|
||||
@ -405,11 +430,9 @@ int VideoEngineSampleCode(void* window1, void* window2)
|
||||
}
|
||||
|
||||
error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
|
||||
if (error == -1)
|
||||
{
|
||||
printf("ERROR in ViECodec::SetSendCodec\n");
|
||||
return -1;
|
||||
}
|
||||
assert(error != -1);
|
||||
error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
|
||||
assert(error != -1);
|
||||
|
||||
//
|
||||
// Choose Protection Mode
|
||||
|
@ -158,7 +158,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
|
||||
shared_data_->SetLastError(kViECodecReceiveOnlyChannel);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set a max_bitrate if the user hasn't set one.
|
||||
VideoCodec video_codec_internal;
|
||||
memcpy(&video_codec_internal, &video_codec, sizeof(VideoCodec));
|
||||
@ -740,9 +739,11 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
|
||||
"Codec type doesn't match pl_name", video_codec.plType);
|
||||
return false;
|
||||
} else if ((video_codec.codecType == kVideoCodecVP8 &&
|
||||
strncmp(video_codec.plName, "VP8", 4) == 0) ||
|
||||
(video_codec.codecType == kVideoCodecI420 &&
|
||||
strncmp(video_codec.plName, "I420", 4) == 0)) {
|
||||
strncmp(video_codec.plName, "VP8", 4) == 0) ||
|
||||
(video_codec.codecType == kVideoCodecI420 &&
|
||||
strncmp(video_codec.plName, "I420", 4) == 0) ||
|
||||
(video_codec.codecType == kVideoCodecGeneric &&
|
||||
strncmp(video_codec.plName, "GENERIC", 7) == 0)) {
|
||||
// OK.
|
||||
} else {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
|
Loading…
x
Reference in New Issue
Block a user