Lint-cleaned video and audio receivers.
BUG= TESTED=trybots Review URL: https://webrtc-codereview.appspot.com/1093004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3471 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
c4e45f67c0
commit
a7303bdfb5
@ -8,144 +8,125 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "rtp_receiver_audio.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
|
||||||
|
|
||||||
#include <cassert> //assert
|
#include <math.h> // pow()
|
||||||
#include <cstring> // memcpy()
|
|
||||||
#include <math.h> // pow()
|
|
||||||
|
|
||||||
#include "critical_section_wrapper.h"
|
#include <cassert> // assert
|
||||||
#include "trace.h"
|
#include <cstring> // memcpy()
|
||||||
|
|
||||||
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id,
|
RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id,
|
||||||
RtpData* data_callback,
|
RtpData* data_callback,
|
||||||
RtpAudioFeedback* incomingMessagesCallback)
|
RtpAudioFeedback* incoming_messages_callback)
|
||||||
: RTPReceiverStrategy(data_callback),
|
: RTPReceiverStrategy(data_callback),
|
||||||
_id(id),
|
id_(id),
|
||||||
_criticalSectionRtpReceiverAudio(
|
critical_section_rtp_receiver_audio_(
|
||||||
CriticalSectionWrapper::CreateCriticalSection()),
|
CriticalSectionWrapper::CreateCriticalSection()),
|
||||||
_lastReceivedFrequency(8000),
|
last_received_frequency_(8000),
|
||||||
_telephoneEvent(false),
|
telephone_event_(false),
|
||||||
_telephoneEventForwardToDecoder(false),
|
telephone_event_forward_to_decoder_(false),
|
||||||
_telephoneEventDetectEndOfTone(false),
|
telephone_event_detect_end_of_tone_(false),
|
||||||
_telephoneEventPayloadType(-1),
|
telephone_event_payload_type_(-1),
|
||||||
_cngNBPayloadType(-1),
|
cng_nb_payload_type_(-1),
|
||||||
_cngWBPayloadType(-1),
|
cng_wb_payload_type_(-1),
|
||||||
_cngSWBPayloadType(-1),
|
cng_swb_payload_type_(-1),
|
||||||
_cngFBPayloadType(-1),
|
cng_fb_payload_type_(-1),
|
||||||
_cngPayloadType(-1),
|
cng_payload_type_(-1),
|
||||||
_G722PayloadType(-1),
|
g722_payload_type_(-1),
|
||||||
_lastReceivedG722(false),
|
last_received_g722_(false),
|
||||||
_cbAudioFeedback(incomingMessagesCallback)
|
cb_audio_feedback_(incoming_messages_callback) {
|
||||||
{
|
|
||||||
last_payload_.Audio.channels = 1;
|
last_payload_.Audio.channels = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_UWord32
|
WebRtc_UWord32 RTPReceiverAudio::AudioFrequency() const {
|
||||||
RTPReceiverAudio::AudioFrequency() const
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
{
|
if (last_received_g722_) {
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
return 8000;
|
||||||
if(_lastReceivedG722)
|
}
|
||||||
{
|
return last_received_frequency_;
|
||||||
return 8000;
|
|
||||||
}
|
|
||||||
return _lastReceivedFrequency;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Outband TelephoneEvent(DTMF) detection
|
// Outband TelephoneEvent(DTMF) detection
|
||||||
WebRtc_Word32
|
WebRtc_Word32 RTPReceiverAudio::SetTelephoneEventStatus(
|
||||||
RTPReceiverAudio::SetTelephoneEventStatus(const bool enable,
|
const bool enable,
|
||||||
const bool forwardToDecoder,
|
const bool forward_to_decoder,
|
||||||
const bool detectEndOfTone)
|
const bool detect_end_of_tone) {
|
||||||
{
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
telephone_event_ = enable;
|
||||||
_telephoneEvent= enable;
|
telephone_event_detect_end_of_tone_ = detect_end_of_tone;
|
||||||
_telephoneEventDetectEndOfTone = detectEndOfTone;
|
telephone_event_forward_to_decoder_ = forward_to_decoder;
|
||||||
_telephoneEventForwardToDecoder = forwardToDecoder;
|
return 0;
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is outband TelephoneEvent(DTMF) turned on/off?
|
// Is outband TelephoneEvent(DTMF) turned on/off?
|
||||||
bool
|
bool RTPReceiverAudio::TelephoneEvent() const {
|
||||||
RTPReceiverAudio::TelephoneEvent() const
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
{
|
return telephone_event_;
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
|
||||||
return _telephoneEvent;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is forwarding of outband telephone events turned on/off?
|
// Is forwarding of outband telephone events turned on/off?
|
||||||
bool
|
bool RTPReceiverAudio::TelephoneEventForwardToDecoder() const {
|
||||||
RTPReceiverAudio::TelephoneEventForwardToDecoder() const
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
{
|
return telephone_event_forward_to_decoder_;
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
|
||||||
return _telephoneEventForwardToDecoder;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool RTPReceiverAudio::TelephoneEventPayloadType(
|
||||||
RTPReceiverAudio::TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const
|
const WebRtc_Word8 payload_type) const {
|
||||||
{
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
return (telephone_event_payload_type_ == payload_type) ? true : false;
|
||||||
return (_telephoneEventPayloadType == payloadType)?true:false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payload_type,
|
||||||
RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType,
|
WebRtc_UWord32* frequency,
|
||||||
WebRtc_UWord32* frequency,
|
bool* cng_payload_type_has_changed) {
|
||||||
bool* cngPayloadTypeHasChanged)
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
{
|
*cng_payload_type_has_changed = false;
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
|
||||||
*cngPayloadTypeHasChanged = false;
|
|
||||||
|
|
||||||
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
|
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
|
||||||
if(_cngNBPayloadType == payloadType)
|
if (cng_nb_payload_type_ == payload_type) {
|
||||||
{
|
*frequency = 8000;
|
||||||
*frequency = 8000;
|
if (cng_payload_type_ != -1 && cng_payload_type_ != cng_nb_payload_type_)
|
||||||
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngNBPayloadType))
|
*cng_payload_type_has_changed = true;
|
||||||
*cngPayloadTypeHasChanged = true;
|
|
||||||
|
|
||||||
_cngPayloadType = _cngNBPayloadType;
|
cng_payload_type_ = cng_nb_payload_type_;
|
||||||
return true;
|
return true;
|
||||||
} else if(_cngWBPayloadType == payloadType)
|
} else if (cng_wb_payload_type_ == payload_type) {
|
||||||
{
|
// if last received codec is G.722 we must use frequency 8000
|
||||||
// if last received codec is G.722 we must use frequency 8000
|
if (last_received_g722_) {
|
||||||
if(_lastReceivedG722)
|
*frequency = 8000;
|
||||||
{
|
} else {
|
||||||
*frequency = 8000;
|
*frequency = 16000;
|
||||||
} else
|
|
||||||
{
|
|
||||||
*frequency = 16000;
|
|
||||||
}
|
|
||||||
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngWBPayloadType))
|
|
||||||
*cngPayloadTypeHasChanged = true;
|
|
||||||
_cngPayloadType = _cngWBPayloadType;
|
|
||||||
return true;
|
|
||||||
}else if(_cngSWBPayloadType == payloadType)
|
|
||||||
{
|
|
||||||
*frequency = 32000;
|
|
||||||
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngSWBPayloadType))
|
|
||||||
*cngPayloadTypeHasChanged = true;
|
|
||||||
_cngPayloadType = _cngSWBPayloadType;
|
|
||||||
return true;
|
|
||||||
}else if(_cngFBPayloadType == payloadType)
|
|
||||||
{
|
|
||||||
*frequency = 48000;
|
|
||||||
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngFBPayloadType))
|
|
||||||
*cngPayloadTypeHasChanged = true;
|
|
||||||
_cngPayloadType = _cngFBPayloadType;
|
|
||||||
return true;
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
// not CNG
|
|
||||||
if(_G722PayloadType == payloadType)
|
|
||||||
{
|
|
||||||
_lastReceivedG722 = true;
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
_lastReceivedG722 = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
if (cng_payload_type_ != -1 && cng_payload_type_ != cng_wb_payload_type_)
|
||||||
|
*cng_payload_type_has_changed = true;
|
||||||
|
cng_payload_type_ = cng_wb_payload_type_;
|
||||||
|
return true;
|
||||||
|
} else if (cng_swb_payload_type_ == payload_type) {
|
||||||
|
*frequency = 32000;
|
||||||
|
if ((cng_payload_type_ != -1) &&
|
||||||
|
(cng_payload_type_ != cng_swb_payload_type_))
|
||||||
|
*cng_payload_type_has_changed = true;
|
||||||
|
cng_payload_type_ = cng_swb_payload_type_;
|
||||||
|
return true;
|
||||||
|
} else if (cng_fb_payload_type_ == payload_type) {
|
||||||
|
*frequency = 48000;
|
||||||
|
if (cng_payload_type_ != -1 && cng_payload_type_ != cng_fb_payload_type_)
|
||||||
|
*cng_payload_type_has_changed = true;
|
||||||
|
cng_payload_type_ = cng_fb_payload_type_;
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
// not CNG
|
||||||
|
if (g722_payload_type_ == payload_type) {
|
||||||
|
last_received_g722_ = true;
|
||||||
|
} else {
|
||||||
|
last_received_g722_ = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool RTPReceiverAudio::ShouldReportCsrcChanges(
|
bool RTPReceiverAudio::ShouldReportCsrcChanges(
|
||||||
@ -154,59 +135,57 @@ bool RTPReceiverAudio::ShouldReportCsrcChanges(
|
|||||||
return !TelephoneEventPayloadType(payload_type);
|
return !TelephoneEventPayloadType(payload_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
// - Sample based or frame based codecs based on RFC 3551
|
||||||
Sample based or frame based codecs based on RFC 3551
|
// -
|
||||||
|
// - NOTE! There is one error in the RFC, stating G.722 uses 8 bits/samples.
|
||||||
NOTE! There is one error in the RFC, stating G.722 uses 8 bits/samples.
|
// - The correct rate is 4 bits/sample.
|
||||||
The correct rate is 4 bits/sample.
|
// -
|
||||||
|
// - name of sampling default
|
||||||
name of sampling default
|
// - encoding sample/frame bits/sample rate ms/frame ms/packet
|
||||||
encoding sample/frame bits/sample rate ms/frame ms/packet
|
// -
|
||||||
|
// - Sample based audio codecs
|
||||||
Sample based audio codecs
|
// - DVI4 sample 4 var. 20
|
||||||
DVI4 sample 4 var. 20
|
// - G722 sample 4 16,000 20
|
||||||
G722 sample 4 16,000 20
|
// - G726-40 sample 5 8,000 20
|
||||||
G726-40 sample 5 8,000 20
|
// - G726-32 sample 4 8,000 20
|
||||||
G726-32 sample 4 8,000 20
|
// - G726-24 sample 3 8,000 20
|
||||||
G726-24 sample 3 8,000 20
|
// - G726-16 sample 2 8,000 20
|
||||||
G726-16 sample 2 8,000 20
|
// - L8 sample 8 var. 20
|
||||||
L8 sample 8 var. 20
|
// - L16 sample 16 var. 20
|
||||||
L16 sample 16 var. 20
|
// - PCMA sample 8 var. 20
|
||||||
PCMA sample 8 var. 20
|
// - PCMU sample 8 var. 20
|
||||||
PCMU sample 8 var. 20
|
// -
|
||||||
|
// - Frame based audio codecs
|
||||||
Frame based audio codecs
|
// - G723 frame N/A 8,000 30 30
|
||||||
G723 frame N/A 8,000 30 30
|
// - G728 frame N/A 8,000 2.5 20
|
||||||
G728 frame N/A 8,000 2.5 20
|
// - G729 frame N/A 8,000 10 20
|
||||||
G729 frame N/A 8,000 10 20
|
// - G729D frame N/A 8,000 10 20
|
||||||
G729D frame N/A 8,000 10 20
|
// - G729E frame N/A 8,000 10 20
|
||||||
G729E frame N/A 8,000 10 20
|
// - GSM frame N/A 8,000 20 20
|
||||||
GSM frame N/A 8,000 20 20
|
// - GSM-EFR frame N/A 8,000 20 20
|
||||||
GSM-EFR frame N/A 8,000 20 20
|
// - LPC frame N/A 8,000 20 20
|
||||||
LPC frame N/A 8,000 20 20
|
// - MPA frame N/A var. var.
|
||||||
MPA frame N/A var. var.
|
// -
|
||||||
|
// - G7221 frame N/A
|
||||||
G7221 frame N/A
|
|
||||||
*/
|
|
||||||
WebRtc_Word32 RTPReceiverAudio::OnNewPayloadTypeCreated(
|
WebRtc_Word32 RTPReceiverAudio::OnNewPayloadTypeCreated(
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const WebRtc_UWord32 frequency) {
|
const WebRtc_UWord32 frequency) {
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
|
|
||||||
if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
|
if (ModuleRTPUtility::StringCompare(payload_name, "telephone-event", 15)) {
|
||||||
_telephoneEventPayloadType = payloadType;
|
telephone_event_payload_type_ = payload_type;
|
||||||
}
|
}
|
||||||
if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2)) {
|
if (ModuleRTPUtility::StringCompare(payload_name, "cn", 2)) {
|
||||||
// we can have three CNG on 8000Hz, 16000Hz and 32000Hz
|
// we can have three CNG on 8000Hz, 16000Hz and 32000Hz
|
||||||
if(frequency == 8000){
|
if (frequency == 8000) {
|
||||||
_cngNBPayloadType = payloadType;
|
cng_nb_payload_type_ = payload_type;
|
||||||
} else if(frequency == 16000) {
|
} else if (frequency == 16000) {
|
||||||
_cngWBPayloadType = payloadType;
|
cng_wb_payload_type_ = payload_type;
|
||||||
} else if(frequency == 32000) {
|
} else if (frequency == 32000) {
|
||||||
_cngSWBPayloadType = payloadType;
|
cng_swb_payload_type_ = payload_type;
|
||||||
} else if(frequency == 48000) {
|
} else if (frequency == 48000) {
|
||||||
_cngFBPayloadType = payloadType;
|
cng_fb_payload_type_ = payload_type;
|
||||||
} else {
|
} else {
|
||||||
assert(false);
|
assert(false);
|
||||||
return -1;
|
return -1;
|
||||||
@ -216,50 +195,52 @@ WebRtc_Word32 RTPReceiverAudio::OnNewPayloadTypeCreated(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void RTPReceiverAudio::SendTelephoneEvents(
|
void RTPReceiverAudio::SendTelephoneEvents(
|
||||||
WebRtc_UWord8 numberOfNewEvents,
|
WebRtc_UWord8 number_of_new_events,
|
||||||
WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
|
WebRtc_UWord8 new_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
|
||||||
WebRtc_UWord8 numberOfRemovedEvents,
|
WebRtc_UWord8 number_of_removed_events,
|
||||||
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]) {
|
WebRtc_UWord8 removed_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]) {
|
||||||
|
|
||||||
// Copy these variables since we can't hold the critsect when we call the
|
// Copy these variables since we can't hold the critsect when we call the
|
||||||
// callback. _cbAudioFeedback and _id are immutable though.
|
// callback. cb_audio_feedback_ and id_ are immutable though.
|
||||||
bool telephoneEvent;
|
bool telephone_event;
|
||||||
bool telephoneEventDetectEndOfTone;
|
bool telephone_event_detect_end_of_tone;
|
||||||
{
|
{
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
telephoneEvent = _telephoneEvent;
|
telephone_event = telephone_event_;
|
||||||
telephoneEventDetectEndOfTone = _telephoneEventDetectEndOfTone;
|
telephone_event_detect_end_of_tone = telephone_event_detect_end_of_tone_;
|
||||||
|
}
|
||||||
|
if (telephone_event) {
|
||||||
|
for (int n = 0; n < number_of_new_events; ++n) {
|
||||||
|
cb_audio_feedback_->OnReceivedTelephoneEvent(id_, new_events[n], false);
|
||||||
}
|
}
|
||||||
if (telephoneEvent) {
|
if (telephone_event_detect_end_of_tone) {
|
||||||
for (int n = 0; n < numberOfNewEvents; ++n) {
|
for (int n = 0; n < number_of_removed_events; ++n) {
|
||||||
_cbAudioFeedback->OnReceivedTelephoneEvent(
|
cb_audio_feedback_->OnReceivedTelephoneEvent(
|
||||||
_id, newEvents[n], false);
|
id_, removed_events[n], true);
|
||||||
}
|
}
|
||||||
if (telephoneEventDetectEndOfTone) {
|
|
||||||
for (int n = 0; n < numberOfRemovedEvents; ++n) {
|
|
||||||
_cbAudioFeedback->OnReceivedTelephoneEvent(
|
|
||||||
_id, removedEvents[n], true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverAudio::ParseRtpPacket(
|
WebRtc_Word32 RTPReceiverAudio::ParseRtpPacket(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload,
|
const ModuleRTPUtility::PayloadUnion& specific_payload,
|
||||||
const bool isRed,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* packet,
|
const WebRtc_UWord8* packet,
|
||||||
const WebRtc_UWord16 packetLength,
|
const WebRtc_UWord16 packet_length,
|
||||||
const WebRtc_Word64 timestampMs,
|
const WebRtc_Word64 timestamp_ms,
|
||||||
const bool isFirstPacket) {
|
const bool is_first_packet) {
|
||||||
|
|
||||||
const WebRtc_UWord8* payloadData =
|
const WebRtc_UWord8* payload_data =
|
||||||
ModuleRTPUtility::GetPayloadData(rtpHeader, packet);
|
ModuleRTPUtility::GetPayloadData(rtp_header, packet);
|
||||||
const WebRtc_UWord16 payloadDataLength =
|
const WebRtc_UWord16 payload_data_length =
|
||||||
ModuleRTPUtility::GetPayloadDataLength(rtpHeader, packetLength);
|
ModuleRTPUtility::GetPayloadDataLength(rtp_header, packet_length);
|
||||||
|
|
||||||
return ParseAudioCodecSpecific(rtpHeader, payloadData, payloadDataLength,
|
return ParseAudioCodecSpecific(rtp_header,
|
||||||
specificPayload.Audio, isRed);
|
payload_data,
|
||||||
|
payload_data_length,
|
||||||
|
specific_payload.Audio,
|
||||||
|
is_red);
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverAudio::GetFrequencyHz() const {
|
WebRtc_Word32 RTPReceiverAudio::GetFrequencyHz() const {
|
||||||
@ -267,197 +248,185 @@ WebRtc_Word32 RTPReceiverAudio::GetFrequencyHz() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
|
RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
|
||||||
WebRtc_UWord16 lastPayloadLength) const {
|
WebRtc_UWord16 last_payload_length) const {
|
||||||
|
|
||||||
// Our CNG is 9 bytes; if it's a likely CNG the receiver needs to check
|
// Our CNG is 9 bytes; if it's a likely CNG the receiver needs to check
|
||||||
// kRtpNoRtp against NetEq speechType kOutputPLCtoCNG.
|
// kRtpNoRtp against NetEq speech_type kOutputPLCtoCNG.
|
||||||
if(lastPayloadLength < 10) // our CNG is 9 bytes
|
if (last_payload_length < 10) { // our CNG is 9 bytes
|
||||||
{
|
return kRtpNoRtp;
|
||||||
return kRtpNoRtp;
|
} else {
|
||||||
} else
|
return kRtpDead;
|
||||||
{
|
}
|
||||||
return kRtpDead;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void RTPReceiverAudio::CheckPayloadChanged(
|
void RTPReceiverAudio::CheckPayloadChanged(
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
ModuleRTPUtility::PayloadUnion* specificPayload,
|
ModuleRTPUtility::PayloadUnion* specific_payload,
|
||||||
bool* shouldResetStatistics,
|
bool* should_reset_statistics,
|
||||||
bool* shouldDiscardChanges) {
|
bool* should_discard_changes) {
|
||||||
*shouldDiscardChanges = false;
|
*should_discard_changes = false;
|
||||||
*shouldResetStatistics = false;
|
*should_reset_statistics = false;
|
||||||
|
|
||||||
if (TelephoneEventPayloadType(payloadType)) {
|
if (TelephoneEventPayloadType(payload_type)) {
|
||||||
// Don't do callbacks for DTMF packets.
|
// Don't do callbacks for DTMF packets.
|
||||||
*shouldDiscardChanges = true;
|
*should_discard_changes = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// frequency is updated for CNG
|
// frequency is updated for CNG
|
||||||
bool cngPayloadTypeHasChanged = false;
|
bool cng_payload_type_has_changed = false;
|
||||||
bool isCngPayloadType = CNGPayloadType(
|
bool is_cng_payload_type = CNGPayloadType(payload_type,
|
||||||
payloadType, &specificPayload->Audio.frequency,
|
&specific_payload->Audio.frequency,
|
||||||
&cngPayloadTypeHasChanged);
|
&cng_payload_type_has_changed);
|
||||||
|
|
||||||
*shouldResetStatistics = cngPayloadTypeHasChanged;
|
*should_reset_statistics = cng_payload_type_has_changed;
|
||||||
|
|
||||||
if (isCngPayloadType) {
|
if (is_cng_payload_type) {
|
||||||
// Don't do callbacks for DTMF packets.
|
// Don't do callbacks for DTMF packets.
|
||||||
*shouldDiscardChanges = true;
|
*should_discard_changes = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverAudio::InvokeOnInitializeDecoder(
|
WebRtc_Word32 RTPReceiverAudio::InvokeOnInitializeDecoder(
|
||||||
RtpFeedback* callback,
|
RtpFeedback* callback,
|
||||||
const WebRtc_Word32 id,
|
const WebRtc_Word32 id,
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload) const {
|
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
|
||||||
if (-1 == callback->OnInitializeDecoder(
|
if (-1 == callback->OnInitializeDecoder(id,
|
||||||
id, payloadType, payloadName, specificPayload.Audio.frequency,
|
payload_type,
|
||||||
specificPayload.Audio.channels, specificPayload.Audio.rate)) {
|
payload_name,
|
||||||
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id,
|
specific_payload.Audio.frequency,
|
||||||
|
specific_payload.Audio.channels,
|
||||||
|
specific_payload.Audio.rate)) {
|
||||||
|
WEBRTC_TRACE(kTraceError,
|
||||||
|
kTraceRtpRtcp,
|
||||||
|
id,
|
||||||
"Failed to create video decoder for payload type:%d",
|
"Failed to create video decoder for payload type:%d",
|
||||||
payloadType);
|
payload_type);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
|
// We are not allowed to have any critsects when calling data_callback.
|
||||||
WebRtc_Word32
|
WebRtc_Word32 RTPReceiverAudio::ParseAudioCodecSpecific(
|
||||||
RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadLength,
|
const WebRtc_UWord16 payload_length,
|
||||||
const ModuleRTPUtility::AudioPayload& audioSpecific,
|
const ModuleRTPUtility::AudioPayload& audio_specific,
|
||||||
const bool isRED)
|
const bool is_red) {
|
||||||
{
|
WebRtc_UWord8 new_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
|
||||||
WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
|
WebRtc_UWord8 removed_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
|
||||||
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
|
WebRtc_UWord8 number_of_new_events = 0;
|
||||||
WebRtc_UWord8 numberOfNewEvents = 0;
|
WebRtc_UWord8 number_of_removed_events = 0;
|
||||||
WebRtc_UWord8 numberOfRemovedEvents = 0;
|
|
||||||
|
|
||||||
if(payloadLength == 0)
|
if (payload_length == 0) {
|
||||||
{
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool telephone_event_packet =
|
||||||
|
TelephoneEventPayloadType(rtp_header->header.payloadType);
|
||||||
|
if (telephone_event_packet) {
|
||||||
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
|
|
||||||
|
// RFC 4733 2.3
|
||||||
|
// 0 1 2 3
|
||||||
|
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||||
|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||||
|
// | event |E|R| volume | duration |
|
||||||
|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||||
|
//
|
||||||
|
if (payload_length % 4 != 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
WebRtc_UWord8 number_of_events = payload_length / 4;
|
||||||
|
|
||||||
|
// sanity
|
||||||
|
if (number_of_events >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS) {
|
||||||
|
number_of_events = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
|
||||||
|
}
|
||||||
|
for (int n = 0; n < number_of_events; ++n) {
|
||||||
|
bool end = (payload_data[(4 * n) + 1] & 0x80) ? true : false;
|
||||||
|
|
||||||
|
std::set<WebRtc_UWord8>::iterator event =
|
||||||
|
telephone_event_reported_.find(payload_data[4 * n]);
|
||||||
|
|
||||||
|
if (event != telephone_event_reported_.end()) {
|
||||||
|
// we have already seen this event
|
||||||
|
if (end) {
|
||||||
|
removed_events[number_of_removed_events] = payload_data[4 * n];
|
||||||
|
number_of_removed_events++;
|
||||||
|
telephone_event_reported_.erase(payload_data[4 * n]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (end) {
|
||||||
|
// don't add if it's a end of a tone
|
||||||
|
} else {
|
||||||
|
new_events[number_of_new_events] = payload_data[4 * n];
|
||||||
|
number_of_new_events++;
|
||||||
|
telephone_event_reported_.insert(payload_data[4 * n]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
|
||||||
|
// should not be a problem since we don't care about the duration
|
||||||
|
|
||||||
|
// RFC 4733 See 2.5.1.5. & 2.5.2.4. Multiple Events in a Packet
|
||||||
|
}
|
||||||
|
|
||||||
|
// This needs to be called without locks held.
|
||||||
|
SendTelephoneEvents(number_of_new_events,
|
||||||
|
new_events,
|
||||||
|
number_of_removed_events,
|
||||||
|
removed_events);
|
||||||
|
|
||||||
|
{
|
||||||
|
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
|
||||||
|
|
||||||
|
if (!telephone_event_packet) {
|
||||||
|
last_received_frequency_ = audio_specific.frequency;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a CNG packet, receiver might want to know
|
||||||
|
WebRtc_UWord32 ignored;
|
||||||
|
bool also_ignored;
|
||||||
|
if (CNGPayloadType(rtp_header->header.payloadType,
|
||||||
|
&ignored,
|
||||||
|
&also_ignored)) {
|
||||||
|
rtp_header->type.Audio.isCNG = true;
|
||||||
|
rtp_header->frameType = kAudioFrameCN;
|
||||||
|
} else {
|
||||||
|
rtp_header->frameType = kAudioFrameSpeech;
|
||||||
|
rtp_header->type.Audio.isCNG = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if it's a DTMF event, hence something we can playout
|
||||||
|
if (telephone_event_packet) {
|
||||||
|
if (!telephone_event_forward_to_decoder_) {
|
||||||
|
// don't forward event to decoder
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
|
std::set<WebRtc_UWord8>::iterator first =
|
||||||
|
telephone_event_reported_.begin();
|
||||||
|
if (first != telephone_event_reported_.end() && *first > 15) {
|
||||||
|
// don't forward non DTMF events
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (is_red && !(payload_data[0] & 0x80)) {
|
||||||
|
// we recive only one frame packed in a RED packet remove the RED wrapper
|
||||||
|
rtp_header->header.payloadType = payload_data[0];
|
||||||
|
|
||||||
bool telephoneEventPacket = TelephoneEventPayloadType(rtpHeader->header.payloadType);
|
// only one frame in the RED strip the one byte to help NetEq
|
||||||
if(telephoneEventPacket)
|
|
||||||
{
|
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
|
||||||
|
|
||||||
// RFC 4733 2.3
|
|
||||||
/*
|
|
||||||
0 1 2 3
|
|
||||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
|
||||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
|
||||||
| event |E|R| volume | duration |
|
|
||||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
|
||||||
*/
|
|
||||||
if(payloadLength % 4 != 0)
|
|
||||||
{
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
WebRtc_UWord8 numberOfEvents = payloadLength / 4;
|
|
||||||
|
|
||||||
// sanity
|
|
||||||
if(numberOfEvents >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS)
|
|
||||||
{
|
|
||||||
numberOfEvents = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
|
|
||||||
}
|
|
||||||
for (int n = 0; n < numberOfEvents; n++)
|
|
||||||
{
|
|
||||||
bool end = (payloadData[(4*n)+1] & 0x80)? true:false;
|
|
||||||
|
|
||||||
std::set<WebRtc_UWord8>::iterator event =
|
|
||||||
_telephoneEventReported.find(payloadData[4*n]);
|
|
||||||
|
|
||||||
if(event != _telephoneEventReported.end())
|
|
||||||
{
|
|
||||||
// we have already seen this event
|
|
||||||
if(end)
|
|
||||||
{
|
|
||||||
removedEvents[numberOfRemovedEvents]= payloadData[4*n];
|
|
||||||
numberOfRemovedEvents++;
|
|
||||||
_telephoneEventReported.erase(payloadData[4*n]);
|
|
||||||
}
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
if(end)
|
|
||||||
{
|
|
||||||
// don't add if it's a end of a tone
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
newEvents[numberOfNewEvents] = payloadData[4*n];
|
|
||||||
numberOfNewEvents++;
|
|
||||||
_telephoneEventReported.insert(payloadData[4*n]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
|
|
||||||
// should not be a problem since we don't care about the duration
|
|
||||||
|
|
||||||
// RFC 4733 See 2.5.1.5. & 2.5.2.4. Multiple Events in a Packet
|
|
||||||
}
|
|
||||||
|
|
||||||
// This needs to be called without locks held.
|
|
||||||
SendTelephoneEvents(numberOfNewEvents, newEvents, numberOfRemovedEvents,
|
|
||||||
removedEvents);
|
|
||||||
|
|
||||||
{
|
|
||||||
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
|
|
||||||
|
|
||||||
if(! telephoneEventPacket )
|
|
||||||
{
|
|
||||||
_lastReceivedFrequency = audioSpecific.frequency;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this is a CNG packet, receiver might want to know
|
|
||||||
WebRtc_UWord32 ignored;
|
|
||||||
bool alsoIgnored;
|
|
||||||
if(CNGPayloadType(rtpHeader->header.payloadType, &ignored, &alsoIgnored))
|
|
||||||
{
|
|
||||||
rtpHeader->type.Audio.isCNG=true;
|
|
||||||
rtpHeader->frameType = kAudioFrameCN;
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
rtpHeader->frameType = kAudioFrameSpeech;
|
|
||||||
rtpHeader->type.Audio.isCNG=false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if it's a DTMF event, hence something we can playout
|
|
||||||
if(telephoneEventPacket)
|
|
||||||
{
|
|
||||||
if(!_telephoneEventForwardToDecoder)
|
|
||||||
{
|
|
||||||
// don't forward event to decoder
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
std::set<WebRtc_UWord8>::iterator first =
|
|
||||||
_telephoneEventReported.begin();
|
|
||||||
if(first != _telephoneEventReported.end() && *first > 15)
|
|
||||||
{
|
|
||||||
// don't forward non DTMF events
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if(isRED && !(payloadData[0] & 0x80))
|
|
||||||
{
|
|
||||||
// we recive only one frame packed in a RED packet remove the RED wrapper
|
|
||||||
rtpHeader->header.payloadType = payloadData[0];
|
|
||||||
|
|
||||||
// only one frame in the RED strip the one byte to help NetEq
|
|
||||||
return data_callback_->OnReceivedPayloadData(payloadData+1,
|
|
||||||
payloadLength-1,
|
|
||||||
rtpHeader);
|
|
||||||
}
|
|
||||||
|
|
||||||
rtpHeader->type.Audio.channel = audioSpecific.channels;
|
|
||||||
return data_callback_->OnReceivedPayloadData(
|
return data_callback_->OnReceivedPayloadData(
|
||||||
payloadData, payloadLength, rtpHeader);
|
payload_data + 1, payload_length - 1, rtp_header);
|
||||||
|
}
|
||||||
|
|
||||||
|
rtp_header->type.Audio.channel = audio_specific.channels;
|
||||||
|
return data_callback_->OnReceivedPayloadData(
|
||||||
|
payload_data, payload_length, rtp_header);
|
||||||
}
|
}
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -13,126 +13,128 @@
|
|||||||
|
|
||||||
#include <set>
|
#include <set>
|
||||||
|
|
||||||
#include "rtp_receiver.h"
|
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
|
||||||
#include "rtp_receiver_strategy.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver.h"
|
||||||
#include "rtp_rtcp_defines.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
|
||||||
#include "rtp_utility.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
|
||||||
#include "scoped_ptr.h"
|
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
|
||||||
#include "typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
class CriticalSectionWrapper;
|
class CriticalSectionWrapper;
|
||||||
|
|
||||||
// Handles audio RTP packets. This class is thread-safe.
|
// Handles audio RTP packets. This class is thread-safe.
|
||||||
class RTPReceiverAudio : public RTPReceiverStrategy
|
class RTPReceiverAudio : public RTPReceiverStrategy {
|
||||||
{
|
public:
|
||||||
public:
|
RTPReceiverAudio(const WebRtc_Word32 id,
|
||||||
RTPReceiverAudio(const WebRtc_Word32 id,
|
RtpData* data_callback,
|
||||||
RtpData* data_callback,
|
RtpAudioFeedback* incoming_messages_callback);
|
||||||
RtpAudioFeedback* incomingMessagesCallback);
|
|
||||||
|
|
||||||
WebRtc_UWord32 AudioFrequency() const;
|
WebRtc_UWord32 AudioFrequency() const;
|
||||||
|
|
||||||
// Outband TelephoneEvent (DTMF) detection
|
// Outband TelephoneEvent (DTMF) detection
|
||||||
WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
|
WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
|
||||||
const bool forwardToDecoder,
|
const bool forward_to_decoder,
|
||||||
const bool detectEndOfTone);
|
const bool detect_end_of_tone);
|
||||||
|
|
||||||
// Is outband DTMF(AVT) turned on/off?
|
// Is outband DTMF(AVT) turned on/off?
|
||||||
bool TelephoneEvent() const ;
|
bool TelephoneEvent() const;
|
||||||
|
|
||||||
// Is forwarding of outband telephone events turned on/off?
|
// Is forwarding of outband telephone events turned on/off?
|
||||||
bool TelephoneEventForwardToDecoder() const ;
|
bool TelephoneEventForwardToDecoder() const;
|
||||||
|
|
||||||
// Is TelephoneEvent configured with payload type payloadType
|
// Is TelephoneEvent configured with payload type payload_type
|
||||||
bool TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const;
|
bool TelephoneEventPayloadType(const WebRtc_Word8 payload_type) const;
|
||||||
|
|
||||||
// Returns true if CNG is configured with payload type payloadType. If so,
|
// Returns true if CNG is configured with payload type payload_type. If so,
|
||||||
// the frequency and cngPayloadTypeHasChanged are filled in.
|
// the frequency and cng_payload_type_has_changed are filled in.
|
||||||
bool CNGPayloadType(const WebRtc_Word8 payloadType,
|
bool CNGPayloadType(const WebRtc_Word8 payload_type,
|
||||||
WebRtc_UWord32* frequency,
|
WebRtc_UWord32* frequency,
|
||||||
bool* cngPayloadTypeHasChanged);
|
bool* cng_payload_type_has_changed);
|
||||||
|
|
||||||
WebRtc_Word32 ParseRtpPacket(
|
WebRtc_Word32 ParseRtpPacket(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload,
|
const ModuleRTPUtility::PayloadUnion& specific_payload,
|
||||||
const bool isRed,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* packet,
|
const WebRtc_UWord8* packet,
|
||||||
const WebRtc_UWord16 packetLength,
|
const WebRtc_UWord16 packet_length,
|
||||||
const WebRtc_Word64 timestampMs,
|
const WebRtc_Word64 timestamp_ms,
|
||||||
const bool isFirstPacket);
|
const bool is_first_packet);
|
||||||
|
|
||||||
WebRtc_Word32 GetFrequencyHz() const;
|
WebRtc_Word32 GetFrequencyHz() const;
|
||||||
|
|
||||||
RTPAliveType ProcessDeadOrAlive(WebRtc_UWord16 lastPayloadLength) const;
|
RTPAliveType ProcessDeadOrAlive(WebRtc_UWord16 last_payload_length) const;
|
||||||
|
|
||||||
bool ShouldReportCsrcChanges(WebRtc_UWord8 payload_type) const;
|
bool ShouldReportCsrcChanges(WebRtc_UWord8 payload_type) const;
|
||||||
|
|
||||||
WebRtc_Word32 OnNewPayloadTypeCreated(
|
WebRtc_Word32 OnNewPayloadTypeCreated(
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const WebRtc_UWord32 frequency);
|
const WebRtc_UWord32 frequency);
|
||||||
|
|
||||||
WebRtc_Word32 InvokeOnInitializeDecoder(
|
WebRtc_Word32 InvokeOnInitializeDecoder(
|
||||||
RtpFeedback* callback,
|
RtpFeedback* callback,
|
||||||
const WebRtc_Word32 id,
|
const WebRtc_Word32 id,
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload) const;
|
const ModuleRTPUtility::PayloadUnion& specific_payload) const;
|
||||||
|
|
||||||
// We do not allow codecs to have multiple payload types for audio, so we
|
// We do not allow codecs to have multiple payload types for audio, so we
|
||||||
// need to override the default behavior (which is to do nothing).
|
// need to override the default behavior (which is to do nothing).
|
||||||
void PossiblyRemoveExistingPayloadType(
|
void PossiblyRemoveExistingPayloadType(
|
||||||
ModuleRTPUtility::PayloadTypeMap* payloadTypeMap,
|
ModuleRTPUtility::PayloadTypeMap* payload_type_map,
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const size_t payloadNameLength,
|
const size_t payload_name_length,
|
||||||
const WebRtc_UWord32 frequency,
|
const WebRtc_UWord32 frequency,
|
||||||
const WebRtc_UWord8 channels,
|
const WebRtc_UWord8 channels,
|
||||||
const WebRtc_UWord32 rate) const;
|
const WebRtc_UWord32 rate) const;
|
||||||
|
|
||||||
// We need to look out for special payload types here and sometimes reset
|
// We need to look out for special payload types here and sometimes reset
|
||||||
// statistics. In addition we sometimes need to tweak the frequency.
|
// statistics. In addition we sometimes need to tweak the frequency.
|
||||||
void CheckPayloadChanged(
|
void CheckPayloadChanged(const WebRtc_Word8 payload_type,
|
||||||
const WebRtc_Word8 payloadType,
|
ModuleRTPUtility::PayloadUnion* specific_payload,
|
||||||
ModuleRTPUtility::PayloadUnion* specificPayload,
|
bool* should_reset_statistics,
|
||||||
bool* shouldResetStatistics,
|
bool* should_discard_changes);
|
||||||
bool* shouldDiscardChanges);
|
|
||||||
private:
|
|
||||||
void SendTelephoneEvents(
|
|
||||||
WebRtc_UWord8 numberOfNewEvents,
|
|
||||||
WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
|
|
||||||
WebRtc_UWord8 numberOfRemovedEvents,
|
|
||||||
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]);
|
|
||||||
|
|
||||||
WebRtc_Word32 ParseAudioCodecSpecific(
|
private:
|
||||||
WebRtcRTPHeader* rtpHeader,
|
void SendTelephoneEvents(
|
||||||
const WebRtc_UWord8* payloadData,
|
WebRtc_UWord8 number_of_new_events,
|
||||||
const WebRtc_UWord16 payloadLength,
|
WebRtc_UWord8 new_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
|
||||||
const ModuleRTPUtility::AudioPayload& audioSpecific,
|
WebRtc_UWord8 number_of_removed_events,
|
||||||
const bool isRED);
|
WebRtc_UWord8 removed_events[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]);
|
||||||
|
|
||||||
WebRtc_Word32 _id;
|
WebRtc_Word32 ParseAudioCodecSpecific(
|
||||||
scoped_ptr<CriticalSectionWrapper> _criticalSectionRtpReceiverAudio;
|
WebRtcRTPHeader* rtp_header,
|
||||||
|
const WebRtc_UWord8* payload_data,
|
||||||
|
const WebRtc_UWord16 payload_length,
|
||||||
|
const ModuleRTPUtility::AudioPayload& audio_specific,
|
||||||
|
const bool is_red);
|
||||||
|
|
||||||
WebRtc_UWord32 _lastReceivedFrequency;
|
WebRtc_Word32 id_;
|
||||||
|
scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_audio_;
|
||||||
|
|
||||||
bool _telephoneEvent;
|
WebRtc_UWord32 last_received_frequency_;
|
||||||
bool _telephoneEventForwardToDecoder;
|
|
||||||
bool _telephoneEventDetectEndOfTone;
|
|
||||||
WebRtc_Word8 _telephoneEventPayloadType;
|
|
||||||
std::set<WebRtc_UWord8> _telephoneEventReported;
|
|
||||||
|
|
||||||
WebRtc_Word8 _cngNBPayloadType;
|
bool telephone_event_;
|
||||||
WebRtc_Word8 _cngWBPayloadType;
|
bool telephone_event_forward_to_decoder_;
|
||||||
WebRtc_Word8 _cngSWBPayloadType;
|
bool telephone_event_detect_end_of_tone_;
|
||||||
WebRtc_Word8 _cngFBPayloadType;
|
WebRtc_Word8 telephone_event_payload_type_;
|
||||||
WebRtc_Word8 _cngPayloadType;
|
std::set<WebRtc_UWord8> telephone_event_reported_;
|
||||||
|
|
||||||
// G722 is special since it use the wrong number of RTP samples in timestamp VS. number of samples in the frame
|
WebRtc_Word8 cng_nb_payload_type_;
|
||||||
WebRtc_Word8 _G722PayloadType;
|
WebRtc_Word8 cng_wb_payload_type_;
|
||||||
bool _lastReceivedG722;
|
WebRtc_Word8 cng_swb_payload_type_;
|
||||||
|
WebRtc_Word8 cng_fb_payload_type_;
|
||||||
|
WebRtc_Word8 cng_payload_type_;
|
||||||
|
|
||||||
RtpAudioFeedback* _cbAudioFeedback;
|
// G722 is special since it use the wrong number of RTP samples in timestamp
|
||||||
|
// VS. number of samples in the frame
|
||||||
|
WebRtc_Word8 g722_payload_type_;
|
||||||
|
bool last_received_g722_;
|
||||||
|
|
||||||
|
RtpAudioFeedback* cb_audio_feedback_;
|
||||||
};
|
};
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
|
|
||||||
|
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
|
||||||
|
@ -8,41 +8,41 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "rtp_receiver_video.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
|
||||||
|
|
||||||
#include <cassert> //assert
|
|
||||||
#include <cstring> // memcpy()
|
|
||||||
#include <math.h>
|
#include <math.h>
|
||||||
|
|
||||||
#include "critical_section_wrapper.h"
|
#include <cassert> // assert
|
||||||
#include "receiver_fec.h"
|
#include <cstring> // memcpy()
|
||||||
#include "rtp_payload_registry.h"
|
|
||||||
#include "rtp_rtcp_impl.h"
|
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
|
||||||
#include "rtp_utility.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
|
||||||
#include "trace.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
|
||||||
|
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
|
||||||
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
|
WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x) {
|
||||||
{
|
return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f, (2 + (x >> 14))));
|
||||||
return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
RTPReceiverVideo::RTPReceiverVideo(
|
RTPReceiverVideo::RTPReceiverVideo(
|
||||||
const WebRtc_Word32 id,
|
const WebRtc_Word32 id,
|
||||||
const RTPPayloadRegistry* rtpRtpPayloadRegistry,
|
const RTPPayloadRegistry* rtp_rtp_payload_registry,
|
||||||
RtpData* data_callback)
|
RtpData* data_callback)
|
||||||
: RTPReceiverStrategy(data_callback),
|
: RTPReceiverStrategy(data_callback),
|
||||||
_id(id),
|
id_(id),
|
||||||
_rtpRtpPayloadRegistry(rtpRtpPayloadRegistry),
|
rtp_rtp_payload_registry_(rtp_rtp_payload_registry),
|
||||||
_criticalSectionReceiverVideo(
|
critical_section_receiver_video_(
|
||||||
CriticalSectionWrapper::CreateCriticalSection()),
|
CriticalSectionWrapper::CreateCriticalSection()),
|
||||||
_currentFecFrameDecoded(false),
|
current_fec_frame_decoded_(false),
|
||||||
_receiveFEC(NULL) {
|
receive_fec_(NULL) {
|
||||||
}
|
}
|
||||||
|
|
||||||
RTPReceiverVideo::~RTPReceiverVideo() {
|
RTPReceiverVideo::~RTPReceiverVideo() {
|
||||||
delete _criticalSectionReceiverVideo;
|
delete critical_section_receiver_video_;
|
||||||
delete _receiveFEC;
|
delete receive_fec_;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool RTPReceiverVideo::ShouldReportCsrcChanges(
|
bool RTPReceiverVideo::ShouldReportCsrcChanges(
|
||||||
@ -52,35 +52,40 @@ bool RTPReceiverVideo::ShouldReportCsrcChanges(
|
|||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::OnNewPayloadTypeCreated(
|
WebRtc_Word32 RTPReceiverVideo::OnNewPayloadTypeCreated(
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const WebRtc_UWord32 frequency) {
|
const WebRtc_UWord32 frequency) {
|
||||||
if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
|
if (ModuleRTPUtility::StringCompare(payload_name, "ULPFEC", 6)) {
|
||||||
// Enable FEC if not enabled.
|
// Enable FEC if not enabled.
|
||||||
if (_receiveFEC == NULL) {
|
if (receive_fec_ == NULL) {
|
||||||
_receiveFEC = new ReceiverFEC(_id, this);
|
receive_fec_ = new ReceiverFEC(id_, this);
|
||||||
}
|
}
|
||||||
_receiveFEC->SetPayloadTypeFEC(payloadType);
|
receive_fec_->SetPayloadTypeFEC(payload_type);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::ParseRtpPacket(
|
WebRtc_Word32 RTPReceiverVideo::ParseRtpPacket(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload,
|
const ModuleRTPUtility::PayloadUnion& specific_payload,
|
||||||
const bool isRed,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* packet,
|
const WebRtc_UWord8* packet,
|
||||||
const WebRtc_UWord16 packetLength,
|
const WebRtc_UWord16 packet_length,
|
||||||
const WebRtc_Word64 timestampMs,
|
const WebRtc_Word64 timestamp_ms,
|
||||||
const bool isFirstPacket) {
|
const bool is_first_packet) {
|
||||||
const WebRtc_UWord8* payloadData =
|
const WebRtc_UWord8* payload_data =
|
||||||
ModuleRTPUtility::GetPayloadData(rtpHeader, packet);
|
ModuleRTPUtility::GetPayloadData(rtp_header, packet);
|
||||||
const WebRtc_UWord16 payloadDataLength =
|
const WebRtc_UWord16 payload_data_length =
|
||||||
ModuleRTPUtility::GetPayloadDataLength(rtpHeader, packetLength);
|
ModuleRTPUtility::GetPayloadDataLength(rtp_header, packet_length);
|
||||||
return ParseVideoCodecSpecific(
|
return ParseVideoCodecSpecific(rtp_header,
|
||||||
rtpHeader, payloadData, payloadDataLength,
|
payload_data,
|
||||||
specificPayload.Video.videoCodecType, isRed, packet, packetLength,
|
payload_data_length,
|
||||||
timestampMs, isFirstPacket);
|
specific_payload.Video.videoCodecType,
|
||||||
|
is_red,
|
||||||
|
packet,
|
||||||
|
packet_length,
|
||||||
|
timestamp_ms,
|
||||||
|
is_first_packet);
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::GetFrequencyHz() const {
|
WebRtc_Word32 RTPReceiverVideo::GetFrequencyHz() const {
|
||||||
@ -88,22 +93,24 @@ WebRtc_Word32 RTPReceiverVideo::GetFrequencyHz() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
|
RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
|
||||||
WebRtc_UWord16 lastPayloadLength) const {
|
WebRtc_UWord16 last_payload_length) const {
|
||||||
return kRtpDead;
|
return kRtpDead;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::InvokeOnInitializeDecoder(
|
WebRtc_Word32 RTPReceiverVideo::InvokeOnInitializeDecoder(
|
||||||
RtpFeedback* callback,
|
RtpFeedback* callback,
|
||||||
const WebRtc_Word32 id,
|
const WebRtc_Word32 id,
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload) const {
|
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
|
||||||
// For video we just go with default values.
|
// For video we just go with default values.
|
||||||
if (-1 == callback->OnInitializeDecoder(
|
if (-1 == callback->OnInitializeDecoder(
|
||||||
id, payloadType, payloadName, kDefaultVideoFrequency, 1, 0)) {
|
id, payload_type, payload_name, kDefaultVideoFrequency, 1, 0)) {
|
||||||
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id,
|
WEBRTC_TRACE(kTraceError,
|
||||||
|
kTraceRtpRtcp,
|
||||||
|
id,
|
||||||
"Failed to create video decoder for payload type:%d",
|
"Failed to create video decoder for payload type:%d",
|
||||||
payloadType);
|
payload_type);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -113,271 +120,271 @@ WebRtc_Word32 RTPReceiverVideo::InvokeOnInitializeDecoder(
|
|||||||
// we are not allowed to have any critsects when calling
|
// we are not allowed to have any critsects when calling
|
||||||
// CallbackOfReceivedPayloadData
|
// CallbackOfReceivedPayloadData
|
||||||
WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecific(
|
WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecific(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength,
|
const WebRtc_UWord16 payload_data_length,
|
||||||
const RtpVideoCodecTypes videoType,
|
const RtpVideoCodecTypes video_type,
|
||||||
const bool isRED,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* incomingRtpPacket,
|
const WebRtc_UWord8* incoming_rtp_packet,
|
||||||
const WebRtc_UWord16 incomingRtpPacketSize,
|
const WebRtc_UWord16 incoming_rtp_packet_size,
|
||||||
const WebRtc_Word64 nowMS,
|
const WebRtc_Word64 now_ms,
|
||||||
const bool isFirstPacket) {
|
const bool is_first_packet) {
|
||||||
WebRtc_Word32 retVal = 0;
|
WebRtc_Word32 ret_val = 0;
|
||||||
|
|
||||||
_criticalSectionReceiverVideo->Enter();
|
critical_section_receiver_video_->Enter();
|
||||||
|
|
||||||
if (isRED) {
|
if (is_red) {
|
||||||
if(_receiveFEC == NULL) {
|
if (receive_fec_ == NULL) {
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
bool FECpacket = false;
|
bool FECpacket = false;
|
||||||
retVal = _receiveFEC->AddReceivedFECPacket(
|
ret_val = receive_fec_->AddReceivedFECPacket(
|
||||||
rtpHeader,
|
rtp_header, incoming_rtp_packet, payload_data_length, FECpacket);
|
||||||
incomingRtpPacket,
|
if (ret_val != -1) {
|
||||||
payloadDataLength,
|
ret_val = receive_fec_->ProcessReceivedFEC();
|
||||||
FECpacket);
|
|
||||||
if (retVal != -1) {
|
|
||||||
retVal = _receiveFEC->ProcessReceivedFEC();
|
|
||||||
}
|
}
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
|
|
||||||
if(retVal == 0 && FECpacket) {
|
if (ret_val == 0 && FECpacket) {
|
||||||
// Callback with the received FEC packet.
|
// Callback with the received FEC packet.
|
||||||
// The normal packets are delivered after parsing.
|
// The normal packets are delivered after parsing.
|
||||||
// This contains the original RTP packet header but with
|
// This contains the original RTP packet header but with
|
||||||
// empty payload and data length.
|
// empty payload and data length.
|
||||||
rtpHeader->frameType = kFrameEmpty;
|
rtp_header->frameType = kFrameEmpty;
|
||||||
// We need this for the routing.
|
// We need this for the routing.
|
||||||
WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
|
WebRtc_Word32 ret_val = SetCodecType(video_type, rtp_header);
|
||||||
if(retVal != 0) {
|
if (ret_val != 0) {
|
||||||
return retVal;
|
return ret_val;
|
||||||
}
|
}
|
||||||
// Pass the length of FEC packets so that they can be accounted for in
|
// Pass the length of FEC packets so that they can be accounted for in
|
||||||
// the bandwidth estimator.
|
// the bandwidth estimator.
|
||||||
retVal = data_callback_->OnReceivedPayloadData(NULL, payloadDataLength,
|
ret_val = data_callback_->OnReceivedPayloadData(
|
||||||
rtpHeader);
|
NULL, payload_data_length, rtp_header);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// will leave the _criticalSectionReceiverVideo critsect
|
// will leave the critical_section_receiver_video_ critsect
|
||||||
retVal = ParseVideoCodecSpecificSwitch(rtpHeader,
|
ret_val = ParseVideoCodecSpecificSwitch(rtp_header,
|
||||||
payloadData,
|
payload_data,
|
||||||
payloadDataLength,
|
payload_data_length,
|
||||||
videoType,
|
video_type,
|
||||||
isFirstPacket);
|
is_first_packet);
|
||||||
}
|
}
|
||||||
return retVal;
|
return ret_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::BuildRTPheader(
|
WebRtc_Word32 RTPReceiverVideo::BuildRTPheader(
|
||||||
const WebRtcRTPHeader* rtpHeader,
|
const WebRtcRTPHeader* rtp_header,
|
||||||
WebRtc_UWord8* dataBuffer) const {
|
WebRtc_UWord8* data_buffer) const {
|
||||||
dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80); // version 2
|
data_buffer[0] = static_cast<WebRtc_UWord8>(0x80); // version 2
|
||||||
dataBuffer[1] = static_cast<WebRtc_UWord8>(rtpHeader->header.payloadType);
|
data_buffer[1] = static_cast<WebRtc_UWord8>(rtp_header->header.payloadType);
|
||||||
if (rtpHeader->header.markerBit) {
|
if (rtp_header->header.markerBit) {
|
||||||
dataBuffer[1] |= kRtpMarkerBitMask; // MarkerBit is 1
|
data_buffer[1] |= kRtpMarkerBitMask; // MarkerBit is 1
|
||||||
}
|
}
|
||||||
ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + 2,
|
ModuleRTPUtility::AssignUWord16ToBuffer(data_buffer + 2,
|
||||||
rtpHeader->header.sequenceNumber);
|
rtp_header->header.sequenceNumber);
|
||||||
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 4,
|
ModuleRTPUtility::AssignUWord32ToBuffer(data_buffer + 4,
|
||||||
rtpHeader->header.timestamp);
|
rtp_header->header.timestamp);
|
||||||
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 8,
|
ModuleRTPUtility::AssignUWord32ToBuffer(data_buffer + 8,
|
||||||
rtpHeader->header.ssrc);
|
rtp_header->header.ssrc);
|
||||||
|
|
||||||
WebRtc_Word32 rtpHeaderLength = 12;
|
WebRtc_Word32 rtp_header_length = 12;
|
||||||
|
|
||||||
// Add the CSRCs if any
|
// Add the CSRCs if any
|
||||||
if (rtpHeader->header.numCSRCs > 0) {
|
if (rtp_header->header.numCSRCs > 0) {
|
||||||
if (rtpHeader->header.numCSRCs > 16) {
|
if (rtp_header->header.numCSRCs > 16) {
|
||||||
// error
|
// error
|
||||||
assert(false);
|
assert(false);
|
||||||
}
|
}
|
||||||
WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
|
WebRtc_UWord8* ptr = &data_buffer[rtp_header_length];
|
||||||
for (WebRtc_UWord32 i = 0; i < rtpHeader->header.numCSRCs; ++i) {
|
for (WebRtc_UWord32 i = 0; i < rtp_header->header.numCSRCs; ++i) {
|
||||||
ModuleRTPUtility::AssignUWord32ToBuffer(ptr,
|
ModuleRTPUtility::AssignUWord32ToBuffer(ptr,
|
||||||
rtpHeader->header.arrOfCSRCs[i]);
|
rtp_header->header.arrOfCSRCs[i]);
|
||||||
ptr +=4;
|
ptr += 4;
|
||||||
}
|
}
|
||||||
dataBuffer[0] = (dataBuffer[0]&0xf0) | rtpHeader->header.numCSRCs;
|
data_buffer[0] = (data_buffer[0] & 0xf0) | rtp_header->header.numCSRCs;
|
||||||
// Update length of header
|
// Update length of header
|
||||||
rtpHeaderLength += sizeof(WebRtc_UWord32)*rtpHeader->header.numCSRCs;
|
rtp_header_length += sizeof(WebRtc_UWord32) * rtp_header->header.numCSRCs;
|
||||||
}
|
}
|
||||||
return rtpHeaderLength;
|
return rtp_header_length;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::ReceiveRecoveredPacketCallback(
|
WebRtc_Word32 RTPReceiverVideo::ReceiveRecoveredPacketCallback(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength) {
|
const WebRtc_UWord16 payload_data_length) {
|
||||||
// TODO(pwestin) Re-factor this to avoid the messy critsect handling.
|
// TODO(pwestin) Re-factor this to avoid the messy critsect handling.
|
||||||
_criticalSectionReceiverVideo->Enter();
|
critical_section_receiver_video_->Enter();
|
||||||
|
|
||||||
_currentFecFrameDecoded = true;
|
current_fec_frame_decoded_ = true;
|
||||||
|
|
||||||
ModuleRTPUtility::Payload* payload = NULL;
|
ModuleRTPUtility::Payload* payload = NULL;
|
||||||
if (_rtpRtpPayloadRegistry->PayloadTypeToPayload(
|
if (rtp_rtp_payload_registry_->PayloadTypeToPayload(
|
||||||
rtpHeader->header.payloadType, payload) != 0) {
|
rtp_header->header.payloadType, payload) != 0) {
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// here we can re-create the original lost packet so that we can use it for
|
// here we can re-create the original lost packet so that we can use it for
|
||||||
// the relay we need to re-create the RED header too
|
// the relay we need to re-create the RED header too
|
||||||
WebRtc_UWord8 recoveredPacket[IP_PACKET_SIZE];
|
WebRtc_UWord8 recovered_packet[IP_PACKET_SIZE];
|
||||||
WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)BuildRTPheader(
|
WebRtc_UWord16 rtp_header_length =
|
||||||
rtpHeader, recoveredPacket);
|
(WebRtc_UWord16) BuildRTPheader(rtp_header, recovered_packet);
|
||||||
|
|
||||||
const WebRtc_UWord8 REDForFECHeaderLength = 1;
|
const WebRtc_UWord8 kREDForFECHeaderLength = 1;
|
||||||
|
|
||||||
// replace pltype
|
// replace pltype
|
||||||
recoveredPacket[1] &= 0x80; // Reset.
|
recovered_packet[1] &= 0x80; // Reset.
|
||||||
recoveredPacket[1] += _rtpRtpPayloadRegistry->red_payload_type();
|
recovered_packet[1] += rtp_rtp_payload_registry_->red_payload_type();
|
||||||
|
|
||||||
// add RED header
|
// add RED header
|
||||||
recoveredPacket[rtpHeaderLength] = rtpHeader->header.payloadType;
|
recovered_packet[rtp_header_length] = rtp_header->header.payloadType;
|
||||||
// f-bit always 0
|
// f-bit always 0
|
||||||
|
|
||||||
memcpy(recoveredPacket + rtpHeaderLength + REDForFECHeaderLength, payloadData,
|
memcpy(recovered_packet + rtp_header_length + kREDForFECHeaderLength,
|
||||||
payloadDataLength);
|
payload_data,
|
||||||
|
payload_data_length);
|
||||||
|
|
||||||
// A recovered packet can be the first packet, but we lack the ability to
|
// A recovered packet can be the first packet, but we lack the ability to
|
||||||
// detect it at the moment since we do not store the history of recently
|
// detect it at the moment since we do not store the history of recently
|
||||||
// received packets. Most codecs like VP8 deal with this in other ways.
|
// received packets. Most codecs like VP8 deal with this in other ways.
|
||||||
bool isFirstPacket = false;
|
bool is_first_packet = false;
|
||||||
|
|
||||||
return ParseVideoCodecSpecificSwitch(
|
return ParseVideoCodecSpecificSwitch(
|
||||||
rtpHeader,
|
rtp_header,
|
||||||
payloadData,
|
payload_data,
|
||||||
payloadDataLength,
|
payload_data_length,
|
||||||
payload->typeSpecific.Video.videoCodecType,
|
payload->typeSpecific.Video.videoCodecType,
|
||||||
isFirstPacket);
|
is_first_packet);
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::SetCodecType(const RtpVideoCodecTypes videoType,
|
WebRtc_Word32 RTPReceiverVideo::SetCodecType(
|
||||||
WebRtcRTPHeader* rtpHeader) const {
|
const RtpVideoCodecTypes video_type,
|
||||||
switch (videoType) {
|
WebRtcRTPHeader* rtp_header) const {
|
||||||
|
switch (video_type) {
|
||||||
case kRtpNoVideo:
|
case kRtpNoVideo:
|
||||||
rtpHeader->type.Video.codec = kRTPVideoGeneric;
|
rtp_header->type.Video.codec = kRTPVideoGeneric;
|
||||||
break;
|
break;
|
||||||
case kRtpVp8Video:
|
case kRtpVp8Video:
|
||||||
rtpHeader->type.Video.codec = kRTPVideoVP8;
|
rtp_header->type.Video.codec = kRTPVideoVP8;
|
||||||
break;
|
break;
|
||||||
case kRtpFecVideo:
|
case kRtpFecVideo:
|
||||||
rtpHeader->type.Video.codec = kRTPVideoFEC;
|
rtp_header->type.Video.codec = kRTPVideoFEC;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
|
WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength,
|
const WebRtc_UWord16 payload_data_length,
|
||||||
const RtpVideoCodecTypes videoType,
|
const RtpVideoCodecTypes video_type,
|
||||||
const bool isFirstPacket) {
|
const bool is_first_packet) {
|
||||||
WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
|
WebRtc_Word32 ret_val = SetCodecType(video_type, rtp_header);
|
||||||
if (retVal != 0) {
|
if (ret_val != 0) {
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
return retVal;
|
return ret_val;
|
||||||
}
|
}
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
|
WEBRTC_TRACE(kTraceStream,
|
||||||
__FUNCTION__, rtpHeader->header.timestamp);
|
kTraceRtpRtcp,
|
||||||
|
id_,
|
||||||
|
"%s(timestamp:%u)",
|
||||||
|
__FUNCTION__,
|
||||||
|
rtp_header->header.timestamp);
|
||||||
|
|
||||||
// All receive functions release _criticalSectionReceiverVideo before
|
// All receive functions release critical_section_receiver_video_ before
|
||||||
// returning.
|
// returning.
|
||||||
switch (videoType) {
|
switch (video_type) {
|
||||||
case kRtpNoVideo:
|
case kRtpNoVideo:
|
||||||
rtpHeader->type.Video.isFirstPacket = isFirstPacket;
|
rtp_header->type.Video.isFirstPacket = is_first_packet;
|
||||||
return ReceiveGenericCodec(rtpHeader, payloadData, payloadDataLength);
|
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
|
||||||
case kRtpVp8Video:
|
case kRtpVp8Video:
|
||||||
return ReceiveVp8Codec(rtpHeader, payloadData, payloadDataLength);
|
return ReceiveVp8Codec(rtp_header, payload_data, payload_data_length);
|
||||||
case kRtpFecVideo:
|
case kRtpFecVideo:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::ReceiveVp8Codec(
|
WebRtc_Word32 RTPReceiverVideo::ReceiveVp8Codec(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength) {
|
const WebRtc_UWord16 payload_data_length) {
|
||||||
bool success;
|
bool success;
|
||||||
ModuleRTPUtility::RTPPayload parsedPacket;
|
ModuleRTPUtility::RTPPayload parsed_packet;
|
||||||
if (payloadDataLength == 0) {
|
if (payload_data_length == 0) {
|
||||||
success = true;
|
success = true;
|
||||||
parsedPacket.info.VP8.dataLength = 0;
|
parsed_packet.info.VP8.dataLength = 0;
|
||||||
} else {
|
} else {
|
||||||
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpVp8Video,
|
ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
|
||||||
payloadData,
|
kRtpVp8Video, payload_data, payload_data_length, id_);
|
||||||
payloadDataLength,
|
|
||||||
_id);
|
|
||||||
|
|
||||||
success = rtpPayloadParser.Parse(parsedPacket);
|
success = rtp_payload_parser.Parse(parsed_packet);
|
||||||
}
|
}
|
||||||
// from here down we only work on local data
|
// from here down we only work on local data
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (parsedPacket.info.VP8.dataLength == 0) {
|
if (parsed_packet.info.VP8.dataLength == 0) {
|
||||||
// we have an "empty" VP8 packet, it's ok, could be one way video
|
// we have an "empty" VP8 packet, it's ok, could be one way video
|
||||||
// Inform the jitter buffer about this packet.
|
// Inform the jitter buffer about this packet.
|
||||||
rtpHeader->frameType = kFrameEmpty;
|
rtp_header->frameType = kFrameEmpty;
|
||||||
if (data_callback_->OnReceivedPayloadData(NULL, 0, rtpHeader) != 0) {
|
if (data_callback_->OnReceivedPayloadData(NULL, 0, rtp_header) != 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ?
|
rtp_header->frameType = (parsed_packet.frameType == ModuleRTPUtility::kIFrame)
|
||||||
kVideoFrameKey : kVideoFrameDelta;
|
? kVideoFrameKey : kVideoFrameDelta;
|
||||||
|
|
||||||
RTPVideoHeaderVP8 *toHeader = &rtpHeader->type.Video.codecHeader.VP8;
|
RTPVideoHeaderVP8* to_header = &rtp_header->type.Video.codecHeader.VP8;
|
||||||
ModuleRTPUtility::RTPPayloadVP8 *fromHeader = &parsedPacket.info.VP8;
|
ModuleRTPUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
|
||||||
|
|
||||||
rtpHeader->type.Video.isFirstPacket = fromHeader->beginningOfPartition
|
rtp_header->type.Video.isFirstPacket =
|
||||||
&& (fromHeader->partitionID == 0);
|
from_header->beginningOfPartition && (from_header->partitionID == 0);
|
||||||
toHeader->nonReference = fromHeader->nonReferenceFrame;
|
to_header->nonReference = from_header->nonReferenceFrame;
|
||||||
toHeader->pictureId = fromHeader->hasPictureID ? fromHeader->pictureID :
|
to_header->pictureId =
|
||||||
kNoPictureId;
|
from_header->hasPictureID ? from_header->pictureID : kNoPictureId;
|
||||||
toHeader->tl0PicIdx = fromHeader->hasTl0PicIdx ? fromHeader->tl0PicIdx :
|
to_header->tl0PicIdx =
|
||||||
kNoTl0PicIdx;
|
from_header->hasTl0PicIdx ? from_header->tl0PicIdx : kNoTl0PicIdx;
|
||||||
if (fromHeader->hasTID) {
|
if (from_header->hasTID) {
|
||||||
toHeader->temporalIdx = fromHeader->tID;
|
to_header->temporalIdx = from_header->tID;
|
||||||
toHeader->layerSync = fromHeader->layerSync;
|
to_header->layerSync = from_header->layerSync;
|
||||||
} else {
|
} else {
|
||||||
toHeader->temporalIdx = kNoTemporalIdx;
|
to_header->temporalIdx = kNoTemporalIdx;
|
||||||
toHeader->layerSync = false;
|
to_header->layerSync = false;
|
||||||
}
|
}
|
||||||
toHeader->keyIdx = fromHeader->hasKeyIdx ? fromHeader->keyIdx : kNoKeyIdx;
|
to_header->keyIdx = from_header->hasKeyIdx ? from_header->keyIdx : kNoKeyIdx;
|
||||||
|
|
||||||
toHeader->frameWidth = fromHeader->frameWidth;
|
to_header->frameWidth = from_header->frameWidth;
|
||||||
toHeader->frameHeight = fromHeader->frameHeight;
|
to_header->frameHeight = from_header->frameHeight;
|
||||||
|
|
||||||
toHeader->partitionId = fromHeader->partitionID;
|
to_header->partitionId = from_header->partitionID;
|
||||||
toHeader->beginningOfPartition = fromHeader->beginningOfPartition;
|
to_header->beginningOfPartition = from_header->beginningOfPartition;
|
||||||
|
|
||||||
if(data_callback_->OnReceivedPayloadData(parsedPacket.info.VP8.data,
|
if (data_callback_->OnReceivedPayloadData(parsed_packet.info.VP8.data,
|
||||||
parsedPacket.info.VP8.dataLength,
|
parsed_packet.info.VP8.dataLength,
|
||||||
rtpHeader) != 0) {
|
rtp_header) != 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
WebRtc_Word32 RTPReceiverVideo::ReceiveGenericCodec(
|
WebRtc_Word32 RTPReceiverVideo::ReceiveGenericCodec(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength) {
|
const WebRtc_UWord16 payload_data_length) {
|
||||||
rtpHeader->frameType = kVideoFrameKey;
|
rtp_header->frameType = kVideoFrameKey;
|
||||||
|
|
||||||
_criticalSectionReceiverVideo->Leave();
|
critical_section_receiver_video_->Leave();
|
||||||
|
|
||||||
if (data_callback_->OnReceivedPayloadData(payloadData, payloadDataLength,
|
if (data_callback_->OnReceivedPayloadData(
|
||||||
rtpHeader) != 0) {
|
payload_data, payload_data_length, rtp_header) != 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -11,12 +11,12 @@
|
|||||||
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
||||||
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
||||||
|
|
||||||
#include "bitrate.h"
|
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
|
||||||
#include "rtp_receiver_strategy.h"
|
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
|
||||||
#include "rtp_rtcp_defines.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
|
||||||
#include "rtp_utility.h"
|
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
|
||||||
#include "scoped_ptr.h"
|
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
|
||||||
#include "typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
class CriticalSectionWrapper;
|
class CriticalSectionWrapper;
|
||||||
@ -35,7 +35,7 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
|
|||||||
|
|
||||||
WebRtc_Word32 ParseRtpPacket(
|
WebRtc_Word32 ParseRtpPacket(
|
||||||
WebRtcRTPHeader* rtp_header,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload,
|
const ModuleRTPUtility::PayloadUnion& specific_payload,
|
||||||
const bool is_red,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* packet,
|
const WebRtc_UWord8* packet,
|
||||||
const WebRtc_UWord16 packet_length,
|
const WebRtc_UWord16 packet_length,
|
||||||
@ -44,71 +44,72 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
|
|||||||
|
|
||||||
WebRtc_Word32 GetFrequencyHz() const;
|
WebRtc_Word32 GetFrequencyHz() const;
|
||||||
|
|
||||||
RTPAliveType ProcessDeadOrAlive(WebRtc_UWord16 lastPayloadLength) const;
|
RTPAliveType ProcessDeadOrAlive(WebRtc_UWord16 last_payload_length) const;
|
||||||
|
|
||||||
bool ShouldReportCsrcChanges(WebRtc_UWord8 payload_type) const;
|
bool ShouldReportCsrcChanges(WebRtc_UWord8 payload_type) const;
|
||||||
|
|
||||||
WebRtc_Word32 OnNewPayloadTypeCreated(
|
WebRtc_Word32 OnNewPayloadTypeCreated(
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const WebRtc_UWord32 frequency);
|
const WebRtc_UWord32 frequency);
|
||||||
|
|
||||||
WebRtc_Word32 InvokeOnInitializeDecoder(
|
WebRtc_Word32 InvokeOnInitializeDecoder(
|
||||||
RtpFeedback* callback,
|
RtpFeedback* callback,
|
||||||
const WebRtc_Word32 id,
|
const WebRtc_Word32 id,
|
||||||
const WebRtc_Word8 payloadType,
|
const WebRtc_Word8 payload_type,
|
||||||
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
|
||||||
const ModuleRTPUtility::PayloadUnion& specificPayload) const;
|
const ModuleRTPUtility::PayloadUnion& specific_payload) const;
|
||||||
|
|
||||||
virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
|
virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength);
|
const WebRtc_UWord16 payload_data_length);
|
||||||
|
|
||||||
void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
|
void SetPacketOverHead(WebRtc_UWord16 packet_over_head);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
|
WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes video_type,
|
||||||
WebRtcRTPHeader* rtpHeader) const;
|
WebRtcRTPHeader* rtp_header) const;
|
||||||
|
|
||||||
WebRtc_Word32 ParseVideoCodecSpecificSwitch(
|
WebRtc_Word32 ParseVideoCodecSpecificSwitch(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength,
|
const WebRtc_UWord16 payload_data_length,
|
||||||
const RtpVideoCodecTypes videoType,
|
const RtpVideoCodecTypes video_type,
|
||||||
const bool isFirstPacket);
|
const bool is_first_packet);
|
||||||
|
|
||||||
WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
|
WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength);
|
const WebRtc_UWord16 payload_data_length);
|
||||||
|
|
||||||
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
|
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength);
|
const WebRtc_UWord16 payload_data_length);
|
||||||
|
|
||||||
WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
|
WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtp_header,
|
||||||
WebRtc_UWord8* dataBuffer) const;
|
WebRtc_UWord8* data_buffer) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
WebRtc_Word32 ParseVideoCodecSpecific(
|
WebRtc_Word32 ParseVideoCodecSpecific(
|
||||||
WebRtcRTPHeader* rtpHeader,
|
WebRtcRTPHeader* rtp_header,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payload_data,
|
||||||
const WebRtc_UWord16 payloadDataLength,
|
const WebRtc_UWord16 payload_data_length,
|
||||||
const RtpVideoCodecTypes videoType,
|
const RtpVideoCodecTypes video_type,
|
||||||
const bool isRED,
|
const bool is_red,
|
||||||
const WebRtc_UWord8* incomingRtpPacket,
|
const WebRtc_UWord8* incoming_rtp_packet,
|
||||||
const WebRtc_UWord16 incomingRtpPacketSize,
|
const WebRtc_UWord16 incoming_rtp_packet_size,
|
||||||
const WebRtc_Word64 nowMS,
|
const WebRtc_Word64 now_ms,
|
||||||
const bool isFirstPacket);
|
const bool is_first_packet);
|
||||||
|
|
||||||
WebRtc_Word32 _id;
|
WebRtc_Word32 id_;
|
||||||
const RTPPayloadRegistry* _rtpRtpPayloadRegistry;
|
const RTPPayloadRegistry* rtp_rtp_payload_registry_;
|
||||||
|
|
||||||
CriticalSectionWrapper* _criticalSectionReceiverVideo;
|
CriticalSectionWrapper* critical_section_receiver_video_;
|
||||||
|
|
||||||
// FEC
|
// FEC
|
||||||
bool _currentFecFrameDecoded;
|
bool current_fec_frame_decoded_;
|
||||||
ReceiverFEC* _receiveFEC;
|
ReceiverFEC* receive_fec_;
|
||||||
};
|
};
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
|
||||||
|
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
|
||||||
|
Loading…
x
Reference in New Issue
Block a user