Decoupled RTP audio processor from RTP receiver.

BUG=
TEST=Ran vie_auto_test, rtp_rtcp_unittests, voe_auto_test

Review URL: https://webrtc-codereview.appspot.com/979004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3279 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
phoglund@webrtc.org 2012-12-13 10:48:24 +00:00
parent 5b689efe8e
commit 92bb417cb1
5 changed files with 229 additions and 187 deletions

View File

@ -32,8 +32,8 @@ using ModuleRTPUtility::VideoPayload;
RTPReceiver::RTPReceiver(const WebRtc_Word32 id, RTPReceiver::RTPReceiver(const WebRtc_Word32 id,
const bool audio, const bool audio,
RtpRtcpClock* clock, RtpRtcpClock* clock,
ModuleRtpRtcpImpl* owner) : ModuleRtpRtcpImpl* owner,
RTPReceiverAudio(id), RtpAudioFeedback* incomingMessagesCallback) :
RTPReceiverVideo(id, owner), RTPReceiverVideo(id, owner),
Bitrate(clock), Bitrate(clock),
_id(id), _id(id),
@ -96,6 +96,8 @@ RTPReceiver::RTPReceiver(const WebRtc_Word32 id,
_nackMethod(kNackOff), _nackMethod(kNackOff),
_RTX(false), _RTX(false),
_ssrcRTX(0) { _ssrcRTX(0) {
_rtpReceiverAudio = new RTPReceiverAudio(id, this, incomingMessagesCallback);
memset(_currentRemoteCSRC, 0, sizeof(_currentRemoteCSRC)); memset(_currentRemoteCSRC, 0, sizeof(_currentRemoteCSRC));
memset(_currentRemoteEnergy, 0, sizeof(_currentRemoteEnergy)); memset(_currentRemoteEnergy, 0, sizeof(_currentRemoteEnergy));
memset(&_lastReceivedAudioSpecific, 0, sizeof(_lastReceivedAudioSpecific)); memset(&_lastReceivedAudioSpecific, 0, sizeof(_lastReceivedAudioSpecific));
@ -121,6 +123,7 @@ RTPReceiver::~RTPReceiver() {
delete it->second; delete it->second;
_payloadTypeMap.erase(it); _payloadTypeMap.erase(it);
} }
delete _rtpReceiverAudio;
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__); WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
} }
@ -392,8 +395,8 @@ WebRtc_Word32 RTPReceiver::RegisterReceivePayload(
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1); strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
} else { } else {
if (_audio) { if (_audio) {
payload = RegisterReceiveAudioPayload(payloadName, payloadType, payload = _rtpReceiverAudio->RegisterReceiveAudioPayload(
frequency, channels, rate); payloadName, payloadType, frequency, channels, rate);
} else { } else {
payload = RegisterReceiveVideoPayload(payloadName, payloadType, rate); payload = RegisterReceiveVideoPayload(payloadName, payloadType, rate);
} }
@ -752,11 +755,11 @@ WebRtc_Word32 RTPReceiver::IncomingRTPPacket(
WebRtc_Word32 retVal = 0; WebRtc_Word32 retVal = 0;
if(_audio) { if(_audio) {
retVal = ParseAudioCodecSpecific(rtp_header, retVal = _rtpReceiverAudio->ParseAudioCodecSpecific(rtp_header,
payload_data, payload_data,
payload_data_length, payload_data_length,
audio_specific, audio_specific,
is_red); is_red);
} else { } else {
retVal = ParseVideoCodecSpecific(rtp_header, retVal = ParseVideoCodecSpecific(rtp_header,
payload_data, payload_data,
@ -821,7 +824,7 @@ RTPReceiver::UpdateStatistics(const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord32 freq = 90000; WebRtc_UWord32 freq = 90000;
if(_audio) if(_audio)
{ {
freq = AudioFrequency(); freq = _rtpReceiverAudio->AudioFrequency();
} }
Bitrate::Update(bytes); Bitrate::Update(bytes);
@ -922,7 +925,7 @@ bool RTPReceiver::RetransmitOfOldPacket(
} }
WebRtc_UWord32 frequencyKHz = 90; // Video frequency. WebRtc_UWord32 frequencyKHz = 90; // Video frequency.
if (_audio) { if (_audio) {
frequencyKHz = AudioFrequency() / 1000; frequencyKHz = _rtpReceiverAudio->AudioFrequency() / 1000;
} }
WebRtc_Word64 timeDiffMS = _clock.GetTimeInMS() - _lastReceiveTime; WebRtc_Word64 timeDiffMS = _clock.GetTimeInMS() - _lastReceiveTime;
// Diff in time stamp since last received in order. // Diff in time stamp since last received in order.
@ -1030,7 +1033,7 @@ RTPReceiver::EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const
WebRtc_UWord32 freq = 90000; WebRtc_UWord32 freq = 90000;
if(_audio) if(_audio)
{ {
freq = AudioFrequency(); freq = _rtpReceiverAudio->AudioFrequency();
} }
if(_localTimeLastReceivedTimestamp == 0) if(_localTimeLastReceivedTimestamp == 0)
{ {
@ -1192,13 +1195,22 @@ WebRtc_Word32 RTPReceiver::CheckPayloadChanged(
} }
} }
if (_audio) { if (_audio) {
if (TelephoneEventPayloadType(payloadType)) { if (_rtpReceiverAudio->TelephoneEventPayloadType(payloadType)) {
// don't do callbacks for DTMF packets // don't do callbacks for DTMF packets
isRED = false; isRED = false;
return 0; return 0;
} }
// frequency is updated for CNG // frequency is updated for CNG
if (CNGPayloadType(payloadType, audioSpecificPayload.frequency)) { bool cngPayloadTypeHasChanged = false;
bool isCngPayloadType = _rtpReceiverAudio->CNGPayloadType(
payloadType, &audioSpecificPayload.frequency,
&cngPayloadTypeHasChanged);
if (cngPayloadTypeHasChanged) {
ResetStatistics();
}
if (isCngPayloadType) {
// don't do callbacks for DTMF packets // don't do callbacks for DTMF packets
isRED = false; isRED = false;
return 0; return 0;
@ -1295,7 +1307,8 @@ void RTPReceiver::CheckCSRC(const WebRtcRTPHeader* rtpHeader) {
{ {
CriticalSectionScoped lock(_criticalSectionRTPReceiver); CriticalSectionScoped lock(_criticalSectionRTPReceiver);
if (TelephoneEventPayloadType(rtpHeader->header.payloadType)) { if (_rtpReceiverAudio->TelephoneEventPayloadType(
rtpHeader->header.payloadType)) {
// Don't do this for DTMF packets // Don't do this for DTMF packets
return; return;
} }
@ -1625,4 +1638,5 @@ RTPReceiver::ProcessBitrate()
Bitrate::Process(); Bitrate::Process();
} }
} // namespace webrtc } // namespace webrtc

View File

@ -29,13 +29,14 @@ class RtpRtcpFeedback;
class ModuleRtpRtcpImpl; class ModuleRtpRtcpImpl;
class Trace; class Trace;
class RTPReceiver : public RTPReceiverAudio, public RTPReceiverVideo, public Bitrate class RTPReceiver : public RTPReceiverVideo, public Bitrate
{ {
public: public:
RTPReceiver(const WebRtc_Word32 id, RTPReceiver(const WebRtc_Word32 id,
const bool audio, const bool audio,
RtpRtcpClock* clock, RtpRtcpClock* clock,
ModuleRtpRtcpImpl* owner); ModuleRtpRtcpImpl* owner,
RtpAudioFeedback* incomingMessagesCallback);
virtual ~RTPReceiver(); virtual ~RTPReceiver();
@ -154,10 +155,13 @@ public:
void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const; void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
RTPReceiverAudio* GetAudioReceiver() const { return _rtpReceiverAudio; }
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader);
protected: protected:
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader);
virtual bool RetransmitOfOldPacket(const WebRtc_UWord16 sequenceNumber, virtual bool RetransmitOfOldPacket(const WebRtc_UWord16 sequenceNumber,
const WebRtc_UWord32 rtpTimeStamp) const; const WebRtc_UWord32 rtpTimeStamp) const;
@ -189,6 +193,8 @@ private:
bool ProcessNACKBitRate(WebRtc_UWord32 now); bool ProcessNACKBitRate(WebRtc_UWord32 now);
private: private:
RTPReceiverAudio* _rtpReceiverAudio;
WebRtc_Word32 _id; WebRtc_Word32 _id;
const bool _audio; const bool _audio;
ModuleRtpRtcpImpl& _rtpRtcp; ModuleRtpRtcpImpl& _rtpRtcp;

View File

@ -15,10 +15,16 @@
#include <math.h> // pow() #include <math.h> // pow()
#include "critical_section_wrapper.h" #include "critical_section_wrapper.h"
#include "rtp_receiver.h"
namespace webrtc { namespace webrtc {
RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id): RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id,
_id(id), RTPReceiver* parent,
RtpAudioFeedback* incomingMessagesCallback)
: _id(id),
_parent(parent),
_criticalSectionRtpReceiverAudio(
CriticalSectionWrapper::CreateCriticalSection()),
_lastReceivedFrequency(8000), _lastReceivedFrequency(8000),
_telephoneEvent(false), _telephoneEvent(false),
_telephoneEventForwardToDecoder(false), _telephoneEventForwardToDecoder(false),
@ -31,27 +37,14 @@ RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id):
_cngPayloadType(-1), _cngPayloadType(-1),
_G722PayloadType(-1), _G722PayloadType(-1),
_lastReceivedG722(false), _lastReceivedG722(false),
_criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()), _cbAudioFeedback(incomingMessagesCallback)
_cbAudioFeedback(NULL)
{ {
} }
RTPReceiverAudio::~RTPReceiverAudio()
{
delete _criticalSectionFeedback;
}
WebRtc_Word32
RTPReceiverAudio::RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback)
{
CriticalSectionScoped lock(_criticalSectionFeedback);
_cbAudioFeedback = incomingMessagesCallback;
return 0;
}
WebRtc_UWord32 WebRtc_UWord32
RTPReceiverAudio::AudioFrequency() const RTPReceiverAudio::AudioFrequency() const
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
if(_lastReceivedG722) if(_lastReceivedG722)
{ {
return 8000; return 8000;
@ -65,6 +58,7 @@ RTPReceiverAudio::SetTelephoneEventStatus(const bool enable,
const bool forwardToDecoder, const bool forwardToDecoder,
const bool detectEndOfTone) const bool detectEndOfTone)
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
_telephoneEvent= enable; _telephoneEvent= enable;
_telephoneEventDetectEndOfTone = detectEndOfTone; _telephoneEventDetectEndOfTone = detectEndOfTone;
_telephoneEventForwardToDecoder = forwardToDecoder; _telephoneEventForwardToDecoder = forwardToDecoder;
@ -75,6 +69,7 @@ RTPReceiverAudio::SetTelephoneEventStatus(const bool enable,
bool bool
RTPReceiverAudio::TelephoneEvent() const RTPReceiverAudio::TelephoneEvent() const
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
return _telephoneEvent; return _telephoneEvent;
} }
@ -82,27 +77,32 @@ RTPReceiverAudio::TelephoneEvent() const
bool bool
RTPReceiverAudio::TelephoneEventForwardToDecoder() const RTPReceiverAudio::TelephoneEventForwardToDecoder() const
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
return _telephoneEventForwardToDecoder; return _telephoneEventForwardToDecoder;
} }
bool bool
RTPReceiverAudio::TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const RTPReceiverAudio::TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
return (_telephoneEventPayloadType == payloadType)?true:false; return (_telephoneEventPayloadType == payloadType)?true:false;
} }
bool bool
RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType, RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType,
WebRtc_UWord32& frequency) WebRtc_UWord32* frequency,
bool* cngPayloadTypeHasChanged)
{ {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
*cngPayloadTypeHasChanged = false;
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz. // We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
if(_cngNBPayloadType == payloadType) if(_cngNBPayloadType == payloadType)
{ {
frequency = 8000; *frequency = 8000;
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngNBPayloadType)) if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngNBPayloadType))
{ *cngPayloadTypeHasChanged = true;
ResetStatistics();
}
_cngPayloadType = _cngNBPayloadType; _cngPayloadType = _cngNBPayloadType;
return true; return true;
} else if(_cngWBPayloadType == payloadType) } else if(_cngWBPayloadType == payloadType)
@ -110,33 +110,27 @@ RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType,
// if last received codec is G.722 we must use frequency 8000 // if last received codec is G.722 we must use frequency 8000
if(_lastReceivedG722) if(_lastReceivedG722)
{ {
frequency = 8000; *frequency = 8000;
} else } else
{ {
frequency = 16000; *frequency = 16000;
} }
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngWBPayloadType)) if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngWBPayloadType))
{ *cngPayloadTypeHasChanged = true;
ResetStatistics();
}
_cngPayloadType = _cngWBPayloadType; _cngPayloadType = _cngWBPayloadType;
return true; return true;
}else if(_cngSWBPayloadType == payloadType) }else if(_cngSWBPayloadType == payloadType)
{ {
frequency = 32000; *frequency = 32000;
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngSWBPayloadType)) if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngSWBPayloadType))
{ *cngPayloadTypeHasChanged = true;
ResetStatistics();
}
_cngPayloadType = _cngSWBPayloadType; _cngPayloadType = _cngSWBPayloadType;
return true; return true;
}else if(_cngFBPayloadType == payloadType) }else if(_cngFBPayloadType == payloadType)
{ {
frequency = 48000; *frequency = 48000;
if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngFBPayloadType)) if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngFBPayloadType))
{ *cngPayloadTypeHasChanged = true;
ResetStatistics();
}
_cngPayloadType = _cngFBPayloadType; _cngPayloadType = _cngFBPayloadType;
return true; return true;
}else }else
@ -194,6 +188,8 @@ ModuleRTPUtility::Payload* RTPReceiverAudio::RegisterReceiveAudioPayload(
const WebRtc_UWord32 frequency, const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels, const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate) { const WebRtc_UWord32 rate) {
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) { if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
_telephoneEventPayloadType = payloadType; _telephoneEventPayloadType = payloadType;
} }
@ -223,6 +219,35 @@ ModuleRTPUtility::Payload* RTPReceiverAudio::RegisterReceiveAudioPayload(
return payload; return payload;
} }
void RTPReceiverAudio::SendTelephoneEvents(
WebRtc_UWord8 numberOfNewEvents,
WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
WebRtc_UWord8 numberOfRemovedEvents,
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]) {
// Copy these variables since we can't hold the critsect when we call the
// callback. _cbAudioFeedback and _id are immutable though.
bool telephoneEvent;
bool telephoneEventDetectEndOfTone;
{
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
telephoneEvent = _telephoneEvent;
telephoneEventDetectEndOfTone = _telephoneEventDetectEndOfTone;
}
if (telephoneEvent) {
for (int n = 0; n < numberOfNewEvents; ++n) {
_cbAudioFeedback->OnReceivedTelephoneEvent(
_id, newEvents[n], false);
}
if (telephoneEventDetectEndOfTone) {
for (int n = 0; n < numberOfRemovedEvents; ++n) {
_cbAudioFeedback->OnReceivedTelephoneEvent(
_id, removedEvents[n], true);
}
}
}
}
// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData // we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
WebRtc_Word32 WebRtc_Word32
RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader, RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
@ -235,119 +260,112 @@ RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]; WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
WebRtc_UWord8 numberOfNewEvents = 0; WebRtc_UWord8 numberOfNewEvents = 0;
WebRtc_UWord8 numberOfRemovedEvents = 0; WebRtc_UWord8 numberOfRemovedEvents = 0;
bool telephoneEventPacket = TelephoneEventPayloadType(rtpHeader->header.payloadType);
if(payloadLength == 0) if(payloadLength == 0)
{ {
return 0; return 0;
} }
{ bool telephoneEventPacket = TelephoneEventPayloadType(rtpHeader->header.payloadType);
CriticalSectionScoped lock(_criticalSectionFeedback);
if(telephoneEventPacket)
{
// RFC 4733 2.3
/*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| event |E|R| volume | duration |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
if(payloadLength % 4 != 0)
{
return -1;
}
WebRtc_UWord8 numberOfEvents = payloadLength / 4;
// sanity
if(numberOfEvents >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS)
{
numberOfEvents = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
}
for (int n = 0; n < numberOfEvents; n++)
{
bool end = (payloadData[(4*n)+1] & 0x80)? true:false;
std::set<WebRtc_UWord8>::iterator event =
_telephoneEventReported.find(payloadData[4*n]);
if(event != _telephoneEventReported.end())
{
// we have already seen this event
if(end)
{
removedEvents[numberOfRemovedEvents]= payloadData[4*n];
numberOfRemovedEvents++;
_telephoneEventReported.erase(payloadData[4*n]);
}
}else
{
if(end)
{
// don't add if it's a end of a tone
}else
{
newEvents[numberOfNewEvents] = payloadData[4*n];
numberOfNewEvents++;
_telephoneEventReported.insert(payloadData[4*n]);
}
}
}
// RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
// should not be a problem since we don't care about the duration
// RFC 4733 See 2.5.1.5. & 2.5.2.4. Multiple Events in a Packet
}
if(_telephoneEvent && _cbAudioFeedback)
{
for (int n = 0; n < numberOfNewEvents; n++)
{
_cbAudioFeedback->OnReceivedTelephoneEvent(_id, newEvents[n], false);
}
if(_telephoneEventDetectEndOfTone)
{
for (int n = 0; n < numberOfRemovedEvents; n++)
{
_cbAudioFeedback->OnReceivedTelephoneEvent(_id, removedEvents[n], true);
}
}
}
}
if(! telephoneEventPacket )
{
_lastReceivedFrequency = audioSpecific.frequency;
}
// Check if this is a CNG packet, receiver might want to know
WebRtc_UWord32 dummy;
if(CNGPayloadType(rtpHeader->header.payloadType, dummy))
{
rtpHeader->type.Audio.isCNG=true;
rtpHeader->frameType = kAudioFrameCN;
}else
{
rtpHeader->frameType = kAudioFrameSpeech;
rtpHeader->type.Audio.isCNG=false;
}
// check if it's a DTMF event, hence something we can playout
if(telephoneEventPacket) if(telephoneEventPacket)
{ {
if(!_telephoneEventForwardToDecoder) CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
// RFC 4733 2.3
/*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| event |E|R| volume | duration |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
if(payloadLength % 4 != 0)
{ {
// don't forward event to decoder return -1;
return 0;
} }
std::set<WebRtc_UWord8>::iterator first = WebRtc_UWord8 numberOfEvents = payloadLength / 4;
_telephoneEventReported.begin();
if(first != _telephoneEventReported.end() && *first > 15) // sanity
if(numberOfEvents >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS)
{ {
// don't forward non DTMF events numberOfEvents = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
return 0; }
for (int n = 0; n < numberOfEvents; n++)
{
bool end = (payloadData[(4*n)+1] & 0x80)? true:false;
std::set<WebRtc_UWord8>::iterator event =
_telephoneEventReported.find(payloadData[4*n]);
if(event != _telephoneEventReported.end())
{
// we have already seen this event
if(end)
{
removedEvents[numberOfRemovedEvents]= payloadData[4*n];
numberOfRemovedEvents++;
_telephoneEventReported.erase(payloadData[4*n]);
}
}else
{
if(end)
{
// don't add if it's a end of a tone
}else
{
newEvents[numberOfNewEvents] = payloadData[4*n];
numberOfNewEvents++;
_telephoneEventReported.insert(payloadData[4*n]);
}
}
}
// RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
// should not be a problem since we don't care about the duration
// RFC 4733 See 2.5.1.5. & 2.5.2.4. Multiple Events in a Packet
}
// This needs to be called without locks held.
SendTelephoneEvents(numberOfNewEvents, newEvents, numberOfRemovedEvents,
removedEvents);
{
CriticalSectionScoped lock(_criticalSectionRtpReceiverAudio.get());
if(! telephoneEventPacket )
{
_lastReceivedFrequency = audioSpecific.frequency;
}
// Check if this is a CNG packet, receiver might want to know
WebRtc_UWord32 ignored;
bool alsoIgnored;
if(CNGPayloadType(rtpHeader->header.payloadType, &ignored, &alsoIgnored))
{
rtpHeader->type.Audio.isCNG=true;
rtpHeader->frameType = kAudioFrameCN;
}else
{
rtpHeader->frameType = kAudioFrameSpeech;
rtpHeader->type.Audio.isCNG=false;
}
// check if it's a DTMF event, hence something we can playout
if(telephoneEventPacket)
{
if(!_telephoneEventForwardToDecoder)
{
// don't forward event to decoder
return 0;
}
std::set<WebRtc_UWord8>::iterator first =
_telephoneEventReported.begin();
if(first != _telephoneEventReported.end() && *first > 15)
{
// don't forward non DTMF events
return 0;
}
} }
} }
if(isRED && !(payloadData[0] & 0x80)) if(isRED && !(payloadData[0] & 0x80))
@ -356,12 +374,13 @@ RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
rtpHeader->header.payloadType = payloadData[0]; rtpHeader->header.payloadType = payloadData[0];
// only one frame in the RED strip the one byte to help NetEq // only one frame in the RED strip the one byte to help NetEq
return CallbackOfReceivedPayloadData(payloadData+1, return _parent->CallbackOfReceivedPayloadData(payloadData+1,
payloadLength-1, payloadLength-1,
rtpHeader); rtpHeader);
} }
rtpHeader->type.Audio.channel = audioSpecific.channels; rtpHeader->type.Audio.channel = audioSpecific.channels;
return CallbackOfReceivedPayloadData(payloadData, payloadLength, rtpHeader); return _parent->CallbackOfReceivedPayloadData(
payloadData, payloadLength, rtpHeader);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -15,19 +15,21 @@
#include "rtp_rtcp_defines.h" #include "rtp_rtcp_defines.h"
#include "rtp_utility.h" #include "rtp_utility.h"
#include "scoped_ptr.h"
#include "typedefs.h" #include "typedefs.h"
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper; class CriticalSectionWrapper;
class RTPReceiver;
// Handles audio RTP packets. This class is thread-safe.
class RTPReceiverAudio class RTPReceiverAudio
{ {
public: public:
RTPReceiverAudio(const WebRtc_Word32 id); RTPReceiverAudio(const WebRtc_Word32 id,
virtual ~RTPReceiverAudio(); RTPReceiver* parent,
RtpAudioFeedback* incomingMessagesCallback);
WebRtc_Word32 RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback);
ModuleRTPUtility::Payload* RegisterReceiveAudioPayload( ModuleRTPUtility::Payload* RegisterReceiveAudioPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE], const char payloadName[RTP_PAYLOAD_NAME_SIZE],
@ -52,25 +54,29 @@ public:
// Is TelephoneEvent configured with payload type payloadType // Is TelephoneEvent configured with payload type payloadType
bool TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const; bool TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const;
// Is CNG configured with payload type payloadType // Returns true if CNG is configured with payload type payloadType. If so,
bool CNGPayloadType(const WebRtc_Word8 payloadType, WebRtc_UWord32& frequency); // the frequency and cngPayloadTypeHasChanged are filled in.
bool CNGPayloadType(const WebRtc_Word8 payloadType,
WebRtc_UWord32* frequency,
bool* cngPayloadTypeHasChanged);
WebRtc_Word32 ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader, WebRtc_Word32 ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadLength, const WebRtc_UWord16 payloadLength,
const ModuleRTPUtility::AudioPayload& audioSpecific, const ModuleRTPUtility::AudioPayload& audioSpecific,
const bool isRED); const bool isRED);
virtual WebRtc_Word32 ResetStatistics() = 0;
protected:
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
private: private:
WebRtc_Word32 _id; void SendTelephoneEvents(
WebRtc_UWord8 numberOfNewEvents,
WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS],
WebRtc_UWord8 numberOfRemovedEvents,
WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS]);
WebRtc_UWord32 _lastReceivedFrequency; WebRtc_Word32 _id;
RTPReceiver* _parent;
scoped_ptr<CriticalSectionWrapper> _criticalSectionRtpReceiverAudio;
WebRtc_UWord32 _lastReceivedFrequency;
bool _telephoneEvent; bool _telephoneEvent;
bool _telephoneEventForwardToDecoder; bool _telephoneEventForwardToDecoder;
@ -82,14 +88,13 @@ private:
WebRtc_Word8 _cngWBPayloadType; WebRtc_Word8 _cngWBPayloadType;
WebRtc_Word8 _cngSWBPayloadType; WebRtc_Word8 _cngSWBPayloadType;
WebRtc_Word8 _cngFBPayloadType; WebRtc_Word8 _cngFBPayloadType;
WebRtc_Word8 _cngPayloadType; WebRtc_Word8 _cngPayloadType;
// G722 is special since it use the wrong number of RTP samples in timestamp VS. number of samples in the frame // G722 is special since it use the wrong number of RTP samples in timestamp VS. number of samples in the frame
WebRtc_Word8 _G722PayloadType; WebRtc_Word8 _G722PayloadType;
bool _lastReceivedG722; bool _lastReceivedG722;
CriticalSectionWrapper* _criticalSectionFeedback; RtpAudioFeedback* _cbAudioFeedback;
RtpAudioFeedback* _cbAudioFeedback;
}; };
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_ #endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_

View File

@ -59,7 +59,7 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.audio_messages, configuration.audio_messages,
configuration.paced_sender), configuration.paced_sender),
_rtpReceiver(configuration.id, configuration.audio, configuration.clock, _rtpReceiver(configuration.id, configuration.audio, configuration.clock,
this), this, configuration.audio_messages),
_rtcpSender(configuration.id, configuration.audio, configuration.clock, _rtcpSender(configuration.id, configuration.audio, configuration.clock,
this), this),
_rtcpReceiver(configuration.id, configuration.clock, this), _rtcpReceiver(configuration.id, configuration.clock, this),
@ -103,8 +103,6 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
_rtcpReceiver.RegisterRtcpObservers(configuration.intra_frame_callback, _rtcpReceiver.RegisterRtcpObservers(configuration.intra_frame_callback,
configuration.bandwidth_callback, configuration.bandwidth_callback,
configuration.rtcp_feedback); configuration.rtcp_feedback);
_rtpReceiver.RegisterIncomingAudioCallback(configuration.audio_messages);
_rtcpSender.RegisterSendTransport(configuration.outgoing_transport); _rtcpSender.RegisterSendTransport(configuration.outgoing_transport);
// make sure that RTCP objects are aware of our SSRC // make sure that RTCP objects are aware of our SSRC
@ -1549,15 +1547,15 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SetTelephoneEventStatus(
" detectEndOfTone:%d)", enable, forwardToDecoder, " detectEndOfTone:%d)", enable, forwardToDecoder,
detectEndOfTone); detectEndOfTone);
return _rtpReceiver.SetTelephoneEventStatus(enable, forwardToDecoder, return _rtpReceiver.GetAudioReceiver()->SetTelephoneEventStatus(
detectEndOfTone); enable, forwardToDecoder, detectEndOfTone);
} }
// Is outband TelephoneEvent turned on/off? // Is outband TelephoneEvent turned on/off?
bool ModuleRtpRtcpImpl::TelephoneEvent() const { bool ModuleRtpRtcpImpl::TelephoneEvent() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TelephoneEvent()"); WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TelephoneEvent()");
return _rtpReceiver.TelephoneEvent(); return _rtpReceiver.GetAudioReceiver()->TelephoneEvent();
} }
// Is forwarding of outband telephone events turned on/off? // Is forwarding of outband telephone events turned on/off?
@ -1565,7 +1563,7 @@ bool ModuleRtpRtcpImpl::TelephoneEventForwardToDecoder() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
"TelephoneEventForwardToDecoder()"); "TelephoneEventForwardToDecoder()");
return _rtpReceiver.TelephoneEventForwardToDecoder(); return _rtpReceiver.GetAudioReceiver()->TelephoneEventForwardToDecoder();
} }
// Send a TelephoneEvent tone using RFC 2833 (4733) // Send a TelephoneEvent tone using RFC 2833 (4733)