Revert r4301

R=mikhal@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1809004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4357 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
tnakamura@webrtc.org 2013-07-16 19:25:04 +00:00
parent 7b2f955e56
commit aa4d96a134
98 changed files with 4306 additions and 2856 deletions

View File

@ -44,7 +44,6 @@ struct RTPHeader
uint32_t arrOfCSRCs[kRtpCsrcSize];
uint8_t paddingLength;
uint16_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};
@ -94,13 +93,13 @@ union RTPVideoTypeHeader
RTPVideoHeaderVP8 VP8;
};
enum RtpVideoCodecTypes
enum RTPVideoCodecTypes
{
kRtpVideoNone,
kRtpVideoGeneric,
kRtpVideoVp8,
kRtpVideoFec,
kRtpVideoI420
kRTPVideoGeneric = 0,
kRTPVideoVP8 = 8,
kRTPVideoNoVideo = 10,
kRTPVideoFEC = 11,
kRTPVideoI420 = 12
};
struct RTPVideoHeader
{
@ -110,7 +109,7 @@ struct RTPVideoHeader
bool isFirstPacket; // first packet in frame
uint8_t simulcastIdx; // Index if the simulcast encoder creating
// this frame, 0 if not using simulcast.
RtpVideoCodecTypes codec;
RTPVideoCodecTypes codec;
RTPVideoTypeHeader codecHeader;
};
union RTPTypeHeader

View File

@ -166,6 +166,7 @@
'remote_bitrate_estimator/bitrate_estimator_unittest.cc',
'remote_bitrate_estimator/rtp_to_ntp_unittest.cc',
'rtp_rtcp/source/mock/mock_rtp_payload_strategy.h',
'rtp_rtcp/source/mock/mock_rtp_receiver_video.h',
'rtp_rtcp/source/fec_test_helper.cc',
'rtp_rtcp/source/fec_test_helper.h',
'rtp_rtcp/source/nack_rtx_unittest.cc',

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class Clock;
class ReceiveStatistics : public Module {
public:
struct RtpReceiveStatistics {
uint8_t fraction_lost;
uint32_t cumulative_lost;
uint32_t extended_max_sequence_number;
uint32_t jitter;
uint32_t max_jitter;
};
virtual ~ReceiveStatistics() {}
static ReceiveStatistics* Create(Clock* clock);
virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
bool retransmitted, bool in_order) = 0;
virtual bool Statistics(RtpReceiveStatistics* statistics, bool reset) = 0;
virtual bool Statistics(RtpReceiveStatistics* statistics, int32_t* missing,
bool reset) = 0;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const = 0;
virtual uint32_t BitrateReceived() = 0;
virtual void ResetStatistics() = 0;
virtual void ResetDataCounters() = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_

View File

@ -1,120 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RTPPayloadRegistry;
class TelephoneEventHandler {
public:
virtual ~TelephoneEventHandler() {}
// The following three methods implement the TelephoneEventHandler interface.
// Forward DTMFs to decoder for playout.
virtual void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) = 0;
// Is forwarding of outband telephone events turned on/off?
virtual bool TelephoneEventForwardToDecoder() const = 0;
// Is TelephoneEvent configured with payload type payload_type
virtual bool TelephoneEventPayloadType(const int8_t payload_type) const = 0;
};
class RtpReceiver {
public:
// Creates a video-enabled RTP receiver.
static RtpReceiver* CreateVideoReceiver(
int id, Clock* clock,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry);
// Creates an audio-enabled RTP receiver.
static RtpReceiver* CreateAudioReceiver(
int id, Clock* clock,
RtpAudioFeedback* incoming_audio_feedback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry);
virtual ~RtpReceiver() {}
// Returns a TelephoneEventHandler if available.
virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
// Registers a receive payload in the payload registry and notifies the media
// receiver strategy.
virtual int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) = 0;
// De-registers |payload_type| from the payload registry.
virtual int32_t DeRegisterReceivePayload(const int8_t payload_type) = 0;
// Parses the media specific parts of an RTP packet and updates the receiver
// state. This for instance means that any changes in SSRC and payload type is
// detected and acted upon.
virtual bool IncomingRtpPacket(RTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
int incoming_rtp_packet_length,
PayloadUnion payload_specific,
bool in_order) = 0;
// Returns the currently configured NACK method.
virtual NACKMethod NACK() const = 0;
// Turn negative acknowledgement (NACK) requests on/off.
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) = 0;
// Returns the last received timestamp.
virtual uint32_t TimeStamp() const = 0;
// Returns the time in milliseconds when the last timestamp was received.
virtual int32_t LastReceivedTimeMs() const = 0;
// Returns the remote SSRC of the currently received RTP stream.
virtual uint32_t SSRC() const = 0;
// Returns the current remote CSRCs.
virtual int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const = 0;
// Returns the current energy of the RTP stream received.
virtual int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const = 0;
// Enable/disable RTX and set the SSRC to be used.
virtual void SetRTXStatus(bool enable, uint32_t ssrc) = 0;
// Returns the current RTX status and the SSRC and payload type used.
virtual void RTXStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const = 0;
// Sets the RTX payload type.
virtual void SetRtxPayloadType(int payload_type) = 0;
// Returns true if the packet with RTP header |header| is likely to be a
// retransmitted packet, false otherwise.
virtual bool RetransmitOfOldPacket(const RTPHeader& header, int jitter,
int min_rtt) const = 0;
// Returns true if |sequence_number| is received in order, false otherwise.
virtual bool InOrderPacket(const uint16_t sequence_number) const = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_

View File

@ -19,9 +19,8 @@
namespace webrtc {
// Forward declarations.
class PacedSender;
class ReceiveStatistics;
class RemoteBitrateEstimator;
class RtpReceiver;
class RemoteBitrateObserver;
class Transport;
class RtpRtcp : public Module {
@ -58,7 +57,8 @@ class RtpRtcp : public Module {
bool audio;
Clock* clock;
RtpRtcp* default_module;
ReceiveStatistics* receive_statistics;
RtpData* incoming_data;
RtpFeedback* incoming_messages;
Transport* outgoing_transport;
RtcpFeedback* rtcp_feedback;
RtcpIntraFrameObserver* intra_frame_callback;
@ -68,7 +68,6 @@ class RtpRtcp : public Module {
RemoteBitrateEstimator* remote_bitrate_estimator;
PacedSender* paced_sender;
};
/*
* Create a RTP/RTCP module object using the system clock.
*
@ -82,11 +81,174 @@ class RtpRtcp : public Module {
*
***************************************************************************/
/*
* configure a RTP packet timeout value
*
* RTPtimeoutMS - time in milliseconds after last received RTP packet
* RTCPtimeoutMS - time in milliseconds after last received RTCP packet
*
* return -1 on failure else 0
*/
virtual int32_t SetPacketTimeout(
const uint32_t RTPtimeoutMS,
const uint32_t RTCPtimeoutMS) = 0;
/*
* Set periodic dead or alive notification
*
* enable - turn periodic dead or alive notification on/off
* sampleTimeSeconds - sample interval in seconds for dead or alive
* notifications
*
* return -1 on failure else 0
*/
virtual int32_t SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sampleTimeSeconds) = 0;
/*
* Get periodic dead or alive notification status
*
* enable - periodic dead or alive notification on/off
* sampleTimeSeconds - sample interval in seconds for dead or alive
* notifications
*
* return -1 on failure else 0
*/
virtual int32_t PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sampleTimeSeconds) = 0;
/*
* set voice codec name and payload type
*
* return -1 on failure else 0
*/
virtual int32_t RegisterReceivePayload(
const CodecInst& voiceCodec) = 0;
/*
* set video codec name and payload type
*
* return -1 on failure else 0
*/
virtual int32_t RegisterReceivePayload(
const VideoCodec& videoCodec) = 0;
/*
* get payload type for a voice codec
*
* return -1 on failure else 0
*/
virtual int32_t ReceivePayloadType(
const CodecInst& voiceCodec,
int8_t* plType) = 0;
/*
* get payload type for a video codec
*
* return -1 on failure else 0
*/
virtual int32_t ReceivePayloadType(
const VideoCodec& videoCodec,
int8_t* plType) = 0;
/*
* Remove a registered payload type from list of accepted payloads
*
* payloadType - payload type of codec
*
* return -1 on failure else 0
*/
virtual int32_t DeRegisterReceivePayload(
const int8_t payloadType) = 0;
/*
* Get last received remote timestamp
*/
virtual uint32_t RemoteTimestamp() const = 0;
/*
* Get the local time of the last received remote timestamp
*/
virtual int64_t LocalTimeOfRemoteTimeStamp() const = 0;
/*
* Get the current estimated remote timestamp
*
* timestamp - estimated timestamp
*
* return -1 on failure else 0
*/
virtual int32_t EstimatedRemoteTimeStamp(
uint32_t& timestamp) const = 0;
/*
* Get incoming SSRC
*/
virtual uint32_t RemoteSSRC() const = 0;
/*
* Get remote CSRC
*
* arrOfCSRC - array that will receive the CSRCs
*
* return -1 on failure else the number of valid entries in the list
*/
virtual int32_t RemoteCSRCs(
uint32_t arrOfCSRC[kRtpCsrcSize]) const = 0;
/*
* get the currently configured SSRC filter
*
* allowedSSRC - SSRC that will be allowed through
*
* return -1 on failure else 0
*/
virtual int32_t SSRCFilter(uint32_t& allowedSSRC) const = 0;
/*
* set a SSRC to be used as a filter for incoming RTP streams
*
* allowedSSRC - SSRC that will be allowed through
*
* return -1 on failure else 0
*/
virtual int32_t SetSSRCFilter(const bool enable,
const uint32_t allowedSSRC) = 0;
/*
* Turn on/off receiving RTX (RFC 4588) on a specific SSRC.
*/
virtual int32_t SetRTXReceiveStatus(bool enable, uint32_t SSRC) = 0;
// Sets the payload type to expected for received RTX packets. Note
// that this doesn't enable RTX, only the payload type is set.
virtual void SetRtxReceivePayloadType(int payload_type) = 0;
/*
* Get status of receiving RTX (RFC 4588) on a specific SSRC.
*/
virtual int32_t RTXReceiveStatus(bool* enable,
uint32_t* SSRC,
int* payloadType) const = 0;
/*
* called by the network module when we receive a packet
*
* incomingPacket - incoming packet buffer
* packetLength - length of incoming buffer
* parsed_rtp_header - the parsed RTP header
*
* return -1 on failure else 0
*/
virtual int32_t IncomingRtpPacket(const uint8_t* incomingPacket,
const uint16_t packetLength,
const RTPHeader& parsed_rtp_header) = 0;
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
uint16_t incoming_packet_length) = 0;
virtual void SetRemoteSSRC(const uint32_t ssrc) = 0;
/**************************************************************************
*
* Sender
@ -446,6 +608,32 @@ class RtpRtcp : public Module {
virtual int32_t SendRTCPSliceLossIndication(
const uint8_t pictureID) = 0;
/*
* Reset RTP statistics
*
* return -1 on failure else 0
*/
virtual int32_t ResetStatisticsRTP() = 0;
/*
* statistics of our localy created statistics of the received RTP stream
*
* return -1 on failure else 0
*/
virtual int32_t StatisticsRTP(
uint8_t* fraction_lost, // scale 0 to 255
uint32_t* cum_lost, // number of lost packets
uint32_t* ext_max, // highest sequence number received
uint32_t* jitter,
uint32_t* max_jitter = NULL) const = 0;
/*
* Reset RTP data counters for the receiving side
*
* return -1 on failure else 0
*/
virtual int32_t ResetReceiveDataCountersRTP() = 0;
/*
* Reset RTP data counters for the sending side
*
@ -460,7 +648,9 @@ class RtpRtcp : public Module {
*/
virtual int32_t DataCountersRTP(
uint32_t* bytesSent,
uint32_t* packetsSent) const = 0;
uint32_t* packetsSent,
uint32_t* bytesReceived,
uint32_t* packetsReceived) const = 0;
/*
* Get received RTCP sender info
*
@ -541,6 +731,18 @@ class RtpRtcp : public Module {
/*
* (NACK)
*/
virtual NACKMethod NACK() const = 0;
/*
* Turn negative acknowledgement requests on/off
* |max_reordering_threshold| should be set to how much a retransmitted
* packet can be expected to be reordered (in sequence numbers) compared to
* a packet which has not been retransmitted.
*
* return -1 on failure else 0
*/
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) = 0;
/*
* TODO(holmer): Propagate this API to VideoEngine.
@ -595,6 +797,19 @@ class RtpRtcp : public Module {
virtual int32_t SetAudioPacketSize(
const uint16_t packetSizeSamples) = 0;
/*
* Forward DTMF to decoder for playout.
*
* return -1 on failure else 0
*/
virtual int SetTelephoneEventForwardToDecoder(bool forwardToDecoder) = 0;
/*
* Returns true if received DTMF events are forwarded to the decoder using
* the OnPlayTelephoneEvent callback.
*/
virtual bool TelephoneEventForwardToDecoder() const = 0;
/*
* SendTelephoneEventActive
*

View File

@ -11,39 +11,22 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
#include <stddef.h>
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/typedefs.h"
#ifndef NULL
#define NULL 0
#endif
#define RTCP_CNAME_SIZE 256 // RFC 3550 page 44, including null termination
#define IP_PACKET_SIZE 1500 // we assume ethernet
#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
#define TIMEOUT_SEI_MESSAGES_MS 30000 // in milliseconds
namespace webrtc {
namespace webrtc{
const int kVideoPayloadTypeFrequency = 90000;
struct AudioPayload
{
uint32_t frequency;
uint8_t channels;
uint32_t rate;
};
struct VideoPayload
{
RtpVideoCodecTypes videoCodecType;
uint32_t maxRate;
};
union PayloadUnion
{
AudioPayload Audio;
VideoPayload Video;
};
const int32_t kDefaultVideoFrequency = 90000;
enum RTCPMethod
{
@ -162,9 +145,6 @@ public:
const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) = 0;
protected:
virtual ~RtpData() {}
};
@ -182,6 +162,8 @@ public:
const int32_t /*id*/,
const RTCPVoIPMetric* /*metric*/) {};
virtual void OnRTCPPacketTimeout(const int32_t /*id*/) {};
virtual void OnReceiveReportReceived(const int32_t id,
const uint32_t senderSSRC) {};
@ -204,6 +186,14 @@ public:
const uint8_t channels,
const uint32_t rate) = 0;
virtual void OnPacketTimeout(const int32_t id) = 0;
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) = 0;
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) = 0;
virtual void OnIncomingSSRCChanged( const int32_t id,
const uint32_t SSRC) = 0;
@ -211,8 +201,6 @@ public:
const uint32_t CSRC,
const bool added) = 0;
virtual void OnResetStatistics() = 0;
protected:
virtual ~RtpFeedback() {}
};
@ -280,32 +268,32 @@ class NullRtpFeedback : public RtpFeedback {
return 0;
}
virtual void OnPacketTimeout(const int32_t id) {}
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) {}
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {}
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) {}
virtual void OnResetStatistics() {}
};
// Null object version of RtpData.
class NullRtpData : public RtpData {
public:
virtual ~NullRtpData() {}
virtual int32_t OnReceivedPayloadData(
const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader) {
return 0;
}
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) {
return true;
}
};
// Null object version of RtpAudioFeedback.

View File

@ -35,9 +35,53 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(RtpRtcp* module));
MOCK_METHOD0(DeRegisterSyncModule,
int32_t());
MOCK_METHOD0(InitReceiver,
int32_t());
MOCK_METHOD1(RegisterIncomingDataCallback,
int32_t(RtpData* incomingDataCallback));
MOCK_METHOD1(RegisterIncomingRTPCallback,
int32_t(RtpFeedback* incomingMessagesCallback));
MOCK_METHOD2(SetPacketTimeout,
int32_t(const uint32_t RTPtimeoutMS, const uint32_t RTCPtimeoutMS));
MOCK_METHOD2(SetPeriodicDeadOrAliveStatus,
int32_t(const bool enable, const uint8_t sampleTimeSeconds));
MOCK_METHOD2(PeriodicDeadOrAliveStatus,
int32_t(bool &enable, uint8_t &sampleTimeSeconds));
MOCK_METHOD1(RegisterReceivePayload,
int32_t(const CodecInst& voiceCodec));
MOCK_METHOD1(RegisterReceivePayload,
int32_t(const VideoCodec& videoCodec));
MOCK_METHOD2(ReceivePayloadType,
int32_t(const CodecInst& voiceCodec, int8_t* plType));
MOCK_METHOD2(ReceivePayloadType,
int32_t(const VideoCodec& videoCodec, int8_t* plType));
MOCK_METHOD1(DeRegisterReceivePayload,
int32_t(const int8_t payloadType));
MOCK_CONST_METHOD0(RemoteTimestamp,
uint32_t());
MOCK_CONST_METHOD0(LocalTimeOfRemoteTimeStamp,
int64_t());
MOCK_CONST_METHOD1(EstimatedRemoteTimeStamp,
int32_t(uint32_t& timestamp));
MOCK_CONST_METHOD0(RemoteSSRC,
uint32_t());
MOCK_CONST_METHOD1(RemoteCSRCs,
int32_t(uint32_t arrOfCSRC[kRtpCsrcSize]));
MOCK_CONST_METHOD1(SSRCFilter,
int32_t(uint32_t& allowedSSRC));
MOCK_METHOD2(SetSSRCFilter,
int32_t(const bool enable, const uint32_t allowedSSRC));
MOCK_METHOD2(SetRTXReceiveStatus,
int32_t(bool enable, uint32_t ssrc));
MOCK_CONST_METHOD3(RTXReceiveStatus,
int32_t(bool* enable, uint32_t* ssrc, int* payload_type));
MOCK_METHOD1(SetRtxReceivePayloadType,
void(int));
MOCK_METHOD3(IncomingRtpPacket,
int32_t(const uint8_t* incomingPacket, const uint16_t packetLength,
const webrtc::RTPHeader& header));
MOCK_METHOD2(IncomingRtcpPacket,
int32_t(const uint8_t* incomingPacket, uint16_t packetLength));
MOCK_METHOD1(SetRemoteSSRC, void(const uint32_t ssrc));
MOCK_METHOD4(IncomingAudioNTP,
int32_t(const uint32_t audioReceivedNTPsecs,
const uint32_t audioReceivedNTPfrac,
@ -152,10 +196,16 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint64_t pictureID));
MOCK_METHOD1(SendRTCPSliceLossIndication,
int32_t(const uint8_t pictureID));
MOCK_METHOD0(ResetStatisticsRTP,
int32_t());
MOCK_CONST_METHOD5(StatisticsRTP,
int32_t(uint8_t *fraction_lost, uint32_t *cum_lost, uint32_t *ext_max, uint32_t *jitter, uint32_t *max_jitter));
MOCK_METHOD0(ResetReceiveDataCountersRTP,
int32_t());
MOCK_METHOD0(ResetSendDataCountersRTP,
int32_t());
MOCK_CONST_METHOD2(DataCountersRTP,
int32_t(uint32_t *bytesSent, uint32_t *packetsSent));
MOCK_CONST_METHOD4(DataCountersRTP,
int32_t(uint32_t *bytesSent, uint32_t *packetsSent, uint32_t *bytesReceived, uint32_t *packetsReceived));
MOCK_METHOD1(RemoteRTCPStat,
int32_t(RTCPSenderInfo* senderInfo));
MOCK_CONST_METHOD1(RemoteRTCPStat,
@ -174,6 +224,8 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const bool enable));
MOCK_METHOD3(SetREMBData,
int32_t(const uint32_t bitrate, const uint8_t numberOfSSRC, const uint32_t* SSRC));
MOCK_METHOD1(SetRemoteBitrateObserver,
bool(RemoteBitrateObserver*));
MOCK_CONST_METHOD0(IJ,
bool());
MOCK_METHOD1(SetIJStatus,
@ -200,6 +252,9 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(RtpAudioFeedback* messagesCallback));
MOCK_METHOD1(SetAudioPacketSize,
int32_t(const uint16_t packetSizeSamples));
MOCK_METHOD1(SetTelephoneEventForwardToDecoder, int(bool forwardToDecoder));
MOCK_CONST_METHOD0(TelephoneEventForwardToDecoder,
bool());
MOCK_CONST_METHOD1(SendTelephoneEventActive,
bool(int8_t& telephoneEvent));
MOCK_METHOD3(SendTelephoneEventOutband,

View File

@ -57,10 +57,6 @@ uint32_t Bitrate::BitrateNow() const {
return static_cast<uint32_t>(bitrate);
}
int64_t Bitrate::time_last_rate_update() const {
return time_last_rate_update_;
}
void Bitrate::Process() {
// Triggered by timer.
int64_t now = clock_->TimeInMilliseconds();

View File

@ -42,8 +42,6 @@ class Bitrate {
// Bitrate last second, updated now.
uint32_t BitrateNow() const;
int64_t time_last_rate_update() const;
protected:
Clock* clock_;

View File

@ -12,7 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_REGISTRY_H_
#include "testing/gmock/include/gmock/gmock.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
namespace webrtc {
@ -27,8 +27,6 @@ class MockRTPPayloadStrategy : public RTPPayloadStrategy {
const uint32_t rate));
MOCK_CONST_METHOD2(UpdatePayloadRate,
void(ModuleRTPUtility::Payload* payload, const uint32_t rate));
MOCK_CONST_METHOD1(GetPayloadTypeFrequency, int(
const ModuleRTPUtility::Payload& payload));
MOCK_CONST_METHOD5(CreatePayloadType,
ModuleRTPUtility::Payload*(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],

View File

@ -0,0 +1,49 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
namespace webrtc {
class MockRTPReceiverVideo : public RTPReceiverVideo {
public:
MockRTPReceiverVideo() : RTPReceiverVideo(0, NULL, NULL) {}
MOCK_METHOD1(ChangeUniqueId,
void(const int32_t id));
MOCK_METHOD3(ReceiveRecoveredPacketCallback,
int32_t(WebRtcRTPHeader* rtpHeader,
const uint8_t* payloadData,
const uint16_t payloadDataLength));
MOCK_METHOD3(CallbackOfReceivedPayloadData,
int32_t(const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader));
MOCK_CONST_METHOD0(TimeStamp,
uint32_t());
MOCK_CONST_METHOD0(SequenceNumber,
uint16_t());
MOCK_CONST_METHOD2(PayloadTypeToPayload,
uint32_t(const uint8_t payloadType,
ModuleRTPUtility::Payload*& payload));
MOCK_CONST_METHOD2(RetransmitOfOldPacket,
bool(const uint16_t sequenceNumber,
const uint32_t rtpTimeStamp));
MOCK_CONST_METHOD0(REDPayloadType,
int8_t());
MOCK_CONST_METHOD0(HaveNotReceivedPackets,
bool());
};
} // namespace webrtc
#endif //WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_

View File

@ -15,10 +15,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -33,7 +30,7 @@ const uint32_t kTestNumberOfPackets = 1350;
const int kTestNumberOfRtxPackets = 149;
const int kNumFrames = 30;
class VerifyingRtxReceiver : public NullRtpData
class VerifyingRtxReceiver : public RtpData
{
public:
VerifyingRtxReceiver() {}
@ -50,20 +47,6 @@ class VerifyingRtxReceiver : public NullRtpData
std::list<uint16_t> sequence_numbers_;
};
class TestRtpFeedback : public NullRtpFeedback {
public:
TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
rtp_rtcp_->SetRemoteSSRC(SSRC);
}
private:
RtpRtcp* rtp_rtcp_;
};
class RtxLoopBackTransport : public webrtc::Transport {
public:
explicit RtxLoopBackTransport(uint32_t rtx_ssrc)
@ -73,17 +56,11 @@ class RtxLoopBackTransport : public webrtc::Transport {
consecutive_drop_end_(0),
rtx_ssrc_(rtx_ssrc),
count_rtx_ssrc_(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
module_(NULL) {
}
void SetSendModule(RtpRtcp* rtpRtcpModule,
RTPPayloadRegistry* rtp_payload_registry,
RtpReceiver* receiver) {
void SetSendModule(RtpRtcp* rtpRtcpModule) {
module_ = rtpRtcpModule;
rtp_payload_registry_ = rtp_payload_registry;
rtp_receiver_ = receiver;
}
void DropEveryNthPacket(int n) {
@ -117,14 +94,8 @@ class RtxLoopBackTransport : public webrtc::Transport {
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return -1;
}
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header,
static_cast<const uint8_t*>(data),
len, payload_specific, true)) {
if (module_->IncomingRtpPacket(static_cast<const uint8_t*>(data), len,
header) < 0) {
return -1;
}
return len;
@ -142,8 +113,6 @@ class RtxLoopBackTransport : public webrtc::Transport {
int consecutive_drop_end_;
uint32_t rtx_ssrc_;
int count_rtx_ssrc_;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* module_;
std::set<uint16_t> expected_sequence_numbers_;
};
@ -151,8 +120,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
class RtpRtcpRtxNackTest : public ::testing::Test {
protected:
RtpRtcpRtxNackTest()
: rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
rtp_rtcp_module_(NULL),
: rtp_rtcp_module_(NULL),
transport_(kTestSsrc + 1),
receiver_(),
payload_data_length(sizeof(payload_data)),
@ -164,27 +132,19 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
configuration.id = kTestId;
configuration.audio = false;
configuration.clock = &fake_clock;
receive_statistics_.reset(ReceiveStatistics::Create(&fake_clock));
configuration.receive_statistics = receive_statistics_.get();
configuration.incoming_data = &receiver_;
configuration.outgoing_transport = &transport_;
rtp_rtcp_module_ = RtpRtcp::CreateRtpRtcp(configuration);
rtp_feedback_.reset(new TestRtpFeedback(rtp_rtcp_module_));
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
kTestId, &fake_clock, &receiver_, rtp_feedback_.get(),
&rtp_payload_registry_));
EXPECT_EQ(0, rtp_rtcp_module_->SetSSRC(kTestSsrc));
EXPECT_EQ(0, rtp_rtcp_module_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, rtp_rtcp_module_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, rtp_rtcp_module_->SetStorePacketsStatus(true, 600));
EXPECT_EQ(0, rtp_rtcp_module_->SetSendingStatus(true));
EXPECT_EQ(0, rtp_rtcp_module_->SetSequenceNumber(kTestSequenceNumber));
EXPECT_EQ(0, rtp_rtcp_module_->SetStartTimestamp(111111));
transport_.SetSendModule(rtp_rtcp_module_, &rtp_payload_registry_,
rtp_receiver_.get());
transport_.SetSendModule(rtp_rtcp_module_);
VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
@ -192,11 +152,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
memcpy(video_codec.plName, "I420", 5);
EXPECT_EQ(0, rtp_rtcp_module_->RegisterSendPayload(video_codec));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate));
EXPECT_EQ(0, rtp_rtcp_module_->RegisterReceivePayload(video_codec));
for (int n = 0; n < payload_data_length; n++) {
payload_data[n] = n % 10;
@ -240,7 +196,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
void RunRtxTest(RtxMode rtx_method, int loss) {
rtp_receiver_->SetRTXStatus(true, kTestSsrc + 1);
EXPECT_EQ(0, rtp_rtcp_module_->SetRTXReceiveStatus(true, kTestSsrc + 1));
EXPECT_EQ(0, rtp_rtcp_module_->SetRTXSendStatus(rtx_method, true,
kTestSsrc + 1));
transport_.DropEveryNthPacket(loss);
@ -268,11 +224,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
delete rtp_rtcp_module_;
}
scoped_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* rtp_rtcp_module_;
scoped_ptr<TestRtpFeedback> rtp_feedback_;
RtxLoopBackTransport transport_;
VerifyingRtxReceiver receiver_;
uint8_t payload_data[65000];

View File

@ -1,289 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
enum { kRateUpdateIntervalMs = 1000 };
ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
return new ReceiveStatisticsImpl(clock);
}
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
clock_(clock),
incoming_bitrate_(clock),
ssrc_(0),
jitter_q4_(0),
jitter_max_q4_(0),
cumulative_loss_(0),
jitter_q4_transmission_time_offset_(0),
local_time_last_received_timestamp_(0),
last_received_timestamp_(0),
last_received_transmission_time_offset_(0),
received_seq_first_(0),
received_seq_max_(0),
received_seq_wraps_(0),
received_packet_oh_(12), // RTP header.
received_byte_count_(0),
received_retransmitted_packets_(0),
received_inorder_packet_count_(0),
last_report_inorder_packets_(0),
last_report_old_packets_(0),
last_report_seq_max_(0),
last_reported_statistics_() {}
void ReceiveStatisticsImpl::ResetStatistics() {
CriticalSectionScoped lock(crit_sect_.get());
last_report_inorder_packets_ = 0;
last_report_old_packets_ = 0;
last_report_seq_max_ = 0;
memset(&last_reported_statistics_, 0, sizeof(last_reported_statistics_));
jitter_q4_ = 0;
jitter_max_q4_ = 0;
cumulative_loss_ = 0;
jitter_q4_transmission_time_offset_ = 0;
received_seq_wraps_ = 0;
received_seq_max_ = 0;
received_seq_first_ = 0;
received_byte_count_ = 0;
received_retransmitted_packets_ = 0;
received_inorder_packet_count_ = 0;
}
void ReceiveStatisticsImpl::ResetDataCounters() {
CriticalSectionScoped lock(crit_sect_.get());
received_byte_count_ = 0;
received_retransmitted_packets_ = 0;
received_inorder_packet_count_ = 0;
last_report_inorder_packets_ = 0;
}
void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
size_t bytes,
bool retransmitted,
bool in_order) {
ssrc_ = header.ssrc;
incoming_bitrate_.Update(bytes);
received_byte_count_ += bytes;
if (received_seq_max_ == 0 && received_seq_wraps_ == 0) {
// This is the first received report.
received_seq_first_ = header.sequenceNumber;
received_seq_max_ = header.sequenceNumber;
received_inorder_packet_count_ = 1;
// Current time in samples.
local_time_last_received_timestamp_ =
ModuleRTPUtility::GetCurrentRTP(clock_, header.payload_type_frequency);
return;
}
// Count only the new packets received. That is, if packets 1, 2, 3, 5, 4, 6
// are received, 4 will be ignored.
if (in_order) {
// Current time in samples.
const uint32_t RTPtime =
ModuleRTPUtility::GetCurrentRTP(clock_, header.payload_type_frequency);
received_inorder_packet_count_++;
// Wrong if we use RetransmitOfOldPacket.
int32_t seq_diff =
header.sequenceNumber - received_seq_max_;
if (seq_diff < 0) {
// Wrap around detected.
received_seq_wraps_++;
}
// New max.
received_seq_max_ = header.sequenceNumber;
if (header.timestamp != last_received_timestamp_ &&
received_inorder_packet_count_ > 1) {
int32_t time_diff_samples =
(RTPtime - local_time_last_received_timestamp_) -
(header.timestamp - last_received_timestamp_);
time_diff_samples = abs(time_diff_samples);
// lib_jingle sometimes deliver crazy jumps in TS for the same stream.
// If this happens, don't update jitter value. Use 5 secs video frequency
// as the threshold.
if (time_diff_samples < 450000) {
// Note we calculate in Q4 to avoid using float.
int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
}
// Extended jitter report, RFC 5450.
// Actual network jitter, excluding the source-introduced jitter.
int32_t time_diff_samples_ext =
(RTPtime - local_time_last_received_timestamp_) -
((header.timestamp +
header.extension.transmissionTimeOffset) -
(last_received_timestamp_ +
last_received_transmission_time_offset_));
time_diff_samples_ext = abs(time_diff_samples_ext);
if (time_diff_samples_ext < 450000) {
int32_t jitter_diffQ4TransmissionTimeOffset =
(time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
jitter_q4_transmission_time_offset_ +=
((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
}
}
last_received_timestamp_ = header.timestamp;
local_time_last_received_timestamp_ = RTPtime;
} else {
if (retransmitted) {
received_retransmitted_packets_++;
} else {
received_inorder_packet_count_++;
}
}
uint16_t packet_oh = header.headerLength + header.paddingLength;
// Our measured overhead. Filter from RFC 5104 4.2.1.2:
// avg_OH (new) = 15/16*avg_OH (old) + 1/16*pckt_OH,
received_packet_oh_ = (15 * received_packet_oh_ + packet_oh) >> 4;
}
bool ReceiveStatisticsImpl::Statistics(RtpReceiveStatistics* statistics,
bool reset) {
int32_t missing;
return Statistics(statistics, &missing, reset);
}
bool ReceiveStatisticsImpl::Statistics(RtpReceiveStatistics* statistics,
int32_t* missing, bool reset) {
CriticalSectionScoped lock(crit_sect_.get());
if (missing == NULL) {
return false;
}
if (received_seq_first_ == 0 && received_byte_count_ == 0) {
// We have not received anything.
return false;
}
if (!reset) {
if (last_report_inorder_packets_ == 0) {
// No report.
return false;
}
// Just get last report.
*statistics = last_reported_statistics_;
return true;
}
if (last_report_inorder_packets_ == 0) {
// First time we send a report.
last_report_seq_max_ = received_seq_first_ - 1;
}
// Calculate fraction lost.
uint16_t exp_since_last = (received_seq_max_ - last_report_seq_max_);
if (last_report_seq_max_ > received_seq_max_) {
// Can we assume that the seq_num can't go decrease over a full RTCP period?
exp_since_last = 0;
}
// Number of received RTP packets since last report, counts all packets but
// not re-transmissions.
uint32_t rec_since_last =
received_inorder_packet_count_ - last_report_inorder_packets_;
// With NACK we don't know the expected retransmissions during the last
// second. We know how many "old" packets we have received. We just count
// the number of old received to estimate the loss, but it still does not
// guarantee an exact number since we run this based on time triggered by
// sending of an RTP packet. This should have a minimum effect.
// With NACK we don't count old packets as received since they are
// re-transmitted. We use RTT to decide if a packet is re-ordered or
// re-transmitted.
uint32_t retransmitted_packets =
received_retransmitted_packets_ - last_report_old_packets_;
rec_since_last += retransmitted_packets;
*missing = 0;
if (exp_since_last > rec_since_last) {
*missing = (exp_since_last - rec_since_last);
}
uint8_t local_fraction_lost = 0;
if (exp_since_last) {
// Scale 0 to 255, where 255 is 100% loss.
local_fraction_lost = (uint8_t)((255 * (*missing)) / exp_since_last);
}
statistics->fraction_lost = local_fraction_lost;
// We need a counter for cumulative loss too.
cumulative_loss_ += *missing;
if (jitter_q4_ > jitter_max_q4_) {
jitter_max_q4_ = jitter_q4_;
}
statistics->cumulative_lost = cumulative_loss_;
statistics->extended_max_sequence_number = (received_seq_wraps_ << 16) +
received_seq_max_;
// Note: internal jitter value is in Q4 and needs to be scaled by 1/16.
statistics->jitter = jitter_q4_ >> 4;
statistics->max_jitter = jitter_max_q4_ >> 4;
if (reset) {
// Store this report.
last_reported_statistics_ = *statistics;
// Only for report blocks in RTCP SR and RR.
last_report_inorder_packets_ = received_inorder_packet_count_;
last_report_old_packets_ = received_retransmitted_packets_;
last_report_seq_max_ = received_seq_max_;
}
return true;
}
void ReceiveStatisticsImpl::GetDataCounters(
uint32_t* bytes_received, uint32_t* packets_received) const {
CriticalSectionScoped lock(crit_sect_.get());
if (bytes_received) {
*bytes_received = received_byte_count_;
}
if (packets_received) {
*packets_received =
received_retransmitted_packets_ + received_inorder_packet_count_;
}
}
uint32_t ReceiveStatisticsImpl::BitrateReceived() {
return incoming_bitrate_.BitrateNow();
}
int32_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
int time_since_last_update = clock_->TimeInMilliseconds() -
incoming_bitrate_.time_last_rate_update();
return std::max(kRateUpdateIntervalMs - time_since_last_update, 0);
}
int32_t ReceiveStatisticsImpl::Process() {
incoming_bitrate_.Process();
return 0;
}
} // namespace webrtc

View File

@ -1,77 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include <algorithm>
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class CriticalSectionWrapper;
class ReceiveStatisticsImpl : public ReceiveStatistics {
public:
explicit ReceiveStatisticsImpl(Clock* clock);
// Implements ReceiveStatistics.
void IncomingPacket(const RTPHeader& header, size_t bytes,
bool old_packet, bool in_order);
bool Statistics(RtpReceiveStatistics* statistics, bool reset);
bool Statistics(RtpReceiveStatistics* statistics, int32_t* missing,
bool reset);
void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const;
uint32_t BitrateReceived();
void ResetStatistics();
void ResetDataCounters();
// Implements Module.
int32_t TimeUntilNextProcess();
int32_t Process();
private:
scoped_ptr<CriticalSectionWrapper> crit_sect_;
Clock* clock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
// Stats on received RTP packets.
uint32_t jitter_q4_;
uint32_t jitter_max_q4_;
uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
uint32_t local_time_last_received_timestamp_;
uint32_t last_received_timestamp_;
int32_t last_received_transmission_time_offset_;
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
// Current counter values.
uint16_t received_packet_oh_;
uint32_t received_byte_count_;
uint32_t received_retransmitted_packets_;
uint32_t received_inorder_packet_count_;
// Counter values when we sent the last report.
uint32_t last_report_inorder_packets_;
uint32_t last_report_old_packets_;
uint16_t last_report_seq_max_;
RtpReceiveStatistics last_reported_statistics_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_

View File

@ -14,16 +14,14 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/trace.h"
// RFC 5109
namespace webrtc {
ReceiverFEC::ReceiverFEC(const int32_t id, RtpData* callback)
ReceiverFEC::ReceiverFEC(const int32_t id, RTPReceiverVideo* owner)
: id_(id),
crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
recovered_packet_callback_(callback),
owner_(owner),
fec_(new ForwardErrorCorrection(id)),
payload_type_fec_(-1) {}
@ -44,7 +42,6 @@ ReceiverFEC::~ReceiverFEC() {
}
void ReceiverFEC::SetPayloadTypeFEC(const int8_t payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
payload_type_fec_ = payload_type;
}
// 0 1 2 3
@ -79,8 +76,6 @@ int32_t ReceiverFEC::AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
const uint16_t payload_data_length,
bool& FECpacket) {
CriticalSectionScoped cs(crit_sect_.get());
if (payload_type_fec_ == -1) {
return -1;
}
@ -226,18 +221,12 @@ int32_t ReceiverFEC::AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
}
int32_t ReceiverFEC::ProcessReceivedFEC() {
crit_sect_->Enter();
if (!received_packet_list_.empty()) {
// Send received media packet to VCM.
if (!received_packet_list_.front()->is_fec) {
ForwardErrorCorrection::Packet* packet =
received_packet_list_.front()->pkt;
crit_sect_->Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
if (ParseAndReceivePacket(received_packet_list_.front()->pkt) != 0) {
return -1;
}
crit_sect_->Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
return -1;
@ -250,16 +239,27 @@ int32_t ReceiverFEC::ProcessReceivedFEC() {
for (; it != recovered_packet_list_.end(); ++it) {
if ((*it)->returned) // Already sent to the VCM and the jitter buffer.
continue;
ForwardErrorCorrection::Packet* packet = (*it)->pkt;
crit_sect_->Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
if (ParseAndReceivePacket((*it)->pkt) != 0) {
return -1;
}
crit_sect_->Enter();
(*it)->returned = true;
}
crit_sect_->Leave();
return 0;
}
int ReceiverFEC::ParseAndReceivePacket(
const ForwardErrorCorrection::Packet* packet) {
WebRtcRTPHeader header;
memset(&header, 0, sizeof(header));
ModuleRTPUtility::RTPHeaderParser parser(packet->data, packet->length);
if (!parser.Parse(header.header)) {
return -1;
}
if (owner_->ReceiveRecoveredPacketCallback(
&header, &packet->data[header.header.headerLength],
packet->length - header.header.headerLength) != 0) {
return -1;
}
return 0;
}

View File

@ -15,16 +15,14 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class CriticalSectionWrapper;
class RTPReceiverVideo;
class ReceiverFEC {
public:
ReceiverFEC(const int32_t id, RtpData* callback);
ReceiverFEC(const int32_t id, RTPReceiverVideo* owner);
virtual ~ReceiverFEC();
int32_t AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
@ -37,9 +35,10 @@ class ReceiverFEC {
void SetPayloadTypeFEC(const int8_t payload_type);
private:
int ParseAndReceivePacket(const ForwardErrorCorrection::Packet* packet);
int id_;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
RtpData* recovered_packet_callback_;
RTPReceiverVideo* owner_;
ForwardErrorCorrection* fec_;
// TODO(holmer): In the current version received_packet_list_ is never more
// than one packet, since we process FEC every time a new packet

View File

@ -16,31 +16,20 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/source/fec_test_helper.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h"
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
using ::testing::_;
using ::testing::Args;
using ::testing::ElementsAreArray;
using ::testing::Return;
namespace webrtc {
class MockRtpData : public RtpData {
public:
MOCK_METHOD3(OnReceivedPayloadData,
int32_t(const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader));
MOCK_METHOD2(OnRecoveredPacket,
bool(const uint8_t* packet, int packet_length));
};
class ReceiverFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
fec_ = new ForwardErrorCorrection(0);
receiver_fec_ = new ReceiverFEC(0, &rtp_data_callback_);
receiver_fec_ = new ReceiverFEC(0, &rtp_receiver_video_);
generator_ = new FrameGenerator();
receiver_fec_->SetPayloadTypeFEC(kFecPayloadType);
}
@ -75,10 +64,11 @@ class ReceiverFecTest : public ::testing::Test {
// Verify that the content of the reconstructed packet is equal to the
// content of |packet|, and that the same content is received |times| number
// of times in a row.
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, packet->length))
.With(Args<0, 1>(ElementsAreArray(packet->data,
packet->length)))
.Times(times).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(
_, _, packet->length - kRtpHeaderSize))
.With(Args<1, 2>(ElementsAreArray(packet->data + kRtpHeaderSize,
packet->length - kRtpHeaderSize)))
.Times(times);
}
void BuildAndAddRedMediaPacket(RtpPacket* packet) {
@ -102,7 +92,7 @@ class ReceiverFecTest : public ::testing::Test {
}
ForwardErrorCorrection* fec_;
MockRtpData rtp_data_callback_;
MockRTPReceiverVideo rtp_receiver_video_;
ReceiverFEC* receiver_fec_;
FrameGenerator* generator_;
};
@ -265,8 +255,8 @@ TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
delayed_fec = fec_packets.front();
@ -280,15 +270,15 @@ TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
it != media_rtp_packets_batch2.end(); ++it) {
BuildAndAddRedMediaPacket(*it);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
// Add the delayed FEC packet. One packet should be reconstructed.
BuildAndAddRedFecPacket(delayed_fec);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets_batch1);
@ -309,8 +299,8 @@ TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
delayed_fec = fec_packets.front();
@ -324,15 +314,15 @@ TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
it != media_rtp_packets_batch2.end(); ++it) {
BuildAndAddRedMediaPacket(*it);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
// Add the delayed FEC packet. No packet should be reconstructed since the
// first media packet of that frame has been dropped due to being too old.
BuildAndAddRedFecPacket(delayed_fec);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(0);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
@ -356,7 +346,7 @@ TEST_F(ReceiverFecTest, OldFecPacketDropped) {
it != fec_packets.end(); ++it) {
// Only FEC packets inserted. No packets recoverable at this time.
BuildAndAddRedFecPacket(*it);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(0);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
@ -370,8 +360,8 @@ TEST_F(ReceiverFecTest, OldFecPacketDropped) {
// and should've been dropped. Only the media packet we inserted will be
// returned.
BuildAndAddRedMediaPacket(media_rtp_packets.front());
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);

View File

@ -115,15 +115,13 @@ TEST_F(RtcpFormatRembTest, TestNonCompund) {
uint32_t SSRC = 456789;
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpNonCompound));
EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 1, &SSRC));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb, NULL));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
}
TEST_F(RtcpFormatRembTest, TestCompund) {
uint32_t SSRCs[2] = {456789, 98765};
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 2, SSRCs));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb, &receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
}
} // namespace

View File

@ -141,11 +141,6 @@ RTCPReceiver::SetRemoteSSRC( const uint32_t ssrc)
return 0;
}
uint32_t RTCPReceiver::RemoteSSRC() const {
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
return _remoteSSRC;
}
void RTCPReceiver::RegisterRtcpObservers(
RtcpIntraFrameObserver* intra_frame_callback,
RtcpBandwidthObserver* bandwidth_callback,
@ -188,7 +183,7 @@ int32_t RTCPReceiver::ResetRTT(const uint32_t remoteSSRC) {
return 0;
}
int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
int32_t RTCPReceiver::RTT(const uint32_t remoteSSRC,
uint16_t* RTT,
uint16_t* avgRTT,
uint16_t* minRTT,
@ -1411,4 +1406,43 @@ int32_t RTCPReceiver::TMMBRReceived(const uint32_t size,
return num;
}
int32_t
RTCPReceiver::SetPacketTimeout(const uint32_t timeoutMS)
{
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
_packetTimeOutMS = timeoutMS;
return 0;
}
void RTCPReceiver::PacketTimeout()
{
if(_packetTimeOutMS == 0)
{
// not configured
return;
}
bool packetTimeOut = false;
{
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
if(_lastReceived == 0)
{
// not active
return;
}
int64_t now = _clock->TimeInMilliseconds();
if(now - _lastReceived > _packetTimeOutMS)
{
packetTimeOut = true;
_lastReceived = 0; // only one callback
}
}
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if(packetTimeOut && _cbRtcpFeedback)
{
_cbRtcpFeedback->OnRTCPPacketTimeout(_id);
}
}
} // namespace webrtc

View File

@ -42,7 +42,6 @@ public:
void SetSSRC( const uint32_t ssrc);
void SetRelaySSRC( const uint32_t ssrc);
int32_t SetRemoteSSRC( const uint32_t ssrc);
uint32_t RemoteSSRC() const;
uint32_t RelaySSRC() const;
@ -68,7 +67,7 @@ public:
uint32_t *rtcp_timestamp) const;
// get rtt
int32_t RTT(uint32_t remoteSSRC,
int32_t RTT(const uint32_t remoteSSRC,
uint16_t* RTT,
uint16_t* avgRTT,
uint16_t* minRTT,
@ -107,6 +106,9 @@ public:
int32_t UpdateTMMBR();
int32_t SetPacketTimeout(const uint32_t timeoutMS);
void PacketTimeout();
protected:
RTCPHelp::RTCPReportBlockInformation* CreateReportBlockInformation(const uint32_t remoteSSRC);
RTCPHelp::RTCPReportBlockInformation* GetReportBlockInformation(const uint32_t remoteSSRC) const;

View File

@ -136,7 +136,7 @@ class PacketBuilder {
// This test transport verifies that no functions get called.
class TestTransport : public Transport,
public NullRtpData {
public RtpData {
public:
explicit TestTransport()
: rtcp_receiver_(NULL) {

View File

@ -268,7 +268,7 @@ RTCPSender::SetSendingStatus(const bool sending)
}
if(sendRTCPBye)
{
return SendRTCP(kRtcpBye, NULL);
return SendRTCP(kRtcpBye);
}
return 0;
}
@ -373,10 +373,12 @@ RTCPSender::SetSSRC( const uint32_t ssrc)
_SSRC = ssrc;
}
void RTCPSender::SetRemoteSSRC(uint32_t ssrc)
int32_t
RTCPSender::SetRemoteSSRC( const uint32_t ssrc)
{
CriticalSectionScoped lock(_criticalSectionRTCPSender);
_remoteSSRC = ssrc;
return 0;
}
int32_t
@ -1531,13 +1533,11 @@ RTCPSender::BuildVoIPMetric(uint8_t* rtcpbuffer, uint32_t& pos)
}
int32_t
RTCPSender::SendRTCP(
uint32_t packetTypeFlags,
const ReceiveStatistics::RtpReceiveStatistics* receive_stats,
int32_t nackSize,
const uint16_t* nackList,
bool repeat,
uint64_t pictureID)
RTCPSender::SendRTCP(const uint32_t packetTypeFlags,
const int32_t nackSize, // NACK
const uint16_t* nackList, // NACK
const bool repeat, // FIR
const uint64_t pictureID) // SLI & RPSI
{
uint32_t rtcpPacketTypeFlags = packetTypeFlags;
uint32_t pos = 0;
@ -1569,15 +1569,13 @@ RTCPSender::SendRTCP(
rtcpPacketTypeFlags & kRtcpSr ||
rtcpPacketTypeFlags & kRtcpRr)
{
// Do we have receive statistics to send?
if (receive_stats)
// get statistics from our RTPreceiver outside critsect
if(_rtpRtcp.ReportBlockStatistics(&received.fractionLost,
&received.cumulativeLost,
&received.extendedHighSeqNum,
&received.jitter,
&jitterTransmissionOffset) == 0)
{
received.fractionLost = receive_stats->fraction_lost;
received.cumulativeLost = receive_stats->cumulative_lost;
received.extendedHighSeqNum =
receive_stats->extended_max_sequence_number;
received.jitter = receive_stats->jitter;
jitterTransmissionOffset = 0;
hasReceived = true;
uint32_t lastReceivedRRNTPsecs = 0;

View File

@ -17,7 +17,6 @@
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@ -71,7 +70,7 @@ public:
void SetSSRC( const uint32_t ssrc);
void SetRemoteSSRC(uint32_t ssrc);
int32_t SetRemoteSSRC( const uint32_t ssrc);
int32_t SetCameraDelay(const int32_t delayMS);
@ -89,13 +88,11 @@ public:
uint32_t LastSendReport(uint32_t& lastRTCPTime);
int32_t SendRTCP(
uint32_t rtcpPacketTypeFlags,
const ReceiveStatistics::RtpReceiveStatistics* receive_stats,
int32_t nackSize = 0,
int32_t SendRTCP(const uint32_t rtcpPacketTypeFlags,
const int32_t nackSize = 0,
const uint16_t* nackList = 0,
bool repeat = false,
uint64_t pictureID = 0);
const bool repeat = false,
const uint64_t pictureID = 0);
int32_t AddReportBlock(const uint32_t SSRC,
const RTCPReportBlock* receiveBlock);

View File

@ -20,8 +20,6 @@
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
@ -219,7 +217,7 @@ void CreateRtpPacket(const bool marker_bit, const uint8_t payload,
}
class TestTransport : public Transport,
public NullRtpData {
public RtpData {
public:
TestTransport()
: rtcp_receiver_(NULL) {
@ -279,8 +277,6 @@ class RtcpSenderTest : public ::testing::Test {
RtcpSenderTest()
: over_use_detector_options_(),
system_clock_(Clock::GetRealTimeClock()),
rtp_payload_registry_(new RTPPayloadRegistry(
0, RTPPayloadStrategy::CreateStrategy(false))),
remote_bitrate_observer_(),
remote_bitrate_estimator_(
RemoteBitrateEstimatorFactory().Create(
@ -292,12 +288,11 @@ class RtcpSenderTest : public ::testing::Test {
configuration.id = 0;
configuration.audio = false;
configuration.clock = system_clock_;
configuration.incoming_data = test_transport_;
configuration.outgoing_transport = test_transport_;
configuration.remote_bitrate_estimator = remote_bitrate_estimator_.get();
rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration);
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
0, system_clock_, test_transport_, NULL, rtp_payload_registry_.get()));
rtcp_sender_ = new RTCPSender(0, false, system_clock_, rtp_rtcp_impl_);
rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_);
test_transport_->SetRTCPReceiver(rtcp_receiver_);
@ -320,8 +315,6 @@ class RtcpSenderTest : public ::testing::Test {
OverUseDetectorOptions over_use_detector_options_;
Clock* system_clock_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
ModuleRtpRtcpImpl* rtp_rtcp_impl_;
RTCPSender* rtcp_sender_;
RTCPReceiver* rtcp_receiver_;
@ -335,7 +328,7 @@ class RtcpSenderTest : public ::testing::Test {
TEST_F(RtcpSenderTest, RtcpOff) {
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpOff));
EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr, NULL));
EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr));
}
TEST_F(RtcpSenderTest, IJStatus) {
@ -359,27 +352,18 @@ TEST_F(RtcpSenderTest, TestCompound) {
strncpy(codec_inst.plName, "VP8", webrtc::kPayloadNameSize - 1);
codec_inst.codecType = webrtc::kVideoCodecVP8;
codec_inst.plType = payload;
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(codec_inst.plName,
codec_inst.plType,
90000,
0,
codec_inst.maxBitrate));
EXPECT_EQ(0, rtp_rtcp_impl_->RegisterReceivePayload(codec_inst));
// Make sure RTP packet has been received.
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
EXPECT_TRUE(parser->Parse(packet_, packet_length, &header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific));
EXPECT_TRUE(rtp_receiver_->IncomingRtpPacket(&header, packet_, packet_length,
payload_specific, true));
EXPECT_EQ(0, rtp_rtcp_impl_->IncomingRtpPacket(packet_, packet_length,
header));
EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr, &receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
// Transmission time offset packet should be received.
ASSERT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
@ -389,9 +373,7 @@ TEST_F(RtcpSenderTest, TestCompound) {
TEST_F(RtcpSenderTest, TestCompound_NoRtpReceived) {
EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
// |receive_stats| is NULL since no data has been received.
ReceiveStatistics::RtpReceiveStatistics* receive_stats = NULL;
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr, receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
// Transmission time offset packet should not be received.
ASSERT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
@ -409,9 +391,7 @@ TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) {
TMMBRSet bounding_set;
EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr, &receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
// We now expect the packet to show up in the rtcp_packet_info_ of
// test_transport_.
ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
@ -433,9 +413,7 @@ TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndValid) {
EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr, &receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
// We now expect the packet to show up in the rtcp_packet_info_ of
// test_transport_.
ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -21,7 +21,8 @@ RTPPayloadRegistry::RTPPayloadRegistry(
rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
last_received_payload_type_(-1),
last_received_media_payload_type_(-1) {}
last_received_media_payload_type_(-1) {
}
RTPPayloadRegistry::~RTPPayloadRegistry() {
while (!payload_type_map_.empty()) {
@ -225,29 +226,7 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
return -1;
}
bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
PayloadUnion* payload) const {
ModuleRTPUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
return false;
}
*payload = it->second->typeSpecific;
return true;
}
int RTPPayloadRegistry::GetPayloadTypeFrequency(
uint8_t payload_type) const {
ModuleRTPUtility::Payload* payload;
if (!PayloadTypeToPayload(payload_type, payload)) {
return -1;
}
return rtp_payload_strategy_->GetPayloadTypeFrequency(*payload);
}
bool RTPPayloadRegistry::PayloadTypeToPayload(
int32_t RTPPayloadRegistry::PayloadTypeToPayload(
const uint8_t payload_type,
ModuleRTPUtility::Payload*& payload) const {
@ -256,10 +235,10 @@ bool RTPPayloadRegistry::PayloadTypeToPayload(
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
return false;
return -1;
}
payload = it->second;
return true;
return 0;
}
bool RTPPayloadRegistry::ReportMediaPayloadType(
@ -311,11 +290,6 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
payload->audio = true;
return payload;
}
int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const {
return payload.typeSpecific.Audio.frequency;
}
};
class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
@ -342,15 +316,15 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const {
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
RtpVideoCodecTypes videoType = kRtpGenericVideo;
if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVideoVp8;
videoType = kRtpVp8Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpVideoGeneric;
videoType = kRtpGenericVideo;
} else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
videoType = kRtpVideoFec;
videoType = kRtpFecVideo;
} else {
videoType = kRtpVideoGeneric;
videoType = kRtpGenericVideo;
}
ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
@ -361,11 +335,6 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
payload->audio = false;
return payload;
}
int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const {
return kVideoPayloadTypeFrequency;
}
};
RTPPayloadStrategy* RTPPayloadStrategy::CreateStrategy(

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@ -21,7 +21,7 @@ namespace webrtc {
// of payload handling.
class RTPPayloadStrategy {
public:
virtual ~RTPPayloadStrategy() {}
virtual ~RTPPayloadStrategy() {};
virtual bool CodecsMustBeUnique() const = 0;
@ -42,13 +42,10 @@ class RTPPayloadStrategy {
const uint8_t channels,
const uint32_t rate) const = 0;
virtual int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const = 0;
static RTPPayloadStrategy* CreateStrategy(const bool handling_audio);
protected:
RTPPayloadStrategy() {}
RTPPayloadStrategy() {};
};
class RTPPayloadRegistry {
@ -76,11 +73,7 @@ class RTPPayloadRegistry {
const uint32_t rate,
int8_t* payload_type) const;
bool GetPayloadSpecifics(uint8_t payload_type, PayloadUnion* payload) const;
int GetPayloadTypeFrequency(uint8_t payload_type) const;
bool PayloadTypeToPayload(
int32_t PayloadTypeToPayload(
const uint8_t payload_type,
ModuleRTPUtility::Payload*& payload) const;
@ -123,4 +116,4 @@ class RTPPayloadRegistry {
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -74,7 +74,7 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
EXPECT_TRUE(new_payload_created) << "A new payload WAS created.";
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
// We should get back the exact pointer to the payload returned by the
@ -83,7 +83,7 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
// Now forget about it and verify it's gone.
EXPECT_EQ(0, rtp_payload_registry_->DeRegisterReceivePayload(payload_type));
EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_EQ(-1, rtp_payload_registry_->PayloadTypeToPayload(
payload_type, retrieved_payload));
}
@ -101,7 +101,7 @@ TEST_F(RtpPayloadRegistryTest, DoesNotCreateNewPayloadTypeIfRed) {
ASSERT_EQ(red_type_of_the_day, rtp_payload_registry_->red_payload_type());
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(red_type_of_the_day,
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(red_type_of_the_day,
retrieved_payload));
EXPECT_FALSE(retrieved_payload->audio);
EXPECT_STRCASEEQ("red", retrieved_payload->name);
@ -131,10 +131,10 @@ TEST_F(RtpPayloadRegistryTest,
// Ensure both payloads are preserved.
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
EXPECT_EQ(first_payload_on_heap, retrieved_payload);
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1,
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1,
retrieved_payload));
EXPECT_EQ(second_payload_on_heap, retrieved_payload);
@ -170,10 +170,10 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalChannels, kTypicalRate, &ignored));
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_EQ(-1, rtp_payload_registry_->PayloadTypeToPayload(
payload_type, retrieved_payload)) << "The first payload should be "
"deregistered because the only thing that differs is payload type.";
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
payload_type - 1, retrieved_payload)) <<
"The second payload should still be registered though.";
@ -185,10 +185,10 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalPayloadName, payload_type + 1, kTypicalFrequency,
kTypicalChannels, kTypicalRate, &ignored));
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
payload_type - 1, retrieved_payload)) <<
"Not compatible; both payloads should be kept.";
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
payload_type + 1, retrieved_payload)) <<
"Not compatible; both payloads should be kept.";
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,242 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
#include <map>
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RtpRtcpFeedback;
class ModuleRtpRtcpImpl;
class Trace;
class RTPReceiverAudio;
class RTPReceiverVideo;
class RTPReceiverStrategy;
class RTPReceiver : public Bitrate {
public:
// Callbacks passed in here may not be NULL (use Null Object callbacks if you
// want callbacks to do nothing). This class takes ownership of the media
// receiver but nothing else.
RTPReceiver(const int32_t id,
Clock* clock,
ModuleRtpRtcpImpl* owner,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPReceiverStrategy* rtp_media_receiver,
RTPPayloadRegistry* rtp_payload_registry);
virtual ~RTPReceiver();
RtpVideoCodecTypes VideoCodecType() const;
uint32_t MaxConfiguredBitrate() const;
int32_t SetPacketTimeout(const uint32_t timeout_ms);
void PacketTimeout();
void ProcessDeadOrAlive(const bool RTCPalive, const int64_t now);
void ProcessBitrate();
int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate);
int32_t DeRegisterReceivePayload(const int8_t payload_type);
int32_t ReceivePayloadType(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate,
int8_t* payload_type) const;
int32_t IncomingRTPPacket(
RTPHeader* rtpheader,
const uint8_t* incoming_rtp_packet,
const uint16_t incoming_rtp_packet_length);
NACKMethod NACK() const ;
// Turn negative acknowledgement requests on/off.
int32_t SetNACKStatus(const NACKMethod method, int max_reordering_threshold);
// Returns the last received timestamp.
virtual uint32_t TimeStamp() const;
int32_t LastReceivedTimeMs() const;
virtual uint16_t SequenceNumber() const;
int32_t EstimatedRemoteTimeStamp(uint32_t& timestamp) const;
uint32_t SSRC() const;
int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const;
int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
// Get the currently configured SSRC filter.
int32_t SSRCFilter(uint32_t& allowed_ssrc) const;
// Set a SSRC to be used as a filter for incoming RTP streams.
int32_t SetSSRCFilter(const bool enable, const uint32_t allowed_ssrc);
int32_t Statistics(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter, // Will be moved from JB.
uint32_t* max_jitter,
uint32_t* jitter_transmission_time_offset,
bool reset) const;
int32_t Statistics(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter, // Will be moved from JB.
uint32_t* max_jitter,
uint32_t* jitter_transmission_time_offset,
int32_t* missing,
bool reset) const;
int32_t DataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const;
int32_t ResetStatistics();
int32_t ResetDataCounters();
uint16_t PacketOHReceived() const;
uint32_t PacketCountReceived() const;
uint32_t ByteCountReceived() const;
int32_t RegisterRtpHeaderExtension(const RTPExtensionType type,
const uint8_t id);
int32_t DeregisterRtpHeaderExtension(const RTPExtensionType type);
void GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const;
// RTX.
void SetRTXStatus(bool enable, uint32_t ssrc);
void RTXStatus(bool* enable, uint32_t* ssrc, int* payload_type) const;
void SetRtxPayloadType(int payload_type);
virtual int8_t REDPayloadType() const;
bool HaveNotReceivedPackets() const;
virtual bool RetransmitOfOldPacket(const uint16_t sequence_number,
const uint32_t rtp_time_stamp) const;
void UpdateStatistics(const RTPHeader* rtp_header,
const uint16_t bytes,
const bool old_packet);
private:
// Returns whether RED is configured with payload_type.
bool REDPayloadType(const int8_t payload_type) const;
bool InOrderPacket(const uint16_t sequence_number) const;
void CheckSSRCChanged(const RTPHeader* rtp_header);
void CheckCSRC(const WebRtcRTPHeader* rtp_header);
int32_t CheckPayloadChanged(const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& isRED,
ModuleRTPUtility::PayloadUnion* payload);
void UpdateNACKBitRate(int32_t bytes, uint32_t now);
bool ProcessNACKBitRate(uint32_t now);
RTPPayloadRegistry* rtp_payload_registry_;
scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
int32_t id_;
ModuleRtpRtcpImpl& rtp_rtcp_;
RtpFeedback* cb_rtp_feedback_;
CriticalSectionWrapper* critical_section_rtp_receiver_;
mutable int64_t last_receive_time_;
uint16_t last_received_payload_length_;
uint32_t packet_timeout_ms_;
// SSRCs.
uint32_t ssrc_;
uint8_t num_csrcs_;
uint32_t current_remote_csrc_[kRtpCsrcSize];
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
bool use_ssrc_filter_;
uint32_t ssrc_filter_;
// Stats on received RTP packets.
uint32_t jitter_q4_;
mutable uint32_t jitter_max_q4_;
mutable uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
uint32_t local_time_last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint32_t last_received_timestamp_;
uint16_t last_received_sequence_number_;
int32_t last_received_transmission_time_offset_;
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
// Current counter values.
uint16_t received_packet_oh_;
uint32_t received_byte_count_;
uint32_t received_old_packet_count_;
uint32_t received_inorder_packet_count_;
// Counter values when we sent the last report.
mutable uint32_t last_report_inorder_packets_;
mutable uint32_t last_report_old_packets_;
mutable uint16_t last_report_seq_max_;
mutable uint8_t last_report_fraction_lost_;
mutable uint32_t last_report_cumulative_lost_; // 24 bits valid.
mutable uint32_t last_report_extended_high_seq_num_;
mutable uint32_t last_report_jitter_;
mutable uint32_t last_report_jitter_transmission_time_offset_;
NACKMethod nack_method_;
int max_reordering_threshold_;
bool rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_

View File

@ -20,18 +20,13 @@
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback) {
return new RTPReceiverAudio(id, data_callback, incoming_messages_callback);
}
RTPReceiverAudio::RTPReceiverAudio(const int32_t id,
RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback)
: RTPReceiverStrategy(data_callback),
TelephoneEventHandler(),
id_(id),
critical_section_rtp_receiver_audio_(
CriticalSectionWrapper::CreateCriticalSection()),
last_received_frequency_(8000),
telephone_event_forward_to_decoder_(false),
telephone_event_payload_type_(-1),
@ -42,36 +37,42 @@ RTPReceiverAudio::RTPReceiverAudio(const int32_t id,
cng_payload_type_(-1),
g722_payload_type_(-1),
last_received_g722_(false),
num_energy_(0),
current_remote_energy_(),
cb_audio_feedback_(incoming_messages_callback) {
last_payload_.Audio.channels = 1;
memset(current_remote_energy_, 0, sizeof(current_remote_energy_));
}
uint32_t RTPReceiverAudio::AudioFrequency() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
if (last_received_g722_) {
return 8000;
}
return last_received_frequency_;
}
// Outband TelephoneEvent(DTMF) detection
void RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
int RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
CriticalSectionScoped lock(crit_sect_.get());
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
telephone_event_forward_to_decoder_ = forward_to_decoder;
return 0;
}
// Is forwarding of outband telephone events turned on/off?
bool RTPReceiverAudio::TelephoneEventForwardToDecoder() const {
CriticalSectionScoped lock(crit_sect_.get());
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
return telephone_event_forward_to_decoder_;
}
bool RTPReceiverAudio::TelephoneEventPayloadType(
int8_t payload_type) const {
CriticalSectionScoped lock(crit_sect_.get());
const int8_t payload_type) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
return (telephone_event_payload_type_ == payload_type) ? true : false;
}
bool RTPReceiverAudio::CNGPayloadType(int8_t payload_type,
bool RTPReceiverAudio::CNGPayloadType(const int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed) {
CriticalSectionScoped lock(crit_sect_.get());
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
*cng_payload_type_has_changed = false;
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
@ -117,7 +118,8 @@ bool RTPReceiverAudio::CNGPayloadType(int8_t payload_type,
return false;
}
bool RTPReceiverAudio::ShouldReportCsrcChanges(uint8_t payload_type) const {
bool RTPReceiverAudio::ShouldReportCsrcChanges(
uint8_t payload_type) const {
// Don't do this for DTMF packets, otherwise it's fine.
return !TelephoneEventPayloadType(payload_type);
}
@ -156,9 +158,9 @@ bool RTPReceiverAudio::ShouldReportCsrcChanges(uint8_t payload_type) const {
// - G7221 frame N/A
int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency) {
CriticalSectionScoped lock(crit_sect_.get());
const int8_t payload_type,
const uint32_t frequency) {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
if (ModuleRTPUtility::StringCompare(payload_name, "telephone-event", 15)) {
telephone_event_payload_type_ = payload_type;
@ -181,24 +183,18 @@ int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
return 0;
}
int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
int32_t RTPReceiverAudio::ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) {
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp", "Audio::ParseRtp",
"seqnum", rtp_header->header.sequenceNumber,
"timestamp", rtp_header->header.timestamp);
rtp_header->type.Audio.numEnergy = rtp_header->header.numCSRCs;
num_energy_ = rtp_header->type.Audio.numEnergy;
if (rtp_header->type.Audio.numEnergy > 0 &&
rtp_header->type.Audio.numEnergy <= kRtpCsrcSize) {
memcpy(current_remote_energy_,
rtp_header->type.Audio.arrOfEnergy,
rtp_header->type.Audio.numEnergy);
}
const uint8_t* payload_data =
ModuleRTPUtility::GetPayloadData(rtp_header->header, packet);
const uint16_t payload_data_length =
@ -211,12 +207,8 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
is_red);
}
int RTPReceiverAudio::GetPayloadTypeFrequency() const {
CriticalSectionScoped lock(crit_sect_.get());
if (last_received_g722_) {
return 8000;
}
return last_received_frequency_;
int32_t RTPReceiverAudio::GetFrequencyHz() const {
return AudioFrequency();
}
RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
@ -231,8 +223,9 @@ RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
}
}
void RTPReceiverAudio::CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
void RTPReceiverAudio::CheckPayloadChanged(
const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
*should_discard_changes = false;
@ -258,24 +251,12 @@ void RTPReceiverAudio::CheckPayloadChanged(int8_t payload_type,
}
}
int RTPReceiverAudio::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
CriticalSectionScoped cs(crit_sect_.get());
assert(num_energy_ <= kRtpCsrcSize);
if (num_energy_ > 0) {
memcpy(array_of_energy, current_remote_energy_,
sizeof(uint8_t) * num_energy_);
}
return num_energy_;
}
int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const {
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
if (-1 == callback->OnInitializeDecoder(id,
payload_type,
payload_name,
@ -296,9 +277,9 @@ int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_length,
const AudioPayload& audio_specific,
bool is_red) {
const uint16_t payload_length,
const ModuleRTPUtility::AudioPayload& audio_specific,
const bool is_red) {
if (payload_length == 0) {
return 0;
@ -307,7 +288,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
bool telephone_event_packet =
TelephoneEventPayloadType(rtp_header->header.payloadType);
if (telephone_event_packet) {
CriticalSectionScoped lock(crit_sect_.get());
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
// RFC 4733 2.3
// 0 1 2 3
@ -352,7 +333,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
}
{
CriticalSectionScoped lock(crit_sect_.get());
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
if (!telephone_event_packet) {
last_received_frequency_ = audio_specific.frequency;

View File

@ -13,8 +13,8 @@
#include <set>
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -25,16 +25,16 @@ namespace webrtc {
class CriticalSectionWrapper;
// Handles audio RTP packets. This class is thread-safe.
class RTPReceiverAudio : public RTPReceiverStrategy,
public TelephoneEventHandler {
class RTPReceiverAudio : public RTPReceiverStrategy {
public:
RTPReceiverAudio(const int32_t id,
RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
// The following three methods implement the TelephoneEventHandler interface.
uint32_t AudioFrequency() const;
// Forward DTMFs to decoder for playout.
void SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
int SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
// Is forwarding of outband telephone events turned on/off?
bool TelephoneEventForwardToDecoder() const;
@ -42,25 +42,22 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
// Is TelephoneEvent configured with payload type payload_type
bool TelephoneEventPayloadType(const int8_t payload_type) const;
TelephoneEventHandler* GetTelephoneEventHandler() {
return this;
}
// Returns true if CNG is configured with payload type payload_type. If so,
// the frequency and cng_payload_type_has_changed are filled in.
bool CNGPayloadType(const int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed);
int32_t ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet);
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet);
int GetPayloadTypeFrequency() const;
int32_t GetFrequencyHz() const;
RTPAliveType ProcessDeadOrAlive(uint16_t last_payload_length) const;
@ -68,45 +65,44 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
int32_t OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency);
const int8_t payload_type,
const uint32_t frequency);
int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const;
const ModuleRTPUtility::PayloadUnion& specific_payload) const;
// We do not allow codecs to have multiple payload types for audio, so we
// need to override the default behavior (which is to do nothing).
void PossiblyRemoveExistingPayloadType(
ModuleRTPUtility::PayloadTypeMap* payload_type_map,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
size_t payload_name_length,
uint32_t frequency,
uint8_t channels,
uint32_t rate) const;
const size_t payload_name_length,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const;
// We need to look out for special payload types here and sometimes reset
// statistics. In addition we sometimes need to tweak the frequency.
void CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
void CheckPayloadChanged(const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes);
int Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
private:
int32_t ParseAudioCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_length,
const AudioPayload& audio_specific,
bool is_red);
const uint16_t payload_length,
const ModuleRTPUtility::AudioPayload& audio_specific,
const bool is_red);
int32_t id_;
scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_audio_;
uint32_t last_received_frequency_;
@ -125,9 +121,6 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
int8_t g722_payload_type_;
bool last_received_g722_;
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
RtpAudioFeedback* cb_audio_feedback_;
};
} // namespace webrtc

View File

@ -1,649 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h"
#include <cassert>
#include <math.h>
#include <stdlib.h>
#include <string.h>
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
using ModuleRTPUtility::GetCurrentRTP;
using ModuleRTPUtility::Payload;
using ModuleRTPUtility::RTPPayloadParser;
using ModuleRTPUtility::StringCompare;
RtpReceiver* RtpReceiver::CreateVideoReceiver(
int id, Clock* clock,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry) {
if (!incoming_payload_callback)
incoming_payload_callback = NullObjectRtpData();
if (!incoming_messages_callback)
incoming_messages_callback = NullObjectRtpFeedback();
return new RtpReceiverImpl(
id, clock, NullObjectRtpAudioFeedback(), incoming_messages_callback,
rtp_payload_registry,
RTPReceiverStrategy::CreateVideoStrategy(id, incoming_payload_callback));
}
RtpReceiver* RtpReceiver::CreateAudioReceiver(
int id, Clock* clock,
RtpAudioFeedback* incoming_audio_feedback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry) {
if (!incoming_audio_feedback)
incoming_audio_feedback = NullObjectRtpAudioFeedback();
if (!incoming_payload_callback)
incoming_payload_callback = NullObjectRtpData();
if (!incoming_messages_callback)
incoming_messages_callback = NullObjectRtpFeedback();
return new RtpReceiverImpl(
id, clock, incoming_audio_feedback, incoming_messages_callback,
rtp_payload_registry,
RTPReceiverStrategy::CreateAudioStrategy(id, incoming_payload_callback,
incoming_audio_feedback));
}
RtpReceiverImpl::RtpReceiverImpl(int32_t id,
Clock* clock,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry,
RTPReceiverStrategy* rtp_media_receiver)
: clock_(clock),
rtp_payload_registry_(rtp_payload_registry),
rtp_media_receiver_(rtp_media_receiver),
id_(id),
cb_rtp_feedback_(incoming_messages_callback),
critical_section_rtp_receiver_(
CriticalSectionWrapper::CreateCriticalSection()),
last_receive_time_(0),
last_received_payload_length_(0),
ssrc_(0),
num_csrcs_(0),
current_remote_csrc_(),
nack_method_(kNackOff),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
rtx_(false),
ssrc_rtx_(0),
payload_type_rtx_(-1) {
assert(incoming_audio_messages_callback);
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RtpReceiverImpl::~RtpReceiverImpl() {
for (int i = 0; i < num_csrcs_; ++i) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, current_remote_csrc_[i],
false);
}
delete critical_section_rtp_receiver_;
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
}
RTPReceiverStrategy* RtpReceiverImpl::GetMediaReceiver() const {
return rtp_media_receiver_.get();
}
RtpVideoCodecTypes RtpReceiverImpl::VideoCodecType() const {
PayloadUnion media_specific;
rtp_media_receiver_->GetLastMediaSpecificPayload(&media_specific);
return media_specific.Video.videoCodecType;
}
int32_t RtpReceiverImpl::RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
// TODO(phoglund): Try to streamline handling of the RED codec and some other
// cases which makes it necessary to keep track of whether we created a
// payload or not.
bool created_new_payload = false;
int32_t result = rtp_payload_registry_->RegisterReceivePayload(
payload_name, payload_type, frequency, channels, rate,
&created_new_payload);
if (created_new_payload) {
if (rtp_media_receiver_->OnNewPayloadTypeCreated(payload_name, payload_type,
frequency) != 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s failed to register payload",
__FUNCTION__);
return -1;
}
}
return result;
}
int32_t RtpReceiverImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
return rtp_payload_registry_->DeRegisterReceivePayload(payload_type);
}
NACKMethod RtpReceiverImpl::NACK() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
return nack_method_;
}
// Turn negative acknowledgment requests on/off.
int32_t RtpReceiverImpl::SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
if (max_reordering_threshold < 0) {
return -1;
} else if (method == kNackRtcp) {
max_reordering_threshold_ = max_reordering_threshold;
} else {
max_reordering_threshold_ = kDefaultMaxReorderingThreshold;
}
nack_method_ = method;
return 0;
}
void RtpReceiverImpl::SetRTXStatus(bool enable, uint32_t ssrc) {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
rtx_ = enable;
ssrc_rtx_ = ssrc;
}
void RtpReceiverImpl::RTXStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
*enable = rtx_;
*ssrc = ssrc_rtx_;
*payload_type = payload_type_rtx_;
}
void RtpReceiverImpl::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(critical_section_rtp_receiver_);
payload_type_rtx_ = payload_type;
}
uint32_t RtpReceiverImpl::SSRC() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
return ssrc_;
}
// Get remote CSRC.
int32_t RtpReceiverImpl::CSRCs(uint32_t array_of_csrcs[kRtpCsrcSize]) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
assert(num_csrcs_ <= kRtpCsrcSize);
if (num_csrcs_ > 0) {
memcpy(array_of_csrcs, current_remote_csrc_, sizeof(uint32_t)*num_csrcs_);
}
return num_csrcs_;
}
int32_t RtpReceiverImpl::Energy(
uint8_t array_of_energy[kRtpCsrcSize]) const {
return rtp_media_receiver_->Energy(array_of_energy);
}
bool RtpReceiverImpl::IncomingRtpPacket(
RTPHeader* rtp_header,
const uint8_t* packet,
int packet_length,
PayloadUnion payload_specific,
bool in_order) {
// The rtp_header argument contains the parsed RTP header.
int length = packet_length - rtp_header->paddingLength;
// Sanity check.
if ((length - rtp_header->headerLength) < 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s invalid argument",
__FUNCTION__);
return false;
}
{
CriticalSectionScoped cs(critical_section_rtp_receiver_);
// TODO(holmer): Make rtp_header const after RTX has been broken out.
if (rtx_) {
if (ssrc_rtx_ == rtp_header->ssrc) {
// Sanity check, RTX packets has 2 extra header bytes.
if (rtp_header->headerLength + kRtxHeaderSize > packet_length) {
return false;
}
// If a specific RTX payload type is negotiated, set back to the media
// payload type and treat it like a media packet from here.
if (payload_type_rtx_ != -1) {
if (payload_type_rtx_ == rtp_header->payloadType &&
rtp_payload_registry_->last_received_media_payload_type() != -1) {
rtp_header->payloadType =
rtp_payload_registry_->last_received_media_payload_type();
} else {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"Incorrect RTX configuration, dropping packet.");
return false;
}
}
rtp_header->ssrc = ssrc_;
rtp_header->sequenceNumber =
(packet[rtp_header->headerLength] << 8) +
packet[1 + rtp_header->headerLength];
// Count the RTX header as part of the RTP
rtp_header->headerLength += 2;
}
}
}
int8_t first_payload_byte = 0;
if (length > 0) {
first_payload_byte = packet[rtp_header->headerLength];
}
// Trigger our callbacks.
CheckSSRCChanged(rtp_header);
bool is_red = false;
bool should_reset_statistics = false;
if (CheckPayloadChanged(rtp_header,
first_payload_byte,
is_red,
&payload_specific,
&should_reset_statistics) == -1) {
if (length - rtp_header->headerLength == 0) {
// OK, keep-alive packet.
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
"%s received keepalive",
__FUNCTION__);
return true;
}
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"%s received invalid payloadtype",
__FUNCTION__);
return false;
}
if (should_reset_statistics) {
cb_rtp_feedback_->OnResetStatistics();
}
WebRtcRTPHeader webrtc_rtp_header;
memset(&webrtc_rtp_header, 0, sizeof(webrtc_rtp_header));
webrtc_rtp_header.header = *rtp_header;
CheckCSRC(&webrtc_rtp_header);
uint16_t payload_data_length =
ModuleRTPUtility::GetPayloadDataLength(*rtp_header, packet_length);
bool is_first_packet_in_frame = false;
bool is_first_packet = false;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_);
is_first_packet_in_frame =
last_received_sequence_number_ + 1 == rtp_header->sequenceNumber &&
TimeStamp() != rtp_header->timestamp;
is_first_packet = is_first_packet_in_frame || last_receive_time_ == 0;
}
int32_t ret_val = rtp_media_receiver_->ParseRtpPacket(
&webrtc_rtp_header, payload_specific, is_red, packet, packet_length,
clock_->TimeInMilliseconds(), is_first_packet);
if (ret_val < 0) {
return false;
}
{
CriticalSectionScoped lock(critical_section_rtp_receiver_);
last_receive_time_ = clock_->TimeInMilliseconds();
last_received_payload_length_ = payload_data_length;
if (in_order) {
if (last_received_timestamp_ != rtp_header->timestamp) {
last_received_timestamp_ = rtp_header->timestamp;
last_received_frame_time_ms_ = clock_->TimeInMilliseconds();
}
last_received_sequence_number_ = rtp_header->sequenceNumber;
}
}
return true;
}
// Implementation note: we expect to have the critical_section_rtp_receiver_
// critsect when we call this.
bool RtpReceiverImpl::RetransmitOfOldPacket(const RTPHeader& header,
int jitter, int min_rtt) const {
if (InOrderPacket(header.sequenceNumber)) {
return false;
}
CriticalSectionScoped cs(critical_section_rtp_receiver_);
uint32_t frequency_khz = header.payload_type_frequency / 1000;
assert(frequency_khz > 0);
int64_t time_diff_ms = clock_->TimeInMilliseconds() -
last_receive_time_;
// Diff in time stamp since last received in order.
int32_t rtp_time_stamp_diff_ms =
static_cast<int32_t>(header.timestamp - last_received_timestamp_) /
frequency_khz;
int32_t max_delay_ms = 0;
if (min_rtt == 0) {
// Jitter standard deviation in samples.
float jitter_std = sqrt(static_cast<float>(jitter));
// 2 times the standard deviation => 95% confidence.
// And transform to milliseconds by dividing by the frequency in kHz.
max_delay_ms = static_cast<int32_t>((2 * jitter_std) / frequency_khz);
// Min max_delay_ms is 1.
if (max_delay_ms == 0) {
max_delay_ms = 1;
}
} else {
max_delay_ms = (min_rtt / 3) + 1;
}
if (time_diff_ms > rtp_time_stamp_diff_ms + max_delay_ms) {
return true;
}
return false;
}
bool RtpReceiverImpl::InOrderPacket(const uint16_t sequence_number) const {
CriticalSectionScoped cs(critical_section_rtp_receiver_);
// First packet is always in order.
if (last_receive_time_ == 0)
return true;
if (IsNewerSequenceNumber(sequence_number, last_received_sequence_number_)) {
return true;
} else {
// If we have a restart of the remote side this packet is still in order.
return !IsNewerSequenceNumber(sequence_number,
last_received_sequence_number_ -
max_reordering_threshold_);
}
}
TelephoneEventHandler* RtpReceiverImpl::GetTelephoneEventHandler() {
return rtp_media_receiver_->GetTelephoneEventHandler();
}
uint32_t RtpReceiverImpl::TimeStamp() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
return last_received_timestamp_;
}
int32_t RtpReceiverImpl::LastReceivedTimeMs() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_);
return last_received_frame_time_ms_;
}
// Implementation note: must not hold critsect when called.
void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader* rtp_header) {
bool new_ssrc = false;
bool re_initialize_decoder = false;
char payload_name[RTP_PAYLOAD_NAME_SIZE];
uint8_t channels = 1;
uint32_t rate = 0;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
if (ssrc_ != rtp_header->ssrc ||
(last_received_payload_type == -1 && ssrc_ == 0)) {
// We need the payload_type_ to make the call if the remote SSRC is 0.
new_ssrc = true;
cb_rtp_feedback_->OnResetStatistics();
last_received_timestamp_ = 0;
last_received_sequence_number_ = 0;
last_received_frame_time_ms_ = 0;
// Do we have a SSRC? Then the stream is restarted.
if (ssrc_) {
// Do we have the same codec? Then re-initialize coder.
if (rtp_header->payloadType == last_received_payload_type) {
re_initialize_decoder = true;
Payload* payload;
if (!rtp_payload_registry_->PayloadTypeToPayload(
rtp_header->payloadType, payload)) {
return;
}
assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
if (payload->audio) {
channels = payload->typeSpecific.Audio.channels;
rate = payload->typeSpecific.Audio.rate;
}
}
}
ssrc_ = rtp_header->ssrc;
}
}
if (new_ssrc) {
// We need to get this to our RTCP sender and receiver.
// We need to do this outside critical section.
cb_rtp_feedback_->OnIncomingSSRCChanged(id_, rtp_header->ssrc);
}
if (re_initialize_decoder) {
if (-1 == cb_rtp_feedback_->OnInitializeDecoder(
id_, rtp_header->payloadType, payload_name,
rtp_header->payload_type_frequency, channels, rate)) {
// New stream, same codec.
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"Failed to create decoder for payload type:%d",
rtp_header->payloadType);
}
}
}
// Implementation note: must not hold critsect when called.
// TODO(phoglund): Move as much as possible of this code path into the media
// specific receivers. Basically this method goes through a lot of trouble to
// compute something which is only used by the media specific parts later. If
// this code path moves we can get rid of some of the rtp_receiver ->
// media_specific interface (such as CheckPayloadChange, possibly get/set
// last known payload).
int32_t RtpReceiverImpl::CheckPayloadChanged(
const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& is_red,
PayloadUnion* specific_payload,
bool* should_reset_statistics) {
bool re_initialize_decoder = false;
char payload_name[RTP_PAYLOAD_NAME_SIZE];
int8_t payload_type = rtp_header->payloadType;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
if (payload_type != last_received_payload_type) {
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Get the real codec payload type.
payload_type = first_payload_byte & 0x7f;
is_red = true;
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Invalid payload type, traced by caller. If we proceeded here,
// this would be set as |_last_received_payload_type|, and we would no
// longer catch corrupt packets at this level.
return -1;
}
// When we receive RED we need to check the real payload type.
if (payload_type == last_received_payload_type) {
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
return 0;
}
}
*should_reset_statistics = false;
bool should_discard_changes = false;
rtp_media_receiver_->CheckPayloadChanged(
payload_type, specific_payload, should_reset_statistics,
&should_discard_changes);
if (should_discard_changes) {
is_red = false;
return 0;
}
Payload* payload;
if (!rtp_payload_registry_->PayloadTypeToPayload(payload_type, payload)) {
// Not a registered payload type.
return -1;
}
assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
rtp_payload_registry_->set_last_received_payload_type(payload_type);
re_initialize_decoder = true;
rtp_media_receiver_->SetLastMediaSpecificPayload(payload->typeSpecific);
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
if (!payload->audio) {
if (VideoCodecType() == kRtpVideoFec) {
// Only reset the decoder on media packets.
re_initialize_decoder = false;
} else {
bool media_type_unchanged =
rtp_payload_registry_->ReportMediaPayloadType(payload_type);
if (media_type_unchanged) {
// Only reset the decoder if the media codec type has changed.
re_initialize_decoder = false;
}
}
}
if (re_initialize_decoder) {
*should_reset_statistics = true;
}
} else {
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
is_red = false;
}
} // End critsect.
if (re_initialize_decoder) {
if (-1 == rtp_media_receiver_->InvokeOnInitializeDecoder(
cb_rtp_feedback_, id_, payload_type, payload_name,
*specific_payload)) {
return -1; // Wrong payload type.
}
}
return 0;
}
// Implementation note: must not hold critsect when called.
void RtpReceiverImpl::CheckCSRC(const WebRtcRTPHeader* rtp_header) {
int32_t num_csrcs_diff = 0;
uint32_t old_remote_csrc[kRtpCsrcSize];
uint8_t old_num_csrcs = 0;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_);
if (!rtp_media_receiver_->ShouldReportCsrcChanges(
rtp_header->header.payloadType)) {
return;
}
old_num_csrcs = num_csrcs_;
if (old_num_csrcs > 0) {
// Make a copy of old.
memcpy(old_remote_csrc, current_remote_csrc_,
num_csrcs_ * sizeof(uint32_t));
}
const uint8_t num_csrcs = rtp_header->header.numCSRCs;
if ((num_csrcs > 0) && (num_csrcs <= kRtpCsrcSize)) {
// Copy new.
memcpy(current_remote_csrc_,
rtp_header->header.arrOfCSRCs,
num_csrcs * sizeof(uint32_t));
}
if (num_csrcs > 0 || old_num_csrcs > 0) {
num_csrcs_diff = num_csrcs - old_num_csrcs;
num_csrcs_ = num_csrcs; // Update stored CSRCs.
} else {
// No change.
return;
}
} // End critsect.
bool have_called_callback = false;
// Search for new CSRC in old array.
for (uint8_t i = 0; i < rtp_header->header.numCSRCs; ++i) {
const uint32_t csrc = rtp_header->header.arrOfCSRCs[i];
bool found_match = false;
for (uint8_t j = 0; j < old_num_csrcs; ++j) {
if (csrc == old_remote_csrc[j]) { // old list
found_match = true;
break;
}
}
if (!found_match && csrc) {
// Didn't find it, report it as new.
have_called_callback = true;
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, csrc, true);
}
}
// Search for old CSRC in new array.
for (uint8_t i = 0; i < old_num_csrcs; ++i) {
const uint32_t csrc = old_remote_csrc[i];
bool found_match = false;
for (uint8_t j = 0; j < rtp_header->header.numCSRCs; ++j) {
if (csrc == rtp_header->header.arrOfCSRCs[j]) {
found_match = true;
break;
}
}
if (!found_match && csrc) {
// Did not find it, report as removed.
have_called_callback = true;
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, csrc, false);
}
}
if (!have_called_callback) {
// If the CSRC list contain non-unique entries we will end up here.
// Using CSRC 0 to signal this event, not interop safe, other
// implementations might have CSRC 0 as a valid value.
if (num_csrcs_diff > 0) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, 0, true);
} else if (num_csrcs_diff < 0) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, 0, false);
}
}
}
} // namespace webrtc

View File

@ -1,122 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RtpReceiverImpl : public RtpReceiver {
public:
// Callbacks passed in here may not be NULL (use Null Object callbacks if you
// want callbacks to do nothing). This class takes ownership of the media
// receiver but nothing else.
RtpReceiverImpl(int32_t id,
Clock* clock,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry,
RTPReceiverStrategy* rtp_media_receiver);
virtual ~RtpReceiverImpl();
RTPReceiverStrategy* GetMediaReceiver() const;
int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate);
int32_t DeRegisterReceivePayload(const int8_t payload_type);
bool IncomingRtpPacket(
RTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
int incoming_rtp_packet_length,
PayloadUnion payload_specific,
bool in_order);
NACKMethod NACK() const;
// Turn negative acknowledgement requests on/off.
int32_t SetNACKStatus(const NACKMethod method, int max_reordering_threshold);
// Returns the last received timestamp.
virtual uint32_t TimeStamp() const;
int32_t LastReceivedTimeMs() const;
uint32_t SSRC() const;
int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const;
int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
// RTX.
void SetRTXStatus(bool enable, uint32_t ssrc);
void RTXStatus(bool* enable, uint32_t* ssrc, int* payload_type) const;
void SetRtxPayloadType(int payload_type);
virtual bool RetransmitOfOldPacket(const RTPHeader& header,
int jitter, int min_rtt) const;
bool InOrderPacket(const uint16_t sequence_number) const;
TelephoneEventHandler* GetTelephoneEventHandler();
private:
RtpVideoCodecTypes VideoCodecType() const;
void CheckSSRCChanged(const RTPHeader* rtp_header);
void CheckCSRC(const WebRtcRTPHeader* rtp_header);
int32_t CheckPayloadChanged(const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& isRED,
PayloadUnion* payload,
bool* should_reset_statistics);
Clock* clock_;
RTPPayloadRegistry* rtp_payload_registry_;
scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
int32_t id_;
RtpFeedback* cb_rtp_feedback_;
CriticalSectionWrapper* critical_section_rtp_receiver_;
int64_t last_receive_time_;
uint16_t last_received_payload_length_;
// SSRCs.
uint32_t ssrc_;
uint8_t num_csrcs_;
uint32_t current_remote_csrc_[kRtpCsrcSize];
uint32_t last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint16_t last_received_sequence_number_;
NACKMethod nack_method_;
int max_reordering_threshold_;
bool rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_

View File

@ -12,30 +12,21 @@
#include <cstdlib>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
RTPReceiverStrategy::RTPReceiverStrategy(RtpData* data_callback)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
data_callback_(data_callback) {
: data_callback_(data_callback) {
memset(&last_payload_, 0, sizeof(last_payload_));
}
void RTPReceiverStrategy::GetLastMediaSpecificPayload(
PayloadUnion* payload) const {
CriticalSectionScoped cs(crit_sect_.get());
ModuleRTPUtility::PayloadUnion* payload) const {
memcpy(payload, &last_payload_, sizeof(*payload));
}
void RTPReceiverStrategy::SetLastMediaSpecificPayload(
const PayloadUnion& payload) {
CriticalSectionScoped cs(crit_sect_.get());
const ModuleRTPUtility::PayloadUnion& payload) {
memcpy(&last_payload_, &payload, sizeof(last_payload_));
}
int RTPReceiverStrategy::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
return 0;
}
} // namespace webrtc

View File

@ -14,24 +14,23 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class TelephoneEventHandler;
// This strategy deals with media-specific RTP packet processing.
// This class is not thread-safe and must be protected by its caller.
class RTPReceiverStrategy {
public:
static RTPReceiverStrategy* CreateVideoStrategy(int32_t id,
RtpData* data_callback);
static RTPReceiverStrategy* CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
// The data callback is where we should send received payload data.
// See ParseRtpPacket. This class does not claim ownership of the callback.
// Implementations must NOT hold any critical sections while calling the
// callback.
//
// Note: Implementations may call the callback for other reasons than calls
// to ParseRtpPacket, for instance if the implementation somehow recovers a
// packet.
RTPReceiverStrategy(RtpData* data_callback);
virtual ~RTPReceiverStrategy() {}
// Parses the RTP packet and calls the data callback with the payload data.
@ -40,18 +39,17 @@ class RTPReceiverStrategy {
// make changes in the data as necessary. The specific_payload argument
// provides audio or video-specific data. The is_first_packet argument is true
// if this packet is either the first packet ever or the first in its frame.
virtual int32_t ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
virtual int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) = 0;
virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) = 0;
// Retrieves the last known applicable frequency.
virtual int GetPayloadTypeFrequency() const = 0;
virtual int32_t GetFrequencyHz() const = 0;
// Computes the current dead-or-alive state.
virtual RTPAliveType ProcessDeadOrAlive(
@ -65,21 +63,22 @@ class RTPReceiverStrategy {
// the payload registry.
virtual int32_t OnNewPayloadTypeCreated(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
int8_t payloadType,
uint32_t frequency) = 0;
const int8_t payloadType,
const uint32_t frequency) = 0;
// Invokes the OnInitializeDecoder callback in a media-specific way.
virtual int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const = 0;
const ModuleRTPUtility::PayloadUnion& specific_payload) const = 0;
// Checks if the payload type has changed, and returns whether we should
// reset statistics and/or discard this packet.
virtual void CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
virtual void CheckPayloadChanged(
const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
// Default: Keep changes and don't reset statistics.
@ -88,26 +87,16 @@ class RTPReceiverStrategy {
}
// Stores / retrieves the last media specific payload for later reference.
void GetLastMediaSpecificPayload(PayloadUnion* payload) const;
void SetLastMediaSpecificPayload(const PayloadUnion& payload);
int Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
void GetLastMediaSpecificPayload(
ModuleRTPUtility::PayloadUnion* payload) const;
void SetLastMediaSpecificPayload(
const ModuleRTPUtility::PayloadUnion& payload);
protected:
// The data callback is where we should send received payload data.
// See ParseRtpPacket. This class does not claim ownership of the callback.
// Implementations must NOT hold any critical sections while calling the
// callback.
//
// Note: Implementations may call the callback for other reasons than calls
// to ParseRtpPacket, for instance if the implementation somehow recovers a
// packet.
RTPReceiverStrategy(RtpData* data_callback);
scoped_ptr<CriticalSectionWrapper> crit_sect_;
PayloadUnion last_payload_;
ModuleRTPUtility::PayloadUnion last_payload_;
RtpData* data_callback_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_

View File

@ -15,9 +15,9 @@
#include <cassert> // assert
#include <cstring> // memcpy()
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -29,18 +29,21 @@ uint32_t BitRateBPS(uint16_t x) {
return (x & 0x3fff) * uint32_t(pow(10.0f, (2 + (x >> 14))));
}
RTPReceiverStrategy* RTPReceiverStrategy::CreateVideoStrategy(
int32_t id, RtpData* data_callback) {
return new RTPReceiverVideo(id, data_callback);
}
RTPReceiverVideo::RTPReceiverVideo(int32_t id, RtpData* data_callback)
RTPReceiverVideo::RTPReceiverVideo(
const int32_t id,
const RTPPayloadRegistry* rtp_rtp_payload_registry,
RtpData* data_callback)
: RTPReceiverStrategy(data_callback),
id_(id),
rtp_rtp_payload_registry_(rtp_rtp_payload_registry),
critical_section_receiver_video_(
CriticalSectionWrapper::CreateCriticalSection()),
current_fec_frame_decoded_(false),
receive_fec_(NULL) {
}
RTPReceiverVideo::~RTPReceiverVideo() {
delete critical_section_receiver_video_;
delete receive_fec_;
}
@ -52,12 +55,12 @@ bool RTPReceiverVideo::ShouldReportCsrcChanges(
int32_t RTPReceiverVideo::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency) {
const int8_t payload_type,
const uint32_t frequency) {
if (ModuleRTPUtility::StringCompare(payload_name, "ULPFEC", 6)) {
// Enable FEC if not enabled.
if (receive_fec_ == NULL) {
receive_fec_ = new ReceiverFEC(id_, data_callback_);
receive_fec_ = new ReceiverFEC(id_, this);
}
receive_fec_->SetPayloadTypeFEC(payload_type);
}
@ -66,16 +69,15 @@ int32_t RTPReceiverVideo::OnNewPayloadTypeCreated(
int32_t RTPReceiverVideo::ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) {
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp", "Video::ParseRtp",
"seqnum", rtp_header->header.sequenceNumber,
"timestamp", rtp_header->header.timestamp);
rtp_header->type.Video.codec = specific_payload.Video.videoCodecType;
const uint8_t* payload_data =
ModuleRTPUtility::GetPayloadData(rtp_header->header, packet);
const uint16_t payload_data_length =
@ -91,8 +93,8 @@ int32_t RTPReceiverVideo::ParseRtpPacket(
is_first_packet);
}
int RTPReceiverVideo::GetPayloadTypeFrequency() const {
return kVideoPayloadTypeFrequency;
int32_t RTPReceiverVideo::GetFrequencyHz() const {
return kDefaultVideoFrequency;
}
RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
@ -102,13 +104,13 @@ RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const {
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
// For video we just go with default values.
if (-1 == callback->OnInitializeDecoder(
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
id, payload_type, payload_name, kDefaultVideoFrequency, 1, 0)) {
WEBRTC_TRACE(kTraceError,
kTraceRtpRtcp,
id,
@ -125,29 +127,29 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
bool is_red,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_red,
const uint8_t* incoming_rtp_packet,
uint16_t incoming_rtp_packet_size,
int64_t now_ms,
bool is_first_packet) {
const uint16_t incoming_rtp_packet_size,
const int64_t now_ms,
const bool is_first_packet) {
int32_t ret_val = 0;
crit_sect_->Enter();
critical_section_receiver_video_->Enter();
if (is_red) {
if (receive_fec_ == NULL) {
crit_sect_->Leave();
critical_section_receiver_video_->Leave();
return -1;
}
crit_sect_->Leave();
bool FECpacket = false;
ret_val = receive_fec_->AddReceivedFECPacket(
rtp_header, incoming_rtp_packet, payload_data_length, FECpacket);
if (ret_val != -1) {
ret_val = receive_fec_->ProcessReceivedFEC();
}
critical_section_receiver_video_->Leave();
if (ret_val == 0 && FECpacket) {
// Callback with the received FEC packet.
@ -156,17 +158,21 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
// empty payload and data length.
rtp_header->frameType = kFrameEmpty;
// We need this for the routing.
rtp_header->type.Video.codec = video_type;
int32_t ret_val = SetCodecType(video_type, rtp_header);
if (ret_val != 0) {
return ret_val;
}
// Pass the length of FEC packets so that they can be accounted for in
// the bandwidth estimator.
ret_val = data_callback_->OnReceivedPayloadData(
NULL, payload_data_length, rtp_header);
}
} else {
// will leave the crit_sect_ critsect
// will leave the critical_section_receiver_video_ critsect
ret_val = ParseVideoCodecSpecificSwitch(rtp_header,
payload_data,
payload_data_length,
video_type,
is_first_packet);
}
return ret_val;
@ -208,11 +214,82 @@ int32_t RTPReceiverVideo::BuildRTPheader(
return rtp_header_length;
}
int32_t RTPReceiverVideo::ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length) {
// TODO(pwestin) Re-factor this to avoid the messy critsect handling.
critical_section_receiver_video_->Enter();
current_fec_frame_decoded_ = true;
ModuleRTPUtility::Payload* payload = NULL;
if (rtp_rtp_payload_registry_->PayloadTypeToPayload(
rtp_header->header.payloadType, payload) != 0) {
critical_section_receiver_video_->Leave();
return -1;
}
// here we can re-create the original lost packet so that we can use it for
// the relay we need to re-create the RED header too
uint8_t recovered_packet[IP_PACKET_SIZE];
uint16_t rtp_header_length =
(uint16_t) BuildRTPheader(rtp_header, recovered_packet);
const uint8_t kREDForFECHeaderLength = 1;
// replace pltype
recovered_packet[1] &= 0x80; // Reset.
recovered_packet[1] += rtp_rtp_payload_registry_->red_payload_type();
// add RED header
recovered_packet[rtp_header_length] = rtp_header->header.payloadType;
// f-bit always 0
memcpy(recovered_packet + rtp_header_length + kREDForFECHeaderLength,
payload_data,
payload_data_length);
// A recovered packet can be the first packet, but we lack the ability to
// detect it at the moment since we do not store the history of recently
// received packets. Most codecs like VP8 deal with this in other ways.
bool is_first_packet = false;
return ParseVideoCodecSpecificSwitch(
rtp_header,
payload_data,
payload_data_length,
payload->typeSpecific.Video.videoCodecType,
is_first_packet);
}
int32_t RTPReceiverVideo::SetCodecType(
const RtpVideoCodecTypes video_type,
WebRtcRTPHeader* rtp_header) const {
switch (video_type) {
case kRtpGenericVideo:
rtp_header->type.Video.codec = kRTPVideoGeneric;
break;
case kRtpVp8Video:
rtp_header->type.Video.codec = kRTPVideoVP8;
break;
case kRtpFecVideo:
rtp_header->type.Video.codec = kRTPVideoFEC;
break;
}
return 0;
}
int32_t RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
bool is_first_packet) {
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_first_packet) {
int32_t ret_val = SetCodecType(video_type, rtp_header);
if (ret_val != 0) {
critical_section_receiver_video_->Leave();
return ret_val;
}
WEBRTC_TRACE(kTraceStream,
kTraceRtpRtcp,
id_,
@ -220,26 +297,25 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
__FUNCTION__,
rtp_header->header.timestamp);
// Critical section has already been taken.
switch (rtp_header->type.Video.codec) {
case kRtpVideoGeneric:
// All receive functions release critical_section_receiver_video_ before
// returning.
switch (video_type) {
case kRtpGenericVideo:
rtp_header->type.Video.isFirstPacket = is_first_packet;
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
case kRtpVideoVp8:
case kRtpVp8Video:
return ReceiveVp8Codec(rtp_header, payload_data, payload_data_length);
case kRtpVideoFec:
case kRtpFecVideo:
break;
default:
assert(false);
}
// Releasing the already taken critical section here.
crit_sect_->Leave();
critical_section_receiver_video_->Leave();
return -1;
}
int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
int32_t RTPReceiverVideo::ReceiveVp8Codec(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
const uint16_t payload_data_length) {
bool success;
ModuleRTPUtility::RTPPayload parsed_packet;
if (payload_data_length == 0) {
@ -247,12 +323,12 @@ int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
parsed_packet.info.VP8.dataLength = 0;
} else {
ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
kRtpVideoVp8, payload_data, payload_data_length, id_);
kRtpVp8Video, payload_data, payload_data_length, id_);
success = rtp_payload_parser.Parse(parsed_packet);
}
// from here down we only work on local data
crit_sect_->Leave();
critical_section_receiver_video_->Leave();
if (!success) {
return -1;
@ -315,7 +391,7 @@ int32_t RTPReceiverVideo::ReceiveGenericCodec(
rtp_header->type.Video.isFirstPacket =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
crit_sect_->Leave();
critical_section_receiver_video_->Leave();
if (data_callback_->OnReceivedPayloadData(
payload_data, payload_data_length, rtp_header) != 0) {

View File

@ -22,28 +22,27 @@ namespace webrtc {
class CriticalSectionWrapper;
class ModuleRtpRtcpImpl;
class ReceiverFEC;
class RtpReceiver;
class RTPReceiver;
class RTPPayloadRegistry;
class RTPReceiverVideo : public RTPReceiverStrategy {
public:
RTPReceiverVideo(const int32_t id, RtpData* data_callback);
RTPReceiverVideo(const int32_t id,
const RTPPayloadRegistry* rtp_payload_registry,
RtpData* data_callback);
virtual ~RTPReceiverVideo();
int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp,
bool is_first_packet);
const uint16_t packet_length,
const int64_t timestamp,
const bool is_first_packet);
TelephoneEventHandler* GetTelephoneEventHandler() {
return NULL;
}
int GetPayloadTypeFrequency() const;
int32_t GetFrequencyHz() const;
RTPAliveType ProcessDeadOrAlive(uint16_t last_payload_length) const;
@ -51,32 +50,41 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
int32_t OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency);
const int8_t payload_type,
const uint32_t frequency);
int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const;
const ModuleRTPUtility::PayloadUnion& specific_payload) const;
virtual int32_t ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length);
void SetPacketOverHead(uint16_t packet_over_head);
protected:
int32_t SetCodecType(const RtpVideoCodecTypes video_type,
WebRtcRTPHeader* rtp_header) const;
int32_t ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
bool is_first_packet);
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_first_packet);
int32_t ReceiveGenericCodec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length);
const uint16_t payload_data_length);
int32_t ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length);
const uint16_t payload_data_length);
int32_t BuildRTPheader(const WebRtcRTPHeader* rtp_header,
uint8_t* data_buffer) const;
@ -85,17 +93,21 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
int32_t ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
bool is_red,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_red,
const uint8_t* incoming_rtp_packet,
uint16_t incoming_rtp_packet_size,
int64_t now_ms,
bool is_first_packet);
const uint16_t incoming_rtp_packet_size,
const int64_t now_ms,
const bool is_first_packet);
int32_t id_;
const RTPPayloadRegistry* rtp_rtp_payload_registry_;
CriticalSectionWrapper* critical_section_receiver_video_;
// FEC
bool current_fec_frame_decoded_;
ReceiverFEC* receive_fec_;
};
} // namespace webrtc

View File

@ -28,16 +28,11 @@
},
'sources': [
# Common
'../interface/receive_statistics.h',
'../interface/rtp_header_parser.h',
'../interface/rtp_payload_registry.h',
'../interface/rtp_receiver.h',
'../interface/rtp_rtcp.h',
'../interface/rtp_rtcp_defines.h',
'bitrate.cc',
'bitrate.h',
'receive_statistics_impl.cc',
'receive_statistics_impl.h',
'rtp_header_parser.cc',
'rtp_rtcp_config.h',
'rtp_rtcp_impl.cc',
@ -52,8 +47,8 @@
'rtcp_utility.h',
'rtp_header_extension.cc',
'rtp_header_extension.h',
'rtp_receiver_impl.cc',
'rtp_receiver_impl.h',
'rtp_receiver.cc',
'rtp_receiver.h',
'rtp_sender.cc',
'rtp_sender.h',
'rtp_utility.cc',
@ -80,6 +75,7 @@
'producer_fec.h',
'rtp_packet_history.cc',
'rtp_packet_history.h',
'rtp_payload_registry.h',
'rtp_payload_registry.cc',
'rtp_receiver_strategy.cc',
'rtp_receiver_strategy.h',
@ -97,7 +93,6 @@
'vp8_partition_aggregator.h',
# Mocks
'../mocks/mock_rtp_rtcp.h',
'mock/mock_rtp_payload_strategy.h',
], # source
# TODO(jschuh): Bug 1348: fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],

View File

@ -14,6 +14,8 @@
#include <string.h>
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -36,12 +38,28 @@ const float kFracMs = 4.294967296E6f;
namespace webrtc {
static RtpData* NullObjectRtpData() {
static NullRtpData null_rtp_data;
return &null_rtp_data;
}
static RtpFeedback* NullObjectRtpFeedback() {
static NullRtpFeedback null_rtp_feedback;
return &null_rtp_feedback;
}
static RtpAudioFeedback* NullObjectRtpAudioFeedback() {
static NullRtpAudioFeedback null_rtp_audio_feedback;
return &null_rtp_audio_feedback;
}
RtpRtcp::Configuration::Configuration()
: id(-1),
audio(false),
clock(NULL),
default_module(NULL),
receive_statistics(NULL),
incoming_data(NullObjectRtpData()),
incoming_messages(NullObjectRtpFeedback()),
outgoing_transport(NULL),
rtcp_feedback(NULL),
intra_frame_callback(NULL),
@ -67,7 +85,10 @@ RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
}
ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
: rtp_sender_(configuration.id,
: rtp_payload_registry_(
configuration.id,
RTPPayloadStrategy::CreateStrategy(configuration.audio)),
rtp_sender_(configuration.id,
configuration.audio,
configuration.clock,
configuration.outgoing_transport,
@ -77,12 +98,14 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
this),
rtcp_receiver_(configuration.id, configuration.clock, this),
clock_(configuration.clock),
receive_statistics_(configuration.receive_statistics),
rtp_telephone_event_handler_(NULL),
id_(configuration.id),
audio_(configuration.audio),
collision_detected_(false),
last_process_time_(configuration.clock->TimeInMilliseconds()),
last_bitrate_process_time_(configuration.clock->TimeInMilliseconds()),
last_packet_timeout_process_time_(
configuration.clock->TimeInMilliseconds()),
last_rtt_process_time_(configuration.clock->TimeInMilliseconds()),
packet_overhead_(28), // IPV4 UDP.
critical_section_module_ptrs_(
@ -91,6 +114,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
CriticalSectionWrapper::CreateCriticalSection()),
default_module_(
static_cast<ModuleRtpRtcpImpl*>(configuration.default_module)),
dead_or_alive_active_(false),
dead_or_alive_timeout_ms_(0),
dead_or_alive_last_timer_(0),
nack_method_(kNackOff),
nack_last_time_sent_full_(0),
nack_last_seq_number_sent_(0),
@ -101,6 +127,26 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
, plot1_(NULL),
#endif
rtt_observer_(configuration.rtt_observer) {
RTPReceiverStrategy* rtp_receiver_strategy;
if (configuration.audio) {
// If audio, we need to be able to handle telephone events too, so stash
// away the audio receiver for those situations.
rtp_telephone_event_handler_ =
new RTPReceiverAudio(configuration.id, configuration.incoming_data,
configuration.audio_messages);
rtp_receiver_strategy = rtp_telephone_event_handler_;
} else {
rtp_receiver_strategy =
new RTPReceiverVideo(configuration.id, &rtp_payload_registry_,
configuration.incoming_data);
}
rtp_receiver_.reset(new RTPReceiver(
configuration.id, configuration.clock, this,
configuration.audio_messages, configuration.incoming_data,
configuration.incoming_messages, rtp_receiver_strategy,
&rtp_payload_registry_));
send_video_codec_.codecType = kVideoCodecUnknown;
if (default_module_) {
@ -192,11 +238,21 @@ int32_t ModuleRtpRtcpImpl::Process() {
const int64_t now = clock_->TimeInMilliseconds();
last_process_time_ = now;
if (now >=
last_packet_timeout_process_time_ + kRtpRtcpPacketTimeoutProcessTimeMs) {
rtp_receiver_->PacketTimeout();
rtcp_receiver_.PacketTimeout();
last_packet_timeout_process_time_ = now;
}
if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) {
rtp_sender_.ProcessBitrate();
rtp_receiver_->ProcessBitrate();
last_bitrate_process_time_ = now;
}
ProcessDeadOrAliveTimer();
const bool default_instance(child_modules_.empty() ? false : true);
if (!default_instance) {
if (rtcp_sender_.Sending()) {
@ -241,14 +297,8 @@ int32_t ModuleRtpRtcpImpl::Process() {
}
}
}
if (rtcp_sender_.TimeToSendRTCPReport()) {
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (receive_statistics_->Statistics(&receive_stats, true)) {
rtcp_sender_.SendRTCP(kRtcpReport, &receive_stats);
} else {
rtcp_sender_.SendRTCP(kRtcpReport, NULL);
}
}
if (rtcp_sender_.TimeToSendRTCPReport())
rtcp_sender_.SendRTCP(kRtcpReport);
}
if (UpdateRTCPReceiveInformationTimers()) {
@ -258,6 +308,230 @@ int32_t ModuleRtpRtcpImpl::Process() {
return 0;
}
void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
bool RTCPalive = false;
int64_t now = 0;
bool do_callback = false;
// Do operations on members under lock but avoid making the
// ProcessDeadOrAlive() callback under the same lock.
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
if (dead_or_alive_active_) {
now = clock_->TimeInMilliseconds();
if (now > dead_or_alive_timeout_ms_ + dead_or_alive_last_timer_) {
// RTCP is alive if we have received a report the last 12 seconds.
dead_or_alive_last_timer_ += dead_or_alive_timeout_ms_;
if (rtcp_receiver_.LastReceived() + 12000 > now)
RTCPalive = true;
do_callback = true;
}
}
}
if (do_callback)
rtp_receiver_->ProcessDeadOrAlive(RTCPalive, now);
}
int32_t ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sample_time_seconds) {
if (enable) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPeriodicDeadOrAliveStatus(enable, %d)",
sample_time_seconds);
} else {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPeriodicDeadOrAliveStatus(disable)");
}
if (sample_time_seconds == 0) {
return -1;
}
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
dead_or_alive_active_ = enable;
dead_or_alive_timeout_ms_ = sample_time_seconds * 1000;
// Trigger the first after one period.
dead_or_alive_last_timer_ = clock_->TimeInMilliseconds();
}
return 0;
}
int32_t ModuleRtpRtcpImpl::PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sample_time_seconds) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"PeriodicDeadOrAliveStatus()");
enable = dead_or_alive_active_;
sample_time_seconds =
static_cast<uint8_t>(dead_or_alive_timeout_ms_ / 1000);
return 0;
}
int32_t ModuleRtpRtcpImpl::SetPacketTimeout(
const uint32_t rtp_timeout_ms,
const uint32_t rtcp_timeout_ms) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPacketTimeout(%u,%u)",
rtp_timeout_ms,
rtcp_timeout_ms);
if (rtp_receiver_->SetPacketTimeout(rtp_timeout_ms) == 0) {
return rtcp_receiver_.SetPacketTimeout(rtcp_timeout_ms);
}
return -1;
}
int32_t ModuleRtpRtcpImpl::RegisterReceivePayload(
const CodecInst& voice_codec) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"RegisterReceivePayload(voice_codec)");
return rtp_receiver_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate);
}
int32_t ModuleRtpRtcpImpl::RegisterReceivePayload(
const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"RegisterReceivePayload(video_codec)");
return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
}
int32_t ModuleRtpRtcpImpl::ReceivePayloadType(
const CodecInst& voice_codec,
int8_t* pl_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"ReceivePayloadType(voice_codec)");
return rtp_receiver_->ReceivePayloadType(
voice_codec.plname,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate,
pl_type);
}
int32_t ModuleRtpRtcpImpl::ReceivePayloadType(
const VideoCodec& video_codec,
int8_t* pl_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"ReceivePayloadType(video_codec)");
return rtp_receiver_->ReceivePayloadType(video_codec.plName,
90000,
0,
video_codec.maxBitrate,
pl_type);
}
int32_t ModuleRtpRtcpImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"DeRegisterReceivePayload(%d)",
payload_type);
return rtp_receiver_->DeRegisterReceivePayload(payload_type);
}
// Get the currently configured SSRC filter.
int32_t ModuleRtpRtcpImpl::SSRCFilter(
uint32_t& allowed_ssrc) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SSRCFilter()");
return rtp_receiver_->SSRCFilter(allowed_ssrc);
}
// Set a SSRC to be used as a filter for incoming RTP streams.
int32_t ModuleRtpRtcpImpl::SetSSRCFilter(
const bool enable,
const uint32_t allowed_ssrc) {
if (enable) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetSSRCFilter(enable, 0x%x)",
allowed_ssrc);
} else {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetSSRCFilter(disable)");
}
return rtp_receiver_->SetSSRCFilter(enable, allowed_ssrc);
}
// Get last received remote timestamp.
uint32_t ModuleRtpRtcpImpl::RemoteTimestamp() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteTimestamp()");
return rtp_receiver_->TimeStamp();
}
int64_t ModuleRtpRtcpImpl::LocalTimeOfRemoteTimeStamp() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"LocalTimeOfRemoteTimeStamp()");
return rtp_receiver_->LastReceivedTimeMs();
}
// Get the current estimated remote timestamp.
int32_t ModuleRtpRtcpImpl::EstimatedRemoteTimeStamp(
uint32_t& timestamp) const {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"EstimatedRemoteTimeStamp()");
return rtp_receiver_->EstimatedRemoteTimeStamp(timestamp);
}
// Get incoming SSRC.
uint32_t ModuleRtpRtcpImpl::RemoteSSRC() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteSSRC()");
return rtp_receiver_->SSRC();
}
// Get remote CSRC
int32_t ModuleRtpRtcpImpl::RemoteCSRCs(
uint32_t arr_of_csrc[kRtpCsrcSize]) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteCSRCs()");
return rtp_receiver_->CSRCs(arr_of_csrc);
}
int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(RtxMode mode, bool set_ssrc,
uint32_t ssrc) {
rtp_sender_.SetRTXStatus(mode, set_ssrc, ssrc);
@ -270,10 +544,42 @@ int32_t ModuleRtpRtcpImpl::RTXSendStatus(RtxMode* mode, uint32_t* ssrc,
return 0;
}
int32_t ModuleRtpRtcpImpl::SetRTXReceiveStatus(bool enable,
uint32_t ssrc) {
rtp_receiver_->SetRTXStatus(enable, ssrc);
return 0;
}
int32_t ModuleRtpRtcpImpl::RTXReceiveStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const {
rtp_receiver_->RTXStatus(enable, ssrc, payload_type);
return 0;
}
void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
rtp_sender_.SetRtxPayloadType(payload_type);
}
void ModuleRtpRtcpImpl::SetRtxReceivePayloadType(int payload_type) {
rtp_receiver_->SetRtxPayloadType(payload_type);
}
// Called by the network module when we receive a packet.
int32_t ModuleRtpRtcpImpl::IncomingRtpPacket(
const uint8_t* incoming_packet,
const uint16_t incoming_packet_length,
const RTPHeader& parsed_rtp_header) {
WEBRTC_TRACE(kTraceStream,
kTraceRtpRtcp,
id_,
"IncomingRtpPacket(packet_length:%u)",
incoming_packet_length);
RTPHeader rtp_header_copy = parsed_rtp_header;
return rtp_receiver_->IncomingRTPPacket(&rtp_header_copy,
incoming_packet,
incoming_packet_length);
}
int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
const uint8_t* rtcp_packet,
const uint16_t length) {
@ -576,12 +882,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
if (!have_child_modules) {
// Don't send RTCP from default module.
if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) {
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (receive_statistics_->Statistics(&receive_stats, true)) {
rtcp_sender_.SendRTCP(kRtcpReport, &receive_stats);
} else {
rtcp_sender_.SendRTCP(kRtcpReport, NULL);
}
rtcp_sender_.SendRTCP(kRtcpReport);
}
return rtp_sender_.SendOutgoingData(frame_type,
payload_type,
@ -884,6 +1185,12 @@ int32_t ModuleRtpRtcpImpl::RemoteCNAME(
return rtcp_receiver_.CNAME(remote_ssrc, c_name);
}
uint16_t ModuleRtpRtcpImpl::RemoteSequenceNumber() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteSequenceNumber()");
return rtp_receiver_->SequenceNumber();
}
int32_t ModuleRtpRtcpImpl::RemoteNTP(
uint32_t* received_ntpsecs,
uint32_t* received_ntpfrac,
@ -923,6 +1230,21 @@ void ModuleRtpRtcpImpl:: SetRtt(uint32_t rtt) {
rtcp_receiver_.SetRTT(static_cast<uint16_t>(rtt));
}
// Reset RTP statistics.
int32_t ModuleRtpRtcpImpl::ResetStatisticsRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetStatisticsRTP()");
return rtp_receiver_->ResetStatistics();
}
// Reset RTP data counters for the receiving side.
int32_t ModuleRtpRtcpImpl::ResetReceiveDataCountersRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"ResetReceiveDataCountersRTP()");
return rtp_receiver_->ResetDataCounters();
}
// Reset RTP data counters for the sending side.
int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
@ -937,18 +1259,8 @@ int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
int32_t ModuleRtpRtcpImpl::SendRTCP(uint32_t rtcp_packet_type) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendRTCP(0x%x)",
rtcp_packet_type);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound ||
(rtcp_packet_type & kRtcpReport) ||
(rtcp_packet_type & kRtcpSr) ||
(rtcp_packet_type & kRtcpRr)) {
if (receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(rtcp_packet_type, &receive_stats);
} else {
return rtcp_sender_.SendRTCP(rtcp_packet_type, NULL);
}
}
return rtcp_sender_.SendRTCP(rtcp_packet_type, NULL);
return rtcp_sender_.SendRTCP(rtcp_packet_type);
}
int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
@ -971,9 +1283,32 @@ int32_t ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
return rtcp_sender_.SetRTCPVoIPMetrics(voip_metric);
}
// Our locally created statistics of the received RTP stream.
int32_t ModuleRtpRtcpImpl::StatisticsRTP(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* max_jitter) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "StatisticsRTP()");
uint32_t jitter_transmission_time_offset = 0;
int32_t ret_val = rtp_receiver_->Statistics(
fraction_lost, cum_lost, ext_max, jitter, max_jitter,
&jitter_transmission_time_offset, (rtcp_sender_.Status() == kRtcpOff));
if (ret_val == -1) {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"StatisticsRTP() no statistics available");
}
return ret_val;
}
int32_t ModuleRtpRtcpImpl::DataCountersRTP(
uint32_t* bytes_sent,
uint32_t* packets_sent) const {
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "DataCountersRTP()");
if (bytes_sent) {
@ -982,7 +1317,36 @@ int32_t ModuleRtpRtcpImpl::DataCountersRTP(
if (packets_sent) {
*packets_sent = rtp_sender_.Packets();
}
return 0;
return rtp_receiver_->DataCounters(bytes_received, packets_received);
}
int32_t ModuleRtpRtcpImpl::ReportBlockStatistics(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* jitter_transmission_time_offset) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ReportBlockStatistics()");
int32_t missing = 0;
int32_t ret = rtp_receiver_->Statistics(fraction_lost,
cum_lost,
ext_max,
jitter,
NULL,
jitter_transmission_time_offset,
&missing,
true);
#ifdef MATLAB
if (plot1_ == NULL) {
plot1_ = eng.NewPlot(new MatlabPlot());
plot1_->AddTimeLine(30, "b", "lost", clock_->TimeInMilliseconds());
}
plot1_->Append("lost", missing);
plot1_->Plot();
#endif
return ret;
}
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) {
@ -1097,6 +1461,52 @@ int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
return rtcp_sender_.SetTMMBN(bounding_set, max_bitrate_kbit);
}
// (NACK) Negative acknowledgment.
// Is Negative acknowledgment requests on/off?
NACKMethod ModuleRtpRtcpImpl::NACK() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "NACK()");
NACKMethod child_method = kNackOff;
const bool default_instance(child_modules_.empty() ? false : true);
if (default_instance) {
// For default we need to check all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::const_iterator it =
child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
NACKMethod nackMethod = module->NACK();
if (nackMethod != kNackOff) {
child_method = nackMethod;
break;
}
}
it++;
}
}
NACKMethod method = nack_method_;
if (child_method != kNackOff) {
method = child_method;
}
return method;
}
// Turn negative acknowledgment requests on/off.
int32_t ModuleRtpRtcpImpl::SetNACKStatus(
NACKMethod method, int max_reordering_threshold) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetNACKStatus(%u)", method);
nack_method_ = method;
rtp_receiver_->SetNACKStatus(method, max_reordering_threshold);
return 0;
}
// Returns the currently configured retransmission mode.
int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
WEBRTC_TRACE(kTraceModuleCall,
@ -1126,7 +1536,7 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
"SendNACK(size:%u)", size);
uint16_t avg_rtt = 0;
rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL);
rtcp_receiver_.RTT(rtp_receiver_->SSRC(), NULL, &avg_rtt, NULL, NULL);
int64_t wait_time = 5 + ((avg_rtt * 3) >> 1); // 5 + RTT * 1.5.
if (wait_time == 5) {
@ -1165,15 +1575,13 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
}
nack_last_seq_number_sent_ = nack_list[start_id + nackLength - 1];
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpNack, &receive_stats, nackLength,
&nack_list[start_id]);
} else {
return rtcp_sender_.SendRTCP(kRtcpNack, NULL, nackLength,
&nack_list[start_id]);
}
switch (nack_method_) {
case kNackRtcp:
return rtcp_sender_.SendRTCP(kRtcpNack, nackLength, &nack_list[start_id]);
case kNackOff:
return -1;
};
return -1;
}
// Store the sent packets, needed to answer to a Negative acknowledgment
@ -1193,6 +1601,29 @@ int32_t ModuleRtpRtcpImpl::SetStorePacketsStatus(
return 0; // TODO(pwestin): change to void.
}
// Forward DTMFs to decoder for playout.
int ModuleRtpRtcpImpl::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"SetTelephoneEventForwardToDecoder(forward_to_decoder:%d)",
forward_to_decoder);
assert(audio_);
assert(rtp_telephone_event_handler_);
return rtp_telephone_event_handler_->SetTelephoneEventForwardToDecoder(
forward_to_decoder);
}
// Is forwarding of out-band telephone events turned on/off?
bool ModuleRtpRtcpImpl::TelephoneEventForwardToDecoder() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"TelephoneEventForwardToDecoder()");
assert(audio_);
assert(rtp_telephone_event_handler_);
return rtp_telephone_event_handler_->TelephoneEventForwardToDecoder();
}
// Send a TelephoneEvent tone using RFC 2833 (4733).
int32_t ModuleRtpRtcpImpl::SendTelephoneEventOutband(
const uint8_t key,
@ -1285,6 +1716,10 @@ int32_t ModuleRtpRtcpImpl::SendREDPayloadType(
return rtp_sender_.RED(&payload_type);
}
RtpVideoCodecTypes ModuleRtpRtcpImpl::ReceivedVideoCodec() const {
return rtp_receiver_->VideoCodecType();
}
RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
return rtp_sender_.VideoCodecType();
}
@ -1350,9 +1785,9 @@ int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
case kKeyFrameReqFirRtp:
return rtp_sender_.SendRTPIntraRequest();
case kKeyFrameReqPliRtcp:
return SendRTCP(kRtcpPli);
return rtcp_sender_.SendRTCP(kRtcpPli);
case kKeyFrameReqFirRtcp:
return SendRTCP(kRtcpFir);
return rtcp_sender_.SendRTCP(kRtcpFir);
}
return -1;
}
@ -1364,14 +1799,7 @@ int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
id_,
"SendRTCPSliceLossIndication (picture_id:%d)",
picture_id);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpSli, &receive_stats, 0, 0, false,
picture_id);
} else {
return rtcp_sender_.SendRTCP(kRtcpSli, NULL, 0, 0, false, picture_id);
}
return rtcp_sender_.SendRTCP(kRtcpSli, 0, 0, false, picture_id);
}
int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) {
@ -1495,7 +1923,7 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
}
if (kRtcpOff != rtcp_sender_.Status()) {
// Send RTCP bye on the current SSRC.
SendRTCP(kRtcpBye);
rtcp_sender_.SendRTCP(kRtcpBye);
}
// Change local SSRC and inform all objects about the new SSRC.
rtcp_sender_.SetSSRC(new_ssrc);
@ -1503,6 +1931,10 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
}
}
uint32_t ModuleRtpRtcpImpl::BitrateReceivedNow() const {
return rtp_receiver_->BitrateNow();
}
void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
@ -1564,19 +1996,12 @@ void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
}
void ModuleRtpRtcpImpl::OnRequestSendReport() {
SendRTCP(kRtcpSr);
rtcp_sender_.SendRTCP(kRtcpSr);
}
int32_t ModuleRtpRtcpImpl::SendRTCPReferencePictureSelection(
const uint64_t picture_id) {
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpRpsi, &receive_stats, 0, 0, false,
picture_id);
} else {
return rtcp_sender_.SendRTCP(kRtcpRpsi, NULL, 0, 0, false, picture_id);
}
return rtcp_sender_.SendRTCP(kRtcpRpsi, 0, 0, false, picture_id);
}
uint32_t ModuleRtpRtcpImpl::SendTimeOfSendReport(
@ -1591,7 +2016,7 @@ void ModuleRtpRtcpImpl::OnReceivedNACK(
return;
}
uint16_t avg_rtt = 0;
rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL);
rtcp_receiver_.RTT(rtp_receiver_->SSRC(), NULL, &avg_rtt, NULL, NULL);
rtp_sender_.OnReceivedNACK(nack_sequence_numbers, avg_rtt);
}

View File

@ -17,6 +17,8 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -41,12 +43,72 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Receiver part.
// Configure a timeout value.
virtual int32_t SetPacketTimeout(const uint32_t rtp_timeout_ms,
const uint32_t rtcp_timeout_ms);
// Set periodic dead or alive notification.
virtual int32_t SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sample_time_seconds);
// Get periodic dead or alive notification status.
virtual int32_t PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sample_time_seconds);
virtual int32_t RegisterReceivePayload(const CodecInst& voice_codec);
virtual int32_t RegisterReceivePayload(const VideoCodec& video_codec);
virtual int32_t ReceivePayloadType(const CodecInst& voice_codec,
int8_t* pl_type);
virtual int32_t ReceivePayloadType(const VideoCodec& video_codec,
int8_t* pl_type);
virtual int32_t DeRegisterReceivePayload(
const int8_t payload_type);
// Get the currently configured SSRC filter.
virtual int32_t SSRCFilter(uint32_t& allowed_ssrc) const;
// Set a SSRC to be used as a filter for incoming RTP streams.
virtual int32_t SetSSRCFilter(const bool enable,
const uint32_t allowed_ssrc);
// Get last received remote timestamp.
virtual uint32_t RemoteTimestamp() const;
// Get the local time of the last received remote timestamp.
virtual int64_t LocalTimeOfRemoteTimeStamp() const;
// Get the current estimated remote timestamp.
virtual int32_t EstimatedRemoteTimeStamp(
uint32_t& timestamp) const;
virtual uint32_t RemoteSSRC() const;
virtual int32_t RemoteCSRCs(
uint32_t arr_of_csrc[kRtpCsrcSize]) const;
virtual int32_t SetRTXReceiveStatus(const bool enable,
const uint32_t ssrc);
virtual int32_t RTXReceiveStatus(bool* enable, uint32_t* ssrc,
int* payloadType) const;
virtual void SetRtxReceivePayloadType(int payload_type);
// Called when we receive an RTP packet.
virtual int32_t IncomingRtpPacket(const uint8_t* incoming_packet,
const uint16_t packet_length,
const RTPHeader& parsed_rtp_header);
// Called when we receive an RTCP packet.
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
uint16_t incoming_packet_length);
virtual void SetRemoteSSRC(const uint32_t ssrc);
// Sender part.
virtual int32_t RegisterSendPayload(const CodecInst& voice_codec);
@ -177,11 +239,32 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Normal SR and RR are triggered via the process function.
virtual int32_t SendRTCP(uint32_t rtcp_packet_type = kRtcpReport);
// Statistics of our locally created statistics of the received RTP stream.
virtual int32_t StatisticsRTP(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* max_jitter = NULL) const;
// Reset RTP statistics.
virtual int32_t ResetStatisticsRTP();
virtual int32_t ResetReceiveDataCountersRTP();
virtual int32_t ResetSendDataCountersRTP();
// Statistics of the amount of data sent and received.
virtual int32_t DataCountersRTP(uint32_t* bytes_sent,
uint32_t* packets_sent) const;
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const;
virtual int32_t ReportBlockStatistics(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* jitter_transmission_time_offset);
// Get received RTCP report, sender info.
virtual int32_t RemoteRTCPStat(RTCPSenderInfo* sender_info);
@ -230,6 +313,13 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// (NACK) Negative acknowledgment part.
// Is Negative acknowledgment requests on/off?
virtual NACKMethod NACK() const;
// Turn negative acknowledgment requests on/off.
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold);
virtual int SelectiveRetransmissions() const;
virtual int SetSelectiveRetransmissions(uint8_t settings);
@ -259,6 +349,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual int32_t SetAudioPacketSize(
const uint16_t packet_size_samples);
// Forward DTMFs to decoder for playout.
virtual int SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
// Is forwarding of outband telephone events turned on/off?
virtual bool TelephoneEventForwardToDecoder() const;
virtual bool SendTelephoneEventActive(int8_t& telephone_event) const;
// Send a TelephoneEvent tone using RFC 2833 (4733).
@ -286,6 +382,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Video part.
virtual RtpVideoCodecTypes ReceivedVideoCodec() const;
virtual RtpVideoCodecTypes SendVideoCodec() const;
virtual int32_t SendRTCPSliceLossIndication(
@ -327,6 +425,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
uint32_t* fec_rate,
uint32_t* nackRate) const;
virtual void SetRemoteSSRC(const uint32_t ssrc);
virtual uint32_t SendTimeOfSendReport(const uint32_t send_report);
// Good state of RTP receiver inform sender.
@ -356,6 +456,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
bool UpdateRTCPReceiveInformationTimers();
void ProcessDeadOrAliveTimer();
uint32_t BitrateReceivedNow() const;
// Get remote SequenceNumber.
@ -364,7 +466,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Only for internal testing.
uint32_t LastSendReport(uint32_t& last_rtcptime);
RTPPayloadRegistry rtp_payload_registry_;
RTPSender rtp_sender_;
scoped_ptr<RTPReceiver> rtp_receiver_;
RTCPSender rtcp_sender_;
RTCPReceiver rtcp_receiver_;
@ -374,13 +479,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
private:
int64_t RtcpReportInterval();
ReceiveStatistics* receive_statistics_;
RTPReceiverAudio* rtp_telephone_event_handler_;
int32_t id_;
const bool audio_;
bool collision_detected_;
int64_t last_process_time_;
int64_t last_bitrate_process_time_;
int64_t last_packet_timeout_process_time_;
int64_t last_rtt_process_time_;
uint16_t packet_overhead_;
@ -389,6 +495,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
ModuleRtpRtcpImpl* default_module_;
std::list<ModuleRtpRtcpImpl*> child_modules_;
// Dead or alive.
bool dead_or_alive_active_;
uint32_t dead_or_alive_timeout_ms_;
int64_t dead_or_alive_last_timer_;
// Send side
NACKMethod nack_method_;
uint32_t nack_last_time_sent_full_;

View File

@ -350,7 +350,7 @@ int32_t RTPSender::SendOutgoingData(
return 0;
}
}
RtpVideoCodecTypes video_type = kRtpVideoGeneric;
RtpVideoCodecTypes video_type = kRtpGenericVideo;
if (CheckPayloadType(payload_type, &video_type) != 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s invalid argument failed to find payload_type:%d",
@ -1178,7 +1178,7 @@ void RTPSender::SetSendingStatus(const bool enabled) {
}
frequency_hz = frequency;
} else {
frequency_hz = kVideoPayloadTypeFrequency;
frequency_hz = kDefaultVideoFrequency;
}
uint32_t RTPtime = ModuleRTPUtility::GetCurrentRTP(clock_, frequency_hz);

View File

@ -37,7 +37,7 @@ RTPSenderVideo::RTPSenderVideo(const int32_t id,
_rtpSender(*rtpSender),
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_videoType(kRtpGenericVideo),
_videoCodecInformation(NULL),
_maxBitrate(0),
_retransmissionSettings(kRetransmitBaseLayer),
@ -89,13 +89,13 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
ModuleRTPUtility::Payload*& payload) {
CriticalSectionScoped cs(_sendVideoCritsect);
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
RtpVideoCodecTypes videoType = kRtpGenericVideo;
if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
videoType = kRtpVideoVp8;
videoType = kRtpVp8Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpVideoGeneric;
videoType = kRtpGenericVideo;
} else {
videoType = kRtpVideoGeneric;
videoType = kRtpGenericVideo;
}
payload = new ModuleRTPUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
@ -302,11 +302,11 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
int32_t retVal = -1;
switch(videoType)
{
case kRtpVideoGeneric:
case kRtpGenericVideo:
retVal = SendGeneric(frameType, payloadType, captureTimeStamp,
capture_time_ms, payloadData, payloadSize);
break;
case kRtpVideoVp8:
case kRtpVp8Video:
retVal = SendVP8(frameType,
payloadType,
captureTimeStamp,

View File

@ -46,21 +46,6 @@
namespace webrtc {
RtpData* NullObjectRtpData() {
static NullRtpData null_rtp_data;
return &null_rtp_data;
}
RtpFeedback* NullObjectRtpFeedback() {
static NullRtpFeedback null_rtp_feedback;
return &null_rtp_feedback;
}
RtpAudioFeedback* NullObjectRtpAudioFeedback() {
static NullRtpAudioFeedback null_rtp_audio_feedback;
return &null_rtp_audio_feedback;
}
namespace ModuleRTPUtility {
enum {
@ -203,9 +188,9 @@ void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
type = videoType;
switch (type) {
case kRtpVideoGeneric:
case kRtpGenericVideo:
break;
case kRtpVideoVp8: {
case kRtpVp8Video: {
info.VP8.nonReferenceFrame = false;
info.VP8.beginningOfPartition = false;
info.VP8.partitionID = 0;
@ -582,9 +567,9 @@ bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
parsedPacket.SetType(_videoType);
switch (_videoType) {
case kRtpVideoGeneric:
case kRtpGenericVideo:
return ParseGeneric(parsedPacket);
case kRtpVideoVp8:
case kRtpVp8Video:
return ParseVP8(parsedPacket);
default:
return false;

View File

@ -19,13 +19,15 @@
#include "webrtc/typedefs.h"
namespace webrtc {
enum RtpVideoCodecTypes
{
kRtpGenericVideo = 0,
kRtpFecVideo = 10,
kRtpVp8Video = 11
};
const uint8_t kRtpMarkerBitMask = 0x80;
RtpData* NullObjectRtpData();
RtpFeedback* NullObjectRtpFeedback();
RtpAudioFeedback* NullObjectRtpAudioFeedback();
namespace ModuleRTPUtility
{
// January 1970, in NTP seconds.
@ -34,6 +36,22 @@ namespace ModuleRTPUtility
// Magic NTP fractional unit.
const double NTP_FRAC = 4.294967296E+9;
struct AudioPayload
{
uint32_t frequency;
uint8_t channels;
uint32_t rate;
};
struct VideoPayload
{
RtpVideoCodecTypes videoCodecType;
uint32_t maxRate;
};
union PayloadUnion
{
AudioPayload Audio;
VideoPayload Video;
};
struct Payload
{
char name[RTP_PAYLOAD_NAME_SIZE];

View File

@ -76,13 +76,13 @@ TEST(ParseVP8Test, BasicHeader) {
payload[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
payload[1] = 0x01; // P frame.
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
@ -97,13 +97,13 @@ TEST(ParseVP8Test, PictureID) {
payload[1] = 0x80;
payload[2] = 17;
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
@ -117,7 +117,7 @@ TEST(ParseVP8Test, PictureID) {
// Re-use payload, but change to long PictureID.
payload[2] = 0x80 | 17;
payload[3] = 17;
RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10, 0);
RTPPayloadParser rtpPayloadParser2(kRtpVp8Video, payload, 10, 0);
ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
@ -136,13 +136,13 @@ TEST(ParseVP8Test, Tl0PicIdx) {
payload[1] = 0x40;
payload[2] = 17;
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 13, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 1 /*L*/, 0 /*T*/, 0 /*K*/);
@ -159,13 +159,13 @@ TEST(ParseVP8Test, TIDAndLayerSync) {
payload[1] = 0x20;
payload[2] = 0x80; // TID(2) + LayerSync(false)
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 1 /*T*/, 0 /*K*/);
@ -183,13 +183,13 @@ TEST(ParseVP8Test, KeyIdx) {
payload[1] = 0x10; // K = 1.
payload[2] = 0x11; // KEYIDX = 17 decimal.
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 1 /*K*/);
@ -209,13 +209,13 @@ TEST(ParseVP8Test, MultipleExtensions) {
payload[4] = 42; // Tl0PicIdx.
payload[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 1 /*L*/, 1 /*T*/, 1 /*K*/);
@ -236,7 +236,7 @@ TEST(ParseVP8Test, TooShortHeader) {
payload[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
payload[3] = 17; // PictureID, low 8 bits.
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
RTPPayload parsedPacket;
EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
@ -258,13 +258,13 @@ TEST(ParseVP8Test, TestWithPacketizer) {
ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
ASSERT_TRUE(last);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes, 0);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, packet, send_bytes, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8,
inputHeader.nonReference /*N*/,

View File

@ -33,10 +33,6 @@ class RtpRtcpAPITest : public ::testing::Test {
configuration.audio = true;
configuration.clock = &fake_clock;
module = RtpRtcp::CreateRtpRtcp(configuration);
rtp_payload_registry_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_receiver_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, NULL, NULL, NULL, rtp_payload_registry_.get()));
}
virtual void TearDown() {
@ -44,8 +40,6 @@ class RtpRtcpAPITest : public ::testing::Test {
}
int test_id;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* module;
uint32_t test_ssrc;
uint32_t test_timestamp;
@ -109,9 +103,9 @@ TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_EQ(0, module->SetTMMBRStatus(false));
EXPECT_FALSE(module->TMMBR());
EXPECT_EQ(kNackOff, rtp_receiver_->NACK());
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(kNackRtcp, rtp_receiver_->NACK());
EXPECT_EQ(kNackOff, module->NACK());
EXPECT_EQ(0, module->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(kNackRtcp, module->NACK());
}
TEST_F(RtpRtcpAPITest, RTXSender) {
@ -135,7 +129,7 @@ TEST_F(RtpRtcpAPITest, RTXSender) {
EXPECT_EQ(0, module->SetRTXSendStatus(kRtxRetransmitted, false, 1));
EXPECT_EQ(0, module->RTXSendStatus(&rtx_mode, &ssrc, &payload_type));
EXPECT_EQ(kRtxRetransmitted, rtx_mode);
EXPECT_EQ(kRtxPayloadType, payload_type);
EXPECT_EQ(kRtxPayloadType ,payload_type);
}
TEST_F(RtpRtcpAPITest, RTXReceiver) {
@ -143,14 +137,14 @@ TEST_F(RtpRtcpAPITest, RTXReceiver) {
unsigned int ssrc = 0;
const int kRtxPayloadType = 119;
int payload_type = -1;
rtp_receiver_->SetRTXStatus(true, 1);
rtp_receiver_->SetRtxPayloadType(kRtxPayloadType);
rtp_receiver_->RTXStatus(&enable, &ssrc, &payload_type);
EXPECT_EQ(0, module->SetRTXReceiveStatus(true, 1));
module->SetRtxReceivePayloadType(kRtxPayloadType);
EXPECT_EQ(0, module->RTXReceiveStatus(&enable, &ssrc, &payload_type));
EXPECT_TRUE(enable);
EXPECT_EQ(1u, ssrc);
EXPECT_EQ(kRtxPayloadType ,payload_type);
rtp_receiver_->SetRTXStatus(false, 0);
rtp_receiver_->RTXStatus(&enable, &ssrc, &payload_type);
EXPECT_EQ(0, module->SetRTXReceiveStatus(false, 0));
EXPECT_EQ(0, module->RTXReceiveStatus(&enable, &ssrc, &payload_type));
EXPECT_FALSE(enable);
EXPECT_EQ(kRtxPayloadType ,payload_type);
}

View File

@ -10,10 +10,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -27,18 +24,10 @@ class LoopBackTransport : public webrtc::Transport {
LoopBackTransport()
: _count(0),
_packetLoss(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
_rtpRtcpModule(NULL) {
}
void SetSendModule(RtpRtcp* rtpRtcpModule,
RTPPayloadRegistry* payload_registry,
RtpReceiver* receiver,
ReceiveStatistics* receive_statistics) {
void SetSendModule(RtpRtcp* rtpRtcpModule) {
_rtpRtcpModule = rtpRtcpModule;
rtp_payload_registry_ = payload_registry;
rtp_receiver_ = receiver;
receive_statistics_ = receive_statistics;
}
void DropEveryNthPacket(int n) {
_packetLoss = n;
@ -55,15 +44,8 @@ class LoopBackTransport : public webrtc::Transport {
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return -1;
}
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
receive_statistics_->IncomingPacket(header, len, false, true);
if (!rtp_receiver_->IncomingRtpPacket(&header,
static_cast<const uint8_t*>(data),
len, payload_specific, true)) {
if (_rtpRtcpModule->IncomingRtpPacket(static_cast<const uint8_t*>(data),
len, header) < 0) {
return -1;
}
return len;
@ -77,13 +59,10 @@ class LoopBackTransport : public webrtc::Transport {
private:
int _count;
int _packetLoss;
ReceiveStatistics* receive_statistics_;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* _rtpRtcpModule;
};
class TestRtpReceiver : public NullRtpData {
class RtpReceiver : public RtpData {
public:
virtual int32_t OnReceivedPayloadData(

View File

@ -17,13 +17,12 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
using namespace webrtc;
#define test_rate 64000u
class VerifyingAudioReceiver : public NullRtpData {
class VerifyingAudioReceiver : public RtpData {
public:
virtual int32_t OnReceivedPayloadData(
const uint8_t* payloadData,
@ -59,7 +58,7 @@ class VerifyingAudioReceiver : public NullRtpData {
}
};
class RTPCallback : public NullRtpFeedback {
class RTPCallback : public RtpFeedback {
public:
virtual int32_t OnInitializeDecoder(
const int32_t id,
@ -74,9 +73,24 @@ class RTPCallback : public NullRtpFeedback {
}
return 0;
}
virtual void OnPacketTimeout(const int32_t id) {
}
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) {
}
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {
}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
}
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) {
}
};
class AudioFeedback : public NullRtpAudioFeedback {
class AudioFeedback : public RtpAudioFeedback {
virtual void OnReceivedTelephoneEvent(const int32_t id,
const uint8_t event,
const bool end) {
@ -96,6 +110,11 @@ class AudioFeedback : public NullRtpAudioFeedback {
expectedEvent = 32;
}
}
virtual void OnPlayTelephoneEvent(const int32_t id,
const uint8_t event,
const uint16_t lengthMs,
const uint8_t volume) {
};
};
class RtpRtcpAudioTest : public ::testing::Test {
@ -118,41 +137,26 @@ class RtpRtcpAudioTest : public ::testing::Test {
transport1 = new LoopBackTransport();
transport2 = new LoopBackTransport();
receive_statistics1_.reset(ReceiveStatistics::Create(&fake_clock));
receive_statistics2_.reset(ReceiveStatistics::Create(&fake_clock));
rtp_payload_registry1_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_payload_registry2_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
RtpRtcp::Configuration configuration;
configuration.id = test_id;
configuration.audio = true;
configuration.clock = &fake_clock;
configuration.receive_statistics = receive_statistics1_.get();
configuration.incoming_data = data_receiver1;
configuration.outgoing_transport = transport1;
configuration.audio_messages = audioFeedback;
module1 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver1_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, audioFeedback, data_receiver1, NULL,
rtp_payload_registry1_.get()));
configuration.id = test_id + 1;
configuration.receive_statistics = receive_statistics2_.get();
configuration.incoming_data = data_receiver2;
configuration.incoming_messages = rtp_callback;
configuration.outgoing_transport = transport2;
configuration.audio_messages = audioFeedback;
module2 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver2_.reset(RtpReceiver::CreateAudioReceiver(
test_id + 1, &fake_clock, audioFeedback, data_receiver2, NULL,
rtp_payload_registry2_.get()));
transport1->SetSendModule(module2, rtp_payload_registry2_.get(),
rtp_receiver2_.get(), receive_statistics2_.get());
transport2->SetSendModule(module1, rtp_payload_registry1_.get(),
rtp_receiver1_.get(), receive_statistics1_.get());
transport1->SetSendModule(module2);
transport2->SetSendModule(module1);
}
virtual void TearDown() {
@ -169,12 +173,6 @@ class RtpRtcpAudioTest : public ::testing::Test {
int test_id;
RtpRtcp* module1;
RtpRtcp* module2;
scoped_ptr<ReceiveStatistics> receive_statistics1_;
scoped_ptr<ReceiveStatistics> receive_statistics2_;
scoped_ptr<RtpReceiver> rtp_receiver1_;
scoped_ptr<RtpReceiver> rtp_receiver2_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
VerifyingAudioReceiver* data_receiver1;
VerifyingAudioReceiver* data_receiver2;
LoopBackTransport* transport1;
@ -193,93 +191,63 @@ TEST_F(RtpRtcpAudioTest, Basic) {
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
// Test detection at the end of a DTMF tone.
//EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
EXPECT_EQ(0, module1->SetSendingStatus(true));
// Start basic RTP test.
// Send an empty RTP packet.
// Should fail since we have not registered the payload type.
// Should fail since we have not registerd the payload type.
EXPECT_EQ(-1, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
96, 0, -1, NULL, 0));
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
printf("4\n");
const uint8_t test[5] = "test";
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
0, -1, test, 4));
EXPECT_EQ(test_ssrc, rtp_receiver2_->SSRC());
EXPECT_EQ(test_timestamp, rtp_receiver2_->TimeStamp());
EXPECT_EQ(test_ssrc, module2->RemoteSSRC());
EXPECT_EQ(test_timestamp, module2->RemoteTimestamp());
}
TEST_F(RtpRtcpAudioTest, RED) {
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
EXPECT_EQ(0, module1->SetSendingStatus(true));
voice_codec.pltype = 127;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "RED", 4);
voiceCodec.pltype = 127;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "RED", 4);
EXPECT_EQ(0, module1->SetSendREDPayloadType(voice_codec.pltype));
EXPECT_EQ(0, module1->SetSendREDPayloadType(voiceCodec.pltype));
int8_t red = 0;
EXPECT_EQ(0, module1->SendREDPayloadType(red));
EXPECT_EQ(voice_codec.pltype, red);
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(voiceCodec.pltype, red);
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
RTPFragmentationHeader fragmentation;
fragmentation.fragmentationVectorSize = 2;
@ -307,43 +275,28 @@ TEST_F(RtpRtcpAudioTest, RED) {
}
TEST_F(RtpRtcpAudioTest, DTMF) {
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
EXPECT_EQ(0, module1->SetSendingStatus(true));
// Prepare for DTMF.
voice_codec.pltype = 97;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "telephone-event", 16);
voiceCodec.pltype = 97;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "telephone-event", 16);
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Start DTMF test.
uint32_t timeStamp = 160;

View File

@ -14,10 +14,8 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
using namespace webrtc;
@ -70,20 +68,6 @@ class RtcpCallback : public RtcpFeedback, public RtcpIntraFrameObserver {
RtpRtcp* _rtpRtcpModule;
};
class TestRtpFeedback : public NullRtpFeedback {
public:
TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
rtp_rtcp_->SetRemoteSSRC(SSRC);
}
private:
RtpRtcp* rtp_rtcp_;
};
class RtpRtcpRtcpTest : public ::testing::Test {
protected:
RtpRtcpRtcpTest() : fake_clock(123456) {
@ -97,55 +81,31 @@ class RtpRtcpRtcpTest : public ::testing::Test {
~RtpRtcpRtcpTest() {}
virtual void SetUp() {
receiver = new TestRtpReceiver();
receiver = new RtpReceiver();
transport1 = new LoopBackTransport();
transport2 = new LoopBackTransport();
myRTCPFeedback1 = new RtcpCallback();
myRTCPFeedback2 = new RtcpCallback();
receive_statistics1_.reset(ReceiveStatistics::Create(&fake_clock));
receive_statistics2_.reset(ReceiveStatistics::Create(&fake_clock));
RtpRtcp::Configuration configuration;
configuration.id = test_id;
configuration.audio = true;
configuration.clock = &fake_clock;
configuration.receive_statistics = receive_statistics1_.get();
configuration.outgoing_transport = transport1;
configuration.rtcp_feedback = myRTCPFeedback1;
configuration.intra_frame_callback = myRTCPFeedback1;
rtp_payload_registry1_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_payload_registry2_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
configuration.incoming_data = receiver;
module1 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_feedback1_.reset(new TestRtpFeedback(module1));
rtp_receiver1_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, NULL, receiver, rtp_feedback1_.get(),
rtp_payload_registry1_.get()));
configuration.receive_statistics = receive_statistics2_.get();
configuration.id = test_id + 1;
configuration.outgoing_transport = transport2;
configuration.rtcp_feedback = myRTCPFeedback2;
configuration.intra_frame_callback = myRTCPFeedback2;
module2 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_feedback2_.reset(new TestRtpFeedback(module2));
rtp_receiver2_.reset(RtpReceiver::CreateAudioReceiver(
test_id + 1, &fake_clock, NULL, receiver, rtp_feedback2_.get(),
rtp_payload_registry2_.get()));
transport1->SetSendModule(module2, rtp_payload_registry2_.get(),
rtp_receiver2_.get(), receive_statistics2_.get());
transport2->SetSendModule(module1, rtp_payload_registry1_.get(),
rtp_receiver1_.get(), receive_statistics1_.get());
transport1->SetSendModule(module2);
transport2->SetSendModule(module1);
myRTCPFeedback1->SetModule(module1);
myRTCPFeedback2->SetModule(module2);
@ -161,26 +121,16 @@ class RtpRtcpRtcpTest : public ::testing::Test {
EXPECT_EQ(0, module1->SetSendingStatus(true));
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
voice_codec.rate = 64000;
memcpy(voice_codec.plname, "PCMU", 5);
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
voiceCodec.rate = 64000;
memcpy(voiceCodec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// We need to send one RTP packet to get the RTCP packet to be accepted by
// the receiving module.
@ -201,17 +151,9 @@ class RtpRtcpRtcpTest : public ::testing::Test {
}
int test_id;
scoped_ptr<TestRtpFeedback> rtp_feedback1_;
scoped_ptr<TestRtpFeedback> rtp_feedback2_;
scoped_ptr<ReceiveStatistics> receive_statistics1_;
scoped_ptr<ReceiveStatistics> receive_statistics2_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
scoped_ptr<RtpReceiver> rtp_receiver1_;
scoped_ptr<RtpReceiver> rtp_receiver2_;
RtpRtcp* module1;
RtpRtcp* module2;
TestRtpReceiver* receiver;
RtpReceiver* receiver;
LoopBackTransport* transport1;
LoopBackTransport* transport2;
RtcpCallback* myRTCPFeedback1;
@ -231,7 +173,7 @@ TEST_F(RtpRtcpRtcpTest, RTCP_PLI_RPSI) {
TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
uint32_t testOfCSRC[webrtc::kRtpCsrcSize];
EXPECT_EQ(2, rtp_receiver2_->CSRCs(testOfCSRC));
EXPECT_EQ(2, module2->RemoteCSRCs(testOfCSRC));
EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
@ -250,10 +192,10 @@ TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
module2->Process();
char cName[RTCP_CNAME_SIZE];
EXPECT_EQ(-1, module2->RemoteCNAME(rtp_receiver2_->SSRC() + 1, cName));
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
// Check multiple CNAME.
EXPECT_EQ(0, module2->RemoteCNAME(rtp_receiver2_->SSRC(), cName));
EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
@ -265,7 +207,7 @@ TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
EXPECT_EQ(0, module1->SetSendingStatus(false));
// Test that BYE clears the CNAME
EXPECT_EQ(-1, module2->RemoteCNAME(rtp_receiver2_->SSRC(), cName));
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
}
TEST_F(RtpRtcpRtcpTest, RTCP) {
@ -334,12 +276,20 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
EXPECT_EQ(static_cast<uint32_t>(0),
reportBlockReceived.cumulativeLost);
ReceiveStatistics::RtpReceiveStatistics stats;
EXPECT_TRUE(receive_statistics2_->Statistics(&stats, true));
EXPECT_EQ(0, stats.fraction_lost);
EXPECT_EQ((uint32_t)0, stats.cumulative_lost);
EXPECT_EQ(test_sequence_number, stats.extended_max_sequence_number);
EXPECT_EQ(reportBlockReceived.jitter, stats.jitter);
uint8_t fraction_lost = 0; // scale 0 to 255
uint32_t cum_lost = 0; // number of lost packets
uint32_t ext_max = 0; // highest sequence number received
uint32_t jitter = 0;
uint32_t max_jitter = 0;
EXPECT_EQ(0, module2->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter));
EXPECT_EQ(0, fraction_lost);
EXPECT_EQ((uint32_t)0, cum_lost);
EXPECT_EQ(test_sequence_number, ext_max);
EXPECT_EQ(reportBlockReceived.jitter, jitter);
uint16_t RTT;
uint16_t avgRTT;

View File

@ -28,7 +28,6 @@ class RtpRtcpVideoTest : public ::testing::Test {
protected:
RtpRtcpVideoTest()
: test_id_(123),
rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
test_ssrc_(3456),
test_timestamp_(4567),
test_sequence_number_(2345),
@ -38,26 +37,23 @@ class RtpRtcpVideoTest : public ::testing::Test {
virtual void SetUp() {
transport_ = new LoopBackTransport();
receiver_ = new TestRtpReceiver();
receive_statistics_.reset(ReceiveStatistics::Create(&fake_clock));
receiver_ = new RtpReceiver();
RtpRtcp::Configuration configuration;
configuration.id = test_id_;
configuration.audio = false;
configuration.clock = &fake_clock;
configuration.incoming_data = receiver_;
configuration.outgoing_transport = transport_;
video_module_ = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
test_id_, &fake_clock, receiver_, NULL, &rtp_payload_registry_));
EXPECT_EQ(0, video_module_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, video_module_->SetSSRC(test_ssrc_));
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, video_module_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, video_module_->SetStorePacketsStatus(true, 600));
EXPECT_EQ(0, video_module_->SetSendingStatus(true));
transport_->SetSendModule(video_module_, &rtp_payload_registry_,
rtp_receiver_.get(), receive_statistics_.get());
transport_->SetSendModule(video_module_);
VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
@ -65,11 +61,7 @@ class RtpRtcpVideoTest : public ::testing::Test {
memcpy(video_codec.plName, "I420", 5);
EXPECT_EQ(0, video_module_->RegisterSendPayload(video_codec));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate));
EXPECT_EQ(0, video_module_->RegisterReceivePayload(video_codec));
payload_data_length_ = sizeof(video_frame_);
@ -127,12 +119,9 @@ class RtpRtcpVideoTest : public ::testing::Test {
}
int test_id_;
scoped_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* video_module_;
LoopBackTransport* transport_;
TestRtpReceiver* receiver_;
RtpReceiver* receiver_;
uint32_t test_ssrc_;
uint32_t test_timestamp_;
uint16_t test_sequence_number_;
@ -160,11 +149,7 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
codec.codecType = kVideoCodecVP8;
codec.plType = kPayloadType;
strncpy(codec.plName, "VP8", 4);
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(codec.plName,
codec.plType,
90000,
0,
codec.maxBitrate));
EXPECT_EQ(0, video_module_->RegisterReceivePayload(codec));
for (int frame_idx = 0; frame_idx < 10; ++frame_idx) {
for (int packet_idx = 0; packet_idx < 5; ++packet_idx) {
int packet_size = PaddingPacket(padding_packet, timestamp, seq_num,
@ -173,12 +158,8 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
RTPHeader header;
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(padding_packet, packet_size, &header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
&payload_specific));
EXPECT_TRUE(rtp_receiver_->IncomingRtpPacket(&header, padding_packet,
packet_size,
payload_specific, true));
EXPECT_EQ(0, video_module_->IncomingRtpPacket(padding_packet,
packet_size, header));
EXPECT_EQ(0, receiver_->payload_size());
EXPECT_EQ(packet_size - 12, receiver_->rtp_header().header.paddingLength);
}

View File

@ -119,7 +119,7 @@ class VCMSendStatisticsCallback {
// Callback class used for informing the user of the incoming bit rate and frame rate.
class VCMReceiveStatisticsCallback {
public:
virtual int32_t OnReceiveStatisticsUpdate(const uint32_t bitRate,
virtual int32_t ReceiveStatistics(const uint32_t bitRate,
const uint32_t frameRate) = 0;
protected:

View File

@ -37,7 +37,7 @@ TEST(TestDecodingState, FrameContinuity) {
packet->timestamp = 1;
packet->seqNum = 0xffff;
packet->frameType = kVideoFrameDelta;
packet->codecSpecificHeader.codec = kRtpVideoVp8;
packet->codecSpecificHeader.codec = kRTPVideoVP8;
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
frame.InsertPacket(*packet, 0, false, 0);
// Always start with a key frame.
@ -208,7 +208,7 @@ TEST(TestDecodingState, MultiLayerBehavior) {
VCMFrameBuffer frame;
VCMPacket* packet = new VCMPacket();
packet->frameType = kVideoFrameDelta;
packet->codecSpecificHeader.codec = kRtpVideoVp8;
packet->codecSpecificHeader.codec = kRTPVideoVP8;
packet->timestamp = 0;
packet->seqNum = 0;
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
@ -361,7 +361,7 @@ TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) {
VCMPacket packet;
frame.Reset();
packet.frameType = kVideoFrameKey;
packet.codecSpecificHeader.codec = kRtpVideoVp8;
packet.codecSpecificHeader.codec = kRTPVideoVP8;
packet.timestamp = 0;
packet.seqNum = 0;
packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;

View File

@ -104,7 +104,7 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
{
switch (header->codec)
{
case kRtpVideoVp8:
case kRTPVideoVP8:
{
if (_codecSpecificInfo.codecType != kVideoCodecVP8)
{

View File

@ -94,7 +94,7 @@ void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
{
switch(videoHeader.codec)
{
case kRtpVideoVp8:
case kRTPVideoVP8:
{
// Handle all packets within a frame as depending on the previous packet
// TODO(holmer): This should be changed to make fragments independent
@ -111,7 +111,7 @@ void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
codec = kVideoCodecVP8;
break;
}
case kRtpVideoI420:
case kRTPVideoI420:
{
codec = kVideoCodecI420;
break;

View File

@ -50,35 +50,35 @@ int VCMSessionInfo::HighSequenceNumber() const {
int VCMSessionInfo::PictureId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
return kNoPictureId;
return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
}
int VCMSessionInfo::TemporalId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
return kNoTemporalIdx;
return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
}
bool VCMSessionInfo::LayerSync() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
return false;
return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
}
int VCMSessionInfo::Tl0PicId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
return kNoTl0PicIdx;
return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
}
bool VCMSessionInfo::NonReference() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
return false;
return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
}

View File

@ -66,7 +66,7 @@ class TestVP8Partitions : public TestSessionInfo {
TestSessionInfo::SetUp();
vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
packet_header_.frameType = kVideoFrameDelta;
packet_header_.type.Video.codec = kRtpVideoVp8;
packet_header_.type.Video.codec = kRTPVideoVP8;
vp8_header_->InitRTPVideoHeaderVP8();
fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
}

View File

@ -157,8 +157,7 @@ VideoCodingModuleImpl::Process()
uint32_t bitRate;
uint32_t frameRate;
_receiver.ReceiveStatistics(&bitRate, &frameRate);
_receiveStatsCallback->OnReceiveStatisticsUpdate(bitRate,
frameRate);
_receiveStatsCallback->ReceiveStatistics(bitRate, frameRate);
}
// Size of render buffer.

View File

@ -192,7 +192,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyFrames) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRtpVideoVp8;
header.type.Video.codec = kRTPVideoVP8;
for (int i = 0; i < 10; ++i) {
EXPECT_CALL(packet_request_callback_, ResendPackets(_, _))
.Times(0);
@ -216,7 +216,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyFramesWithLosses) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRtpVideoVp8;
header.type.Video.codec = kRTPVideoVP8;
// Insert one video frame to get one frame decoded.
header.frameType = kVideoFrameKey;
header.type.Video.isFirstPacket = true;
@ -270,7 +270,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyAndVideo) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRtpVideoVp8;
header.type.Video.codec = kRTPVideoVP8;
header.type.Video.codecHeader.VP8.pictureId = -1;
header.type.Video.codecHeader.VP8.tl0PicIdx = -1;
for (int i = 0; i < 3; ++i) {

View File

@ -67,7 +67,7 @@ class VCMRobustnessTest : public ::testing::Test {
rtp_info.header.sequenceNumber = seq_no;
rtp_info.header.markerBit = marker_bit;
rtp_info.header.payloadType = video_codec_.plType;
rtp_info.type.Video.codec = kRtpVideoVp8;
rtp_info.type.Video.codec = kRTPVideoVP8;
rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtp_info.type.Video.isFirstPacket = first;

View File

@ -151,7 +151,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
// Testing with VP8.
VideoCodingModule::Codec(kVideoCodecVP8, &sendCodec);
_vcm->RegisterSendCodec(&sendCodec, 1, 1440);
_encodeCompleteCallback->SetCodecType(kRtpVideoVp8);
_encodeCompleteCallback->SetCodecType(kRTPVideoVP8);
_vcm->InitializeReceiver();
TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK );
_vcm->InitializeSender();
@ -196,7 +196,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
VideoCodingModule::Codec(kVideoCodecVP8, &vp8EncSettings);
_vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
_encodeCallback->RegisterReceiverVCM(_vcm);
_encodeCallback->SetCodecType(kRtpVideoVp8);
_encodeCallback->SetCodecType(kRTPVideoVP8);
TEST(_vcm->RegisterExternalEncoder(encoder, vp8EncSettings.plType) == VCM_OK);
TEST(_vcm->RegisterSendCodec(&vp8EncSettings, 4, 1440) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
@ -232,7 +232,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
TEST(_vcm->RegisterReceiveCodec(&receiveCodec, 1, true) == VCM_OK); // Require key frame
_vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
_encodeCallback->RegisterReceiverVCM(_vcm);
_encodeCallback->SetCodecType(kRtpVideoVp8);
_encodeCallback->SetCodecType(kRTPVideoVP8);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
TEST(_vcm->ResetDecoder() == VCM_OK);

View File

@ -549,7 +549,7 @@ VCMEncComplete_KeyReqTest::SendData(
WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; // end of frame
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codec = kRtpVideoVp8;
rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.header.payloadType = payloadType;
rtpInfo.header.sequenceNumber = _seqNo;
_seqNo += 2;

View File

@ -18,7 +18,6 @@
#include <time.h>
#include <vector>
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/modules/video_coding/main/test/test_util.h"
@ -203,6 +202,7 @@ MediaOptTest::GeneralSetup()
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
configuration.incoming_data = _dataCallback;
configuration.outgoing_transport = _outgoingTransport;
_rtp = RtpRtcp::CreateRtpRtcp(configuration);
@ -211,33 +211,21 @@ MediaOptTest::GeneralSetup()
// Registering codecs for the RTP module
// Register receive and send payload
VideoCodec video_codec;
strncpy(video_codec.plName, "VP8", 32);
video_codec.plType = VCM_VP8_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
VideoCodec videoCodec;
strncpy(videoCodec.plName, "VP8", 32);
videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
strncpy(video_codec.plName, "ULPFEC", 32);
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
strncpy(videoCodec.plName, "ULPFEC", 32);
videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
strncpy(video_codec.plName, "RED", 32);
video_codec.plType = VCM_RED_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
if (_nackFecEnabled == 1)
_rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE,

View File

@ -53,7 +53,6 @@ public:
private:
webrtc::VideoCodingModule* _vcm;
webrtc::RtpReceiver* rtp_receiver_;
webrtc::RtpRtcp* _rtp;
webrtc::RTPSendCompleteCallback* _outgoingTransport;
RtpDataCallback* _dataCallback;

View File

@ -16,8 +16,6 @@
#include <string.h>
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/test/media_opt_test.h"
@ -154,46 +152,29 @@ int MTRxTxTest(CmdArgs& args)
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
configuration.incoming_data = &dataCallback;
configuration.outgoing_transport = outgoingTransport;
RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(configuration);
scoped_ptr<RTPPayloadRegistry> registry(new RTPPayloadRegistry(
-1, RTPPayloadStrategy::CreateStrategy(false)));
scoped_ptr<RtpReceiver> rtp_receiver(
RtpReceiver::CreateVideoReceiver(-1, Clock::GetRealTimeClock(),
&dataCallback, NULL, registry.get()));
// registering codecs for the RTP module
VideoCodec video_codec;
strncpy(video_codec.plName, "ULPFEC", 32);
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
VideoCodec videoCodec;
strncpy(videoCodec.plName, "ULPFEC", 32);
videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
strncpy(video_codec.plName, "RED", 32);
video_codec.plType = VCM_RED_PAYLOAD_TYPE;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
strncpy(video_codec.plName, args.codecName.c_str(), 32);
video_codec.plType = VCM_VP8_PAYLOAD_TYPE;
video_codec.maxBitrate = 10000;
video_codec.codecType = args.codecType;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
TEST(rtp->RegisterSendPayload(video_codec) == 0);
strncpy(videoCodec.plName, args.codecName.c_str(), 32);
videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
videoCodec.maxBitrate = 10000;
videoCodec.codecType = args.codecType;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
TEST(rtp->RegisterSendPayload(videoCodec) == 0);
// inform RTP Module of error resilience features
TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE,
VCM_ULPFEC_PAYLOAD_TYPE) == 0);
TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0);
//VCM
if (vcm->InitializeReceiver() < 0)
@ -257,8 +238,7 @@ int MTRxTxTest(CmdArgs& args)
FecProtectionParams delta_params = protectionCallback.DeltaFecParameters();
FecProtectionParams key_params = protectionCallback.KeyFecParameters();
rtp->SetFecParameters(&delta_params, &key_params);
rtp_receiver->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff,
kMaxPacketAgeToNack);
rtp->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff, kMaxPacketAgeToNack);
vcm->SetChannelParameters(static_cast<uint32_t>(1000 * bitRate),
(uint8_t) lossRate, rttMS);

View File

@ -13,8 +13,6 @@
#include <cmath>
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/system_wrappers/interface/clock.h"
@ -97,14 +95,7 @@ TransportCallback::TransportPackets()
delete packet;
return -1;
}
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header, packet->data,
packet->length, payload_specific,
true))
if (_rtp->IncomingRtpPacket(packet->data, packet->length, header) < 0)
{
delete packet;
return -1;

View File

@ -95,7 +95,7 @@ VCMNTEncodeCompleteCallback::SendData(
switch (_test.VideoType())
{
case kVideoCodecVP8:
rtpInfo.type.Video.codec = kRtpVideoVp8;
rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoHdr->codecHeader.VP8.nonReference;
@ -103,7 +103,7 @@ VCMNTEncodeCompleteCallback::SendData(
videoHdr->codecHeader.VP8.pictureId;
break;
case kVideoCodecI420:
rtpInfo.type.Video.codec = kRtpVideoI420;
rtpInfo.type.Video.codec = kRTPVideoI420;
break;
default:
assert(false);

View File

@ -22,7 +22,7 @@
#include <stdio.h>
#include <string>
class RtpDataCallback : public webrtc::NullRtpData {
class RtpDataCallback : public webrtc::RtpData {
public:
RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
virtual ~RtpDataCallback() {}

View File

@ -14,8 +14,6 @@
#include <map>
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/test/pcap_file_reader.h"
@ -218,9 +216,8 @@ class SsrcHandlers {
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
handler->rtp_module_.reset(RtpReceiver::CreateVideoReceiver(
configuration.id, configuration.clock, handler->payload_sink_.get(),
NULL, handler->rtp_payload_registry_.get()));
configuration.incoming_data = handler->payload_sink_.get();
handler->rtp_module_.reset(RtpRtcp::CreateRtpRtcp(configuration));
if (handler->rtp_module_.get() == NULL) {
return -1;
}
@ -229,6 +226,9 @@ class SsrcHandlers {
kMaxPacketAgeToNack) < 0) {
return -1;
}
handler->rtp_module_->SetRTCPStatus(kRtcpNonCompound);
handler->rtp_module_->SetREMBStatus(true);
handler->rtp_module_->SetSSRCFilter(true, ssrc);
handler->rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset,
kDefaultTransmissionTimeOffsetExtensionId);
@ -240,11 +240,7 @@ class SsrcHandlers {
strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName)-1);
codec.plType = it->payload_type();
codec.codecType = it->codec_type();
if (handler->rtp_module_->RegisterReceivePayload(codec.plName,
codec.plType,
90000,
0,
codec.maxBitrate) < 0) {
if (handler->rtp_module_->RegisterReceivePayload(codec) < 0) {
return -1;
}
}
@ -253,18 +249,20 @@ class SsrcHandlers {
return 0;
}
void Process() {
for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
it->second->rtp_module_->Process();
}
}
void IncomingPacket(const uint8_t* data, uint32_t length) {
for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
if (!it->second->rtp_header_parser_->IsRtcp(data, length)) {
if (it->second->rtp_header_parser_->IsRtcp(data, length)) {
it->second->rtp_module_->IncomingRtcpPacket(data, length);
} else {
RTPHeader header;
it->second->rtp_header_parser_->Parse(data, length, &header);
PayloadUnion payload_specific;
it->second->rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific);
bool in_order =
it->second->rtp_module_->InOrderPacket(header.sequenceNumber);
it->second->rtp_module_->IncomingRtpPacket(&header, data, length,
payload_specific, in_order);
it->second->rtp_module_->IncomingRtpPacket(data, length, header);
}
}
}
@ -275,8 +273,6 @@ class SsrcHandlers {
Handler(uint32_t ssrc, const PayloadTypes& payload_types,
LostPackets* lost_packets)
: rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(new RTPPayloadRegistry(
0, RTPPayloadStrategy::CreateStrategy(false))),
rtp_module_(),
payload_sink_(),
ssrc_(ssrc),
@ -300,8 +296,7 @@ class SsrcHandlers {
}
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_module_;
scoped_ptr<RtpRtcp> rtp_module_;
scoped_ptr<PayloadSinkInterface> payload_sink_;
private:
@ -371,6 +366,8 @@ class RtpPlayerImpl : public RtpPlayerInterface {
// Send any packets from packet source.
if (!end_of_file_ && (TimeUntilNextPacket() == 0 || first_packet_)) {
ssrc_handlers_.Process();
if (first_packet_) {
next_packet_length_ = sizeof(next_packet_);
if (packet_source_->NextPacket(next_packet_, &next_packet_length_,

View File

@ -14,8 +14,6 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/system_wrappers/interface/clock.h"
@ -36,7 +34,7 @@ VCMEncodeCompleteCallback::VCMEncodeCompleteCallback(FILE* encodedFile):
_encodeComplete(false),
_width(0),
_height(0),
_codecType(kRtpVideoNone)
_codecType(kRTPVideoNoVideo)
{
//
}
@ -75,14 +73,14 @@ VCMEncodeCompleteCallback::SendData(
rtpInfo.type.Video.width = (uint16_t)_width;
switch (_codecType)
{
case webrtc::kRtpVideoVp8:
case webrtc::kRTPVideoVP8:
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoHdr->codecHeader.VP8.nonReference;
rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoHdr->codecHeader.VP8.pictureId;
break;
case webrtc::kRtpVideoI420:
case webrtc::kRTPVideoI420:
break;
default:
assert(false);
@ -211,8 +209,6 @@ RTPSendCompleteCallback::RTPSendCompleteCallback(Clock* clock,
const char* filename):
_clock(clock),
_sendCount(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
_rtp(NULL),
_lossPct(0),
_burstLength(0),
@ -303,14 +299,7 @@ RTPSendCompleteCallback::SendPacket(int channel, const void *data, int len)
delete packet;
return -1;
}
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header, packet->data,
packet->length, payload_specific,
true))
if (_rtp->IncomingRtpPacket(packet->data, packet->length, header) < 0)
{
delete packet;
return -1;

View File

@ -29,7 +29,6 @@
namespace webrtc
{
class RTPPayloadRegistry;
class RtpDump;
// Send Side - Packetization callback - send an encoded frame to the VCMReceiver
@ -60,7 +59,7 @@ public:
// Return encode complete (true/false)
bool EncodeComplete();
// Inform callback of codec used
void SetCodecType(RtpVideoCodecTypes codecType)
void SetCodecType(RTPVideoCodecTypes codecType)
{_codecType = codecType;}
// Inform callback of frame dimensions
void SetFrameDimensions(int32_t width, int32_t height)
@ -83,7 +82,7 @@ private:
bool _encodeComplete;
int32_t _width;
int32_t _height;
RtpVideoCodecTypes _codecType;
RTPVideoCodecTypes _codecType;
}; // end of VCMEncodeCompleteCallback
@ -114,7 +113,7 @@ public:
// Return encode complete (true/false)
bool EncodeComplete();
// Inform callback of codec used
void SetCodecType(RtpVideoCodecTypes codecType)
void SetCodecType(RTPVideoCodecTypes codecType)
{_codecType = codecType;}
// Inform callback of frame dimensions
@ -131,7 +130,7 @@ private:
RtpRtcp* _RTPModule;
int16_t _width;
int16_t _height;
RtpVideoCodecTypes _codecType;
RTPVideoCodecTypes _codecType;
}; // end of VCMEncodeCompleteCallback
// Decode Complete callback
@ -189,8 +188,6 @@ protected:
Clock* _clock;
uint32_t _sendCount;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* _rtp;
double _lossPct;
double _burstLength;

View File

@ -146,12 +146,12 @@ int32_t FileOutputFrameReceiver::FrameToRender(
return 0;
}
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
webrtc::RTPVideoCodecTypes ConvertCodecType(const char* plname) {
if (strncmp(plname,"VP8" , 3) == 0) {
return webrtc::kRtpVideoVp8;
return webrtc::kRTPVideoVP8;
} else if (strncmp(plname,"I420" , 5) == 0) {
return webrtc::kRtpVideoI420;
return webrtc::kRTPVideoI420;
} else {
return webrtc::kRtpVideoNone; // Default value
return webrtc::kRTPVideoNoVideo; // Default value
}
}

View File

@ -102,6 +102,6 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
};
// Codec type conversion
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname);
webrtc::RTPVideoCodecTypes ConvertCodecType(const char* plname);
#endif

View File

@ -67,12 +67,6 @@ class VcmPayloadSinkFactory::VcmPayloadSink
return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
}
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) {
// We currently don't handle FEC.
return true;
}
// VCMPacketRequestCallback
virtual int32_t ResendPackets(const uint16_t* sequence_numbers,
uint16_t length) {

View File

@ -32,6 +32,24 @@ enum ViEPacketTimeout {
PacketReceived = 1
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViENetworkObserver {
public:
// This method will be called periodically delivering a deadoralive
// decision for a specified channel.
virtual void OnPeriodicDeadOrAlive(const int video_channel,
const bool alive) = 0;
// This method is called once if a packet timeout occurred.
virtual void PacketTimeout(const int video_channel,
const ViEPacketTimeout timeout) = 0;
protected:
virtual ~ViENetworkObserver() {}
};
class WEBRTC_DLLEXPORT ViENetwork {
public:
// Default values.
@ -78,6 +96,27 @@ class WEBRTC_DLLEXPORT ViENetwork {
// over the network.
virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
// This function enables or disables warning reports if packets have not
// been received for a specified time interval.
virtual int SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds) = 0;
// Registers an instance of a user implementation of the ViENetwork
// observer.
virtual int RegisterObserver(const int video_channel,
ViENetworkObserver& observer) = 0;
// Removes a registered instance of ViENetworkObserver.
virtual int DeregisterObserver(const int video_channel) = 0;
// This function enables or disables the periodic deadoralive callback
// functionality for a specified channel.
virtual int SetPeriodicDeadOrAliveStatus(
const int video_channel,
const bool enable,
const unsigned int sample_time_seconds = KDefaultSampleTimeSeconds) = 0;
protected:
ViENetwork() {}
virtual ~ViENetwork() {}

View File

@ -25,6 +25,24 @@
#include <qos.h>
#endif
class ViEAutoTestNetworkObserver: public webrtc::ViENetworkObserver
{
public:
ViEAutoTestNetworkObserver()
{
}
virtual ~ViEAutoTestNetworkObserver()
{
}
virtual void OnPeriodicDeadOrAlive(const int videoChannel, const bool alive)
{
}
virtual void PacketTimeout(const int videoChannel,
const webrtc::ViEPacketTimeout timeout)
{
}
};
void ViEAutoTest::ViENetworkStandardTest()
{
TbInterfaces ViE("ViENetworkStandardTest"); // Create VIE
@ -527,6 +545,26 @@ void ViEAutoTest::ViENetworkAPITest()
EXPECT_NE(0, ViE.network->SetMTU(tbChannel.videoChannel, 1600));
// Valid input
EXPECT_EQ(0, ViE.network->SetMTU(tbChannel.videoChannel, 800));
//
// Observer and timeout
//
ViEAutoTestNetworkObserver vieTestObserver;
EXPECT_EQ(0, ViE.network->RegisterObserver(
tbChannel.videoChannel, vieTestObserver));
EXPECT_NE(0, ViE.network->RegisterObserver(
tbChannel.videoChannel, vieTestObserver));
EXPECT_EQ(0, ViE.network->SetPeriodicDeadOrAliveStatus(
tbChannel.videoChannel, true)); // No observer
EXPECT_EQ(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->SetPeriodicDeadOrAliveStatus(
tbChannel.videoChannel, true)); // No observer
// Packet timout notification
EXPECT_EQ(0, ViE.network->SetPacketTimeoutNotification(
tbChannel.videoChannel, true, 10));
}
//***************************************************************

View File

@ -15,7 +15,6 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
@ -75,7 +74,7 @@ ViEChannel::ViEChannel(int32_t channel_id,
default_rtp_rtcp_(default_rtp_rtcp),
rtp_rtcp_(NULL),
vcm_(*VideoCodingModule::Create(ViEModuleId(engine_id, channel_id))),
vie_receiver_(channel_id, &vcm_, remote_bitrate_estimator, this),
vie_receiver_(channel_id, &vcm_, remote_bitrate_estimator),
vie_sender_(channel_id),
vie_sync_(&vcm_, this),
stats_observer_(new ChannelStatsObserver(this)),
@ -84,13 +83,16 @@ ViEChannel::ViEChannel(int32_t channel_id,
do_key_frame_callbackRequest_(false),
rtp_observer_(NULL),
rtcp_observer_(NULL),
networkObserver_(NULL),
intra_frame_observer_(intra_frame_observer),
rtt_observer_(rtt_observer),
paced_sender_(paced_sender),
bandwidth_observer_(bandwidth_observer),
rtp_packet_timeout_(false),
send_timestamp_extension_id_(kInvalidRtpExtensionId),
absolute_send_time_extension_id_(kInvalidRtpExtensionId),
receive_absolute_send_time_enabled_(false),
using_packet_spread_(false),
external_transport_(NULL),
decoder_reset_(true),
wait_for_key_frame_(false),
@ -110,6 +112,8 @@ ViEChannel::ViEChannel(int32_t channel_id,
configuration.id = ViEModuleId(engine_id, channel_id);
configuration.audio = false;
configuration.default_module = default_rtp_rtcp;
configuration.incoming_data = &vie_receiver_;
configuration.incoming_messages = this;
configuration.outgoing_transport = &vie_sender_;
configuration.rtcp_feedback = this;
configuration.intra_frame_callback = intra_frame_observer;
@ -117,7 +121,6 @@ ViEChannel::ViEChannel(int32_t channel_id,
configuration.rtt_observer = rtt_observer;
configuration.remote_bitrate_estimator = remote_bitrate_estimator;
configuration.paced_sender = paced_sender;
configuration.receive_statistics = vie_receiver_.GetReceiveStatistics();
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
@ -129,13 +132,6 @@ int32_t ViEChannel::Init() {
"%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
engine_id_);
if (module_process_thread_.RegisterModule(
vie_receiver_.GetReceiveStatistics()) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Failed to register receive-statistics to process thread",
__FUNCTION__);
return -1;
}
// RTP/RTCP initialization.
if (rtp_rtcp_->SetSendingMediaStatus(false) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -201,10 +197,7 @@ int32_t ViEChannel::Init() {
VideoCodec video_codec;
if (vcm_.Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
rtp_rtcp_->RegisterSendPayload(video_codec);
// TODO(holmer): Can we call SetReceiveCodec() here instead?
if (!vie_receiver_.RegisterPayload(video_codec)) {
return -1;
}
rtp_rtcp_->RegisterReceivePayload(video_codec);
vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_);
vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
rtp_rtcp_->MaxDataPayloadLength());
@ -222,7 +215,6 @@ ViEChannel::~ViEChannel() {
channel_id_, engine_id_);
// Make sure we don't get more callbacks from the RTP module.
module_process_thread_.DeRegisterModule(vie_receiver_.GetReceiveStatistics());
module_process_thread_.DeRegisterModule(rtp_rtcp_.get());
module_process_thread_.DeRegisterModule(&vcm_);
module_process_thread_.DeRegisterModule(&vie_sync_);
@ -278,6 +270,7 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
(*it)->SetSendingMediaStatus(false);
}
}
NACKMethod nack_method = rtp_rtcp_->NACK();
bool fec_enabled = false;
uint8_t payload_type_red;
@ -324,6 +317,12 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::SetRTCPStatus failure", __FUNCTION__);
}
if (nack_method != kNackOff) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
rtp_rtcp->SetNACKStatus(nack_method, max_nack_reordering_threshold_);
} else if (paced_sender_) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
}
if (fec_enabled) {
rtp_rtcp->SetGenericFECStatus(fec_enabled, payload_type_red,
payload_type_fec);
@ -445,7 +444,12 @@ int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
if (!vie_receiver_.SetReceiveCodec(video_codec)) {
int8_t old_pltype = -1;
if (rtp_rtcp_->ReceivePayloadType(video_codec, &old_pltype) != -1) {
rtp_rtcp_->DeRegisterReceivePayload(old_pltype);
}
if (rtp_rtcp_->RegisterReceivePayload(video_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not register receive payload type", __FUNCTION__);
return -1;
@ -655,8 +659,8 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
"%s: Could not enable NACK, RTPC not on ", __FUNCTION__);
return -1;
}
if (!vie_receiver_.SetNackStatus(true,
max_nack_reordering_threshold_)) {
if (rtp_rtcp_->SetNACKStatus(nackMethod,
max_nack_reordering_threshold_) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not set NACK method %d", __FUNCTION__,
nackMethod);
@ -674,6 +678,7 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
it != simulcast_rtp_rtcp_.end();
it++) {
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetNACKStatus(nackMethod, max_nack_reordering_threshold_);
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
}
} else {
@ -685,13 +690,14 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
if (paced_sender_ == NULL) {
rtp_rtcp->SetStorePacketsStatus(false, 0);
}
rtp_rtcp->SetNACKStatus(kNackOff, max_nack_reordering_threshold_);
}
vcm_.RegisterPacketRequestCallback(NULL);
if (paced_sender_ == NULL) {
rtp_rtcp_->SetStorePacketsStatus(false, 0);
}
if (!vie_receiver_.SetNackStatus(false,
max_nack_reordering_threshold_)) {
if (rtp_rtcp_->SetNACKStatus(kNackOff,
max_nack_reordering_threshold_) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not turn off NACK", __FUNCTION__);
return -1;
@ -971,15 +977,14 @@ int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
}
int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
const uint32_t SSRC) {
const uint32_t SSRC) const {
WEBRTC_TRACE(webrtc::kTraceInfo,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(usage:%d, SSRC: 0x%x)",
__FUNCTION__, usage, SSRC);
vie_receiver_.SetRtxStatus(true, SSRC);
return 0;
return rtp_rtcp_->SetRTXReceiveStatus(true, SSRC);
}
// TODO(mflodman) Add kViEStreamTypeRtx.
@ -1009,7 +1014,7 @@ int32_t ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
*ssrc = vie_receiver_.GetRemoteSsrc();
*ssrc = rtp_rtcp_->RemoteSSRC();
return 0;
}
@ -1020,7 +1025,7 @@ int32_t ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
uint32_t arrayCSRC[kRtpCsrcSize];
memset(arrayCSRC, 0, sizeof(arrayCSRC));
int num_csrcs = vie_receiver_.GetCsrcs(arrayCSRC);
int num_csrcs = rtp_rtcp_->RemoteCSRCs(arrayCSRC);
if (num_csrcs > 0) {
memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(uint32_t));
for (int idx = 0; idx < num_csrcs; idx++) {
@ -1050,7 +1055,12 @@ int ViEChannel::SetRtxSendPayloadType(int payload_type) {
}
void ViEChannel::SetRtxReceivePayloadType(int payload_type) {
vie_receiver_.SetRtxPayloadType(payload_type);
rtp_rtcp_->SetRtxReceivePayloadType(payload_type);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end(); it++) {
(*it)->SetRtxReceivePayloadType(payload_type);
}
}
int32_t ViEChannel::SetStartSequenceNumber(uint16_t sequence_number) {
@ -1086,7 +1096,7 @@ int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
uint32_t remoteSSRC = rtp_rtcp_->RemoteSSRC();
return rtp_rtcp_->RemoteCNAME(remoteSSRC, rtcp_cname);
}
@ -1193,7 +1203,7 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
// it++) {
// RtpRtcp* rtp_rtcp = *it;
// }
uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
// Get all RTCP receiver report blocks that have been received on this
// channel. If we receive RTP packets from a remote source we know the
@ -1236,9 +1246,6 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
return 0;
}
// TODO(holmer): This is a bad function name as it implies that it returns the
// received RTCP, while it actually returns the statistics which will be sent
// in the RTCP.
int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
@ -1248,21 +1255,15 @@ int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
"%s", __FUNCTION__);
uint8_t frac_lost = 0;
ReceiveStatistics* receive_statistics = vie_receiver_.GetReceiveStatistics();
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (!receive_statistics || !receive_statistics->Statistics(
&receive_stats, rtp_rtcp_->RTCP() == kRtcpOff)) {
if (rtp_rtcp_->StatisticsRTP(&frac_lost, cumulative_lost, extended_max,
jitter_samples) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not get received RTP statistics", __FUNCTION__);
return -1;
}
*fraction_lost = receive_stats.fraction_lost;
*cumulative_lost = receive_stats.cumulative_lost;
*extended_max = receive_stats.extended_max_sequence_number;
*jitter_samples = receive_stats.jitter;
*fraction_lost = frac_lost;
uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
uint16_t dummy = 0;
uint16_t rtt = 0;
if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
@ -1280,9 +1281,10 @@ int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
ReceiveStatistics* receive_statistics = vie_receiver_.GetReceiveStatistics();
receive_statistics->GetDataCounters(bytes_received, packets_received);
if (rtp_rtcp_->DataCountersRTP(bytes_sent, packets_sent) != 0) {
if (rtp_rtcp_->DataCountersRTP(bytes_sent,
packets_sent,
bytes_received,
packets_received) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not get counters", __FUNCTION__);
return -1;
@ -1294,7 +1296,7 @@ int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t bytes_sent_temp = 0;
uint32_t packets_sent_temp = 0;
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp);
rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp, NULL, NULL);
bytes_sent += bytes_sent_temp;
packets_sent += packets_sent_temp;
}
@ -1555,6 +1557,92 @@ uint16_t ViEChannel::MaxDataPayloadLength() const {
return rtp_rtcp_->MaxDataPayloadLength();
}
int32_t ViEChannel::SetPacketTimeoutNotification(
bool enable, uint32_t timeout_seconds) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (enable) {
uint32_t timeout_ms = 1000 * timeout_seconds;
if (rtp_rtcp_->SetPacketTimeout(timeout_ms, 0) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return -1;
}
} else {
if (rtp_rtcp_->SetPacketTimeout(0, 0) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return -1;
}
}
return 0;
}
int32_t ViEChannel::RegisterNetworkObserver(
ViENetworkObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer alread added", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer added", __FUNCTION__);
networkObserver_ = observer;
} else {
if (!networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: no observer added", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer removed", __FUNCTION__);
networkObserver_ = NULL;
}
return 0;
}
bool ViEChannel::NetworkObserverRegistered() {
CriticalSectionScoped cs(callback_cs_.get());
return networkObserver_ != NULL;
}
int32_t ViEChannel::SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
CriticalSectionScoped cs(callback_cs_.get());
if (!networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: no observer added", __FUNCTION__);
return -1;
}
bool enabled = false;
uint8_t current_sampletime_seconds = 0;
// Get old settings.
rtp_rtcp_->PeriodicDeadOrAliveStatus(enabled, current_sampletime_seconds);
// Set new settings.
if (rtp_rtcp_->SetPeriodicDeadOrAliveStatus(
enable, static_cast<uint8_t>(sample_time_seconds)) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not set periodic dead-or-alive status",
__FUNCTION__);
return -1;
}
if (!enable) {
// Restore last utilized sample time.
// Without this trick, the sample time would always be reset to default
// (2 sec), each time dead-or-alive was disabled without sample-time
// parameter.
rtp_rtcp_->SetPeriodicDeadOrAliveStatus(enable, current_sampletime_seconds);
}
return 0;
}
int32_t ViEChannel::EnableColorEnhancement(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -1611,9 +1699,9 @@ int32_t ViEChannel::FrameToRender(
}
uint32_t arr_ofCSRC[kRtpCsrcSize];
int32_t no_of_csrcs = vie_receiver_.GetCsrcs(arr_ofCSRC);
int32_t no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
if (no_of_csrcs <= 0) {
arr_ofCSRC[0] = rtp_rtcp_->SSRC();
arr_ofCSRC[0] = rtp_rtcp_->RemoteSSRC();
no_of_csrcs = 1;
}
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1632,7 +1720,7 @@ int32_t ViEChannel::StoreReceivedFrame(
return 0;
}
int32_t ViEChannel::OnReceiveStatisticsUpdate(const uint32_t bit_rate,
int32_t ViEChannel::ReceiveStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
@ -1786,10 +1874,8 @@ int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
} else {
module_process_thread_.DeRegisterModule(&vie_sync_);
}
return vie_sync_.ConfigureSync(ve_channel_id,
ve_sync_interface,
rtp_rtcp_.get(),
vie_receiver_.GetRtpReceiver());
return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
rtp_rtcp_.get());
}
int32_t ViEChannel::VoiceChannel() {
@ -1861,6 +1947,52 @@ int32_t ViEChannel::OnInitializeDecoder(
return 0;
}
void ViEChannel::OnPacketTimeout(const int32_t id) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
CriticalSectionScoped cs(callback_cs_.get());
if (networkObserver_) {
networkObserver_->PacketTimeout(channel_id_, NoPacket);
rtp_packet_timeout_ = true;
}
}
void ViEChannel::OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (rtp_packet_timeout_ && packet_type == kPacketRtp) {
CriticalSectionScoped cs(callback_cs_.get());
if (networkObserver_) {
networkObserver_->PacketTimeout(channel_id_, PacketReceived);
}
// Reset even if no observer set, might have been removed during timeout.
rtp_packet_timeout_ = false;
}
}
void ViEChannel::OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(id=%d, alive=%d)", __FUNCTION__, id, alive);
CriticalSectionScoped cs(callback_cs_.get());
if (!networkObserver_) {
return;
}
bool is_alive = true;
if (alive == kRtpDead) {
is_alive = false;
}
networkObserver_->OnPeriodicDeadOrAlive(channel_id_, is_alive);
return;
}
void ViEChannel::OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
if (channel_id_ != ChannelId(id)) {
@ -1873,8 +2005,6 @@ void ViEChannel::OnIncomingSSRCChanged(const int32_t id,
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %u", __FUNCTION__, SSRC);
rtp_rtcp_->SetRemoteSSRC(SSRC);
CriticalSectionScoped cs(callback_cs_.get());
{
if (rtp_observer_) {
@ -1907,8 +2037,4 @@ void ViEChannel::OnIncomingCSRCChanged(const int32_t id,
}
}
void ViEChannel::OnResetStatistics() {
vie_receiver_.GetReceiveStatistics()->ResetStatistics();
}
} // namespace webrtc

View File

@ -41,6 +41,7 @@ class RtpRtcp;
class ThreadWrapper;
class ViEDecoderObserver;
class ViEEffectFilter;
class ViENetworkObserver;
class ViERTCPObserver;
class ViERTPObserver;
class VideoCodingModule;
@ -208,12 +209,16 @@ class ViEChannel
const int frequency,
const uint8_t channels,
const uint32_t rate);
virtual void OnPacketTimeout(const int32_t id);
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type);
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive);
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC);
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added);
virtual void OnResetStatistics();
int32_t SetLocalReceiver(const uint16_t rtp_port,
const uint16_t rtcp_port,
@ -236,7 +241,7 @@ class ViEChannel
char* ip_address,
uint32_t ip_address_length);
int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC) const;
int32_t StartSend();
int32_t StopSend();
@ -265,6 +270,12 @@ class ViEChannel
int32_t SetMaxPacketBurstSize(uint16_t max_number_of_packets);
int32_t SetPacketBurstSpreadState(bool enable, const uint16_t frame_periodMS);
int32_t SetPacketTimeoutNotification(bool enable, uint32_t timeout_seconds);
int32_t RegisterNetworkObserver(ViENetworkObserver* observer);
bool NetworkObserverRegistered();
int32_t SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds);
int32_t EnableColorEnhancement(bool enable);
// Gets the modules used by the channel.
@ -284,7 +295,7 @@ class ViEChannel
const EncodedVideoData& frame_to_store);
// Implements VideoReceiveStatisticsCallback.
virtual int32_t OnReceiveStatisticsUpdate(const uint32_t bit_rate,
virtual int32_t ReceiveStatistics(const uint32_t bit_rate,
const uint32_t frame_rate);
// Implements VideoFrameTypeCallback.
@ -357,11 +368,13 @@ class ViEChannel
bool do_key_frame_callbackRequest_;
ViERTPObserver* rtp_observer_;
ViERTCPObserver* rtcp_observer_;
ViENetworkObserver* networkObserver_;
RtcpIntraFrameObserver* intra_frame_observer_;
RtcpRttObserver* rtt_observer_;
PacedSender* paced_sender_;
scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
bool rtp_packet_timeout_;
int send_timestamp_extension_id_;
int absolute_send_time_extension_id_;
bool receive_absolute_send_time_enabled_;

View File

@ -247,7 +247,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
shared_data_->channel_manager()->UpdateSsrcs(video_channel, ssrcs);
// Update the protection mode, we might be switching NACK/FEC.
vie_encoder->UpdateProtectionMethod(vie_encoder->nack_enabled());
vie_encoder->UpdateProtectionMethod();
// Get new best format for frame provider.
if (frame_provider) {

View File

@ -723,7 +723,7 @@ int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
return 0;
}
int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
int32_t ViEEncoder::UpdateProtectionMethod() {
bool fec_enabled = false;
uint8_t dummy_ptype_red = 0;
uint8_t dummy_ptypeFEC = 0;
@ -736,23 +736,25 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
if (error) {
return -1;
}
if (fec_enabled_ == fec_enabled && nack_enabled_ == enable_nack) {
bool nack_enabled = (default_rtp_rtcp_->NACK() == kNackOff) ? false : true;
if (fec_enabled_ == fec_enabled && nack_enabled_ == nack_enabled) {
// No change needed, we're already in correct state.
return 0;
}
fec_enabled_ = fec_enabled;
nack_enabled_ = enable_nack;
nack_enabled_ = nack_enabled;
// Set Video Protection for VCM.
if (fec_enabled && nack_enabled_) {
if (fec_enabled && nack_enabled) {
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, true);
} else {
vcm_.SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackSender, nack_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNack, nack_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, false);
}
if (fec_enabled_ || nack_enabled_) {
if (fec_enabled || nack_enabled) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: FEC status ",
__FUNCTION__, fec_enabled);

View File

@ -114,8 +114,7 @@ class ViEEncoder
int CodecTargetBitrate(uint32_t* bitrate) const;
// Loss protection.
int32_t UpdateProtectionMethod(bool enable_nack);
bool nack_enabled() const { return nack_enabled_; }
int32_t UpdateProtectionMethod();
// Buffering mode.
void SetSenderBufferingMode(int target_delay_ms);

View File

@ -196,4 +196,99 @@ int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
}
return 0;
}
int ViENetworkImpl::SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d, enable: %d, timeout_seconds: %u)",
__FUNCTION__, video_channel, enable, timeout_seconds);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->SetPacketTimeoutNotification(enable,
timeout_seconds) != 0) {
shared_data_->SetLastError(kViENetworkUnknownError);
return -1;
}
return 0;
}
int ViENetworkImpl::RegisterObserver(const int video_channel,
ViENetworkObserver& observer) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->RegisterNetworkObserver(&observer) != 0) {
shared_data_->SetLastError(kViENetworkObserverAlreadyRegistered);
return -1;
}
return 0;
}
int ViENetworkImpl::DeregisterObserver(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (!vie_channel->NetworkObserverRegistered()) {
shared_data_->SetLastError(kViENetworkObserverNotRegistered);
return -1;
}
return vie_channel->RegisterNetworkObserver(NULL);
}
int ViENetworkImpl::SetPeriodicDeadOrAliveStatus(
const int video_channel,
bool enable,
unsigned int sample_time_seconds) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d, enable: %d, sample_time_seconds: %ul)",
__FUNCTION__, video_channel, enable, sample_time_seconds);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (!vie_channel->NetworkObserverRegistered()) {
shared_data_->SetLastError(kViENetworkObserverNotRegistered);
return -1;
}
if (vie_channel->SetPeriodicDeadOrAliveStatus(enable, sample_time_seconds)
!= 0) {
shared_data_->SetLastError(kViENetworkUnknownError);
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -37,6 +37,16 @@ class ViENetworkImpl
const void* data,
const int length);
virtual int SetMTU(int video_channel, unsigned int mtu);
virtual int SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds);
virtual int RegisterObserver(const int video_channel,
ViENetworkObserver& observer);
virtual int DeregisterObserver(const int video_channel);
virtual int SetPeriodicDeadOrAliveStatus(
const int video_channel,
const bool enable,
const unsigned int sample_time_seconds);
protected:
explicit ViENetworkImpl(ViESharedData* shared_data);

View File

@ -13,10 +13,7 @@
#include <vector>
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
@ -28,18 +25,10 @@ namespace webrtc {
ViEReceiver::ViEReceiver(const int32_t channel_id,
VideoCodingModule* module_vcm,
RemoteBitrateEstimator* remote_bitrate_estimator,
RtpFeedback* rtp_feedback)
RemoteBitrateEstimator* remote_bitrate_estimator)
: receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
channel_id_(channel_id),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(new RTPPayloadRegistry(
channel_id, RTPPayloadStrategy::CreateStrategy(false))),
rtp_receiver_(RtpReceiver::CreateVideoReceiver(
channel_id, Clock::GetRealTimeClock(), this, rtp_feedback,
rtp_payload_registry_.get())),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
rtp_rtcp_(NULL),
vcm_(module_vcm),
remote_bitrate_estimator_(remote_bitrate_estimator),
@ -62,59 +51,6 @@ ViEReceiver::~ViEReceiver() {
}
}
bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
int8_t old_pltype = -1;
if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
kVideoPayloadTypeFrequency,
0,
video_codec.maxBitrate,
&old_pltype) != -1) {
rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
}
if (rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
kVideoPayloadTypeFrequency,
0,
video_codec.maxBitrate) != 0) {
return false;
}
return true;
}
bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
if (rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
kVideoPayloadTypeFrequency,
0,
video_codec.maxBitrate) != 0) {
return false;
}
return true;
}
bool ViEReceiver::SetNackStatus(bool enable,
int max_nack_reordering_threshold) {
return rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
max_nack_reordering_threshold) == 0;
}
void ViEReceiver::SetRtxStatus(bool enable, uint32_t ssrc) {
rtp_receiver_->SetRTXStatus(true, ssrc);
}
void ViEReceiver::SetRtxPayloadType(uint32_t payload_type) {
rtp_receiver_->SetRtxPayloadType(payload_type);
}
uint32_t ViEReceiver::GetRemoteSsrc() const {
return rtp_receiver_->SSRC();
}
int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
return rtp_receiver_->CSRCs(csrcs);
}
int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
CriticalSectionScoped cs(receive_cs_.get());
if (external_decryption_) {
@ -141,10 +77,6 @@ void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
rtp_rtcp_ = module;
}
RtpReceiver* ViEReceiver::GetRtpReceiver() const {
return rtp_receiver_.get();
}
void ViEReceiver::RegisterSimulcastRtpRtcpModules(
const std::list<RtpRtcp*>& rtp_modules) {
CriticalSectionScoped cs(receive_cs_.get());
@ -208,25 +140,6 @@ int32_t ViEReceiver::OnReceivedPayloadData(
return 0;
}
bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
int rtp_packet_length) {
RTPHeader header;
if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVideo, channel_id_,
"IncomingPacket invalid RTP header");
return false;
}
header.payload_type_frequency = kVideoPayloadTypeFrequency;
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return false;
}
return rtp_receiver_->IncomingRtpPacket(&header, rtp_packet,
rtp_packet_length,
payload_specific, false);
}
int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
int rtp_packet_length) {
// TODO(mflodman) Change decrypt to get rid of this cast.
@ -272,19 +185,9 @@ int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
const int payload_size = received_packet_length - header.headerLength;
remote_bitrate_estimator_->IncomingPacket(TickTime::MillisecondTimestamp(),
payload_size, header);
header.payload_type_frequency = kVideoPayloadTypeFrequency;
bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
bool retransmitted = !in_order && IsPacketRetransmitted(header);
rtp_receive_statistics_->IncomingPacket(header, received_packet_length,
retransmitted, in_order);
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return -1;
}
return rtp_receiver_->IncomingRtpPacket(&header, received_packet,
received_packet_length,
payload_specific, in_order) ? 0 : -1;
assert(rtp_rtcp_); // Should be set by owner at construction time.
return rtp_rtcp_->IncomingRtpPacket(received_packet, received_packet_length,
header);
}
int ViEReceiver::InsertRTCPPacket(const int8_t* rtcp_packet,
@ -393,7 +296,7 @@ void ViEReceiver::EstimatedReceiveBandwidth(
// LatestEstimate returns an error if there is no valid bitrate estimate, but
// ViEReceiver instead returns a zero estimate.
remote_bitrate_estimator_->LatestEstimate(&ssrcs, available_bandwidth);
if (std::find(ssrcs.begin(), ssrcs.end(), rtp_receiver_->SSRC()) !=
if (std::find(ssrcs.begin(), ssrcs.end(), rtp_rtcp_->RemoteSSRC()) !=
ssrcs.end()) {
*available_bandwidth /= ssrcs.size();
} else {
@ -401,25 +304,4 @@ void ViEReceiver::EstimatedReceiveBandwidth(
}
}
ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
return rtp_receive_statistics_.get();
}
bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header) const {
bool rtx_enabled = false;
uint32_t rtx_ssrc = 0;
int rtx_payload_type = 0;
rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
if (!rtx_enabled) {
// Check if this is a retransmission.
ReceiveStatistics::RtpReceiveStatistics stats;
if (rtp_receive_statistics_->Statistics(&stats, false)) {
uint16_t min_rtt = 0;
rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
min_rtt);
}
}
return false;
}
} // namespace webrtc

View File

@ -14,7 +14,6 @@
#include <list>
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@ -24,39 +23,23 @@ namespace webrtc {
class CriticalSectionWrapper;
class Encryption;
class ReceiveStatistics;
class RemoteBitrateEstimator;
class RtpDump;
class RtpHeaderParser;
class RTPPayloadRegistry;
class RtpReceiver;
class RtpRtcp;
class VideoCodingModule;
class ViEReceiver : public RtpData {
public:
ViEReceiver(const int32_t channel_id, VideoCodingModule* module_vcm,
RemoteBitrateEstimator* remote_bitrate_estimator,
RtpFeedback* rtp_feedback);
RemoteBitrateEstimator* remote_bitrate_estimator);
~ViEReceiver();
bool SetReceiveCodec(const VideoCodec& video_codec);
bool RegisterPayload(const VideoCodec& video_codec);
bool SetNackStatus(bool enable, int max_nack_reordering_threshold);
void SetRtxStatus(bool enable, uint32_t ssrc);
void SetRtxPayloadType(uint32_t payload_type);
uint32_t GetRemoteSsrc() const;
int GetCsrcs(uint32_t* csrcs) const;
int RegisterExternalDecryption(Encryption* decryption);
int DeregisterExternalDecryption();
void SetRtpRtcpModule(RtpRtcp* module);
RtpReceiver* GetRtpReceiver() const;
void RegisterSimulcastRtpRtcpModules(const std::list<RtpRtcp*>& rtp_modules);
bool SetReceiveTimestampOffsetStatus(bool enable, int id);
@ -71,8 +54,6 @@ class ViEReceiver : public RtpData {
// Receives packets from external transport.
int ReceivedRTPPacket(const void* rtp_packet, int rtp_packet_length);
int ReceivedRTCPPacket(const void* rtcp_packet, int rtcp_packet_length);
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) OVERRIDE;
// Implements RtpData.
virtual int32_t OnReceivedPayloadData(
@ -82,19 +63,13 @@ class ViEReceiver : public RtpData {
void EstimatedReceiveBandwidth(unsigned int* available_bandwidth) const;
ReceiveStatistics* GetReceiveStatistics() const;
private:
int InsertRTPPacket(const int8_t* rtp_packet, int rtp_packet_length);
int InsertRTCPPacket(const int8_t* rtcp_packet, int rtcp_packet_length);
bool IsPacketRetransmitted(const RTPHeader& header) const;
scoped_ptr<CriticalSectionWrapper> receive_cs_;
const int32_t channel_id_;
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
RtpRtcp* rtp_rtcp_;
std::list<RtpRtcp*> rtp_rtcp_simulcast_;
VideoCodingModule* vcm_;

View File

@ -501,7 +501,7 @@ int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod(enable);
vie_encoder->UpdateProtectionMethod();
return 0;
}
@ -542,7 +542,7 @@ int ViERTP_RTCPImpl::SetFECStatus(const int video_channel, const bool enable,
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod(false);
vie_encoder->UpdateProtectionMethod();
return 0;
}
@ -587,7 +587,7 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod(enable);
vie_encoder->UpdateProtectionMethod();
return 0;
}

View File

@ -10,7 +10,6 @@
#include "webrtc/video_engine/vie_sync_module.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -25,11 +24,11 @@ namespace webrtc {
enum { kSyncInterval = 1000};
int UpdateMeasurements(StreamSynchronization::Measurements* stream,
const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
stream->latest_timestamp = receiver.TimeStamp();
stream->latest_receive_time_ms = receiver.LastReceivedTimeMs();
const RtpRtcp* rtp_rtcp) {
stream->latest_timestamp = rtp_rtcp->RemoteTimestamp();
stream->latest_receive_time_ms = rtp_rtcp->LocalTimeOfRemoteTimeStamp();
synchronization::RtcpMeasurement measurement;
if (0 != rtp_rtcp.RemoteNTP(&measurement.ntp_secs,
if (0 != rtp_rtcp->RemoteNTP(&measurement.ntp_secs,
&measurement.ntp_frac,
NULL,
NULL,
@ -61,7 +60,6 @@ ViESyncModule::ViESyncModule(VideoCodingModule* vcm,
: data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
vcm_(vcm),
vie_channel_(vie_channel),
video_receiver_(NULL),
video_rtp_rtcp_(NULL),
voe_channel_id_(-1),
voe_sync_interface_(NULL),
@ -74,12 +72,10 @@ ViESyncModule::~ViESyncModule() {
int ViESyncModule::ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module,
RtpReceiver* video_receiver) {
RtpRtcp* video_rtcp_module) {
CriticalSectionScoped cs(data_cs_.get());
voe_channel_id_ = voe_channel_id;
voe_sync_interface_ = voe_sync_interface;
video_receiver_ = video_receiver;
video_rtp_rtcp_ = video_rtcp_module;
sync_.reset(new StreamSynchronization(voe_channel_id, vie_channel_->Id()));
@ -133,21 +129,16 @@ int32_t ViESyncModule::Process() {
playout_buffer_delay_ms;
RtpRtcp* voice_rtp_rtcp = NULL;
RtpReceiver* voice_receiver = NULL;
if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
&voice_receiver)) {
if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, voice_rtp_rtcp)) {
return 0;
}
assert(voice_rtp_rtcp);
assert(voice_receiver);
if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
*video_receiver_) != 0) {
if (UpdateMeasurements(&video_measurement_, video_rtp_rtcp_) != 0) {
return 0;
}
if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
*voice_receiver) != 0) {
if (UpdateMeasurements(&audio_measurement_, voice_rtp_rtcp) != 0) {
return 0;
}

View File

@ -36,8 +36,7 @@ class ViESyncModule : public Module {
int ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module,
RtpReceiver* video_receiver);
RtpRtcp* video_rtcp_module);
int VoiceChannel();
@ -52,7 +51,6 @@ class ViESyncModule : public Module {
scoped_ptr<CriticalSectionWrapper> data_cs_;
VideoCodingModule* vcm_;
ViEChannel* vie_channel_;
RtpReceiver* video_receiver_;
RtpRtcp* video_rtp_rtcp_;
int voe_channel_id_;
VoEVideoSync* voe_sync_interface_;

View File

@ -12,10 +12,6 @@
#include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/utility/interface/audio_frame_operations.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
@ -371,8 +367,8 @@ Channel::OnIncomingSSRCChanged(int32_t id,
assert(channel == _channelId);
// Reset RTP-module counters since a new incoming RTP stream is detected
rtp_receive_statistics_->ResetDataCounters();
rtp_receive_statistics_->ResetStatistics();
_rtpRtcpModule->ResetReceiveDataCountersRTP();
_rtpRtcpModule->ResetStatisticsRTP();
if (_rtpObserver)
{
@ -408,10 +404,6 @@ void Channel::OnIncomingCSRCChanged(int32_t id,
}
}
void Channel::OnResetStatistics() {
rtp_receive_statistics_->ResetStatistics();
}
void
Channel::OnApplicationDataReceived(int32_t id,
uint8_t subType,
@ -637,8 +629,9 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
UpdatePacketDelay(rtpHeader->header.timestamp,
rtpHeader->header.sequenceNumber);
if (kNackOff != _rtpRtcpModule->NACK()) { // Is NACK on?
uint16_t round_trip_time = 0;
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
_rtpRtcpModule->RTT(_rtpRtcpModule->RemoteSSRC(), &round_trip_time,
NULL, NULL, NULL);
std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
@ -648,6 +641,7 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
// compilers.
ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
}
}
return 0;
}
@ -889,15 +883,6 @@ Channel::Channel(int32_t channelId,
_instanceId(instanceId),
_channelId(channelId),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(
new RTPPayloadRegistry(channelId,
RTPPayloadStrategy::CreateStrategy(true))),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
rtp_receiver_(RtpReceiver::CreateAudioReceiver(
VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
this, this, rtp_payload_registry_.get())),
telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
_audioCodingModule(*AudioCodingModule::Create(
VoEModuleId(instanceId, channelId))),
_rtpDumpIn(*RtpDump::CreateRtpDump()),
@ -997,10 +982,11 @@ Channel::Channel(int32_t channelId,
RtpRtcp::Configuration configuration;
configuration.id = VoEModuleId(instanceId, channelId);
configuration.audio = true;
configuration.incoming_data = this;
configuration.incoming_messages = this;
configuration.outgoing_transport = this;
configuration.rtcp_feedback = this;
configuration.audio_messages = this;
configuration.receive_statistics = rtp_receive_statistics_.get();
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
@ -1148,9 +1134,12 @@ Channel::Init()
// disabled by the user.
// After StopListen (when no sockets exists), RTCP packets will no longer
// be transmitted since the Transport object will then be invalid.
telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
// RTCP is enabled by default.
if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
const bool rtpRtcpFail =
((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
// RTCP is enabled by default
(_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
if (rtpRtcpFail)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@ -1181,12 +1170,7 @@ Channel::Init()
{
// Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
(rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) == -1))
(_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId),
@ -1496,7 +1480,12 @@ Channel::StopReceiving()
}
// Recover DTMF detection status.
telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
int32_t ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
if (ret != 0) {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"StopReceiving() failed to restore telephone-event status.");
}
RegisterReceiveCodecsToRTPModule();
_receiving = false;
return 0;
@ -1748,15 +1737,10 @@ Channel::SetRecPayloadType(const CodecInst& codec)
CodecInst rxCodec = codec;
// Get payload type for the given codec
rtp_payload_registry_->ReceivePayloadType(
rxCodec.plname,
rxCodec.plfreq,
rxCodec.channels,
(rxCodec.rate < 0) ? 0 : rxCodec.rate,
&pltype);
_rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
rxCodec.pltype = pltype;
if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR,
@ -1775,21 +1759,11 @@ Channel::SetRecPayloadType(const CodecInst& codec)
return 0;
}
if (rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) != 0)
if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
{
// First attempt to register failed => de-register and try again
rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
if (rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) != 0)
_rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@ -1817,12 +1791,7 @@ Channel::GetRecPayloadType(CodecInst& codec)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetRecPayloadType()");
int8_t payloadType(-1);
if (rtp_payload_registry_->ReceivePayloadType(
codec.plname,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate,
&payloadType) != 0)
if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
@ -2196,10 +2165,9 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
// Deliver RTP packet to RTP/RTCP module for parsing
// The packet will be pushed back to the channel thru the
// OnReceivedPayloadData callback so we don't push it to the ACM here
if (!rtp_receiver_->IncomingRtpPacket(&header,
reinterpret_cast<const uint8_t*>(data),
if (_rtpRtcpModule->IncomingRtpPacket(reinterpret_cast<const uint8_t*>(data),
static_cast<uint16_t>(length),
payload_specific, in_order)) {
header) == -1) {
_engineStatisticsPtr->SetLastError(
VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
"Channel::IncomingRTPPacket() RTP packet is invalid");
@ -2207,24 +2175,6 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
return 0;
}
bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
bool rtx_enabled = false;
uint32_t rtx_ssrc = 0;
int rtx_payload_type = 0;
rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
if (!rtx_enabled) {
// Check if this is a retransmission.
ReceiveStatistics::RtpReceiveStatistics stats;
if (rtp_receive_statistics_->Statistics(&stats, false)) {
uint16_t min_rtt = 0;
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
min_rtt);
}
}
return false;
}
int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ReceivedRTCPPacket()");
@ -2249,6 +2199,141 @@ int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
return 0;
}
int32_t
Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetPacketTimeoutNotification()");
if (enable)
{
const uint32_t RTPtimeoutMS = 1000*timeoutSeconds;
const uint32_t RTCPtimeoutMS = 0;
_rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
_rtpPacketTimeOutIsEnabled = true;
_rtpTimeOutSeconds = timeoutSeconds;
}
else
{
_rtpRtcpModule->SetPacketTimeout(0, 0);
_rtpPacketTimeOutIsEnabled = false;
_rtpTimeOutSeconds = 0;
}
return 0;
}
int32_t
Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPacketTimeoutNotification()");
enabled = _rtpPacketTimeOutIsEnabled;
if (enabled)
{
timeoutSeconds = _rtpTimeOutSeconds;
}
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
"GetPacketTimeoutNotification() => enabled=%d,"
" timeoutSeconds=%d",
enabled, timeoutSeconds);
return 0;
}
int32_t
Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::RegisterDeadOrAliveObserver()");
CriticalSectionScoped cs(&_callbackCritSect);
if (_connectionObserverPtr)
{
_engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
"RegisterDeadOrAliveObserver() observer already enabled");
return -1;
}
_connectionObserverPtr = &observer;
_connectionObserver = true;
return 0;
}
int32_t
Channel::DeRegisterDeadOrAliveObserver()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterDeadOrAliveObserver()");
CriticalSectionScoped cs(&_callbackCritSect);
if (!_connectionObserverPtr)
{
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"DeRegisterDeadOrAliveObserver() observer already disabled");
return 0;
}
_connectionObserver = false;
_connectionObserverPtr = NULL;
return 0;
}
int32_t
Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetPeriodicDeadOrAliveStatus()");
if (!_connectionObserverPtr)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"SetPeriodicDeadOrAliveStatus() connection observer has"
" not been registered");
}
if (enable)
{
ResetDeadOrAliveCounters();
}
bool enabled(false);
uint8_t currentSampleTimeSec(0);
// Store last state (will be used later if dead-or-alive is disabled).
_rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
// Update the dead-or-alive state.
if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
enable, (uint8_t)sampleTimeSeconds) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR,
kTraceError,
"SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
"status");
return -1;
}
if (!enable)
{
// Restore last utilized sample time.
// Without this, the sample time would always be reset to default
// (2 sec), each time dead-or-alived was disabled without sample-time
// parameter.
_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
currentSampleTimeSec);
}
return 0;
}
int32_t
Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
{
_rtpRtcpModule->PeriodicDeadOrAliveStatus(
enabled,
(uint8_t&)sampleTimeSeconds);
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
"GetPeriodicDeadOrAliveStatus() => enabled=%d,"
" sampleTimeSeconds=%d",
enabled, sampleTimeSeconds);
return 0;
}
int Channel::StartPlayingFileLocally(const char* fileName,
bool loop,
FileFormats format,
@ -3587,7 +3672,7 @@ Channel::GetLocalSSRC(unsigned int& ssrc)
int
Channel::GetRemoteSSRC(unsigned int& ssrc)
{
ssrc = rtp_receiver_->SSRC();
ssrc = _rtpRtcpModule->RemoteSSRC();
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"GetRemoteSSRC() => ssrc=%lu", ssrc);
@ -3735,7 +3820,7 @@ Channel::GetRemoteRTCP_CNAME(char cName[256])
return -1;
}
char cname[RTCP_CNAME_SIZE];
const uint32_t remoteSSRC = rtp_receiver_->SSRC();
const uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
{
_engineStatisticsPtr->SetLastError(
@ -3810,7 +3895,7 @@ Channel::GetRemoteRTCPData(
return -1;
}
uint32_t remoteSSRC = rtp_receiver_->SSRC();
uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
for (; it != remote_stats.end(); ++it) {
if (it->remoteSSRC == remoteSSRC)
@ -3902,11 +3987,20 @@ Channel::GetRTPStatistics(
unsigned int& maxJitterMs,
unsigned int& discardedPackets)
{
uint8_t fraction_lost(0);
uint32_t cum_lost(0);
uint32_t ext_max(0);
uint32_t jitter(0);
uint32_t max_jitter(0);
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
ReceiveStatistics::RtpReceiveStatistics statistics;
if (!rtp_receive_statistics_->Statistics(
&statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
@ -3918,8 +4012,8 @@ Channel::GetRTPStatistics(
if (playoutFrequency > 0)
{
// Scale RTP statistics given the current playout frequency
maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
maxJitterMs = max_jitter / (playoutFrequency / 1000);
averageJitterMs = jitter / (playoutFrequency / 1000);
}
discardedPackets = _numberOfDiscardedPackets;
@ -3995,23 +4089,32 @@ int Channel::GetRemoteRTCPReportBlocks(
int
Channel::GetRTPStatistics(CallStatistics& stats)
{
uint8_t fraction_lost(0);
uint32_t cum_lost(0);
uint32_t ext_max(0);
uint32_t jitter(0);
uint32_t max_jitter(0);
// --- Part one of the final structure (four values)
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
ReceiveStatistics::RtpReceiveStatistics statistics;
if (!rtp_receive_statistics_->Statistics(
&statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
"RTP/RTCP module");
}
stats.fractionLost = statistics.fraction_lost;
stats.cumulativeLost = statistics.cumulative_lost;
stats.extendedMax = statistics.extended_max_sequence_number;
stats.jitterSamples = statistics.jitter;
stats.fractionLost = fraction_lost;
stats.cumulativeLost = cum_lost;
stats.extendedMax = ext_max;
stats.jitterSamples = jitter;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId, _channelId),
@ -4033,7 +4136,7 @@ Channel::GetRTPStatistics(CallStatistics& stats)
} else
{
// The remote SSRC will be zero if no RTP packet has been received.
uint32_t remoteSSRC = rtp_receiver_->SSRC();
uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
if (remoteSSRC > 0)
{
uint16_t avgRTT(0);
@ -4070,10 +4173,10 @@ Channel::GetRTPStatistics(CallStatistics& stats)
uint32_t bytesReceived(0);
uint32_t packetsReceived(0);
rtp_receive_statistics_->GetDataCounters(&bytesReceived, &packetsReceived);
if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
&packetsSent) != 0)
&packetsSent,
&bytesReceived,
&packetsReceived) != 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId, _channelId),
@ -4155,7 +4258,7 @@ Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
// None of these functions can fail.
_rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
_rtpRtcpModule->SetNACKStatus(enable ? kNackRtcp : kNackOff,
maxNumberOfPackets);
if (enable)
_audioCodingModule.EnableNack(maxNumberOfPackets);
@ -4539,7 +4642,7 @@ Channel::ResetRTCPStatistics()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ResetRTCPStatistics()");
uint32_t remoteSSRC(0);
remoteSSRC = rtp_receiver_->SSRC();
remoteSSRC = _rtpRtcpModule->RemoteSSRC();
return _rtpRtcpModule->ResetRTT(remoteSSRC);
}
@ -4568,7 +4671,7 @@ Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
uint16_t maxRTT;
uint16_t minRTT;
// The remote SSRC will be zero if no RTP packet has been received.
remoteSSRC = rtp_receiver_->SSRC();
remoteSSRC = _rtpRtcpModule->RemoteSSRC();
if (remoteSSRC == 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
@ -4778,12 +4881,11 @@ Channel::SetInitSequenceNumber(short sequenceNumber)
}
int
Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetRtpRtcp()");
*rtpRtcpModule = _rtpRtcpModule.get();
*rtp_receiver = rtp_receiver_.get();
rtpRtcpModule = _rtpRtcpModule.get();
return 0;
}
@ -5003,6 +5105,15 @@ Channel::UpdateDeadOrAliveCounters(bool alive)
int
Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
{
bool enabled;
uint8_t timeSec;
_rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
if (!enabled)
return (-1);
countDead = static_cast<int> (_countDeadDetections);
countAlive = static_cast<int> (_countAliveDetections);
return 0;
}
@ -5102,12 +5213,7 @@ Channel::RegisterReceiveCodecsToRTPModule()
{
// Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
(rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) == -1))
(_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
{
WEBRTC_TRACE(
kTraceWarning,

View File

@ -35,21 +35,16 @@
namespace webrtc
{
class AudioDeviceModule;
class CriticalSectionWrapper;
class FileWrapper;
class ProcessThread;
class ReceiveStatistics;
class RtpDump;
class RTPPayloadRegistry;
class RtpReceiver;
class RTPReceiverAudio;
class AudioDeviceModule;
class RtpRtcp;
class TelephoneEventHandler;
class VoEMediaProcess;
class VoERTCPObserver;
class VoERTPObserver;
class FileWrapper;
class RtpDump;
class VoiceEngineObserver;
class VoEMediaProcess;
class VoERTPObserver;
class VoERTCPObserver;
struct CallStatistics;
struct ReportBlock;
@ -138,6 +133,12 @@ public:
int32_t DeRegisterExternalTransport();
int32_t ReceivedRTPPacket(const int8_t* data, int32_t length);
int32_t ReceivedRTCPPacket(const int8_t* data, int32_t length);
int32_t SetPacketTimeoutNotification(bool enable, int timeoutSeconds);
int32_t GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds);
int32_t RegisterDeadOrAliveObserver(VoEConnectionObserver& observer);
int32_t DeRegisterDeadOrAliveObserver();
int32_t SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds);
int32_t GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds);
// VoEFile
int StartPlayingFileLocally(const char* fileName, bool loop,
@ -214,7 +215,7 @@ public:
int SetInitSequenceNumber(short sequenceNumber);
// VoEVideoSyncExtended
int GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const;
int GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const;
// VoEEncryption
int RegisterExternalEncryption(Encryption& encryption);
@ -306,11 +307,6 @@ public:
uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader);
bool OnRecoveredPacket(const uint8_t* packet, int packet_length) {
// Generic FEC not supported for audio.
return true;
}
public:
// From RtpFeedback in the RTP/RTCP module
int32_t OnInitializeDecoder(
@ -334,8 +330,6 @@ public:
void OnIncomingCSRCChanged(int32_t id,
uint32_t CSRC, bool added);
void OnResetStatistics();
public:
// From RtcpFeedback in the RTP/RTCP module
void OnApplicationDataReceived(int32_t id,
@ -432,7 +426,6 @@ public:
uint32_t EncodeAndSend();
private:
bool IsPacketRetransmitted(const RTPHeader& header) const;
int ResendPackets(const uint16_t* sequence_numbers, int length);
int InsertInbandDtmfTone();
int32_t MixOrReplaceAudioWithFile(int mixingFrequency);
@ -453,10 +446,6 @@ private:
private:
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
scoped_ptr<RtpReceiver> rtp_receiver_;
TelephoneEventHandler* telephone_event_handler_;
scoped_ptr<RtpRtcp> _rtpRtcpModule;
AudioCodingModule& _audioCodingModule;
RtpDump& _rtpDumpIn;

View File

@ -89,6 +89,31 @@ public:
virtual int ReceivedRTCPPacket(
int channel, const void* data, unsigned int length) = 0;
// Enables or disables warnings that report if packets have not been
// received in |timeoutSeconds| seconds for a specific |channel|.
virtual int SetPacketTimeoutNotification(
int channel, bool enable, int timeoutSeconds = 2) = 0;
// Gets the current time-out notification status.
virtual int GetPacketTimeoutNotification(
int channel, bool& enabled, int& timeoutSeconds) = 0;
// Installs the observer class implementation for a specified |channel|.
virtual int RegisterDeadOrAliveObserver(
int channel, VoEConnectionObserver& observer) = 0;
// Removes the observer class implementation for a specified |channel|.
virtual int DeRegisterDeadOrAliveObserver(int channel) = 0;
// Enables or disables the periodic dead-or-alive callback functionality
// for a specified |channel|.
virtual int SetPeriodicDeadOrAliveStatus(
int channel, bool enable, int sampleTimeSeconds = 2) = 0;
// Gets the current dead-or-alive notification status.
virtual int GetPeriodicDeadOrAliveStatus(
int channel, bool& enabled, int& sampleTimeSeconds) = 0;
protected:
VoENetwork() {}
virtual ~VoENetwork() {}

View File

@ -37,7 +37,6 @@
namespace webrtc {
class RtpReceiver;
class RtpRtcp;
class VoiceEngine;
@ -93,8 +92,7 @@ public:
// Get the received RTP timestamp
virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp) = 0;
virtual int GetRtpRtcp (int channel, RtpRtcp** rtpRtcpModule,
RtpReceiver** rtp_receiver) = 0;
virtual int GetRtpRtcp (int channel, RtpRtcp* &rtpRtcpModule) = 0;
protected:
VoEVideoSync() { }

View File

@ -54,6 +54,29 @@ TEST_F(CallReportTest, DISABLED_GetRoundTripTimesReturnsValuesIfRtcpIsOn) {
EXPECT_NE(-1, delays.max);
}
TEST_F(CallReportTest, DeadOrAliveSummaryFailsIfDeadOrAliveTrackingNotActive) {
int count_the_dead;
int count_the_living;
EXPECT_EQ(-1, voe_call_report_->GetDeadOrAliveSummary(channel_,
count_the_dead,
count_the_living));
}
TEST_F(CallReportTest,
DeadOrAliveSummarySucceedsIfDeadOrAliveTrackingIsActive) {
EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
Sleep(1200);
int count_the_dead;
int count_the_living;
EXPECT_EQ(0, voe_call_report_->GetDeadOrAliveSummary(channel_,
count_the_dead,
count_the_living));
EXPECT_GE(count_the_dead, 0);
EXPECT_GE(count_the_living, 0);
}
TEST_F(CallReportTest, WriteReportToFileFailsOnBadInput) {
EXPECT_EQ(-1, voe_call_report_->WriteReportToFile(NULL));
}

View File

@ -23,6 +23,100 @@ class NetworkTest : public AfterStreamingFixture {
using ::testing::Between;
TEST_F(NetworkTest,
CallsObserverOnTimeoutAndRestartWhenPacketTimeoutNotificationIsEnabled) {
// First, get rid of the default, asserting observer and install our observer.
EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
webrtc::MockVoEObserver mock_observer;
EXPECT_EQ(0, voe_base_->RegisterVoiceEngineObserver(mock_observer));
// Define expectations.
int expected_error = VE_RECEIVE_PACKET_TIMEOUT;
EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
.Times(1);
expected_error = VE_PACKET_RECEIPT_RESTARTED;
EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
.Times(1);
// Get some speech going.
Sleep(500);
// Enable packet timeout.
EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
// Trigger a timeout.
EXPECT_EQ(0, voe_base_->StopSend(channel_));
Sleep(1500);
// Trigger a restart event.
EXPECT_EQ(0, voe_base_->StartSend(channel_));
Sleep(500);
}
TEST_F(NetworkTest, DoesNotCallDeRegisteredObserver) {
// De-register the default observer. This test will fail if the observer gets
// called for any reason, so if this de-register doesn't work the test will
// fail.
EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
// Get some speech going.
Sleep(500);
// Enable packet timeout.
EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
// Trigger a timeout.
EXPECT_EQ(0, voe_base_->StopSend(channel_));
Sleep(1500);
}
// TODO(phoglund): flaky on Linux
TEST_F(NetworkTest,
DISABLED_ON_LINUX(DeadOrAliveObserverSeesAliveMessagesIfEnabled)) {
if (!FLAGS_include_timing_dependent_tests) {
TEST_LOG("Skipping test - running in slow execution environment...\n");
return;
}
webrtc::MockVoeConnectionObserver mock_observer;
EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
channel_, mock_observer));
// We should be called about 4 times in four seconds, but 3 is OK too.
EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, true))
.Times(Between(3, 4));
EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
Sleep(4000);
EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
}
TEST_F(NetworkTest, DeadOrAliveObserverSeesDeadMessagesIfEnabled) {
if (!FLAGS_include_timing_dependent_tests) {
TEST_LOG("Skipping test - running in slow execution environment...\n");
return;
}
// "When do you see them?" - "All the time!"
webrtc::MockVoeConnectionObserver mock_observer;
EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
channel_, mock_observer));
Sleep(500);
// We should be called about 4 times in four seconds, but 3 is OK too.
EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, false))
.Times(Between(3, 4));
EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, false));
EXPECT_EQ(0, voe_base_->StopSend(channel_));
Sleep(4000);
EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
}
TEST_F(NetworkTest, CanSwitchToExternalTransport) {
EXPECT_EQ(0, voe_base_->StopReceive(channel_));
EXPECT_EQ(0, voe_base_->DeleteChannel(channel_));

View File

@ -1158,6 +1158,24 @@ int VoEExtendedTest::TestCallReport() {
ANL();
*/
int nDead = 0;
int nAlive = 0;
TEST(GetDeadOrAliveSummary);
ANL();
// All results should be -1 since dead-or-alive is not active
TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive) != -1);
MARK();
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 1));
SleepMs(2000);
// All results should be >= 0 since dead-or-alive is active
TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive));
MARK();
TEST_MUSTPASS(nDead == -1);
TEST_MUSTPASS(nAlive == -1)
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, false));
AOK();
ANL();
TEST(WriteReportToFile);
ANL();
@ -4165,8 +4183,218 @@ int VoEExtendedTest::TestNetwork() {
// >> end of SetExternalTransport
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// >> RegisterDeadOrAliveObserver
// >> DeRegisterDeadOrAliveObserver
//
// - VE initialized
// - no existing channels
// - no media
TEST(RegisterDeadOrAliveObserver);
ANL();
TEST(DeRegisterDeadOrAliveObserver);
ANL();
// call without valid channel
TEST_MUSTPASS(!voe_network ->RegisterDeadOrAliveObserver(0, *this));
MARK();
TEST_ERROR(VE_CHANNEL_NOT_VALID);
TEST_MUSTPASS(voe_base_->CreateChannel());
TEST_MUSTPASS(voe_network ->RegisterDeadOrAliveObserver(0, *this));
MARK();
TEST_MUSTPASS(!voe_network ->RegisterDeadOrAliveObserver(0, *this));
MARK(); // already registered
TEST_ERROR(VE_INVALID_OPERATION);
TEST_MUSTPASS(voe_network ->DeRegisterDeadOrAliveObserver(0));
MARK();
TEST_MUSTPASS(voe_network ->DeRegisterDeadOrAliveObserver(0));
MARK(); // OK to do it again
TEST_MUSTPASS(voe_network ->RegisterDeadOrAliveObserver(0, *this));
MARK();
TEST_MUSTPASS(voe_network ->DeRegisterDeadOrAliveObserver(0));
MARK();
TEST_MUSTPASS(voe_base_->DeleteChannel(0));
// STATE: dead-or-alive observer is disabled
// >> end of RegisterDeadOrAliveObserver
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// >> SetPeriodicDeadOrAliveStatus
// >> GetPeriodicDeadOrAliveStatus
//
// - VE initialized
// - no existing channels
// - no media
// call without valid channel
TEST_MUSTPASS(!voe_network ->SetPeriodicDeadOrAliveStatus(0, false));
MARK();
TEST_ERROR(VE_CHANNEL_NOT_VALID);
TEST_MUSTPASS(voe_base_->CreateChannel());
// Invalid paramters
TEST_MUSTPASS(!voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 0));
MARK();
TEST_ERROR(VE_INVALID_ARGUMENT);
TEST_MUSTPASS(!voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 151));
MARK();
TEST_ERROR(VE_INVALID_ARGUMENT);
TEST_MUSTPASS(!voe_network ->SetPeriodicDeadOrAliveStatus(1, true, 10));
MARK();
TEST_ERROR(VE_CHANNEL_NOT_VALID);
int sampleTime(0);
bool enabled;
// Valid parameters
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 1));
MARK();
TEST_MUSTPASS(
voe_network ->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
TEST_MUSTPASS(enabled != true);
TEST_MUSTPASS(sampleTime != 1);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 150));
MARK();
TEST_MUSTPASS(
voe_network ->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
TEST_MUSTPASS(enabled != true);
TEST_MUSTPASS(sampleTime != 150);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, false));
MARK();
TEST_MUSTPASS(
voe_network ->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
TEST_MUSTPASS(enabled != false);
TEST_MUSTPASS(sampleTime != 150); // ensure last set time isnt modified
StartMedia(0, 2000, true, true, true);
// STATE: full duplex media is active
// test the dead-or-alive mechanism
TEST_MUSTPASS(voe_network ->RegisterDeadOrAliveObserver(0, *this));
MARK();
TEST_LOG("\nVerify that Alive callbacks are received (dT=2sec): ");
fflush(NULL);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 2));
SleepMs(6000);
TEST_LOG("\nChange dT to 1 second: ");
fflush(NULL);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 1));
SleepMs(6000);
TEST_LOG("\nDisable dead-or-alive callbacks: ");
fflush(NULL);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, false));
SleepMs(6000);
TEST_LOG("\nStop sending and enable callbacks again.\n");
TEST_LOG("Verify that Dead callbacks are received (dT=2sec): ");
fflush(NULL);
TEST_MUSTPASS(voe_base_->StopSend(0));
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, true, 2));
SleepMs(6000);
TEST_MUSTPASS(voe_base_->StartSend(0));
TEST_LOG("\nRestart sending.\n");
TEST_LOG("Verify that Alive callbacks are received again (dT=2sec): ");
fflush(NULL);
SleepMs(6000);
TEST_LOG("\nDisable dead-or-alive callbacks.");
fflush(NULL);
TEST_MUSTPASS(voe_network ->SetPeriodicDeadOrAliveStatus(0, false));
TEST_MUSTPASS(voe_network ->DeRegisterDeadOrAliveObserver(0));
MARK();
StopMedia(0);
TEST_MUSTPASS(voe_base_->DeleteChannel(0));
ANL();
AOK();
ANL();
ANL();
// >> end of SetPeriodicDeadOrAliveStatus
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// >> SetPacketTimeoutNotification
// >> GetPacketTimeoutNotification
//
// - VE initialized
// - no existing channels
// - no media
// - NOTE: dynamic tests are performed in standard test
int timeOut(0);
TEST(SetPacketTimeoutNotification);
ANL();
TEST(GetPacketTimeoutNotification);
ANL();
// call without existing valid channel
TEST_MUSTPASS(!voe_network ->SetPacketTimeoutNotification(0, false));
MARK();
TEST_ERROR(VE_CHANNEL_NOT_VALID);
TEST_MUSTPASS(voe_base_->CreateChannel());
// invalid function calls
TEST_MUSTPASS(!voe_network ->SetPacketTimeoutNotification(0, true, 0));
MARK();
TEST_ERROR(VE_INVALID_ARGUMENT);
TEST_MUSTPASS(!voe_network ->SetPacketTimeoutNotification(0, true, 151));
MARK();
TEST_ERROR(VE_INVALID_ARGUMENT);
// valid function calls (no active media)
TEST_MUSTPASS(voe_network ->SetPacketTimeoutNotification(0, true, 2));
MARK();
TEST_MUSTPASS(voe_network ->GetPacketTimeoutNotification(0, enabled,
timeOut));
MARK();
TEST_MUSTPASS(enabled != true);
TEST_MUSTPASS(timeOut != 2);
TEST_MUSTPASS(voe_network ->SetPacketTimeoutNotification(0, false));
MARK();
TEST_MUSTPASS(voe_network ->GetPacketTimeoutNotification(0, enabled,
timeOut));
MARK();
TEST_MUSTPASS(enabled != false);
TEST_MUSTPASS(voe_network ->SetPacketTimeoutNotification(0, true, 10));
MARK();
TEST_MUSTPASS(voe_network ->GetPacketTimeoutNotification(0, enabled,
timeOut));
MARK();
TEST_MUSTPASS(enabled != true);
TEST_MUSTPASS(timeOut != 10);
TEST_MUSTPASS(voe_network ->SetPacketTimeoutNotification(0, true, 2));
MARK();
TEST_MUSTPASS(voe_network ->GetPacketTimeoutNotification(0, enabled,
timeOut));
MARK();
TEST_MUSTPASS(enabled != true);
TEST_MUSTPASS(timeOut != 2);
TEST_MUSTPASS(voe_network ->SetPacketTimeoutNotification(0, false));
MARK();
TEST_MUSTPASS(voe_network ->GetPacketTimeoutNotification(0, enabled,
timeOut));
MARK();
TEST_MUSTPASS(enabled != false);
TEST_MUSTPASS(voe_base_->DeleteChannel(0));
ANL();
AOK();
ANL();
ANL();
return 0;
}
// >> end of SetPacketTimeoutNotification
// ------------------------------------------------------------------------
// ----------------------------------------------------------------------------
// VoEExtendedTest::TestRTP_RTCP

View File

@ -165,4 +165,158 @@ int VoENetworkImpl::ReceivedRTCPPacket(int channel, const void* data,
}
return channelPtr->ReceivedRTCPPacket((const int8_t*) data, length);
}
int VoENetworkImpl::SetPacketTimeoutNotification(int channel,
bool enable,
int timeoutSeconds)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetPacketTimeoutNotification(channel=%d, enable=%d, "
"timeoutSeconds=%d)",
channel, (int) enable, timeoutSeconds);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
if (enable &&
((timeoutSeconds < kVoiceEngineMinPacketTimeoutSec) ||
(timeoutSeconds > kVoiceEngineMaxPacketTimeoutSec)))
{
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetPacketTimeoutNotification() invalid timeout size");
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetPacketTimeoutNotification() failed to locate channel");
return -1;
}
return channelPtr->SetPacketTimeoutNotification(enable, timeoutSeconds);
}
int VoENetworkImpl::GetPacketTimeoutNotification(int channel,
bool& enabled,
int& timeoutSeconds)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetPacketTimeoutNotification(channel=%d, enabled=?,"
" timeoutSeconds=?)", channel);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetPacketTimeoutNotification() failed to locate channel");
return -1;
}
return channelPtr->GetPacketTimeoutNotification(enabled, timeoutSeconds);
}
int VoENetworkImpl::RegisterDeadOrAliveObserver(int channel,
VoEConnectionObserver&
observer)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"RegisterDeadOrAliveObserver(channel=%d, observer=0x%x)",
channel, &observer);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"RegisterDeadOrAliveObserver() failed to locate channel");
return -1;
}
return channelPtr->RegisterDeadOrAliveObserver(observer);
}
int VoENetworkImpl::DeRegisterDeadOrAliveObserver(int channel)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"DeRegisterDeadOrAliveObserver(channel=%d)", channel);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"DeRegisterDeadOrAliveObserver() failed to locate channel");
return -1;
}
return channelPtr->DeRegisterDeadOrAliveObserver();
}
int VoENetworkImpl::SetPeriodicDeadOrAliveStatus(int channel, bool enable,
int sampleTimeSeconds)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetPeriodicDeadOrAliveStatus(channel=%d, enable=%d,"
" sampleTimeSeconds=%d)",
channel, enable, sampleTimeSeconds);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
if (enable &&
((sampleTimeSeconds < kVoiceEngineMinSampleTimeSec) ||
(sampleTimeSeconds > kVoiceEngineMaxSampleTimeSec)))
{
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetPeriodicDeadOrAliveStatus() invalid sample time");
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetPeriodicDeadOrAliveStatus() failed to locate channel");
return -1;
}
return channelPtr->SetPeriodicDeadOrAliveStatus(enable, sampleTimeSeconds);
}
int VoENetworkImpl::GetPeriodicDeadOrAliveStatus(int channel,
bool& enabled,
int& sampleTimeSeconds)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetPeriodicDeadOrAliveStatus(channel=%d, enabled=?,"
" sampleTimeSeconds=?)", channel);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetPeriodicDeadOrAliveStatus() failed to locate channel");
return -1;
}
return channelPtr->GetPeriodicDeadOrAliveStatus(enabled,
sampleTimeSeconds);
}
} // namespace webrtc

View File

@ -34,6 +34,27 @@ public:
const void* data,
unsigned int length);
virtual int SetPacketTimeoutNotification(int channel,
bool enable,
int timeoutSeconds = 2);
virtual int GetPacketTimeoutNotification(int channel,
bool& enabled,
int& timeoutSeconds);
virtual int RegisterDeadOrAliveObserver(int channel,
VoEConnectionObserver& observer);
virtual int DeRegisterDeadOrAliveObserver(int channel);
virtual int SetPeriodicDeadOrAliveStatus(int channel,
bool enable,
int sampleTimeSeconds = 2);
virtual int GetPeriodicDeadOrAliveStatus(int channel,
bool& enabled,
int& sampleTimeSeconds);
protected:
VoENetworkImpl(voe::SharedData* shared);
virtual ~VoENetworkImpl();

View File

@ -216,8 +216,7 @@ int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)
return 0;
}
int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp** rtpRtcpModule,
RtpReceiver** rtp_receiver)
int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetRtpRtcp(channel=%i)", channel);
@ -235,7 +234,7 @@ int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp** rtpRtcpModule,
"GetPlayoutTimestamp() failed to locate channel");
return -1;
}
return channelPtr->GetRtpRtcp(rtpRtcpModule, rtp_receiver);
return channelPtr->GetRtpRtcp(rtpRtcpModule);
}
int VoEVideoSyncImpl::GetLeastRequiredDelayMs(int channel) const {

View File

@ -38,8 +38,7 @@ public:
virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp);
virtual int GetRtpRtcp(int channel, RtpRtcp** rtpRtcpModule,
RtpReceiver** rtp_receiver);
virtual int GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule);
protected:
VoEVideoSyncImpl(voe::SharedData* shared);