Update talk to 50918584.

Together with Stefan's http://review.webrtc.org/1960004/.

R=mallinath@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2048004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4556 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
wu@webrtc.org 2013-08-15 23:38:54 +00:00
parent dde7d4c6ed
commit 822fbd8b68
108 changed files with 2926 additions and 4301 deletions

View File

@ -98,6 +98,7 @@ bool DataChannel::HasNegotiationCompleted() {
DataChannel::~DataChannel() {
ClearQueuedReceivedData();
ClearQueuedSendData();
ClearQueuedControlData();
}
void DataChannel::RegisterObserver(DataChannelObserver* observer) {
@ -250,14 +251,16 @@ void DataChannel::OnChannelReady(bool writable) {
if (!writable) {
return;
}
// Update the readyState if the channel is writable for the first time;
// otherwise it means the channel was blocked for sending and now unblocked,
// so send the queued data now.
// Update the readyState and send the queued control message if the channel
// is writable for the first time; otherwise it means the channel was blocked
// for sending and now unblocked, so send the queued data now.
if (!was_ever_writable_) {
was_ever_writable_ = true;
UpdateState();
DeliverQueuedControlData();
ASSERT(queued_send_data_.empty());
} else if (state_ == kOpen) {
SendQueuedSendData();
DeliverQueuedSendData();
}
}
@ -356,7 +359,7 @@ void DataChannel::ClearQueuedReceivedData() {
}
}
void DataChannel::SendQueuedSendData() {
void DataChannel::DeliverQueuedSendData() {
DeliverQueuedControlData();
if (!was_ever_writable_) {
return;
@ -366,7 +369,7 @@ void DataChannel::SendQueuedSendData() {
DataBuffer* buffer = queued_send_data_.front();
cricket::SendDataResult send_result;
if (!InternalSendWithoutQueueing(*buffer, &send_result)) {
LOG(LS_WARNING) << "SendQueuedSendData aborted due to send_result "
LOG(LS_WARNING) << "DeliverQueuedSendData aborted due to send_result "
<< send_result;
break;
}
@ -375,6 +378,14 @@ void DataChannel::SendQueuedSendData() {
}
}
void DataChannel::ClearQueuedControlData() {
while (!queued_control_data_.empty()) {
const talk_base::Buffer *buf = queued_control_data_.front();
queued_control_data_.pop();
delete buf;
}
}
void DataChannel::DeliverQueuedControlData() {
if (was_ever_writable_) {
while (!queued_control_data_.empty()) {

View File

@ -67,8 +67,17 @@ class DataChannel : public DataChannelInterface,
virtual void RegisterObserver(DataChannelObserver* observer);
virtual void UnregisterObserver();
virtual std::string label() const { return label_; }
virtual std::string label() const { return label_; }
virtual bool reliable() const;
virtual bool ordered() const { return config_.ordered; }
virtual uint16 maxRetransmitTime() const {
return config_.maxRetransmitTime;
}
virtual uint16 maxRetransmits() const {
return config_.maxRetransmits;
}
virtual std::string protocol() const { return config_.protocol; }
virtual bool negotiated() const { return config_.negotiated; }
virtual int id() const { return config_.id; }
virtual uint64 buffered_amount() const;
virtual void Close();
@ -116,9 +125,10 @@ class DataChannel : public DataChannelInterface,
bool IsConnectedToDataSession() { return data_session_ != NULL; }
void DeliverQueuedControlData();
void QueueControl(const talk_base::Buffer* buffer);
void ClearQueuedControlData();
void DeliverQueuedReceivedData();
void ClearQueuedReceivedData();
void SendQueuedSendData();
void DeliverQueuedSendData();
void ClearQueuedSendData();
bool InternalSendWithoutQueueing(const DataBuffer& buffer,
cricket::SendDataResult* send_result);
@ -158,6 +168,11 @@ BEGIN_PROXY_MAP(DataChannel)
PROXY_METHOD0(void, UnregisterObserver)
PROXY_CONSTMETHOD0(std::string, label)
PROXY_CONSTMETHOD0(bool, reliable)
PROXY_CONSTMETHOD0(bool, ordered)
PROXY_CONSTMETHOD0(uint16, maxRetransmitTime)
PROXY_CONSTMETHOD0(uint16, maxRetransmits)
PROXY_CONSTMETHOD0(std::string, protocol)
PROXY_CONSTMETHOD0(bool, negotiated)
PROXY_CONSTMETHOD0(int, id)
PROXY_CONSTMETHOD0(DataState, state)
PROXY_CONSTMETHOD0(uint64, buffered_amount)

View File

@ -96,7 +96,7 @@ class SctpDataChannelTest : public testing::Test {
talk_base::scoped_refptr<CreateSessionDescriptionObserverForTest> observer
= new CreateSessionDescriptionObserverForTest();
session_.CreateOffer(observer.get(), NULL);
EXPECT_TRUE_WAIT(observer->description() != NULL, 1000);
EXPECT_TRUE_WAIT(observer->description() != NULL, 2000);
ASSERT_TRUE(observer->description() != NULL);
ASSERT_TRUE(session_.SetLocalDescription(observer->ReleaseDescription(),
NULL));

View File

@ -110,6 +110,16 @@ class DataChannelInterface : public talk_base::RefCountInterface {
// DataChannel object from other DataChannel objects.
virtual std::string label() const = 0;
virtual bool reliable() const = 0;
// TODO(tommyw): Remove these dummy implementations when all classes have
// implemented these APIs. They should all just return the values the
// DataChannel was created with.
virtual bool ordered() const { return false; }
virtual uint16 maxRetransmitTime() const { return 0; }
virtual uint16 maxRetransmits() const { return 0; }
virtual std::string protocol() const { return std::string(); }
virtual bool negotiated() const { return false; }
virtual int id() const = 0;
virtual DataState state() const = 0;
// The buffered_amount returns the number of bytes of application data

View File

@ -117,6 +117,7 @@ class FakeIdentityService : public webrtc::DTLSIdentityServiceInterface,
observer->OnFailure(0);
break;
}
delete message_data;
}
void GenerateIdentity(

View File

@ -348,7 +348,7 @@ class WebRtcSessionTest : public testing::Test {
session_->CreateOffer(observer, constraints);
EXPECT_TRUE_WAIT(
observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
1000);
2000);
return observer->ReleaseDescription();
}
@ -359,7 +359,7 @@ class WebRtcSessionTest : public testing::Test {
session_->CreateAnswer(observer, constraints);
EXPECT_TRUE_WAIT(
observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
1000);
2000);
return observer->ReleaseDescription();
}

View File

@ -1373,8 +1373,10 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(0, renderer.num_rendered_frames());
EXPECT_TRUE(SendFrame());
EXPECT_FRAME_ON_RENDERER_WAIT(renderer, 1, codec.width, codec.height,
kTimeout);
EXPECT_TRUE_WAIT(renderer.num_rendered_frames() >= 1 &&
codec.width == renderer.width() &&
codec.height == renderer.height(), kTimeout);
EXPECT_EQ(0, renderer.errors());
// Registering an external capturer is currently the same as screen casting
// (update the test when this changes).
@ -1392,8 +1394,8 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(capturer->CaptureCustomFrame(kWidth, kHeight,
cricket::FOURCC_ARGB));
EXPECT_TRUE(capturer->CaptureFrame());
EXPECT_EQ_WAIT(2, renderer.num_rendered_frames(), kTimeout);
EXPECT_TRUE_WAIT(kScaledWidth == renderer.width() &&
EXPECT_TRUE_WAIT(renderer.num_rendered_frames() >= 2 &&
kScaledWidth == renderer.width() &&
kScaledHeight == renderer.height(), kTimeout);
EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
}

View File

@ -787,10 +787,12 @@ class FakeWebRtcVideoEngine
// Not using WEBRTC_STUB due to bool return value
virtual bool IsIPv6Enabled(int channel) { return true; }
WEBRTC_STUB(SetMTU, (int, unsigned int));
#ifndef USE_WEBRTC_DEV_BRANCH
WEBRTC_STUB(SetPacketTimeoutNotification, (const int, bool, int));
WEBRTC_STUB(RegisterObserver, (const int, webrtc::ViENetworkObserver&));
WEBRTC_STUB(SetPeriodicDeadOrAliveStatus, (const int, const bool,
const unsigned int));
#endif
// webrtc::ViERender
WEBRTC_STUB(RegisterVideoRenderModule, (webrtc::VideoRender&));

View File

@ -619,6 +619,7 @@ class FakeWebRtcVoiceEngine
}
WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data,
unsigned int length));
#ifndef USE_WEBRTC_DEV_BRANCH
// Not using WEBRTC_STUB due to bool return value
WEBRTC_STUB(SetPacketTimeoutNotification, (int channel, bool enable,
int timeoutSeconds));
@ -631,6 +632,7 @@ class FakeWebRtcVoiceEngine
int& sampleTimeSeconds));
WEBRTC_STUB(SetPeriodicDeadOrAliveStatus, (int channel, bool enable,
int sampleTimeSeconds));
#endif
// webrtc::VoERTP_RTCP
WEBRTC_STUB(RegisterRTPObserver, (int channel,
@ -751,7 +753,11 @@ class FakeWebRtcVoiceEngine
// webrtc::VoEVideoSync
WEBRTC_STUB(GetPlayoutBufferSize, (int& bufferMs));
WEBRTC_STUB(GetPlayoutTimestamp, (int channel, unsigned int& timestamp));
#ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_STUB(GetRtpRtcp, (int, webrtc::RtpRtcp**, webrtc::RtpReceiver**));
#else
WEBRTC_STUB(GetRtpRtcp, (int, webrtc::RtpRtcp*&));
#endif
WEBRTC_STUB(SetInitTimestamp, (int channel, unsigned int timestamp));
WEBRTC_STUB(SetInitSequenceNumber, (int channel, short sequenceNumber));
WEBRTC_STUB(SetMinimumPlayoutDelay, (int channel, int delayMs));

View File

@ -44,6 +44,7 @@ struct RTPHeader
uint32_t arrOfCSRCs[kRtpCsrcSize];
uint8_t paddingLength;
uint16_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};
@ -93,13 +94,13 @@ union RTPVideoTypeHeader
RTPVideoHeaderVP8 VP8;
};
enum RTPVideoCodecTypes
enum RtpVideoCodecTypes
{
kRTPVideoGeneric = 0,
kRTPVideoVP8 = 8,
kRTPVideoNoVideo = 10,
kRTPVideoFEC = 11,
kRTPVideoI420 = 12
kRtpVideoNone,
kRtpVideoGeneric,
kRtpVideoVp8,
kRtpVideoFec,
kRtpVideoI420
};
struct RTPVideoHeader
{
@ -109,7 +110,7 @@ struct RTPVideoHeader
bool isFirstPacket; // first packet in frame
uint8_t simulcastIdx; // Index if the simulcast encoder creating
// this frame, 0 if not using simulcast.
RTPVideoCodecTypes codec;
RtpVideoCodecTypes codec;
RTPVideoTypeHeader codecHeader;
};
union RTPTypeHeader

View File

@ -166,7 +166,6 @@
'remote_bitrate_estimator/bitrate_estimator_unittest.cc',
'remote_bitrate_estimator/rtp_to_ntp_unittest.cc',
'rtp_rtcp/source/mock/mock_rtp_payload_strategy.h',
'rtp_rtcp/source/mock/mock_rtp_receiver_video.h',
'rtp_rtcp/source/fec_test_helper.cc',
'rtp_rtcp/source/fec_test_helper.h',
'rtp_rtcp/source/nack_rtx_unittest.cc',

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class Clock;
class ReceiveStatistics : public Module {
public:
struct RtpReceiveStatistics {
uint8_t fraction_lost;
uint32_t cumulative_lost;
uint32_t extended_max_sequence_number;
uint32_t jitter;
uint32_t max_jitter;
};
virtual ~ReceiveStatistics() {}
static ReceiveStatistics* Create(Clock* clock);
virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
bool retransmitted, bool in_order) = 0;
virtual bool Statistics(RtpReceiveStatistics* statistics, bool reset) = 0;
virtual bool Statistics(RtpReceiveStatistics* statistics, int32_t* missing,
bool reset) = 0;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const = 0;
virtual uint32_t BitrateReceived() = 0;
virtual void ResetStatistics() = 0;
virtual void ResetDataCounters() = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@ -21,7 +21,7 @@ namespace webrtc {
// of payload handling.
class RTPPayloadStrategy {
public:
virtual ~RTPPayloadStrategy() {};
virtual ~RTPPayloadStrategy() {}
virtual bool CodecsMustBeUnique() const = 0;
@ -42,10 +42,13 @@ class RTPPayloadStrategy {
const uint8_t channels,
const uint32_t rate) const = 0;
virtual int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const = 0;
static RTPPayloadStrategy* CreateStrategy(const bool handling_audio);
protected:
RTPPayloadStrategy() {};
RTPPayloadStrategy() {}
};
class RTPPayloadRegistry {
@ -73,7 +76,11 @@ class RTPPayloadRegistry {
const uint32_t rate,
int8_t* payload_type) const;
int32_t PayloadTypeToPayload(
bool GetPayloadSpecifics(uint8_t payload_type, PayloadUnion* payload) const;
int GetPayloadTypeFrequency(uint8_t payload_type) const;
bool PayloadTypeToPayload(
const uint8_t payload_type,
ModuleRTPUtility::Payload*& payload) const;
@ -116,4 +123,4 @@ class RTPPayloadRegistry {
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PAYLOAD_REGISTRY_H_
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_

View File

@ -0,0 +1,120 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RTPPayloadRegistry;
class TelephoneEventHandler {
public:
virtual ~TelephoneEventHandler() {}
// The following three methods implement the TelephoneEventHandler interface.
// Forward DTMFs to decoder for playout.
virtual void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) = 0;
// Is forwarding of outband telephone events turned on/off?
virtual bool TelephoneEventForwardToDecoder() const = 0;
// Is TelephoneEvent configured with payload type payload_type
virtual bool TelephoneEventPayloadType(const int8_t payload_type) const = 0;
};
class RtpReceiver {
public:
// Creates a video-enabled RTP receiver.
static RtpReceiver* CreateVideoReceiver(
int id, Clock* clock,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry);
// Creates an audio-enabled RTP receiver.
static RtpReceiver* CreateAudioReceiver(
int id, Clock* clock,
RtpAudioFeedback* incoming_audio_feedback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry);
virtual ~RtpReceiver() {}
// Returns a TelephoneEventHandler if available.
virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
// Registers a receive payload in the payload registry and notifies the media
// receiver strategy.
virtual int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) = 0;
// De-registers |payload_type| from the payload registry.
virtual int32_t DeRegisterReceivePayload(const int8_t payload_type) = 0;
// Parses the media specific parts of an RTP packet and updates the receiver
// state. This for instance means that any changes in SSRC and payload type is
// detected and acted upon.
virtual bool IncomingRtpPacket(RTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
int incoming_rtp_packet_length,
PayloadUnion payload_specific,
bool in_order) = 0;
// Returns the currently configured NACK method.
virtual NACKMethod NACK() const = 0;
// Turn negative acknowledgement (NACK) requests on/off.
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) = 0;
// Returns the last received timestamp.
virtual uint32_t Timestamp() const = 0;
// Returns the time in milliseconds when the last timestamp was received.
virtual int32_t LastReceivedTimeMs() const = 0;
// Returns the remote SSRC of the currently received RTP stream.
virtual uint32_t SSRC() const = 0;
// Returns the current remote CSRCs.
virtual int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const = 0;
// Returns the current energy of the RTP stream received.
virtual int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const = 0;
// Enable/disable RTX and set the SSRC to be used.
virtual void SetRTXStatus(bool enable, uint32_t ssrc) = 0;
// Returns the current RTX status and the SSRC and payload type used.
virtual void RTXStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const = 0;
// Sets the RTX payload type.
virtual void SetRtxPayloadType(int payload_type) = 0;
// Returns true if the packet with RTP header |header| is likely to be a
// retransmitted packet, false otherwise.
virtual bool RetransmitOfOldPacket(const RTPHeader& header, int jitter,
int min_rtt) const = 0;
// Returns true if |sequence_number| is received in order, false otherwise.
virtual bool InOrderPacket(const uint16_t sequence_number) const = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_

View File

@ -19,8 +19,9 @@
namespace webrtc {
// Forward declarations.
class PacedSender;
class ReceiveStatistics;
class RemoteBitrateEstimator;
class RemoteBitrateObserver;
class RtpReceiver;
class Transport;
class RtpRtcp : public Module {
@ -57,8 +58,7 @@ class RtpRtcp : public Module {
bool audio;
Clock* clock;
RtpRtcp* default_module;
RtpData* incoming_data;
RtpFeedback* incoming_messages;
ReceiveStatistics* receive_statistics;
Transport* outgoing_transport;
RtcpFeedback* rtcp_feedback;
RtcpIntraFrameObserver* intra_frame_callback;
@ -68,6 +68,7 @@ class RtpRtcp : public Module {
RemoteBitrateEstimator* remote_bitrate_estimator;
PacedSender* paced_sender;
};
/*
* Create a RTP/RTCP module object using the system clock.
*
@ -81,174 +82,11 @@ class RtpRtcp : public Module {
*
***************************************************************************/
/*
* configure a RTP packet timeout value
*
* RTPtimeoutMS - time in milliseconds after last received RTP packet
* RTCPtimeoutMS - time in milliseconds after last received RTCP packet
*
* return -1 on failure else 0
*/
virtual int32_t SetPacketTimeout(
const uint32_t RTPtimeoutMS,
const uint32_t RTCPtimeoutMS) = 0;
/*
* Set periodic dead or alive notification
*
* enable - turn periodic dead or alive notification on/off
* sampleTimeSeconds - sample interval in seconds for dead or alive
* notifications
*
* return -1 on failure else 0
*/
virtual int32_t SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sampleTimeSeconds) = 0;
/*
* Get periodic dead or alive notification status
*
* enable - periodic dead or alive notification on/off
* sampleTimeSeconds - sample interval in seconds for dead or alive
* notifications
*
* return -1 on failure else 0
*/
virtual int32_t PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sampleTimeSeconds) = 0;
/*
* set voice codec name and payload type
*
* return -1 on failure else 0
*/
virtual int32_t RegisterReceivePayload(
const CodecInst& voiceCodec) = 0;
/*
* set video codec name and payload type
*
* return -1 on failure else 0
*/
virtual int32_t RegisterReceivePayload(
const VideoCodec& videoCodec) = 0;
/*
* get payload type for a voice codec
*
* return -1 on failure else 0
*/
virtual int32_t ReceivePayloadType(
const CodecInst& voiceCodec,
int8_t* plType) = 0;
/*
* get payload type for a video codec
*
* return -1 on failure else 0
*/
virtual int32_t ReceivePayloadType(
const VideoCodec& videoCodec,
int8_t* plType) = 0;
/*
* Remove a registered payload type from list of accepted payloads
*
* payloadType - payload type of codec
*
* return -1 on failure else 0
*/
virtual int32_t DeRegisterReceivePayload(
const int8_t payloadType) = 0;
/*
* Get last received remote timestamp
*/
virtual uint32_t RemoteTimestamp() const = 0;
/*
* Get the local time of the last received remote timestamp
*/
virtual int64_t LocalTimeOfRemoteTimeStamp() const = 0;
/*
* Get the current estimated remote timestamp
*
* timestamp - estimated timestamp
*
* return -1 on failure else 0
*/
virtual int32_t EstimatedRemoteTimeStamp(
uint32_t& timestamp) const = 0;
/*
* Get incoming SSRC
*/
virtual uint32_t RemoteSSRC() const = 0;
/*
* Get remote CSRC
*
* arrOfCSRC - array that will receive the CSRCs
*
* return -1 on failure else the number of valid entries in the list
*/
virtual int32_t RemoteCSRCs(
uint32_t arrOfCSRC[kRtpCsrcSize]) const = 0;
/*
* get the currently configured SSRC filter
*
* allowedSSRC - SSRC that will be allowed through
*
* return -1 on failure else 0
*/
virtual int32_t SSRCFilter(uint32_t& allowedSSRC) const = 0;
/*
* set a SSRC to be used as a filter for incoming RTP streams
*
* allowedSSRC - SSRC that will be allowed through
*
* return -1 on failure else 0
*/
virtual int32_t SetSSRCFilter(const bool enable,
const uint32_t allowedSSRC) = 0;
/*
* Turn on/off receiving RTX (RFC 4588) on a specific SSRC.
*/
virtual int32_t SetRTXReceiveStatus(bool enable, uint32_t SSRC) = 0;
// Sets the payload type to expected for received RTX packets. Note
// that this doesn't enable RTX, only the payload type is set.
virtual void SetRtxReceivePayloadType(int payload_type) = 0;
/*
* Get status of receiving RTX (RFC 4588) on a specific SSRC.
*/
virtual int32_t RTXReceiveStatus(bool* enable,
uint32_t* SSRC,
int* payloadType) const = 0;
/*
* called by the network module when we receive a packet
*
* incomingPacket - incoming packet buffer
* packetLength - length of incoming buffer
* parsed_rtp_header - the parsed RTP header
*
* return -1 on failure else 0
*/
virtual int32_t IncomingRtpPacket(const uint8_t* incomingPacket,
const uint16_t packetLength,
const RTPHeader& parsed_rtp_header) = 0;
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
uint16_t incoming_packet_length) = 0;
virtual void SetRemoteSSRC(const uint32_t ssrc) = 0;
/**************************************************************************
*
* Sender
@ -608,32 +446,6 @@ class RtpRtcp : public Module {
virtual int32_t SendRTCPSliceLossIndication(
const uint8_t pictureID) = 0;
/*
* Reset RTP statistics
*
* return -1 on failure else 0
*/
virtual int32_t ResetStatisticsRTP() = 0;
/*
* statistics of our localy created statistics of the received RTP stream
*
* return -1 on failure else 0
*/
virtual int32_t StatisticsRTP(
uint8_t* fraction_lost, // scale 0 to 255
uint32_t* cum_lost, // number of lost packets
uint32_t* ext_max, // highest sequence number received
uint32_t* jitter,
uint32_t* max_jitter = NULL) const = 0;
/*
* Reset RTP data counters for the receiving side
*
* return -1 on failure else 0
*/
virtual int32_t ResetReceiveDataCountersRTP() = 0;
/*
* Reset RTP data counters for the sending side
*
@ -648,9 +460,7 @@ class RtpRtcp : public Module {
*/
virtual int32_t DataCountersRTP(
uint32_t* bytesSent,
uint32_t* packetsSent,
uint32_t* bytesReceived,
uint32_t* packetsReceived) const = 0;
uint32_t* packetsSent) const = 0;
/*
* Get received RTCP sender info
*
@ -731,18 +541,6 @@ class RtpRtcp : public Module {
/*
* (NACK)
*/
virtual NACKMethod NACK() const = 0;
/*
* Turn negative acknowledgement requests on/off
* |max_reordering_threshold| should be set to how much a retransmitted
* packet can be expected to be reordered (in sequence numbers) compared to
* a packet which has not been retransmitted.
*
* return -1 on failure else 0
*/
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) = 0;
/*
* TODO(holmer): Propagate this API to VideoEngine.
@ -782,6 +580,9 @@ class RtpRtcp : public Module {
const bool enable,
const uint16_t numberToStore) = 0;
// Returns true if the module is configured to store packets.
virtual bool StorePackets() const = 0;
/**************************************************************************
*
* Audio
@ -797,19 +598,6 @@ class RtpRtcp : public Module {
virtual int32_t SetAudioPacketSize(
const uint16_t packetSizeSamples) = 0;
/*
* Forward DTMF to decoder for playout.
*
* return -1 on failure else 0
*/
virtual int SetTelephoneEventForwardToDecoder(bool forwardToDecoder) = 0;
/*
* Returns true if received DTMF events are forwarded to the decoder using
* the OnPlayTelephoneEvent callback.
*/
virtual bool TelephoneEventForwardToDecoder() const = 0;
/*
* SendTelephoneEventActive
*

View File

@ -11,22 +11,39 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
#include <stddef.h>
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/typedefs.h"
#ifndef NULL
#define NULL 0
#endif
#define RTCP_CNAME_SIZE 256 // RFC 3550 page 44, including null termination
#define IP_PACKET_SIZE 1500 // we assume ethernet
#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
#define TIMEOUT_SEI_MESSAGES_MS 30000 // in milliseconds
namespace webrtc{
namespace webrtc {
const int32_t kDefaultVideoFrequency = 90000;
const int kVideoPayloadTypeFrequency = 90000;
struct AudioPayload
{
uint32_t frequency;
uint8_t channels;
uint32_t rate;
};
struct VideoPayload
{
RtpVideoCodecTypes videoCodecType;
uint32_t maxRate;
};
union PayloadUnion
{
AudioPayload Audio;
VideoPayload Video;
};
enum RTCPMethod
{
@ -145,6 +162,9 @@ public:
const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) = 0;
protected:
virtual ~RtpData() {}
};
@ -162,8 +182,6 @@ public:
const int32_t /*id*/,
const RTCPVoIPMetric* /*metric*/) {};
virtual void OnRTCPPacketTimeout(const int32_t /*id*/) {};
virtual void OnReceiveReportReceived(const int32_t id,
const uint32_t senderSSRC) {};
@ -186,14 +204,6 @@ public:
const uint8_t channels,
const uint32_t rate) = 0;
virtual void OnPacketTimeout(const int32_t id) = 0;
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) = 0;
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) = 0;
virtual void OnIncomingSSRCChanged( const int32_t id,
const uint32_t SSRC) = 0;
@ -201,6 +211,8 @@ public:
const uint32_t CSRC,
const bool added) = 0;
virtual void ResetStatistics() = 0;
protected:
virtual ~RtpFeedback() {}
};
@ -268,32 +280,32 @@ class NullRtpFeedback : public RtpFeedback {
return 0;
}
virtual void OnPacketTimeout(const int32_t id) OVERRIDE {}
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) OVERRIDE {}
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) OVERRIDE {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) OVERRIDE {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) OVERRIDE {}
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) OVERRIDE {}
virtual void ResetStatistics() OVERRIDE {}
};
// Null object version of RtpData.
class NullRtpData : public RtpData {
public:
virtual ~NullRtpData() {}
virtual int32_t OnReceivedPayloadData(
const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader) OVERRIDE {
return 0;
}
return 0;
}
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) {
return true;
}
};
// Null object version of RtpAudioFeedback.

View File

@ -35,53 +35,9 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(RtpRtcp* module));
MOCK_METHOD0(DeRegisterSyncModule,
int32_t());
MOCK_METHOD0(InitReceiver,
int32_t());
MOCK_METHOD1(RegisterIncomingDataCallback,
int32_t(RtpData* incomingDataCallback));
MOCK_METHOD1(RegisterIncomingRTPCallback,
int32_t(RtpFeedback* incomingMessagesCallback));
MOCK_METHOD2(SetPacketTimeout,
int32_t(const uint32_t RTPtimeoutMS, const uint32_t RTCPtimeoutMS));
MOCK_METHOD2(SetPeriodicDeadOrAliveStatus,
int32_t(const bool enable, const uint8_t sampleTimeSeconds));
MOCK_METHOD2(PeriodicDeadOrAliveStatus,
int32_t(bool &enable, uint8_t &sampleTimeSeconds));
MOCK_METHOD1(RegisterReceivePayload,
int32_t(const CodecInst& voiceCodec));
MOCK_METHOD1(RegisterReceivePayload,
int32_t(const VideoCodec& videoCodec));
MOCK_METHOD2(ReceivePayloadType,
int32_t(const CodecInst& voiceCodec, int8_t* plType));
MOCK_METHOD2(ReceivePayloadType,
int32_t(const VideoCodec& videoCodec, int8_t* plType));
MOCK_METHOD1(DeRegisterReceivePayload,
int32_t(const int8_t payloadType));
MOCK_CONST_METHOD0(RemoteTimestamp,
uint32_t());
MOCK_CONST_METHOD0(LocalTimeOfRemoteTimeStamp,
int64_t());
MOCK_CONST_METHOD1(EstimatedRemoteTimeStamp,
int32_t(uint32_t& timestamp));
MOCK_CONST_METHOD0(RemoteSSRC,
uint32_t());
MOCK_CONST_METHOD1(RemoteCSRCs,
int32_t(uint32_t arrOfCSRC[kRtpCsrcSize]));
MOCK_CONST_METHOD1(SSRCFilter,
int32_t(uint32_t& allowedSSRC));
MOCK_METHOD2(SetSSRCFilter,
int32_t(const bool enable, const uint32_t allowedSSRC));
MOCK_METHOD2(SetRTXReceiveStatus,
int32_t(bool enable, uint32_t ssrc));
MOCK_CONST_METHOD3(RTXReceiveStatus,
int32_t(bool* enable, uint32_t* ssrc, int* payload_type));
MOCK_METHOD1(SetRtxReceivePayloadType,
void(int));
MOCK_METHOD3(IncomingRtpPacket,
int32_t(const uint8_t* incomingPacket, const uint16_t packetLength,
const webrtc::RTPHeader& header));
MOCK_METHOD2(IncomingRtcpPacket,
int32_t(const uint8_t* incomingPacket, uint16_t packetLength));
MOCK_METHOD1(SetRemoteSSRC, void(const uint32_t ssrc));
MOCK_METHOD4(IncomingAudioNTP,
int32_t(const uint32_t audioReceivedNTPsecs,
const uint32_t audioReceivedNTPfrac,
@ -196,16 +152,10 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint64_t pictureID));
MOCK_METHOD1(SendRTCPSliceLossIndication,
int32_t(const uint8_t pictureID));
MOCK_METHOD0(ResetStatisticsRTP,
int32_t());
MOCK_CONST_METHOD5(StatisticsRTP,
int32_t(uint8_t *fraction_lost, uint32_t *cum_lost, uint32_t *ext_max, uint32_t *jitter, uint32_t *max_jitter));
MOCK_METHOD0(ResetReceiveDataCountersRTP,
int32_t());
MOCK_METHOD0(ResetSendDataCountersRTP,
int32_t());
MOCK_CONST_METHOD4(DataCountersRTP,
int32_t(uint32_t *bytesSent, uint32_t *packetsSent, uint32_t *bytesReceived, uint32_t *packetsReceived));
MOCK_CONST_METHOD2(DataCountersRTP,
int32_t(uint32_t *bytesSent, uint32_t *packetsSent));
MOCK_METHOD1(RemoteRTCPStat,
int32_t(RTCPSenderInfo* senderInfo));
MOCK_CONST_METHOD1(RemoteRTCPStat,
@ -224,8 +174,6 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const bool enable));
MOCK_METHOD3(SetREMBData,
int32_t(const uint32_t bitrate, const uint8_t numberOfSSRC, const uint32_t* SSRC));
MOCK_METHOD1(SetRemoteBitrateObserver,
bool(RemoteBitrateObserver*));
MOCK_CONST_METHOD0(IJ,
bool());
MOCK_METHOD1(SetIJStatus,
@ -248,13 +196,11 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint16_t* nackList, const uint16_t size));
MOCK_METHOD2(SetStorePacketsStatus,
int32_t(const bool enable, const uint16_t numberToStore));
MOCK_CONST_METHOD0(StorePackets, bool());
MOCK_METHOD1(RegisterAudioCallback,
int32_t(RtpAudioFeedback* messagesCallback));
MOCK_METHOD1(SetAudioPacketSize,
int32_t(const uint16_t packetSizeSamples));
MOCK_METHOD1(SetTelephoneEventForwardToDecoder, int(bool forwardToDecoder));
MOCK_CONST_METHOD0(TelephoneEventForwardToDecoder,
bool());
MOCK_CONST_METHOD1(SendTelephoneEventActive,
bool(int8_t& telephoneEvent));
MOCK_METHOD3(SendTelephoneEventOutband,

View File

@ -57,6 +57,10 @@ uint32_t Bitrate::BitrateNow() const {
return static_cast<uint32_t>(bitrate);
}
int64_t Bitrate::time_last_rate_update() const {
return time_last_rate_update_;
}
void Bitrate::Process() {
// Triggered by timer.
int64_t now = clock_->TimeInMilliseconds();

View File

@ -42,6 +42,8 @@ class Bitrate {
// Bitrate last second, updated now.
uint32_t BitrateNow() const;
int64_t time_last_rate_update() const;
protected:
Clock* clock_;

View File

@ -12,7 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_REGISTRY_H_
#include "testing/gmock/include/gmock/gmock.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
namespace webrtc {
@ -27,6 +27,8 @@ class MockRTPPayloadStrategy : public RTPPayloadStrategy {
const uint32_t rate));
MOCK_CONST_METHOD2(UpdatePayloadRate,
void(ModuleRTPUtility::Payload* payload, const uint32_t rate));
MOCK_CONST_METHOD1(GetPayloadTypeFrequency, int(
const ModuleRTPUtility::Payload& payload));
MOCK_CONST_METHOD5(CreatePayloadType,
ModuleRTPUtility::Payload*(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],

View File

@ -1,49 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
namespace webrtc {
class MockRTPReceiverVideo : public RTPReceiverVideo {
public:
MockRTPReceiverVideo() : RTPReceiverVideo(0, NULL, NULL) {}
MOCK_METHOD1(ChangeUniqueId,
void(const int32_t id));
MOCK_METHOD3(ReceiveRecoveredPacketCallback,
int32_t(WebRtcRTPHeader* rtpHeader,
const uint8_t* payloadData,
const uint16_t payloadDataLength));
MOCK_METHOD3(CallbackOfReceivedPayloadData,
int32_t(const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader));
MOCK_CONST_METHOD0(TimeStamp,
uint32_t());
MOCK_CONST_METHOD0(SequenceNumber,
uint16_t());
MOCK_CONST_METHOD2(PayloadTypeToPayload,
uint32_t(const uint8_t payloadType,
ModuleRTPUtility::Payload*& payload));
MOCK_CONST_METHOD2(RetransmitOfOldPacket,
bool(const uint16_t sequenceNumber,
const uint32_t rtpTimeStamp));
MOCK_CONST_METHOD0(REDPayloadType,
int8_t());
MOCK_CONST_METHOD0(HaveNotReceivedPackets,
bool());
};
} // namespace webrtc
#endif //WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_

View File

@ -15,7 +15,10 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -30,7 +33,7 @@ const uint32_t kTestNumberOfPackets = 1350;
const int kTestNumberOfRtxPackets = 149;
const int kNumFrames = 30;
class VerifyingRtxReceiver : public RtpData
class VerifyingRtxReceiver : public NullRtpData
{
public:
VerifyingRtxReceiver() {}
@ -47,6 +50,20 @@ class VerifyingRtxReceiver : public RtpData
std::list<uint16_t> sequence_numbers_;
};
class TestRtpFeedback : public NullRtpFeedback {
public:
TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
rtp_rtcp_->SetRemoteSSRC(SSRC);
}
private:
RtpRtcp* rtp_rtcp_;
};
class RtxLoopBackTransport : public webrtc::Transport {
public:
explicit RtxLoopBackTransport(uint32_t rtx_ssrc)
@ -56,11 +73,17 @@ class RtxLoopBackTransport : public webrtc::Transport {
consecutive_drop_end_(0),
rtx_ssrc_(rtx_ssrc),
count_rtx_ssrc_(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
module_(NULL) {
}
void SetSendModule(RtpRtcp* rtpRtcpModule) {
void SetSendModule(RtpRtcp* rtpRtcpModule,
RTPPayloadRegistry* rtp_payload_registry,
RtpReceiver* receiver) {
module_ = rtpRtcpModule;
rtp_payload_registry_ = rtp_payload_registry;
rtp_receiver_ = receiver;
}
void DropEveryNthPacket(int n) {
@ -94,8 +117,14 @@ class RtxLoopBackTransport : public webrtc::Transport {
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return -1;
}
if (module_->IncomingRtpPacket(static_cast<const uint8_t*>(data), len,
header) < 0) {
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header,
static_cast<const uint8_t*>(data),
len, payload_specific, true)) {
return -1;
}
return len;
@ -113,6 +142,8 @@ class RtxLoopBackTransport : public webrtc::Transport {
int consecutive_drop_end_;
uint32_t rtx_ssrc_;
int count_rtx_ssrc_;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* module_;
std::set<uint16_t> expected_sequence_numbers_;
};
@ -120,7 +151,8 @@ class RtxLoopBackTransport : public webrtc::Transport {
class RtpRtcpRtxNackTest : public ::testing::Test {
protected:
RtpRtcpRtxNackTest()
: rtp_rtcp_module_(NULL),
: rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
rtp_rtcp_module_(NULL),
transport_(kTestSsrc + 1),
receiver_(),
payload_data_length(sizeof(payload_data)),
@ -132,19 +164,27 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
configuration.id = kTestId;
configuration.audio = false;
configuration.clock = &fake_clock;
configuration.incoming_data = &receiver_;
receive_statistics_.reset(ReceiveStatistics::Create(&fake_clock));
configuration.receive_statistics = receive_statistics_.get();
configuration.outgoing_transport = &transport_;
rtp_rtcp_module_ = RtpRtcp::CreateRtpRtcp(configuration);
rtp_feedback_.reset(new TestRtpFeedback(rtp_rtcp_module_));
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
kTestId, &fake_clock, &receiver_, rtp_feedback_.get(),
&rtp_payload_registry_));
EXPECT_EQ(0, rtp_rtcp_module_->SetSSRC(kTestSsrc));
EXPECT_EQ(0, rtp_rtcp_module_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtp_rtcp_module_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, rtp_rtcp_module_->SetStorePacketsStatus(true, 600));
EXPECT_EQ(0, rtp_rtcp_module_->SetSendingStatus(true));
EXPECT_EQ(0, rtp_rtcp_module_->SetSequenceNumber(kTestSequenceNumber));
EXPECT_EQ(0, rtp_rtcp_module_->SetStartTimestamp(111111));
transport_.SetSendModule(rtp_rtcp_module_);
transport_.SetSendModule(rtp_rtcp_module_, &rtp_payload_registry_,
rtp_receiver_.get());
VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
@ -152,7 +192,11 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
memcpy(video_codec.plName, "I420", 5);
EXPECT_EQ(0, rtp_rtcp_module_->RegisterSendPayload(video_codec));
EXPECT_EQ(0, rtp_rtcp_module_->RegisterReceivePayload(video_codec));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate));
for (int n = 0; n < payload_data_length; n++) {
payload_data[n] = n % 10;
@ -196,7 +240,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
void RunRtxTest(RtxMode rtx_method, int loss) {
EXPECT_EQ(0, rtp_rtcp_module_->SetRTXReceiveStatus(true, kTestSsrc + 1));
rtp_receiver_->SetRTXStatus(true, kTestSsrc + 1);
EXPECT_EQ(0, rtp_rtcp_module_->SetRTXSendStatus(rtx_method, true,
kTestSsrc + 1));
transport_.DropEveryNthPacket(loss);
@ -224,7 +268,11 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
delete rtp_rtcp_module_;
}
scoped_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* rtp_rtcp_module_;
scoped_ptr<TestRtpFeedback> rtp_feedback_;
RtxLoopBackTransport transport_;
VerifyingRtxReceiver receiver_;
uint8_t payload_data[65000];

View File

@ -0,0 +1,291 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
enum { kRateUpdateIntervalMs = 1000 };
ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
return new ReceiveStatisticsImpl(clock);
}
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
clock_(clock),
incoming_bitrate_(clock),
ssrc_(0),
jitter_q4_(0),
jitter_max_q4_(0),
cumulative_loss_(0),
jitter_q4_transmission_time_offset_(0),
local_time_last_received_timestamp_(0),
last_received_timestamp_(0),
last_received_transmission_time_offset_(0),
received_seq_first_(0),
received_seq_max_(0),
received_seq_wraps_(0),
received_packet_overhead_(12),
received_byte_count_(0),
received_retransmitted_packets_(0),
received_inorder_packet_count_(0),
last_report_inorder_packets_(0),
last_report_old_packets_(0),
last_report_seq_max_(0),
last_reported_statistics_() {}
void ReceiveStatisticsImpl::ResetStatistics() {
CriticalSectionScoped lock(crit_sect_.get());
last_report_inorder_packets_ = 0;
last_report_old_packets_ = 0;
last_report_seq_max_ = 0;
memset(&last_reported_statistics_, 0, sizeof(last_reported_statistics_));
jitter_q4_ = 0;
jitter_max_q4_ = 0;
cumulative_loss_ = 0;
jitter_q4_transmission_time_offset_ = 0;
received_seq_wraps_ = 0;
received_seq_max_ = 0;
received_seq_first_ = 0;
received_byte_count_ = 0;
received_retransmitted_packets_ = 0;
received_inorder_packet_count_ = 0;
}
void ReceiveStatisticsImpl::ResetDataCounters() {
CriticalSectionScoped lock(crit_sect_.get());
received_byte_count_ = 0;
received_retransmitted_packets_ = 0;
received_inorder_packet_count_ = 0;
last_report_inorder_packets_ = 0;
}
void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
size_t bytes,
bool retransmitted,
bool in_order) {
ssrc_ = header.ssrc;
incoming_bitrate_.Update(bytes);
received_byte_count_ += bytes;
if (received_seq_max_ == 0 && received_seq_wraps_ == 0) {
// This is the first received report.
received_seq_first_ = header.sequenceNumber;
received_seq_max_ = header.sequenceNumber;
received_inorder_packet_count_ = 1;
// Current time in samples.
local_time_last_received_timestamp_ =
ModuleRTPUtility::GetCurrentRTP(clock_, header.payload_type_frequency);
return;
}
// Count only the new packets received. That is, if packets 1, 2, 3, 5, 4, 6
// are received, 4 will be ignored.
if (in_order) {
// Current time in samples.
const uint32_t RTPtime =
ModuleRTPUtility::GetCurrentRTP(clock_, header.payload_type_frequency);
received_inorder_packet_count_++;
// Wrong if we use RetransmitOfOldPacket.
int32_t seq_diff =
header.sequenceNumber - received_seq_max_;
if (seq_diff < 0) {
// Wrap around detected.
received_seq_wraps_++;
}
// New max.
received_seq_max_ = header.sequenceNumber;
if (header.timestamp != last_received_timestamp_ &&
received_inorder_packet_count_ > 1) {
int32_t time_diff_samples =
(RTPtime - local_time_last_received_timestamp_) -
(header.timestamp - last_received_timestamp_);
time_diff_samples = abs(time_diff_samples);
// lib_jingle sometimes deliver crazy jumps in TS for the same stream.
// If this happens, don't update jitter value. Use 5 secs video frequency
// as the threshold.
if (time_diff_samples < 450000) {
// Note we calculate in Q4 to avoid using float.
int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
}
// Extended jitter report, RFC 5450.
// Actual network jitter, excluding the source-introduced jitter.
int32_t time_diff_samples_ext =
(RTPtime - local_time_last_received_timestamp_) -
((header.timestamp +
header.extension.transmissionTimeOffset) -
(last_received_timestamp_ +
last_received_transmission_time_offset_));
time_diff_samples_ext = abs(time_diff_samples_ext);
if (time_diff_samples_ext < 450000) {
int32_t jitter_diffQ4TransmissionTimeOffset =
(time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
jitter_q4_transmission_time_offset_ +=
((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
}
}
last_received_timestamp_ = header.timestamp;
local_time_last_received_timestamp_ = RTPtime;
} else {
if (retransmitted) {
received_retransmitted_packets_++;
} else {
received_inorder_packet_count_++;
}
}
uint16_t packet_oh = header.headerLength + header.paddingLength;
// Our measured overhead. Filter from RFC 5104 4.2.1.2:
// avg_OH (new) = 15/16*avg_OH (old) + 1/16*pckt_OH,
received_packet_overhead_ = (15 * received_packet_overhead_ + packet_oh) >> 4;
}
bool ReceiveStatisticsImpl::Statistics(RtpReceiveStatistics* statistics,
bool reset) {
int32_t missing;
return Statistics(statistics, &missing, reset);
}
bool ReceiveStatisticsImpl::Statistics(RtpReceiveStatistics* statistics,
int32_t* missing, bool reset) {
CriticalSectionScoped lock(crit_sect_.get());
assert(missing);
if (received_seq_first_ == 0 && received_byte_count_ == 0) {
// We have not received anything.
return false;
}
if (!reset) {
if (last_report_inorder_packets_ == 0) {
// No report.
return false;
}
// Just get last report.
*statistics = last_reported_statistics_;
return true;
}
if (last_report_inorder_packets_ == 0) {
// First time we send a report.
last_report_seq_max_ = received_seq_first_ - 1;
}
// Calculate fraction lost.
uint16_t exp_since_last = (received_seq_max_ - last_report_seq_max_);
if (last_report_seq_max_ > received_seq_max_) {
// Can we assume that the seq_num can't go decrease over a full RTCP period?
exp_since_last = 0;
}
// Number of received RTP packets since last report, counts all packets but
// not re-transmissions.
uint32_t rec_since_last =
received_inorder_packet_count_ - last_report_inorder_packets_;
// With NACK we don't know the expected retransmissions during the last
// second. We know how many "old" packets we have received. We just count
// the number of old received to estimate the loss, but it still does not
// guarantee an exact number since we run this based on time triggered by
// sending of an RTP packet. This should have a minimum effect.
// With NACK we don't count old packets as received since they are
// re-transmitted. We use RTT to decide if a packet is re-ordered or
// re-transmitted.
uint32_t retransmitted_packets =
received_retransmitted_packets_ - last_report_old_packets_;
rec_since_last += retransmitted_packets;
*missing = 0;
if (exp_since_last > rec_since_last) {
*missing = (exp_since_last - rec_since_last);
}
uint8_t local_fraction_lost = 0;
if (exp_since_last) {
// Scale 0 to 255, where 255 is 100% loss.
local_fraction_lost =
static_cast<uint8_t>((255 * (*missing)) / exp_since_last);
}
statistics->fraction_lost = local_fraction_lost;
// We need a counter for cumulative loss too.
cumulative_loss_ += *missing;
if (jitter_q4_ > jitter_max_q4_) {
jitter_max_q4_ = jitter_q4_;
}
statistics->cumulative_lost = cumulative_loss_;
statistics->extended_max_sequence_number = (received_seq_wraps_ << 16) +
received_seq_max_;
// Note: internal jitter value is in Q4 and needs to be scaled by 1/16.
statistics->jitter = jitter_q4_ >> 4;
statistics->max_jitter = jitter_max_q4_ >> 4;
if (reset) {
// Store this report.
last_reported_statistics_ = *statistics;
// Only for report blocks in RTCP SR and RR.
last_report_inorder_packets_ = received_inorder_packet_count_;
last_report_old_packets_ = received_retransmitted_packets_;
last_report_seq_max_ = received_seq_max_;
}
return true;
}
void ReceiveStatisticsImpl::GetDataCounters(
uint32_t* bytes_received, uint32_t* packets_received) const {
CriticalSectionScoped lock(crit_sect_.get());
if (bytes_received) {
*bytes_received = received_byte_count_;
}
if (packets_received) {
*packets_received =
received_retransmitted_packets_ + received_inorder_packet_count_;
}
}
uint32_t ReceiveStatisticsImpl::BitrateReceived() {
return incoming_bitrate_.BitrateNow();
}
int32_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
int time_since_last_update = clock_->TimeInMilliseconds() -
incoming_bitrate_.time_last_rate_update();
return std::max(kRateUpdateIntervalMs - time_since_last_update, 0);
}
int32_t ReceiveStatisticsImpl::Process() {
incoming_bitrate_.Process();
return 0;
}
} // namespace webrtc

View File

@ -0,0 +1,77 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include <algorithm>
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class CriticalSectionWrapper;
class ReceiveStatisticsImpl : public ReceiveStatistics {
public:
explicit ReceiveStatisticsImpl(Clock* clock);
// Implements ReceiveStatistics.
void IncomingPacket(const RTPHeader& header, size_t bytes,
bool old_packet, bool in_order);
bool Statistics(RtpReceiveStatistics* statistics, bool reset);
bool Statistics(RtpReceiveStatistics* statistics, int32_t* missing,
bool reset);
void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const;
uint32_t BitrateReceived();
void ResetStatistics();
void ResetDataCounters();
// Implements Module.
int32_t TimeUntilNextProcess();
int32_t Process();
private:
scoped_ptr<CriticalSectionWrapper> crit_sect_;
Clock* clock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
// Stats on received RTP packets.
uint32_t jitter_q4_;
uint32_t jitter_max_q4_;
uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
uint32_t local_time_last_received_timestamp_;
uint32_t last_received_timestamp_;
int32_t last_received_transmission_time_offset_;
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
// Current counter values.
uint16_t received_packet_overhead_;
uint32_t received_byte_count_;
uint32_t received_retransmitted_packets_;
uint32_t received_inorder_packet_count_;
// Counter values when we sent the last report.
uint32_t last_report_inorder_packets_;
uint32_t last_report_old_packets_;
uint16_t last_report_seq_max_;
RtpReceiveStatistics last_reported_statistics_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_

View File

@ -14,14 +14,16 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/trace.h"
// RFC 5109
namespace webrtc {
ReceiverFEC::ReceiverFEC(const int32_t id, RTPReceiverVideo* owner)
ReceiverFEC::ReceiverFEC(const int32_t id, RtpData* callback)
: id_(id),
owner_(owner),
crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
recovered_packet_callback_(callback),
fec_(new ForwardErrorCorrection(id)),
payload_type_fec_(-1) {}
@ -42,6 +44,7 @@ ReceiverFEC::~ReceiverFEC() {
}
void ReceiverFEC::SetPayloadTypeFEC(const int8_t payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
payload_type_fec_ = payload_type;
}
// 0 1 2 3
@ -76,6 +79,8 @@ int32_t ReceiverFEC::AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
const uint16_t payload_data_length,
bool& FECpacket) {
CriticalSectionScoped cs(crit_sect_.get());
if (payload_type_fec_ == -1) {
return -1;
}
@ -221,12 +226,18 @@ int32_t ReceiverFEC::AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
}
int32_t ReceiverFEC::ProcessReceivedFEC() {
crit_sect_->Enter();
if (!received_packet_list_.empty()) {
// Send received media packet to VCM.
if (!received_packet_list_.front()->is_fec) {
if (ParseAndReceivePacket(received_packet_list_.front()->pkt) != 0) {
ForwardErrorCorrection::Packet* packet =
received_packet_list_.front()->pkt;
crit_sect_->Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
crit_sect_->Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
return -1;
@ -239,27 +250,16 @@ int32_t ReceiverFEC::ProcessReceivedFEC() {
for (; it != recovered_packet_list_.end(); ++it) {
if ((*it)->returned) // Already sent to the VCM and the jitter buffer.
continue;
if (ParseAndReceivePacket((*it)->pkt) != 0) {
ForwardErrorCorrection::Packet* packet = (*it)->pkt;
crit_sect_->Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
crit_sect_->Enter();
(*it)->returned = true;
}
return 0;
}
int ReceiverFEC::ParseAndReceivePacket(
const ForwardErrorCorrection::Packet* packet) {
WebRtcRTPHeader header;
memset(&header, 0, sizeof(header));
ModuleRTPUtility::RTPHeaderParser parser(packet->data, packet->length);
if (!parser.Parse(header.header)) {
return -1;
}
if (owner_->ReceiveRecoveredPacketCallback(
&header, &packet->data[header.header.headerLength],
packet->length - header.header.headerLength) != 0) {
return -1;
}
crit_sect_->Leave();
return 0;
}

View File

@ -15,14 +15,16 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RTPReceiverVideo;
class CriticalSectionWrapper;
class ReceiverFEC {
public:
ReceiverFEC(const int32_t id, RTPReceiverVideo* owner);
ReceiverFEC(const int32_t id, RtpData* callback);
virtual ~ReceiverFEC();
int32_t AddReceivedFECPacket(const WebRtcRTPHeader* rtp_header,
@ -35,10 +37,9 @@ class ReceiverFEC {
void SetPayloadTypeFEC(const int8_t payload_type);
private:
int ParseAndReceivePacket(const ForwardErrorCorrection::Packet* packet);
int id_;
RTPReceiverVideo* owner_;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
RtpData* recovered_packet_callback_;
ForwardErrorCorrection* fec_;
// TODO(holmer): In the current version received_packet_list_ is never more
// than one packet, since we process FEC every time a new packet

View File

@ -16,20 +16,31 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/source/fec_test_helper.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h"
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
using ::testing::_;
using ::testing::Args;
using ::testing::ElementsAreArray;
using ::testing::Return;
namespace webrtc {
class MockRtpData : public RtpData {
public:
MOCK_METHOD3(OnReceivedPayloadData,
int32_t(const uint8_t* payloadData,
const uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader));
MOCK_METHOD2(OnRecoveredPacket,
bool(const uint8_t* packet, int packet_length));
};
class ReceiverFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
fec_ = new ForwardErrorCorrection(0);
receiver_fec_ = new ReceiverFEC(0, &rtp_receiver_video_);
receiver_fec_ = new ReceiverFEC(0, &rtp_data_callback_);
generator_ = new FrameGenerator();
receiver_fec_->SetPayloadTypeFEC(kFecPayloadType);
}
@ -64,11 +75,10 @@ class ReceiverFecTest : public ::testing::Test {
// Verify that the content of the reconstructed packet is equal to the
// content of |packet|, and that the same content is received |times| number
// of times in a row.
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(
_, _, packet->length - kRtpHeaderSize))
.With(Args<1, 2>(ElementsAreArray(packet->data + kRtpHeaderSize,
packet->length - kRtpHeaderSize)))
.Times(times);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, packet->length))
.With(Args<0, 1>(ElementsAreArray(packet->data,
packet->length)))
.Times(times).WillRepeatedly(Return(true));
}
void BuildAndAddRedMediaPacket(RtpPacket* packet) {
@ -92,7 +102,7 @@ class ReceiverFecTest : public ::testing::Test {
}
ForwardErrorCorrection* fec_;
MockRTPReceiverVideo rtp_receiver_video_;
MockRtpData rtp_data_callback_;
ReceiverFEC* receiver_fec_;
FrameGenerator* generator_;
};
@ -255,8 +265,8 @@ TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
delayed_fec = fec_packets.front();
@ -270,15 +280,15 @@ TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
it != media_rtp_packets_batch2.end(); ++it) {
BuildAndAddRedMediaPacket(*it);
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
// Add the delayed FEC packet. One packet should be reconstructed.
BuildAndAddRedFecPacket(delayed_fec);
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets_batch1);
@ -299,8 +309,8 @@ TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
delayed_fec = fec_packets.front();
@ -314,15 +324,15 @@ TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
it != media_rtp_packets_batch2.end(); ++it) {
BuildAndAddRedMediaPacket(*it);
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
// Add the delayed FEC packet. No packet should be reconstructed since the
// first media packet of that frame has been dropped due to being too old.
BuildAndAddRedFecPacket(delayed_fec);
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(0);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
@ -346,7 +356,7 @@ TEST_F(ReceiverFecTest, OldFecPacketDropped) {
it != fec_packets.end(); ++it) {
// Only FEC packets inserted. No packets recoverable at this time.
BuildAndAddRedFecPacket(*it);
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(0);
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
}
@ -360,8 +370,8 @@ TEST_F(ReceiverFecTest, OldFecPacketDropped) {
// and should've been dropped. Only the media packet we inserted will be
// returned.
BuildAndAddRedMediaPacket(media_rtp_packets.front());
EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_, _, _))
.Times(1);
EXPECT_CALL(rtp_data_callback_, OnRecoveredPacket(_, _))
.Times(1).WillRepeatedly(Return(true));
EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
DeletePackets(&media_packets);

View File

@ -115,13 +115,15 @@ TEST_F(RtcpFormatRembTest, TestNonCompund) {
uint32_t SSRC = 456789;
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpNonCompound));
EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 1, &SSRC));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb, NULL));
}
TEST_F(RtcpFormatRembTest, TestCompund) {
uint32_t SSRCs[2] = {456789, 98765};
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 2, SSRCs));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb, &receive_stats));
}
} // namespace

View File

@ -141,6 +141,11 @@ RTCPReceiver::SetRemoteSSRC( const uint32_t ssrc)
return 0;
}
uint32_t RTCPReceiver::RemoteSSRC() const {
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
return _remoteSSRC;
}
void RTCPReceiver::RegisterRtcpObservers(
RtcpIntraFrameObserver* intra_frame_callback,
RtcpBandwidthObserver* bandwidth_callback,
@ -183,7 +188,7 @@ int32_t RTCPReceiver::ResetRTT(const uint32_t remoteSSRC) {
return 0;
}
int32_t RTCPReceiver::RTT(const uint32_t remoteSSRC,
int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
uint16_t* RTT,
uint16_t* avgRTT,
uint16_t* minRTT,
@ -1406,43 +1411,4 @@ int32_t RTCPReceiver::TMMBRReceived(const uint32_t size,
return num;
}
int32_t
RTCPReceiver::SetPacketTimeout(const uint32_t timeoutMS)
{
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
_packetTimeOutMS = timeoutMS;
return 0;
}
void RTCPReceiver::PacketTimeout()
{
if(_packetTimeOutMS == 0)
{
// not configured
return;
}
bool packetTimeOut = false;
{
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
if(_lastReceived == 0)
{
// not active
return;
}
int64_t now = _clock->TimeInMilliseconds();
if(now - _lastReceived > _packetTimeOutMS)
{
packetTimeOut = true;
_lastReceived = 0; // only one callback
}
}
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if(packetTimeOut && _cbRtcpFeedback)
{
_cbRtcpFeedback->OnRTCPPacketTimeout(_id);
}
}
} // namespace webrtc

View File

@ -42,6 +42,7 @@ public:
void SetSSRC( const uint32_t ssrc);
void SetRelaySSRC( const uint32_t ssrc);
int32_t SetRemoteSSRC( const uint32_t ssrc);
uint32_t RemoteSSRC() const;
uint32_t RelaySSRC() const;
@ -67,7 +68,7 @@ public:
uint32_t *rtcp_timestamp) const;
// get rtt
int32_t RTT(const uint32_t remoteSSRC,
int32_t RTT(uint32_t remoteSSRC,
uint16_t* RTT,
uint16_t* avgRTT,
uint16_t* minRTT,
@ -106,9 +107,6 @@ public:
int32_t UpdateTMMBR();
int32_t SetPacketTimeout(const uint32_t timeoutMS);
void PacketTimeout();
protected:
RTCPHelp::RTCPReportBlockInformation* CreateReportBlockInformation(const uint32_t remoteSSRC);
RTCPHelp::RTCPReportBlockInformation* GetReportBlockInformation(const uint32_t remoteSSRC) const;

View File

@ -136,7 +136,7 @@ class PacketBuilder {
// This test transport verifies that no functions get called.
class TestTransport : public Transport,
public RtpData {
public NullRtpData {
public:
explicit TestTransport()
: rtcp_receiver_(NULL) {

View File

@ -271,7 +271,7 @@ RTCPSender::SetSendingStatus(const bool sending)
}
if(sendRTCPBye)
{
return SendRTCP(kRtcpBye);
return SendRTCP(kRtcpBye, NULL);
}
return 0;
}
@ -376,12 +376,10 @@ RTCPSender::SetSSRC( const uint32_t ssrc)
_SSRC = ssrc;
}
int32_t
RTCPSender::SetRemoteSSRC( const uint32_t ssrc)
void RTCPSender::SetRemoteSSRC(uint32_t ssrc)
{
CriticalSectionScoped lock(_criticalSectionRTCPSender);
_remoteSSRC = ssrc;
return 0;
}
int32_t
@ -1536,11 +1534,13 @@ RTCPSender::BuildVoIPMetric(uint8_t* rtcpbuffer, uint32_t& pos)
}
int32_t
RTCPSender::SendRTCP(const uint32_t packetTypeFlags,
const int32_t nackSize, // NACK
const uint16_t* nackList, // NACK
const bool repeat, // FIR
const uint64_t pictureID) // SLI & RPSI
RTCPSender::SendRTCP(
uint32_t packetTypeFlags,
const ReceiveStatistics::RtpReceiveStatistics* receive_stats,
int32_t nackSize,
const uint16_t* nackList,
bool repeat,
uint64_t pictureID)
{
uint32_t rtcpPacketTypeFlags = packetTypeFlags;
uint32_t pos = 0;
@ -1572,13 +1572,15 @@ RTCPSender::SendRTCP(const uint32_t packetTypeFlags,
rtcpPacketTypeFlags & kRtcpSr ||
rtcpPacketTypeFlags & kRtcpRr)
{
// get statistics from our RTPreceiver outside critsect
if(_rtpRtcp.ReportBlockStatistics(&received.fractionLost,
&received.cumulativeLost,
&received.extendedHighSeqNum,
&received.jitter,
&jitterTransmissionOffset) == 0)
// Do we have receive statistics to send?
if (receive_stats)
{
received.fractionLost = receive_stats->fraction_lost;
received.cumulativeLost = receive_stats->cumulative_lost;
received.extendedHighSeqNum =
receive_stats->extended_max_sequence_number;
received.jitter = receive_stats->jitter;
jitterTransmissionOffset = 0;
hasReceived = true;
uint32_t lastReceivedRRNTPsecs = 0;

View File

@ -17,6 +17,7 @@
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@ -26,7 +27,7 @@
namespace webrtc {
class ModuleRtpRtcpImpl;
class ModuleRtpRtcpImpl;
class NACKStringBuilder
{
@ -72,7 +73,7 @@ public:
void SetSSRC( const uint32_t ssrc);
int32_t SetRemoteSSRC( const uint32_t ssrc);
void SetRemoteSSRC(uint32_t ssrc);
int32_t SetCameraDelay(const int32_t delayMS);
@ -90,11 +91,13 @@ public:
uint32_t LastSendReport(uint32_t& lastRTCPTime);
int32_t SendRTCP(const uint32_t rtcpPacketTypeFlags,
const int32_t nackSize = 0,
const uint16_t* nackList = 0,
const bool repeat = false,
const uint64_t pictureID = 0);
int32_t SendRTCP(
uint32_t rtcpPacketTypeFlags,
const ReceiveStatistics::RtpReceiveStatistics* receive_stats,
int32_t nackSize = 0,
const uint16_t* nackList = 0,
bool repeat = false,
uint64_t pictureID = 0);
int32_t AddReportBlock(const uint32_t SSRC,
const RTCPReportBlock* receiveBlock);

View File

@ -20,6 +20,8 @@
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
@ -217,7 +219,7 @@ void CreateRtpPacket(const bool marker_bit, const uint8_t payload,
}
class TestTransport : public Transport,
public RtpData {
public NullRtpData {
public:
TestTransport()
: rtcp_receiver_(NULL) {
@ -277,6 +279,8 @@ class RtcpSenderTest : public ::testing::Test {
RtcpSenderTest()
: over_use_detector_options_(),
system_clock_(Clock::GetRealTimeClock()),
rtp_payload_registry_(new RTPPayloadRegistry(
0, RTPPayloadStrategy::CreateStrategy(false))),
remote_bitrate_observer_(),
remote_bitrate_estimator_(
RemoteBitrateEstimatorFactory().Create(
@ -288,11 +292,12 @@ class RtcpSenderTest : public ::testing::Test {
configuration.id = 0;
configuration.audio = false;
configuration.clock = system_clock_;
configuration.incoming_data = test_transport_;
configuration.outgoing_transport = test_transport_;
configuration.remote_bitrate_estimator = remote_bitrate_estimator_.get();
rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration);
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
0, system_clock_, test_transport_, NULL, rtp_payload_registry_.get()));
rtcp_sender_ = new RTCPSender(0, false, system_clock_, rtp_rtcp_impl_);
rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_);
test_transport_->SetRTCPReceiver(rtcp_receiver_);
@ -315,6 +320,8 @@ class RtcpSenderTest : public ::testing::Test {
OverUseDetectorOptions over_use_detector_options_;
Clock* system_clock_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
ModuleRtpRtcpImpl* rtp_rtcp_impl_;
RTCPSender* rtcp_sender_;
RTCPReceiver* rtcp_receiver_;
@ -328,7 +335,7 @@ class RtcpSenderTest : public ::testing::Test {
TEST_F(RtcpSenderTest, RtcpOff) {
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpOff));
EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr));
EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr, NULL));
}
TEST_F(RtcpSenderTest, IJStatus) {
@ -352,18 +359,27 @@ TEST_F(RtcpSenderTest, TestCompound) {
strncpy(codec_inst.plName, "VP8", webrtc::kPayloadNameSize - 1);
codec_inst.codecType = webrtc::kVideoCodecVP8;
codec_inst.plType = payload;
EXPECT_EQ(0, rtp_rtcp_impl_->RegisterReceivePayload(codec_inst));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(codec_inst.plName,
codec_inst.plType,
90000,
0,
codec_inst.maxBitrate));
// Make sure RTP packet has been received.
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
EXPECT_TRUE(parser->Parse(packet_, packet_length, &header));
EXPECT_EQ(0, rtp_rtcp_impl_->IncomingRtpPacket(packet_, packet_length,
header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific));
EXPECT_TRUE(rtp_receiver_->IncomingRtpPacket(&header, packet_, packet_length,
payload_specific, true));
EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr, &receive_stats));
// Transmission time offset packet should be received.
ASSERT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
@ -373,7 +389,9 @@ TEST_F(RtcpSenderTest, TestCompound) {
TEST_F(RtcpSenderTest, TestCompound_NoRtpReceived) {
EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
// |receive_stats| is NULL since no data has been received.
ReceiveStatistics::RtpReceiveStatistics* receive_stats = NULL;
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr, receive_stats));
// Transmission time offset packet should not be received.
ASSERT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
@ -391,7 +409,9 @@ TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) {
TMMBRSet bounding_set;
EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr, &receive_stats));
// We now expect the packet to show up in the rtcp_packet_info_ of
// test_transport_.
ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
@ -413,7 +433,9 @@ TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndValid) {
EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
ReceiveStatistics::RtpReceiveStatistics receive_stats;
memset(&receive_stats, 0, sizeof(receive_stats));
EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr, &receive_stats));
// We now expect the packet to show up in the rtcp_packet_info_ of
// test_transport_.
ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -21,8 +21,7 @@ RTPPayloadRegistry::RTPPayloadRegistry(
rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
last_received_payload_type_(-1),
last_received_media_payload_type_(-1) {
}
last_received_media_payload_type_(-1) {}
RTPPayloadRegistry::~RTPPayloadRegistry() {
while (!payload_type_map_.empty()) {
@ -104,6 +103,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
if (ModuleRTPUtility::StringCompare(payload_name, "red", 3)) {
red_payload_type_ = payload_type;
payload = new ModuleRTPUtility::Payload;
memset(payload, 0, sizeof(*payload));
payload->audio = false;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payload_name, RTP_PAYLOAD_NAME_SIZE - 1);
@ -226,7 +226,29 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
return -1;
}
int32_t RTPPayloadRegistry::PayloadTypeToPayload(
bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
PayloadUnion* payload) const {
ModuleRTPUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
return false;
}
*payload = it->second->typeSpecific;
return true;
}
int RTPPayloadRegistry::GetPayloadTypeFrequency(
uint8_t payload_type) const {
ModuleRTPUtility::Payload* payload;
if (!PayloadTypeToPayload(payload_type, payload)) {
return -1;
}
return rtp_payload_strategy_->GetPayloadTypeFrequency(*payload);
}
bool RTPPayloadRegistry::PayloadTypeToPayload(
const uint8_t payload_type,
ModuleRTPUtility::Payload*& payload) const {
@ -235,10 +257,11 @@ int32_t RTPPayloadRegistry::PayloadTypeToPayload(
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
return -1;
return false;
}
payload = it->second;
return 0;
return true;
}
bool RTPPayloadRegistry::ReportMediaPayloadType(
@ -283,12 +306,18 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
assert(frequency >= 1000);
payload->typeSpecific.Audio.frequency = frequency;
payload->typeSpecific.Audio.channels = channels;
payload->typeSpecific.Audio.rate = rate;
payload->audio = true;
return payload;
}
int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const {
return payload.typeSpecific.Audio.frequency;
}
};
class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
@ -315,15 +344,15 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const OVERRIDE {
RtpVideoCodecTypes videoType = kRtpGenericVideo;
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVp8Video;
videoType = kRtpVideoVp8;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpGenericVideo;
videoType = kRtpVideoGeneric;
} else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
videoType = kRtpFecVideo;
videoType = kRtpVideoFec;
} else {
videoType = kRtpGenericVideo;
videoType = kRtpVideoGeneric;
}
ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
@ -334,6 +363,11 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
payload->audio = false;
return payload;
}
int GetPayloadTypeFrequency(
const ModuleRTPUtility::Payload& payload) const {
return kVideoPayloadTypeFrequency;
}
};
RTPPayloadStrategy* RTPPayloadStrategy::CreateStrategy(

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -74,8 +74,8 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
EXPECT_TRUE(new_payload_created) << "A new payload WAS created.";
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
// We should get back the exact pointer to the payload returned by the
// payload strategy.
@ -83,7 +83,7 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
// Now forget about it and verify it's gone.
EXPECT_EQ(0, rtp_payload_registry_->DeRegisterReceivePayload(payload_type));
EXPECT_EQ(-1, rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type, retrieved_payload));
}
@ -101,8 +101,8 @@ TEST_F(RtpPayloadRegistryTest, DoesNotCreateNewPayloadTypeIfRed) {
ASSERT_EQ(red_type_of_the_day, rtp_payload_registry_->red_payload_type());
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(red_type_of_the_day,
retrieved_payload));
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(red_type_of_the_day,
retrieved_payload));
EXPECT_FALSE(retrieved_payload->audio);
EXPECT_STRCASEEQ("red", retrieved_payload->name);
}
@ -131,11 +131,11 @@ TEST_F(RtpPayloadRegistryTest,
// Ensure both payloads are preserved.
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
EXPECT_EQ(first_payload_on_heap, retrieved_payload);
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1,
retrieved_payload));
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1,
retrieved_payload));
EXPECT_EQ(second_payload_on_heap, retrieved_payload);
// Ok, update the rate for one of the codecs. If either the incoming rate or
@ -170,10 +170,10 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalChannels, kTypicalRate, &ignored));
ModuleRTPUtility::Payload* retrieved_payload = NULL;
EXPECT_EQ(-1, rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type, retrieved_payload)) << "The first payload should be "
"deregistered because the only thing that differs is payload type.";
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type - 1, retrieved_payload)) <<
"The second payload should still be registered though.";
@ -185,10 +185,10 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalPayloadName, payload_type + 1, kTypicalFrequency,
kTypicalChannels, kTypicalRate, &ignored));
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type - 1, retrieved_payload)) <<
"Not compatible; both payloads should be kept.";
EXPECT_EQ(0, rtp_payload_registry_->PayloadTypeToPayload(
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type + 1, retrieved_payload)) <<
"Not compatible; both payloads should be kept.";
}

File diff suppressed because it is too large Load Diff

View File

@ -1,242 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
#include <map>
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RtpRtcpFeedback;
class ModuleRtpRtcpImpl;
class Trace;
class RTPReceiverAudio;
class RTPReceiverVideo;
class RTPReceiverStrategy;
class RTPReceiver : public Bitrate {
public:
// Callbacks passed in here may not be NULL (use Null Object callbacks if you
// want callbacks to do nothing). This class takes ownership of the media
// receiver but nothing else.
RTPReceiver(const int32_t id,
Clock* clock,
ModuleRtpRtcpImpl* owner,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPReceiverStrategy* rtp_media_receiver,
RTPPayloadRegistry* rtp_payload_registry);
virtual ~RTPReceiver();
RtpVideoCodecTypes VideoCodecType() const;
uint32_t MaxConfiguredBitrate() const;
int32_t SetPacketTimeout(const uint32_t timeout_ms);
void PacketTimeout();
void ProcessDeadOrAlive(const bool RTCPalive, const int64_t now);
void ProcessBitrate();
int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate);
int32_t DeRegisterReceivePayload(const int8_t payload_type);
int32_t ReceivePayloadType(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate,
int8_t* payload_type) const;
int32_t IncomingRTPPacket(
RTPHeader* rtpheader,
const uint8_t* incoming_rtp_packet,
const uint16_t incoming_rtp_packet_length);
NACKMethod NACK() const ;
// Turn negative acknowledgement requests on/off.
int32_t SetNACKStatus(const NACKMethod method, int max_reordering_threshold);
// Returns the last received timestamp.
virtual uint32_t TimeStamp() const;
int32_t LastReceivedTimeMs() const;
virtual uint16_t SequenceNumber() const;
int32_t EstimatedRemoteTimeStamp(uint32_t& timestamp) const;
uint32_t SSRC() const;
int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const;
int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
// Get the currently configured SSRC filter.
int32_t SSRCFilter(uint32_t& allowed_ssrc) const;
// Set a SSRC to be used as a filter for incoming RTP streams.
int32_t SetSSRCFilter(const bool enable, const uint32_t allowed_ssrc);
int32_t Statistics(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter, // Will be moved from JB.
uint32_t* max_jitter,
uint32_t* jitter_transmission_time_offset,
bool reset) const;
int32_t Statistics(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter, // Will be moved from JB.
uint32_t* max_jitter,
uint32_t* jitter_transmission_time_offset,
int32_t* missing,
bool reset) const;
int32_t DataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const;
int32_t ResetStatistics();
int32_t ResetDataCounters();
uint16_t PacketOHReceived() const;
uint32_t PacketCountReceived() const;
uint32_t ByteCountReceived() const;
int32_t RegisterRtpHeaderExtension(const RTPExtensionType type,
const uint8_t id);
int32_t DeregisterRtpHeaderExtension(const RTPExtensionType type);
void GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const;
// RTX.
void SetRTXStatus(bool enable, uint32_t ssrc);
void RTXStatus(bool* enable, uint32_t* ssrc, int* payload_type) const;
void SetRtxPayloadType(int payload_type);
virtual int8_t REDPayloadType() const;
bool HaveNotReceivedPackets() const;
virtual bool RetransmitOfOldPacket(const uint16_t sequence_number,
const uint32_t rtp_time_stamp) const;
void UpdateStatistics(const RTPHeader* rtp_header,
const uint16_t bytes,
const bool old_packet);
private:
// Returns whether RED is configured with payload_type.
bool REDPayloadType(const int8_t payload_type) const;
bool InOrderPacket(const uint16_t sequence_number) const;
void CheckSSRCChanged(const RTPHeader* rtp_header);
void CheckCSRC(const WebRtcRTPHeader* rtp_header);
int32_t CheckPayloadChanged(const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& isRED,
ModuleRTPUtility::PayloadUnion* payload);
void UpdateNACKBitRate(int32_t bytes, uint32_t now);
bool ProcessNACKBitRate(uint32_t now);
RTPPayloadRegistry* rtp_payload_registry_;
scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
int32_t id_;
ModuleRtpRtcpImpl& rtp_rtcp_;
RtpFeedback* cb_rtp_feedback_;
CriticalSectionWrapper* critical_section_rtp_receiver_;
mutable int64_t last_receive_time_;
uint16_t last_received_payload_length_;
uint32_t packet_timeout_ms_;
// SSRCs.
uint32_t ssrc_;
uint8_t num_csrcs_;
uint32_t current_remote_csrc_[kRtpCsrcSize];
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
bool use_ssrc_filter_;
uint32_t ssrc_filter_;
// Stats on received RTP packets.
uint32_t jitter_q4_;
mutable uint32_t jitter_max_q4_;
mutable uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
uint32_t local_time_last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint32_t last_received_timestamp_;
uint16_t last_received_sequence_number_;
int32_t last_received_transmission_time_offset_;
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
// Current counter values.
uint16_t received_packet_oh_;
uint32_t received_byte_count_;
uint32_t received_old_packet_count_;
uint32_t received_inorder_packet_count_;
// Counter values when we sent the last report.
mutable uint32_t last_report_inorder_packets_;
mutable uint32_t last_report_old_packets_;
mutable uint16_t last_report_seq_max_;
mutable uint8_t last_report_fraction_lost_;
mutable uint32_t last_report_cumulative_lost_; // 24 bits valid.
mutable uint32_t last_report_extended_high_seq_num_;
mutable uint32_t last_report_jitter_;
mutable uint32_t last_report_jitter_transmission_time_offset_;
NACKMethod nack_method_;
int max_reordering_threshold_;
bool rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_

View File

@ -19,13 +19,18 @@
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback) {
return new RTPReceiverAudio(id, data_callback, incoming_messages_callback);
}
RTPReceiverAudio::RTPReceiverAudio(const int32_t id,
RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback)
: RTPReceiverStrategy(data_callback),
TelephoneEventHandler(),
id_(id),
critical_section_rtp_receiver_audio_(
CriticalSectionWrapper::CreateCriticalSection()),
last_received_frequency_(8000),
telephone_event_forward_to_decoder_(false),
telephone_event_payload_type_(-1),
@ -36,44 +41,36 @@ RTPReceiverAudio::RTPReceiverAudio(const int32_t id,
cng_payload_type_(-1),
g722_payload_type_(-1),
last_received_g722_(false),
num_energy_(0),
current_remote_energy_(),
cb_audio_feedback_(incoming_messages_callback) {
last_payload_.Audio.channels = 1;
}
RTPReceiverAudio::~RTPReceiverAudio() {}
uint32_t RTPReceiverAudio::AudioFrequency() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
if (last_received_g722_) {
return 8000;
}
return last_received_frequency_;
memset(current_remote_energy_, 0, sizeof(current_remote_energy_));
}
// Outband TelephoneEvent(DTMF) detection
int RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
void RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
CriticalSectionScoped lock(crit_sect_.get());
telephone_event_forward_to_decoder_ = forward_to_decoder;
return 0;
}
// Is forwarding of outband telephone events turned on/off?
bool RTPReceiverAudio::TelephoneEventForwardToDecoder() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
CriticalSectionScoped lock(crit_sect_.get());
return telephone_event_forward_to_decoder_;
}
bool RTPReceiverAudio::TelephoneEventPayloadType(
const int8_t payload_type) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
int8_t payload_type) const {
CriticalSectionScoped lock(crit_sect_.get());
return (telephone_event_payload_type_ == payload_type) ? true : false;
}
bool RTPReceiverAudio::CNGPayloadType(const int8_t payload_type,
bool RTPReceiverAudio::CNGPayloadType(int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed) {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
CriticalSectionScoped lock(crit_sect_.get());
*cng_payload_type_has_changed = false;
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
@ -119,8 +116,7 @@ bool RTPReceiverAudio::CNGPayloadType(const int8_t payload_type,
return false;
}
bool RTPReceiverAudio::ShouldReportCsrcChanges(
uint8_t payload_type) const {
bool RTPReceiverAudio::ShouldReportCsrcChanges(uint8_t payload_type) const {
// Don't do this for DTMF packets, otherwise it's fine.
return !TelephoneEventPayloadType(payload_type);
}
@ -159,9 +155,9 @@ bool RTPReceiverAudio::ShouldReportCsrcChanges(
// - G7221 frame N/A
int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency) {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
int8_t payload_type,
uint32_t frequency) {
CriticalSectionScoped lock(crit_sect_.get());
if (ModuleRTPUtility::StringCompare(payload_name, "telephone-event", 15)) {
telephone_event_payload_type_ = payload_type;
@ -184,18 +180,24 @@ int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
return 0;
}
int32_t RTPReceiverAudio::ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) {
int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp", "Audio::ParseRtp",
"seqnum", rtp_header->header.sequenceNumber,
"timestamp", rtp_header->header.timestamp);
rtp_header->type.Audio.numEnergy = rtp_header->header.numCSRCs;
num_energy_ = rtp_header->type.Audio.numEnergy;
if (rtp_header->type.Audio.numEnergy > 0 &&
rtp_header->type.Audio.numEnergy <= kRtpCsrcSize) {
memcpy(current_remote_energy_,
rtp_header->type.Audio.arrOfEnergy,
rtp_header->type.Audio.numEnergy);
}
const uint8_t* payload_data =
ModuleRTPUtility::GetPayloadData(rtp_header->header, packet);
const uint16_t payload_data_length =
@ -208,8 +210,12 @@ int32_t RTPReceiverAudio::ParseRtpPacket(
is_red);
}
int32_t RTPReceiverAudio::GetFrequencyHz() const {
return AudioFrequency();
int RTPReceiverAudio::GetPayloadTypeFrequency() const {
CriticalSectionScoped lock(crit_sect_.get());
if (last_received_g722_) {
return 8000;
}
return last_received_frequency_;
}
RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
@ -224,11 +230,10 @@ RTPAliveType RTPReceiverAudio::ProcessDeadOrAlive(
}
}
void RTPReceiverAudio::CheckPayloadChanged(
const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
void RTPReceiverAudio::CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
*should_discard_changes = false;
*should_reset_statistics = false;
@ -252,12 +257,24 @@ void RTPReceiverAudio::CheckPayloadChanged(
}
}
int RTPReceiverAudio::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
CriticalSectionScoped cs(crit_sect_.get());
assert(num_energy_ <= kRtpCsrcSize);
if (num_energy_ > 0) {
memcpy(array_of_energy, current_remote_energy_,
sizeof(uint8_t) * num_energy_);
}
return num_energy_;
}
int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
RtpFeedback* callback,
const int32_t id,
const int8_t payload_type,
int32_t id,
int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
const PayloadUnion& specific_payload) const {
if (-1 == callback->OnInitializeDecoder(id,
payload_type,
payload_name,
@ -278,9 +295,9 @@ int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_length,
const ModuleRTPUtility::AudioPayload& audio_specific,
const bool is_red) {
uint16_t payload_length,
const AudioPayload& audio_specific,
bool is_red) {
if (payload_length == 0) {
return 0;
@ -289,7 +306,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
bool telephone_event_packet =
TelephoneEventPayloadType(rtp_header->header.payloadType);
if (telephone_event_packet) {
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
CriticalSectionScoped lock(crit_sect_.get());
// RFC 4733 2.3
// 0 1 2 3
@ -334,7 +351,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
}
{
CriticalSectionScoped lock(critical_section_rtp_receiver_audio_.get());
CriticalSectionScoped lock(crit_sect_.get());
if (!telephone_event_packet) {
last_received_frequency_ = audio_specific.frequency;

View File

@ -13,8 +13,8 @@
#include <set>
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -25,17 +25,17 @@ namespace webrtc {
class CriticalSectionWrapper;
// Handles audio RTP packets. This class is thread-safe.
class RTPReceiverAudio : public RTPReceiverStrategy {
class RTPReceiverAudio : public RTPReceiverStrategy,
public TelephoneEventHandler {
public:
RTPReceiverAudio(const int32_t id,
RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
virtual ~RTPReceiverAudio();
uint32_t AudioFrequency() const;
virtual ~RTPReceiverAudio() {}
// The following three methods implement the TelephoneEventHandler interface.
// Forward DTMFs to decoder for playout.
int SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
void SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
// Is forwarding of outband telephone events turned on/off?
bool TelephoneEventForwardToDecoder() const;
@ -43,22 +43,25 @@ class RTPReceiverAudio : public RTPReceiverStrategy {
// Is TelephoneEvent configured with payload type payload_type
bool TelephoneEventPayloadType(const int8_t payload_type) const;
TelephoneEventHandler* GetTelephoneEventHandler() {
return this;
}
// Returns true if CNG is configured with payload type payload_type. If so,
// the frequency and cng_payload_type_has_changed are filled in.
bool CNGPayloadType(const int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed);
virtual int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) OVERRIDE;
int32_t ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet);
virtual int32_t GetFrequencyHz() const OVERRIDE;
int GetPayloadTypeFrequency() const OVERRIDE;
virtual RTPAliveType ProcessDeadOrAlive(uint16_t last_payload_length) const
OVERRIDE;
@ -67,44 +70,45 @@ class RTPReceiverAudio : public RTPReceiverStrategy {
virtual int32_t OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency) OVERRIDE;
int8_t payload_type,
uint32_t frequency) OVERRIDE;
virtual int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
const int32_t id,
const int8_t payload_type,
int32_t id,
int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const ModuleRTPUtility::PayloadUnion& specific_payload) const OVERRIDE;
const PayloadUnion& specific_payload) const OVERRIDE;
// We do not allow codecs to have multiple payload types for audio, so we
// need to override the default behavior (which is to do nothing).
void PossiblyRemoveExistingPayloadType(
ModuleRTPUtility::PayloadTypeMap* payload_type_map,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const size_t payload_name_length,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const;
size_t payload_name_length,
uint32_t frequency,
uint8_t channels,
uint32_t rate) const;
// We need to look out for special payload types here and sometimes reset
// statistics. In addition we sometimes need to tweak the frequency.
virtual void CheckPayloadChanged(const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
void CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) OVERRIDE;
int Energy(uint8_t array_of_energy[kRtpCsrcSize]) const OVERRIDE;
private:
int32_t ParseAudioCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_length,
const ModuleRTPUtility::AudioPayload& audio_specific,
const bool is_red);
uint16_t payload_length,
const AudioPayload& audio_specific,
bool is_red);
int32_t id_;
scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_audio_;
uint32_t last_received_frequency_;
@ -123,6 +127,9 @@ class RTPReceiverAudio : public RTPReceiverStrategy {
int8_t g722_payload_type_;
bool last_received_g722_;
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
RtpAudioFeedback* cb_audio_feedback_;
};
} // namespace webrtc

View File

@ -0,0 +1,653 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h"
#include <assert.h>
#include <math.h>
#include <stdlib.h>
#include <string.h>
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
using ModuleRTPUtility::GetCurrentRTP;
using ModuleRTPUtility::Payload;
using ModuleRTPUtility::RTPPayloadParser;
using ModuleRTPUtility::StringCompare;
RtpReceiver* RtpReceiver::CreateVideoReceiver(
int id, Clock* clock,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry) {
if (!incoming_payload_callback)
incoming_payload_callback = NullObjectRtpData();
if (!incoming_messages_callback)
incoming_messages_callback = NullObjectRtpFeedback();
return new RtpReceiverImpl(
id, clock, NullObjectRtpAudioFeedback(), incoming_messages_callback,
rtp_payload_registry,
RTPReceiverStrategy::CreateVideoStrategy(id, incoming_payload_callback));
}
RtpReceiver* RtpReceiver::CreateAudioReceiver(
int id, Clock* clock,
RtpAudioFeedback* incoming_audio_feedback,
RtpData* incoming_payload_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry) {
if (!incoming_audio_feedback)
incoming_audio_feedback = NullObjectRtpAudioFeedback();
if (!incoming_payload_callback)
incoming_payload_callback = NullObjectRtpData();
if (!incoming_messages_callback)
incoming_messages_callback = NullObjectRtpFeedback();
return new RtpReceiverImpl(
id, clock, incoming_audio_feedback, incoming_messages_callback,
rtp_payload_registry,
RTPReceiverStrategy::CreateAudioStrategy(id, incoming_payload_callback,
incoming_audio_feedback));
}
RtpReceiverImpl::RtpReceiverImpl(int32_t id,
Clock* clock,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry,
RTPReceiverStrategy* rtp_media_receiver)
: clock_(clock),
rtp_payload_registry_(rtp_payload_registry),
rtp_media_receiver_(rtp_media_receiver),
id_(id),
cb_rtp_feedback_(incoming_messages_callback),
critical_section_rtp_receiver_(
CriticalSectionWrapper::CreateCriticalSection()),
last_receive_time_(0),
last_received_payload_length_(0),
ssrc_(0),
num_csrcs_(0),
current_remote_csrc_(),
last_received_timestamp_(0),
last_received_frame_time_ms_(0),
last_received_sequence_number_(0),
nack_method_(kNackOff),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
rtx_(false),
ssrc_rtx_(0),
payload_type_rtx_(-1) {
assert(incoming_audio_messages_callback);
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RtpReceiverImpl::~RtpReceiverImpl() {
for (int i = 0; i < num_csrcs_; ++i) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, current_remote_csrc_[i],
false);
}
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
}
RTPReceiverStrategy* RtpReceiverImpl::GetMediaReceiver() const {
return rtp_media_receiver_.get();
}
RtpVideoCodecTypes RtpReceiverImpl::VideoCodecType() const {
PayloadUnion media_specific;
rtp_media_receiver_->GetLastMediaSpecificPayload(&media_specific);
return media_specific.Video.videoCodecType;
}
int32_t RtpReceiverImpl::RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
// TODO(phoglund): Try to streamline handling of the RED codec and some other
// cases which makes it necessary to keep track of whether we created a
// payload or not.
bool created_new_payload = false;
int32_t result = rtp_payload_registry_->RegisterReceivePayload(
payload_name, payload_type, frequency, channels, rate,
&created_new_payload);
if (created_new_payload) {
if (rtp_media_receiver_->OnNewPayloadTypeCreated(payload_name, payload_type,
frequency) != 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s failed to register payload",
__FUNCTION__);
return -1;
}
}
return result;
}
int32_t RtpReceiverImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
return rtp_payload_registry_->DeRegisterReceivePayload(payload_type);
}
NACKMethod RtpReceiverImpl::NACK() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
return nack_method_;
}
// Turn negative acknowledgment requests on/off.
int32_t RtpReceiverImpl::SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
if (max_reordering_threshold < 0) {
return -1;
} else if (method == kNackRtcp) {
max_reordering_threshold_ = max_reordering_threshold;
} else {
max_reordering_threshold_ = kDefaultMaxReorderingThreshold;
}
nack_method_ = method;
return 0;
}
void RtpReceiverImpl::SetRTXStatus(bool enable, uint32_t ssrc) {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
rtx_ = enable;
ssrc_rtx_ = ssrc;
}
void RtpReceiverImpl::RTXStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
*enable = rtx_;
*ssrc = ssrc_rtx_;
*payload_type = payload_type_rtx_;
}
void RtpReceiverImpl::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(critical_section_rtp_receiver_.get());
payload_type_rtx_ = payload_type;
}
uint32_t RtpReceiverImpl::SSRC() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
return ssrc_;
}
// Get remote CSRC.
int32_t RtpReceiverImpl::CSRCs(uint32_t array_of_csrcs[kRtpCsrcSize]) const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
assert(num_csrcs_ <= kRtpCsrcSize);
if (num_csrcs_ > 0) {
memcpy(array_of_csrcs, current_remote_csrc_, sizeof(uint32_t)*num_csrcs_);
}
return num_csrcs_;
}
int32_t RtpReceiverImpl::Energy(
uint8_t array_of_energy[kRtpCsrcSize]) const {
return rtp_media_receiver_->Energy(array_of_energy);
}
bool RtpReceiverImpl::IncomingRtpPacket(
RTPHeader* rtp_header,
const uint8_t* packet,
int packet_length,
PayloadUnion payload_specific,
bool in_order) {
// The rtp_header argument contains the parsed RTP header.
int length = packet_length - rtp_header->paddingLength;
// Sanity check.
if ((length - rtp_header->headerLength) < 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s invalid argument",
__FUNCTION__);
return false;
}
{
CriticalSectionScoped cs(critical_section_rtp_receiver_.get());
// TODO(holmer): Make rtp_header const after RTX has been broken out.
if (rtx_) {
if (ssrc_rtx_ == rtp_header->ssrc) {
// Sanity check, RTX packets has 2 extra header bytes.
if (rtp_header->headerLength + kRtxHeaderSize > packet_length) {
return false;
}
// If a specific RTX payload type is negotiated, set back to the media
// payload type and treat it like a media packet from here.
if (payload_type_rtx_ != -1) {
if (payload_type_rtx_ == rtp_header->payloadType &&
rtp_payload_registry_->last_received_media_payload_type() != -1) {
rtp_header->payloadType =
rtp_payload_registry_->last_received_media_payload_type();
} else {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"Incorrect RTX configuration, dropping packet.");
return false;
}
}
rtp_header->ssrc = ssrc_;
rtp_header->sequenceNumber =
(packet[rtp_header->headerLength] << 8) +
packet[1 + rtp_header->headerLength];
// Count the RTX header as part of the RTP
rtp_header->headerLength += 2;
}
}
}
int8_t first_payload_byte = 0;
if (length > 0) {
first_payload_byte = packet[rtp_header->headerLength];
}
// Trigger our callbacks.
CheckSSRCChanged(rtp_header);
bool is_red = false;
bool should_reset_statistics = false;
if (CheckPayloadChanged(rtp_header,
first_payload_byte,
is_red,
&payload_specific,
&should_reset_statistics) == -1) {
if (length - rtp_header->headerLength == 0) {
// OK, keep-alive packet.
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
"%s received keepalive",
__FUNCTION__);
return true;
}
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"%s received invalid payloadtype",
__FUNCTION__);
return false;
}
if (should_reset_statistics) {
cb_rtp_feedback_->ResetStatistics();
}
WebRtcRTPHeader webrtc_rtp_header;
memset(&webrtc_rtp_header, 0, sizeof(webrtc_rtp_header));
webrtc_rtp_header.header = *rtp_header;
CheckCSRC(&webrtc_rtp_header);
uint16_t payload_data_length =
ModuleRTPUtility::GetPayloadDataLength(*rtp_header, packet_length);
bool is_first_packet_in_frame = false;
bool is_first_packet = false;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
is_first_packet_in_frame =
last_received_sequence_number_ + 1 == rtp_header->sequenceNumber &&
Timestamp() != rtp_header->timestamp;
is_first_packet = is_first_packet_in_frame || last_receive_time_ == 0;
}
int32_t ret_val = rtp_media_receiver_->ParseRtpPacket(
&webrtc_rtp_header, payload_specific, is_red, packet, packet_length,
clock_->TimeInMilliseconds(), is_first_packet);
if (ret_val < 0) {
return false;
}
{
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
last_receive_time_ = clock_->TimeInMilliseconds();
last_received_payload_length_ = payload_data_length;
if (in_order) {
if (last_received_timestamp_ != rtp_header->timestamp) {
last_received_timestamp_ = rtp_header->timestamp;
last_received_frame_time_ms_ = clock_->TimeInMilliseconds();
}
last_received_sequence_number_ = rtp_header->sequenceNumber;
}
}
return true;
}
bool RtpReceiverImpl::RetransmitOfOldPacket(const RTPHeader& header,
int jitter, int min_rtt) const {
if (InOrderPacket(header.sequenceNumber)) {
return false;
}
CriticalSectionScoped cs(critical_section_rtp_receiver_.get());
uint32_t frequency_khz = header.payload_type_frequency / 1000;
assert(frequency_khz > 0);
int64_t time_diff_ms = clock_->TimeInMilliseconds() -
last_receive_time_;
// Diff in time stamp since last received in order.
uint32_t timestamp_diff = header.timestamp - last_received_timestamp_;
int32_t rtp_time_stamp_diff_ms = static_cast<int32_t>(timestamp_diff) /
frequency_khz;
int32_t max_delay_ms = 0;
if (min_rtt == 0) {
// Jitter standard deviation in samples.
float jitter_std = sqrt(static_cast<float>(jitter));
// 2 times the standard deviation => 95% confidence.
// And transform to milliseconds by dividing by the frequency in kHz.
max_delay_ms = static_cast<int32_t>((2 * jitter_std) / frequency_khz);
// Min max_delay_ms is 1.
if (max_delay_ms == 0) {
max_delay_ms = 1;
}
} else {
max_delay_ms = (min_rtt / 3) + 1;
}
if (time_diff_ms > rtp_time_stamp_diff_ms + max_delay_ms) {
return true;
}
return false;
}
bool RtpReceiverImpl::InOrderPacket(const uint16_t sequence_number) const {
CriticalSectionScoped cs(critical_section_rtp_receiver_.get());
// First packet is always in order.
if (last_receive_time_ == 0)
return true;
if (IsNewerSequenceNumber(sequence_number, last_received_sequence_number_)) {
return true;
} else {
// If we have a restart of the remote side this packet is still in order.
return !IsNewerSequenceNumber(sequence_number,
last_received_sequence_number_ -
max_reordering_threshold_);
}
}
TelephoneEventHandler* RtpReceiverImpl::GetTelephoneEventHandler() {
return rtp_media_receiver_->GetTelephoneEventHandler();
}
uint32_t RtpReceiverImpl::Timestamp() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
return last_received_timestamp_;
}
int32_t RtpReceiverImpl::LastReceivedTimeMs() const {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
return last_received_frame_time_ms_;
}
// Implementation note: must not hold critsect when called.
void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader* rtp_header) {
bool new_ssrc = false;
bool re_initialize_decoder = false;
char payload_name[RTP_PAYLOAD_NAME_SIZE];
uint8_t channels = 1;
uint32_t rate = 0;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
if (ssrc_ != rtp_header->ssrc ||
(last_received_payload_type == -1 && ssrc_ == 0)) {
// We need the payload_type_ to make the call if the remote SSRC is 0.
new_ssrc = true;
cb_rtp_feedback_->ResetStatistics();
last_received_timestamp_ = 0;
last_received_sequence_number_ = 0;
last_received_frame_time_ms_ = 0;
// Do we have a SSRC? Then the stream is restarted.
if (ssrc_ != 0) {
// Do we have the same codec? Then re-initialize coder.
if (rtp_header->payloadType == last_received_payload_type) {
re_initialize_decoder = true;
Payload* payload;
if (!rtp_payload_registry_->PayloadTypeToPayload(
rtp_header->payloadType, payload)) {
return;
}
assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
if (payload->audio) {
channels = payload->typeSpecific.Audio.channels;
rate = payload->typeSpecific.Audio.rate;
}
}
}
ssrc_ = rtp_header->ssrc;
}
}
if (new_ssrc) {
// We need to get this to our RTCP sender and receiver.
// We need to do this outside critical section.
cb_rtp_feedback_->OnIncomingSSRCChanged(id_, rtp_header->ssrc);
}
if (re_initialize_decoder) {
if (-1 == cb_rtp_feedback_->OnInitializeDecoder(
id_, rtp_header->payloadType, payload_name,
rtp_header->payload_type_frequency, channels, rate)) {
// New stream, same codec.
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"Failed to create decoder for payload type:%d",
rtp_header->payloadType);
}
}
}
// Implementation note: must not hold critsect when called.
// TODO(phoglund): Move as much as possible of this code path into the media
// specific receivers. Basically this method goes through a lot of trouble to
// compute something which is only used by the media specific parts later. If
// this code path moves we can get rid of some of the rtp_receiver ->
// media_specific interface (such as CheckPayloadChange, possibly get/set
// last known payload).
int32_t RtpReceiverImpl::CheckPayloadChanged(
const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& is_red,
PayloadUnion* specific_payload,
bool* should_reset_statistics) {
bool re_initialize_decoder = false;
char payload_name[RTP_PAYLOAD_NAME_SIZE];
int8_t payload_type = rtp_header->payloadType;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
if (payload_type != last_received_payload_type) {
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Get the real codec payload type.
payload_type = first_payload_byte & 0x7f;
is_red = true;
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Invalid payload type, traced by caller. If we proceeded here,
// this would be set as |_last_received_payload_type|, and we would no
// longer catch corrupt packets at this level.
return -1;
}
// When we receive RED we need to check the real payload type.
if (payload_type == last_received_payload_type) {
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
return 0;
}
}
*should_reset_statistics = false;
bool should_discard_changes = false;
rtp_media_receiver_->CheckPayloadChanged(
payload_type, specific_payload, should_reset_statistics,
&should_discard_changes);
if (should_discard_changes) {
is_red = false;
return 0;
}
Payload* payload;
if (!rtp_payload_registry_->PayloadTypeToPayload(payload_type, payload)) {
// Not a registered payload type.
return -1;
}
assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
rtp_payload_registry_->set_last_received_payload_type(payload_type);
re_initialize_decoder = true;
rtp_media_receiver_->SetLastMediaSpecificPayload(payload->typeSpecific);
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
if (!payload->audio) {
if (VideoCodecType() == kRtpVideoFec) {
// Only reset the decoder on media packets.
re_initialize_decoder = false;
} else {
bool media_type_unchanged =
rtp_payload_registry_->ReportMediaPayloadType(payload_type);
if (media_type_unchanged) {
// Only reset the decoder if the media codec type has changed.
re_initialize_decoder = false;
}
}
}
if (re_initialize_decoder) {
*should_reset_statistics = true;
}
} else {
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
is_red = false;
}
} // End critsect.
if (re_initialize_decoder) {
if (-1 == rtp_media_receiver_->InvokeOnInitializeDecoder(
cb_rtp_feedback_, id_, payload_type, payload_name,
*specific_payload)) {
return -1; // Wrong payload type.
}
}
return 0;
}
// Implementation note: must not hold critsect when called.
void RtpReceiverImpl::CheckCSRC(const WebRtcRTPHeader* rtp_header) {
int32_t num_csrcs_diff = 0;
uint32_t old_remote_csrc[kRtpCsrcSize];
uint8_t old_num_csrcs = 0;
{
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
if (!rtp_media_receiver_->ShouldReportCsrcChanges(
rtp_header->header.payloadType)) {
return;
}
old_num_csrcs = num_csrcs_;
if (old_num_csrcs > 0) {
// Make a copy of old.
memcpy(old_remote_csrc, current_remote_csrc_,
num_csrcs_ * sizeof(uint32_t));
}
const uint8_t num_csrcs = rtp_header->header.numCSRCs;
if ((num_csrcs > 0) && (num_csrcs <= kRtpCsrcSize)) {
// Copy new.
memcpy(current_remote_csrc_,
rtp_header->header.arrOfCSRCs,
num_csrcs * sizeof(uint32_t));
}
if (num_csrcs > 0 || old_num_csrcs > 0) {
num_csrcs_diff = num_csrcs - old_num_csrcs;
num_csrcs_ = num_csrcs; // Update stored CSRCs.
} else {
// No change.
return;
}
} // End critsect.
bool have_called_callback = false;
// Search for new CSRC in old array.
for (uint8_t i = 0; i < rtp_header->header.numCSRCs; ++i) {
const uint32_t csrc = rtp_header->header.arrOfCSRCs[i];
bool found_match = false;
for (uint8_t j = 0; j < old_num_csrcs; ++j) {
if (csrc == old_remote_csrc[j]) { // old list
found_match = true;
break;
}
}
if (!found_match && csrc) {
// Didn't find it, report it as new.
have_called_callback = true;
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, csrc, true);
}
}
// Search for old CSRC in new array.
for (uint8_t i = 0; i < old_num_csrcs; ++i) {
const uint32_t csrc = old_remote_csrc[i];
bool found_match = false;
for (uint8_t j = 0; j < rtp_header->header.numCSRCs; ++j) {
if (csrc == rtp_header->header.arrOfCSRCs[j]) {
found_match = true;
break;
}
}
if (!found_match && csrc) {
// Did not find it, report as removed.
have_called_callback = true;
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, csrc, false);
}
}
if (!have_called_callback) {
// If the CSRC list contain non-unique entries we will end up here.
// Using CSRC 0 to signal this event, not interop safe, other
// implementations might have CSRC 0 as a valid value.
if (num_csrcs_diff > 0) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, 0, true);
} else if (num_csrcs_diff < 0) {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, 0, false);
}
}
}
} // namespace webrtc

View File

@ -0,0 +1,122 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class RtpReceiverImpl : public RtpReceiver {
public:
// Callbacks passed in here may not be NULL (use Null Object callbacks if you
// want callbacks to do nothing). This class takes ownership of the media
// receiver but nothing else.
RtpReceiverImpl(int32_t id,
Clock* clock,
RtpAudioFeedback* incoming_audio_messages_callback,
RtpFeedback* incoming_messages_callback,
RTPPayloadRegistry* rtp_payload_registry,
RTPReceiverStrategy* rtp_media_receiver);
virtual ~RtpReceiverImpl();
RTPReceiverStrategy* GetMediaReceiver() const;
int32_t RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate);
int32_t DeRegisterReceivePayload(const int8_t payload_type);
bool IncomingRtpPacket(
RTPHeader* rtp_header,
const uint8_t* incoming_rtp_packet,
int incoming_rtp_packet_length,
PayloadUnion payload_specific,
bool in_order);
NACKMethod NACK() const;
// Turn negative acknowledgement requests on/off.
int32_t SetNACKStatus(const NACKMethod method, int max_reordering_threshold);
// Returns the last received timestamp.
virtual uint32_t Timestamp() const;
int32_t LastReceivedTimeMs() const;
uint32_t SSRC() const;
int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const;
int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
// RTX.
void SetRTXStatus(bool enable, uint32_t ssrc);
void RTXStatus(bool* enable, uint32_t* ssrc, int* payload_type) const;
void SetRtxPayloadType(int payload_type);
virtual bool RetransmitOfOldPacket(const RTPHeader& header,
int jitter, int min_rtt) const;
bool InOrderPacket(const uint16_t sequence_number) const;
TelephoneEventHandler* GetTelephoneEventHandler();
private:
RtpVideoCodecTypes VideoCodecType() const;
void CheckSSRCChanged(const RTPHeader* rtp_header);
void CheckCSRC(const WebRtcRTPHeader* rtp_header);
int32_t CheckPayloadChanged(const RTPHeader* rtp_header,
const int8_t first_payload_byte,
bool& isRED,
PayloadUnion* payload,
bool* should_reset_statistics);
Clock* clock_;
RTPPayloadRegistry* rtp_payload_registry_;
scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
int32_t id_;
RtpFeedback* cb_rtp_feedback_;
scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_;
int64_t last_receive_time_;
uint16_t last_received_payload_length_;
// SSRCs.
uint32_t ssrc_;
uint8_t num_csrcs_;
uint32_t current_remote_csrc_[kRtpCsrcSize];
uint32_t last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint16_t last_received_sequence_number_;
NACKMethod nack_method_;
int max_reordering_threshold_;
bool rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_

View File

@ -12,30 +12,39 @@
#include <stdlib.h>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
RTPReceiverStrategy::RTPReceiverStrategy(RtpData* data_callback)
: data_callback_(data_callback) {
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
data_callback_(data_callback) {
memset(&last_payload_, 0, sizeof(last_payload_));
}
void RTPReceiverStrategy::GetLastMediaSpecificPayload(
ModuleRTPUtility::PayloadUnion* payload) const {
PayloadUnion* payload) const {
CriticalSectionScoped cs(crit_sect_.get());
memcpy(payload, &last_payload_, sizeof(*payload));
}
void RTPReceiverStrategy::SetLastMediaSpecificPayload(
const ModuleRTPUtility::PayloadUnion& payload) {
const PayloadUnion& payload) {
CriticalSectionScoped cs(crit_sect_.get());
memcpy(&last_payload_, &payload, sizeof(last_payload_));
}
void RTPReceiverStrategy::CheckPayloadChanged(
const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
void RTPReceiverStrategy::CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes) {
// Default: Keep changes and don't reset statistics.
*should_discard_changes = false;
*should_reset_statistics = false;
}
int RTPReceiverStrategy::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
return -1;
}
} // namespace webrtc

View File

@ -14,23 +14,24 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class TelephoneEventHandler;
// This strategy deals with media-specific RTP packet processing.
// This class is not thread-safe and must be protected by its caller.
class RTPReceiverStrategy {
public:
// The data callback is where we should send received payload data.
// See ParseRtpPacket. This class does not claim ownership of the callback.
// Implementations must NOT hold any critical sections while calling the
// callback.
//
// Note: Implementations may call the callback for other reasons than calls
// to ParseRtpPacket, for instance if the implementation somehow recovers a
// packet.
RTPReceiverStrategy(RtpData* data_callback);
static RTPReceiverStrategy* CreateVideoStrategy(int32_t id,
RtpData* data_callback);
static RTPReceiverStrategy* CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
virtual ~RTPReceiverStrategy() {}
// Parses the RTP packet and calls the data callback with the payload data.
@ -39,21 +40,22 @@ class RTPReceiverStrategy {
// make changes in the data as necessary. The specific_payload argument
// provides audio or video-specific data. The is_first_packet argument is true
// if this packet is either the first packet ever or the first in its frame.
virtual int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const uint8_t* packet,
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) = 0;
virtual int32_t ParseRtpPacket(WebRtcRTPHeader* rtp_header,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) = 0;
virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
// Retrieves the last known applicable frequency.
virtual int32_t GetFrequencyHz() const = 0;
virtual int GetPayloadTypeFrequency() const = 0;
// Computes the current dead-or-alive state.
virtual RTPAliveType ProcessDeadOrAlive(
uint16_t last_payload_length) const = 0;
uint16_t last_payload_length) const = 0;
// Returns true if we should report CSRC changes for this payload type.
// TODO(phoglund): should move out of here along with other payload stuff.
@ -63,36 +65,45 @@ class RTPReceiverStrategy {
// the payload registry.
virtual int32_t OnNewPayloadTypeCreated(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency) = 0;
int8_t payloadType,
uint32_t frequency) = 0;
// Invokes the OnInitializeDecoder callback in a media-specific way.
virtual int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const ModuleRTPUtility::PayloadUnion& specific_payload) const = 0;
RtpFeedback* callback,
int32_t id,
int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const PayloadUnion& specific_payload) const = 0;
// Checks if the payload type has changed, and returns whether we should
// reset statistics and/or discard this packet.
virtual void CheckPayloadChanged(
const int8_t payload_type,
ModuleRTPUtility::PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes);
virtual void CheckPayloadChanged(int8_t payload_type,
PayloadUnion* specific_payload,
bool* should_reset_statistics,
bool* should_discard_changes);
virtual int Energy(uint8_t array_of_energy[kRtpCsrcSize]) const;
// Stores / retrieves the last media specific payload for later reference.
void GetLastMediaSpecificPayload(
ModuleRTPUtility::PayloadUnion* payload) const;
void SetLastMediaSpecificPayload(
const ModuleRTPUtility::PayloadUnion& payload);
void GetLastMediaSpecificPayload(PayloadUnion* payload) const;
void SetLastMediaSpecificPayload(const PayloadUnion& payload);
protected:
ModuleRTPUtility::PayloadUnion last_payload_;
// The data callback is where we should send received payload data.
// See ParseRtpPacket. This class does not claim ownership of the callback.
// Implementations must NOT hold any critical sections while calling the
// callback.
//
// Note: Implementations may call the callback for other reasons than calls
// to ParseRtpPacket, for instance if the implementation somehow recovers a
// packet.
RTPReceiverStrategy(RtpData* data_callback);
scoped_ptr<CriticalSectionWrapper> crit_sect_;
PayloadUnion last_payload_;
RtpData* data_callback_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_

View File

@ -15,9 +15,9 @@
#include <assert.h> // assert
#include <string.h> // memcpy()
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/receiver_fec.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -29,21 +29,18 @@ uint32_t BitRateBPS(uint16_t x) {
return (x & 0x3fff) * uint32_t(pow(10.0f, (2 + (x >> 14))));
}
RTPReceiverVideo::RTPReceiverVideo(
const int32_t id,
const RTPPayloadRegistry* rtp_rtp_payload_registry,
RtpData* data_callback)
RTPReceiverStrategy* RTPReceiverStrategy::CreateVideoStrategy(
int32_t id, RtpData* data_callback) {
return new RTPReceiverVideo(id, data_callback);
}
RTPReceiverVideo::RTPReceiverVideo(int32_t id, RtpData* data_callback)
: RTPReceiverStrategy(data_callback),
id_(id),
rtp_rtp_payload_registry_(rtp_rtp_payload_registry),
critical_section_receiver_video_(
CriticalSectionWrapper::CreateCriticalSection()),
current_fec_frame_decoded_(false),
receive_fec_(NULL) {
}
RTPReceiverVideo::~RTPReceiverVideo() {
delete critical_section_receiver_video_;
delete receive_fec_;
}
@ -55,12 +52,12 @@ bool RTPReceiverVideo::ShouldReportCsrcChanges(
int32_t RTPReceiverVideo::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency) {
int8_t payload_type,
uint32_t frequency) {
if (ModuleRTPUtility::StringCompare(payload_name, "ULPFEC", 6)) {
// Enable FEC if not enabled.
if (receive_fec_ == NULL) {
receive_fec_ = new ReceiverFEC(id_, this);
receive_fec_ = new ReceiverFEC(id_, data_callback_);
}
receive_fec_->SetPayloadTypeFEC(payload_type);
}
@ -69,15 +66,16 @@ int32_t RTPReceiverVideo::OnNewPayloadTypeCreated(
int32_t RTPReceiverVideo::ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
const uint16_t packet_length,
const int64_t timestamp_ms,
const bool is_first_packet) {
uint16_t packet_length,
int64_t timestamp_ms,
bool is_first_packet) {
TRACE_EVENT2("webrtc_rtp", "Video::ParseRtp",
"seqnum", rtp_header->header.sequenceNumber,
"timestamp", rtp_header->header.timestamp);
rtp_header->type.Video.codec = specific_payload.Video.videoCodecType;
const uint8_t* payload_data =
ModuleRTPUtility::GetPayloadData(rtp_header->header, packet);
const uint16_t payload_data_length =
@ -93,8 +91,8 @@ int32_t RTPReceiverVideo::ParseRtpPacket(
is_first_packet);
}
int32_t RTPReceiverVideo::GetFrequencyHz() const {
return kDefaultVideoFrequency;
int RTPReceiverVideo::GetPayloadTypeFrequency() const {
return kVideoPayloadTypeFrequency;
}
RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
@ -104,13 +102,13 @@ RTPAliveType RTPReceiverVideo::ProcessDeadOrAlive(
int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
RtpFeedback* callback,
const int32_t id,
const int8_t payload_type,
int32_t id,
int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const ModuleRTPUtility::PayloadUnion& specific_payload) const {
const PayloadUnion& specific_payload) const {
// For video we just go with default values.
if (-1 == callback->OnInitializeDecoder(
id, payload_type, payload_name, kDefaultVideoFrequency, 1, 0)) {
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
WEBRTC_TRACE(kTraceError,
kTraceRtpRtcp,
id,
@ -127,29 +125,29 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_red,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
bool is_red,
const uint8_t* incoming_rtp_packet,
const uint16_t incoming_rtp_packet_size,
const int64_t now_ms,
const bool is_first_packet) {
uint16_t incoming_rtp_packet_size,
int64_t now_ms,
bool is_first_packet) {
int32_t ret_val = 0;
critical_section_receiver_video_->Enter();
crit_sect_->Enter();
if (is_red) {
if (receive_fec_ == NULL) {
critical_section_receiver_video_->Leave();
crit_sect_->Leave();
return -1;
}
crit_sect_->Leave();
bool FECpacket = false;
ret_val = receive_fec_->AddReceivedFECPacket(
rtp_header, incoming_rtp_packet, payload_data_length, FECpacket);
if (ret_val != -1) {
ret_val = receive_fec_->ProcessReceivedFEC();
}
critical_section_receiver_video_->Leave();
if (ret_val == 0 && FECpacket) {
// Callback with the received FEC packet.
@ -158,21 +156,17 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
// empty payload and data length.
rtp_header->frameType = kFrameEmpty;
// We need this for the routing.
int32_t ret_val = SetCodecType(video_type, rtp_header);
if (ret_val != 0) {
return ret_val;
}
rtp_header->type.Video.codec = video_type;
// Pass the length of FEC packets so that they can be accounted for in
// the bandwidth estimator.
ret_val = data_callback_->OnReceivedPayloadData(
NULL, payload_data_length, rtp_header);
}
} else {
// will leave the critical_section_receiver_video_ critsect
// will leave the crit_sect_ critsect
ret_val = ParseVideoCodecSpecificSwitch(rtp_header,
payload_data,
payload_data_length,
video_type,
is_first_packet);
}
return ret_val;
@ -214,82 +208,11 @@ int32_t RTPReceiverVideo::BuildRTPheader(
return rtp_header_length;
}
int32_t RTPReceiverVideo::ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length) {
// TODO(pwestin) Re-factor this to avoid the messy critsect handling.
critical_section_receiver_video_->Enter();
current_fec_frame_decoded_ = true;
ModuleRTPUtility::Payload* payload = NULL;
if (rtp_rtp_payload_registry_->PayloadTypeToPayload(
rtp_header->header.payloadType, payload) != 0) {
critical_section_receiver_video_->Leave();
return -1;
}
// here we can re-create the original lost packet so that we can use it for
// the relay we need to re-create the RED header too
uint8_t recovered_packet[IP_PACKET_SIZE];
uint16_t rtp_header_length =
(uint16_t) BuildRTPheader(rtp_header, recovered_packet);
const uint8_t kREDForFECHeaderLength = 1;
// replace pltype
recovered_packet[1] &= 0x80; // Reset.
recovered_packet[1] += rtp_rtp_payload_registry_->red_payload_type();
// add RED header
recovered_packet[rtp_header_length] = rtp_header->header.payloadType;
// f-bit always 0
memcpy(recovered_packet + rtp_header_length + kREDForFECHeaderLength,
payload_data,
payload_data_length);
// A recovered packet can be the first packet, but we lack the ability to
// detect it at the moment since we do not store the history of recently
// received packets. Most codecs like VP8 deal with this in other ways.
bool is_first_packet = false;
return ParseVideoCodecSpecificSwitch(
rtp_header,
payload_data,
payload_data_length,
payload->typeSpecific.Video.videoCodecType,
is_first_packet);
}
int32_t RTPReceiverVideo::SetCodecType(
const RtpVideoCodecTypes video_type,
WebRtcRTPHeader* rtp_header) const {
switch (video_type) {
case kRtpGenericVideo:
rtp_header->type.Video.codec = kRTPVideoGeneric;
break;
case kRtpVp8Video:
rtp_header->type.Video.codec = kRTPVideoVP8;
break;
case kRtpFecVideo:
rtp_header->type.Video.codec = kRTPVideoFEC;
break;
}
return 0;
}
int32_t RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_first_packet) {
int32_t ret_val = SetCodecType(video_type, rtp_header);
if (ret_val != 0) {
critical_section_receiver_video_->Leave();
return ret_val;
}
uint16_t payload_data_length,
bool is_first_packet) {
WEBRTC_TRACE(kTraceStream,
kTraceRtpRtcp,
id_,
@ -297,25 +220,26 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
__FUNCTION__,
rtp_header->header.timestamp);
// All receive functions release critical_section_receiver_video_ before
// returning.
switch (video_type) {
case kRtpGenericVideo:
// Critical section has already been taken.
switch (rtp_header->type.Video.codec) {
case kRtpVideoGeneric:
rtp_header->type.Video.isFirstPacket = is_first_packet;
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
case kRtpVp8Video:
case kRtpVideoVp8:
return ReceiveVp8Codec(rtp_header, payload_data, payload_data_length);
case kRtpFecVideo:
case kRtpVideoFec:
break;
default:
assert(false);
}
critical_section_receiver_video_->Leave();
// Releasing the already taken critical section here.
crit_sect_->Leave();
return -1;
}
int32_t RTPReceiverVideo::ReceiveVp8Codec(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length) {
int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
bool success;
ModuleRTPUtility::RTPPayload parsed_packet;
if (payload_data_length == 0) {
@ -323,12 +247,12 @@ int32_t RTPReceiverVideo::ReceiveVp8Codec(
parsed_packet.info.VP8.dataLength = 0;
} else {
ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
kRtpVp8Video, payload_data, payload_data_length, id_);
kRtpVideoVp8, payload_data, payload_data_length, id_);
success = rtp_payload_parser.Parse(parsed_packet);
}
// from here down we only work on local data
critical_section_receiver_video_->Leave();
crit_sect_->Leave();
if (!success) {
return -1;
@ -391,7 +315,7 @@ int32_t RTPReceiverVideo::ReceiveGenericCodec(
rtp_header->type.Video.isFirstPacket =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
critical_section_receiver_video_->Leave();
crit_sect_->Leave();
if (data_callback_->OnReceivedPayloadData(
payload_data, payload_data_length, rtp_header) != 0) {

View File

@ -22,27 +22,28 @@ namespace webrtc {
class CriticalSectionWrapper;
class ModuleRtpRtcpImpl;
class ReceiverFEC;
class RTPReceiver;
class RTPPayloadRegistry;
class RtpReceiver;
class RTPReceiverVideo : public RTPReceiverStrategy {
public:
RTPReceiverVideo(const int32_t id,
const RTPPayloadRegistry* rtp_payload_registry,
RtpData* data_callback);
RTPReceiverVideo(const int32_t id, RtpData* data_callback);
virtual ~RTPReceiverVideo();
virtual int32_t ParseRtpPacket(
WebRtcRTPHeader* rtp_header,
const ModuleRTPUtility::PayloadUnion& specific_payload,
const bool is_red,
const PayloadUnion& specific_payload,
bool is_red,
const uint8_t* packet,
const uint16_t packet_length,
const int64_t timestamp,
const bool is_first_packet) OVERRIDE;
uint16_t packet_length,
int64_t timestamp,
bool is_first_packet) OVERRIDE;
virtual int32_t GetFrequencyHz() const OVERRIDE;
TelephoneEventHandler* GetTelephoneEventHandler() {
return NULL;
}
int GetPayloadTypeFrequency() const OVERRIDE;
virtual RTPAliveType ProcessDeadOrAlive(uint16_t last_payload_length) const
OVERRIDE;
@ -51,41 +52,32 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
virtual int32_t OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency) OVERRIDE;
int8_t payload_type,
uint32_t frequency) OVERRIDE;
virtual int32_t InvokeOnInitializeDecoder(
RtpFeedback* callback,
const int32_t id,
const int8_t payload_type,
int32_t id,
int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const ModuleRTPUtility::PayloadUnion& specific_payload) const OVERRIDE;
virtual int32_t ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length);
const PayloadUnion& specific_payload) const OVERRIDE;
void SetPacketOverHead(uint16_t packet_over_head);
protected:
int32_t SetCodecType(const RtpVideoCodecTypes video_type,
WebRtcRTPHeader* rtp_header) const;
int32_t ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_first_packet);
uint16_t payload_data_length,
bool is_first_packet);
int32_t ReceiveGenericCodec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length);
uint16_t payload_data_length);
int32_t ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length);
uint16_t payload_data_length);
int32_t BuildRTPheader(const WebRtcRTPHeader* rtp_header,
uint8_t* data_buffer) const;
@ -94,21 +86,17 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
int32_t ParseVideoCodecSpecific(
WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
const uint16_t payload_data_length,
const RtpVideoCodecTypes video_type,
const bool is_red,
uint16_t payload_data_length,
RtpVideoCodecTypes video_type,
bool is_red,
const uint8_t* incoming_rtp_packet,
const uint16_t incoming_rtp_packet_size,
const int64_t now_ms,
const bool is_first_packet);
uint16_t incoming_rtp_packet_size,
int64_t now_ms,
bool is_first_packet);
int32_t id_;
const RTPPayloadRegistry* rtp_rtp_payload_registry_;
CriticalSectionWrapper* critical_section_receiver_video_;
// FEC
bool current_fec_frame_decoded_;
ReceiverFEC* receive_fec_;
};
} // namespace webrtc

View File

@ -28,11 +28,16 @@
},
'sources': [
# Common
'../interface/receive_statistics.h',
'../interface/rtp_header_parser.h',
'../interface/rtp_payload_registry.h',
'../interface/rtp_receiver.h',
'../interface/rtp_rtcp.h',
'../interface/rtp_rtcp_defines.h',
'bitrate.cc',
'bitrate.h',
'receive_statistics_impl.cc',
'receive_statistics_impl.h',
'rtp_header_parser.cc',
'rtp_rtcp_config.h',
'rtp_rtcp_impl.cc',
@ -47,8 +52,8 @@
'rtcp_utility.h',
'rtp_header_extension.cc',
'rtp_header_extension.h',
'rtp_receiver.cc',
'rtp_receiver.h',
'rtp_receiver_impl.cc',
'rtp_receiver_impl.h',
'rtp_sender.cc',
'rtp_sender.h',
'rtp_utility.cc',
@ -75,7 +80,6 @@
'producer_fec.h',
'rtp_packet_history.cc',
'rtp_packet_history.h',
'rtp_payload_registry.h',
'rtp_payload_registry.cc',
'rtp_receiver_strategy.cc',
'rtp_receiver_strategy.h',
@ -93,6 +97,7 @@
'vp8_partition_aggregator.h',
# Mocks
'../mocks/mock_rtp_rtcp.h',
'mock/mock_rtp_payload_strategy.h',
], # source
# TODO(jschuh): Bug 1348: fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],

View File

@ -14,8 +14,6 @@
#include <string.h>
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -38,28 +36,12 @@ const float kFracMs = 4.294967296E6f;
namespace webrtc {
static RtpData* NullObjectRtpData() {
static NullRtpData null_rtp_data;
return &null_rtp_data;
}
static RtpFeedback* NullObjectRtpFeedback() {
static NullRtpFeedback null_rtp_feedback;
return &null_rtp_feedback;
}
static RtpAudioFeedback* NullObjectRtpAudioFeedback() {
static NullRtpAudioFeedback null_rtp_audio_feedback;
return &null_rtp_audio_feedback;
}
RtpRtcp::Configuration::Configuration()
: id(-1),
audio(false),
clock(NULL),
default_module(NULL),
incoming_data(NullObjectRtpData()),
incoming_messages(NullObjectRtpFeedback()),
receive_statistics(NULL),
outgoing_transport(NULL),
rtcp_feedback(NULL),
intra_frame_callback(NULL),
@ -85,10 +67,7 @@ RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
}
ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
: rtp_payload_registry_(
configuration.id,
RTPPayloadStrategy::CreateStrategy(configuration.audio)),
rtp_sender_(configuration.id,
: rtp_sender_(configuration.id,
configuration.audio,
configuration.clock,
configuration.outgoing_transport,
@ -98,14 +77,12 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
this),
rtcp_receiver_(configuration.id, configuration.clock, this),
clock_(configuration.clock),
rtp_telephone_event_handler_(NULL),
receive_statistics_(configuration.receive_statistics),
id_(configuration.id),
audio_(configuration.audio),
collision_detected_(false),
last_process_time_(configuration.clock->TimeInMilliseconds()),
last_bitrate_process_time_(configuration.clock->TimeInMilliseconds()),
last_packet_timeout_process_time_(
configuration.clock->TimeInMilliseconds()),
last_rtt_process_time_(configuration.clock->TimeInMilliseconds()),
packet_overhead_(28), // IPV4 UDP.
critical_section_module_ptrs_(
@ -114,9 +91,6 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
CriticalSectionWrapper::CreateCriticalSection()),
default_module_(
static_cast<ModuleRtpRtcpImpl*>(configuration.default_module)),
dead_or_alive_active_(false),
dead_or_alive_timeout_ms_(0),
dead_or_alive_last_timer_(0),
nack_method_(kNackOff),
nack_last_time_sent_full_(0),
nack_last_seq_number_sent_(0),
@ -127,26 +101,6 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
, plot1_(NULL),
#endif
rtt_observer_(configuration.rtt_observer) {
RTPReceiverStrategy* rtp_receiver_strategy;
if (configuration.audio) {
// If audio, we need to be able to handle telephone events too, so stash
// away the audio receiver for those situations.
rtp_telephone_event_handler_ =
new RTPReceiverAudio(configuration.id, configuration.incoming_data,
configuration.audio_messages);
rtp_receiver_strategy = rtp_telephone_event_handler_;
} else {
rtp_receiver_strategy =
new RTPReceiverVideo(configuration.id, &rtp_payload_registry_,
configuration.incoming_data);
}
rtp_receiver_.reset(new RTPReceiver(
configuration.id, configuration.clock, this,
configuration.audio_messages, configuration.incoming_data,
configuration.incoming_messages, rtp_receiver_strategy,
&rtp_payload_registry_));
send_video_codec_.codecType = kVideoCodecUnknown;
if (default_module_) {
@ -235,24 +189,14 @@ int32_t ModuleRtpRtcpImpl::TimeUntilNextProcess() {
// Process any pending tasks such as timeouts (non time critical events).
int32_t ModuleRtpRtcpImpl::Process() {
const int64_t now = clock_->TimeInMilliseconds();
const int64_t now = clock_->TimeInMilliseconds();
last_process_time_ = now;
if (now >=
last_packet_timeout_process_time_ + kRtpRtcpPacketTimeoutProcessTimeMs) {
rtp_receiver_->PacketTimeout();
rtcp_receiver_.PacketTimeout();
last_packet_timeout_process_time_ = now;
}
if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) {
rtp_sender_.ProcessBitrate();
rtp_receiver_->ProcessBitrate();
last_bitrate_process_time_ = now;
}
ProcessDeadOrAliveTimer();
const bool default_instance(child_modules_.empty() ? false : true);
if (!default_instance) {
if (rtcp_sender_.Sending()) {
@ -297,8 +241,15 @@ int32_t ModuleRtpRtcpImpl::Process() {
}
}
}
if (rtcp_sender_.TimeToSendRTCPReport())
rtcp_sender_.SendRTCP(kRtcpReport);
if (rtcp_sender_.TimeToSendRTCPReport()) {
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
rtcp_sender_.SendRTCP(kRtcpReport, &receive_stats);
} else {
rtcp_sender_.SendRTCP(kRtcpReport, NULL);
}
}
}
if (UpdateRTCPReceiveInformationTimers()) {
@ -308,230 +259,6 @@ int32_t ModuleRtpRtcpImpl::Process() {
return 0;
}
void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
bool RTCPalive = false;
int64_t now = 0;
bool do_callback = false;
// Do operations on members under lock but avoid making the
// ProcessDeadOrAlive() callback under the same lock.
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
if (dead_or_alive_active_) {
now = clock_->TimeInMilliseconds();
if (now > dead_or_alive_timeout_ms_ + dead_or_alive_last_timer_) {
// RTCP is alive if we have received a report the last 12 seconds.
dead_or_alive_last_timer_ += dead_or_alive_timeout_ms_;
if (rtcp_receiver_.LastReceived() + 12000 > now)
RTCPalive = true;
do_callback = true;
}
}
}
if (do_callback)
rtp_receiver_->ProcessDeadOrAlive(RTCPalive, now);
}
int32_t ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sample_time_seconds) {
if (enable) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPeriodicDeadOrAliveStatus(enable, %d)",
sample_time_seconds);
} else {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPeriodicDeadOrAliveStatus(disable)");
}
if (sample_time_seconds == 0) {
return -1;
}
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
dead_or_alive_active_ = enable;
dead_or_alive_timeout_ms_ = sample_time_seconds * 1000;
// Trigger the first after one period.
dead_or_alive_last_timer_ = clock_->TimeInMilliseconds();
}
return 0;
}
int32_t ModuleRtpRtcpImpl::PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sample_time_seconds) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"PeriodicDeadOrAliveStatus()");
enable = dead_or_alive_active_;
sample_time_seconds =
static_cast<uint8_t>(dead_or_alive_timeout_ms_ / 1000);
return 0;
}
int32_t ModuleRtpRtcpImpl::SetPacketTimeout(
const uint32_t rtp_timeout_ms,
const uint32_t rtcp_timeout_ms) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetPacketTimeout(%u,%u)",
rtp_timeout_ms,
rtcp_timeout_ms);
if (rtp_receiver_->SetPacketTimeout(rtp_timeout_ms) == 0) {
return rtcp_receiver_.SetPacketTimeout(rtcp_timeout_ms);
}
return -1;
}
int32_t ModuleRtpRtcpImpl::RegisterReceivePayload(
const CodecInst& voice_codec) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"RegisterReceivePayload(voice_codec)");
return rtp_receiver_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate);
}
int32_t ModuleRtpRtcpImpl::RegisterReceivePayload(
const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"RegisterReceivePayload(video_codec)");
return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
}
int32_t ModuleRtpRtcpImpl::ReceivePayloadType(
const CodecInst& voice_codec,
int8_t* pl_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"ReceivePayloadType(voice_codec)");
return rtp_receiver_->ReceivePayloadType(
voice_codec.plname,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate,
pl_type);
}
int32_t ModuleRtpRtcpImpl::ReceivePayloadType(
const VideoCodec& video_codec,
int8_t* pl_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"ReceivePayloadType(video_codec)");
return rtp_receiver_->ReceivePayloadType(video_codec.plName,
90000,
0,
video_codec.maxBitrate,
pl_type);
}
int32_t ModuleRtpRtcpImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"DeRegisterReceivePayload(%d)",
payload_type);
return rtp_receiver_->DeRegisterReceivePayload(payload_type);
}
// Get the currently configured SSRC filter.
int32_t ModuleRtpRtcpImpl::SSRCFilter(
uint32_t& allowed_ssrc) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SSRCFilter()");
return rtp_receiver_->SSRCFilter(allowed_ssrc);
}
// Set a SSRC to be used as a filter for incoming RTP streams.
int32_t ModuleRtpRtcpImpl::SetSSRCFilter(
const bool enable,
const uint32_t allowed_ssrc) {
if (enable) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetSSRCFilter(enable, 0x%x)",
allowed_ssrc);
} else {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetSSRCFilter(disable)");
}
return rtp_receiver_->SetSSRCFilter(enable, allowed_ssrc);
}
// Get last received remote timestamp.
uint32_t ModuleRtpRtcpImpl::RemoteTimestamp() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteTimestamp()");
return rtp_receiver_->TimeStamp();
}
int64_t ModuleRtpRtcpImpl::LocalTimeOfRemoteTimeStamp() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"LocalTimeOfRemoteTimeStamp()");
return rtp_receiver_->LastReceivedTimeMs();
}
// Get the current estimated remote timestamp.
int32_t ModuleRtpRtcpImpl::EstimatedRemoteTimeStamp(
uint32_t& timestamp) const {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"EstimatedRemoteTimeStamp()");
return rtp_receiver_->EstimatedRemoteTimeStamp(timestamp);
}
// Get incoming SSRC.
uint32_t ModuleRtpRtcpImpl::RemoteSSRC() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteSSRC()");
return rtp_receiver_->SSRC();
}
// Get remote CSRC
int32_t ModuleRtpRtcpImpl::RemoteCSRCs(
uint32_t arr_of_csrc[kRtpCsrcSize]) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteCSRCs()");
return rtp_receiver_->CSRCs(arr_of_csrc);
}
int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(RtxMode mode, bool set_ssrc,
uint32_t ssrc) {
rtp_sender_.SetRTXStatus(mode, set_ssrc, ssrc);
@ -544,42 +271,10 @@ int32_t ModuleRtpRtcpImpl::RTXSendStatus(RtxMode* mode, uint32_t* ssrc,
return 0;
}
int32_t ModuleRtpRtcpImpl::SetRTXReceiveStatus(bool enable,
uint32_t ssrc) {
rtp_receiver_->SetRTXStatus(enable, ssrc);
return 0;
}
int32_t ModuleRtpRtcpImpl::RTXReceiveStatus(bool* enable, uint32_t* ssrc,
int* payload_type) const {
rtp_receiver_->RTXStatus(enable, ssrc, payload_type);
return 0;
}
void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
rtp_sender_.SetRtxPayloadType(payload_type);
}
void ModuleRtpRtcpImpl::SetRtxReceivePayloadType(int payload_type) {
rtp_receiver_->SetRtxPayloadType(payload_type);
}
// Called by the network module when we receive a packet.
int32_t ModuleRtpRtcpImpl::IncomingRtpPacket(
const uint8_t* incoming_packet,
const uint16_t incoming_packet_length,
const RTPHeader& parsed_rtp_header) {
WEBRTC_TRACE(kTraceStream,
kTraceRtpRtcp,
id_,
"IncomingRtpPacket(packet_length:%u)",
incoming_packet_length);
RTPHeader rtp_header_copy = parsed_rtp_header;
return rtp_receiver_->IncomingRTPPacket(&rtp_header_copy,
incoming_packet,
incoming_packet_length);
}
int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
const uint8_t* rtcp_packet,
const uint16_t length) {
@ -882,7 +577,13 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
if (!have_child_modules) {
// Don't send RTCP from default module.
if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) {
rtcp_sender_.SendRTCP(kRtcpReport);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
rtcp_sender_.SendRTCP(kRtcpReport, &receive_stats);
} else {
rtcp_sender_.SendRTCP(kRtcpReport, NULL);
}
}
return rtp_sender_.SendOutgoingData(frame_type,
payload_type,
@ -1171,12 +872,6 @@ int32_t ModuleRtpRtcpImpl::RemoteCNAME(
return rtcp_receiver_.CNAME(remote_ssrc, c_name);
}
uint16_t ModuleRtpRtcpImpl::RemoteSequenceNumber() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteSequenceNumber()");
return rtp_receiver_->SequenceNumber();
}
int32_t ModuleRtpRtcpImpl::RemoteNTP(
uint32_t* received_ntpsecs,
uint32_t* received_ntpfrac,
@ -1216,21 +911,6 @@ void ModuleRtpRtcpImpl:: SetRtt(uint32_t rtt) {
rtcp_receiver_.SetRTT(static_cast<uint16_t>(rtt));
}
// Reset RTP statistics.
int32_t ModuleRtpRtcpImpl::ResetStatisticsRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetStatisticsRTP()");
return rtp_receiver_->ResetStatistics();
}
// Reset RTP data counters for the receiving side.
int32_t ModuleRtpRtcpImpl::ResetReceiveDataCountersRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"ResetReceiveDataCountersRTP()");
return rtp_receiver_->ResetDataCounters();
}
// Reset RTP data counters for the sending side.
int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
@ -1245,8 +925,19 @@ int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
int32_t ModuleRtpRtcpImpl::SendRTCP(uint32_t rtcp_packet_type) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendRTCP(0x%x)",
rtcp_packet_type);
return rtcp_sender_.SendRTCP(rtcp_packet_type);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound ||
(rtcp_packet_type & kRtcpReport) ||
(rtcp_packet_type & kRtcpSr) ||
(rtcp_packet_type & kRtcpRr)) {
if (receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(rtcp_packet_type, &receive_stats);
} else {
return rtcp_sender_.SendRTCP(rtcp_packet_type, NULL);
}
}
return rtcp_sender_.SendRTCP(rtcp_packet_type, NULL);
}
int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
@ -1269,32 +960,9 @@ int32_t ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
return rtcp_sender_.SetRTCPVoIPMetrics(voip_metric);
}
// Our locally created statistics of the received RTP stream.
int32_t ModuleRtpRtcpImpl::StatisticsRTP(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* max_jitter) const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "StatisticsRTP()");
uint32_t jitter_transmission_time_offset = 0;
int32_t ret_val = rtp_receiver_->Statistics(
fraction_lost, cum_lost, ext_max, jitter, max_jitter,
&jitter_transmission_time_offset, (rtcp_sender_.Status() == kRtcpOff));
if (ret_val == -1) {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
"StatisticsRTP() no statistics available");
}
return ret_val;
}
int32_t ModuleRtpRtcpImpl::DataCountersRTP(
uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
uint32_t* packets_sent) const {
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "DataCountersRTP()");
if (bytes_sent) {
@ -1303,36 +971,7 @@ int32_t ModuleRtpRtcpImpl::DataCountersRTP(
if (packets_sent) {
*packets_sent = rtp_sender_.Packets();
}
return rtp_receiver_->DataCounters(bytes_received, packets_received);
}
int32_t ModuleRtpRtcpImpl::ReportBlockStatistics(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* jitter_transmission_time_offset) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ReportBlockStatistics()");
int32_t missing = 0;
int32_t ret = rtp_receiver_->Statistics(fraction_lost,
cum_lost,
ext_max,
jitter,
NULL,
jitter_transmission_time_offset,
&missing,
true);
#ifdef MATLAB
if (plot1_ == NULL) {
plot1_ = eng.NewPlot(new MatlabPlot());
plot1_->AddTimeLine(30, "b", "lost", clock_->TimeInMilliseconds());
}
plot1_->Append("lost", missing);
plot1_->Plot();
#endif
return ret;
return 0;
}
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) {
@ -1447,52 +1086,6 @@ int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
return rtcp_sender_.SetTMMBN(bounding_set, max_bitrate_kbit);
}
// (NACK) Negative acknowledgment.
// Is Negative acknowledgment requests on/off?
NACKMethod ModuleRtpRtcpImpl::NACK() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "NACK()");
NACKMethod child_method = kNackOff;
const bool default_instance(child_modules_.empty() ? false : true);
if (default_instance) {
// For default we need to check all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::const_iterator it =
child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
NACKMethod nackMethod = module->NACK();
if (nackMethod != kNackOff) {
child_method = nackMethod;
break;
}
}
it++;
}
}
NACKMethod method = nack_method_;
if (child_method != kNackOff) {
method = child_method;
}
return method;
}
// Turn negative acknowledgment requests on/off.
int32_t ModuleRtpRtcpImpl::SetNACKStatus(
NACKMethod method, int max_reordering_threshold) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetNACKStatus(%u)", method);
nack_method_ = method;
rtp_receiver_->SetNACKStatus(method, max_reordering_threshold);
return 0;
}
// Returns the currently configured retransmission mode.
int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
WEBRTC_TRACE(kTraceModuleCall,
@ -1522,7 +1115,7 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
"SendNACK(size:%u)", size);
uint16_t avg_rtt = 0;
rtcp_receiver_.RTT(rtp_receiver_->SSRC(), NULL, &avg_rtt, NULL, NULL);
rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL);
int64_t wait_time = 5 + ((avg_rtt * 3) >> 1); // 5 + RTT * 1.5.
if (wait_time == 5) {
@ -1561,13 +1154,15 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
}
nack_last_seq_number_sent_ = nack_list[start_id + nackLength - 1];
switch (nack_method_) {
case kNackRtcp:
return rtcp_sender_.SendRTCP(kRtcpNack, nackLength, &nack_list[start_id]);
case kNackOff:
return -1;
};
return -1;
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound && receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpNack, &receive_stats, nackLength,
&nack_list[start_id]);
} else {
return rtcp_sender_.SendRTCP(kRtcpNack, NULL, nackLength,
&nack_list[start_id]);
}
}
// Store the sent packets, needed to answer to a Negative acknowledgment
@ -1587,27 +1182,8 @@ int32_t ModuleRtpRtcpImpl::SetStorePacketsStatus(
return 0; // TODO(pwestin): change to void.
}
// Forward DTMFs to decoder for playout.
int ModuleRtpRtcpImpl::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"SetTelephoneEventForwardToDecoder(forward_to_decoder:%d)",
forward_to_decoder);
assert(audio_);
assert(rtp_telephone_event_handler_);
return rtp_telephone_event_handler_->SetTelephoneEventForwardToDecoder(
forward_to_decoder);
}
// Is forwarding of out-band telephone events turned on/off?
bool ModuleRtpRtcpImpl::TelephoneEventForwardToDecoder() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
"TelephoneEventForwardToDecoder()");
assert(audio_);
assert(rtp_telephone_event_handler_);
return rtp_telephone_event_handler_->TelephoneEventForwardToDecoder();
bool ModuleRtpRtcpImpl::StorePackets() const {
return rtp_sender_.StorePackets();
}
// Send a TelephoneEvent tone using RFC 2833 (4733).
@ -1702,10 +1278,6 @@ int32_t ModuleRtpRtcpImpl::SendREDPayloadType(
return rtp_sender_.RED(&payload_type);
}
RtpVideoCodecTypes ModuleRtpRtcpImpl::ReceivedVideoCodec() const {
return rtp_receiver_->VideoCodecType();
}
RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
return rtp_sender_.VideoCodecType();
}
@ -1771,9 +1343,9 @@ int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
case kKeyFrameReqFirRtp:
return rtp_sender_.SendRTPIntraRequest();
case kKeyFrameReqPliRtcp:
return rtcp_sender_.SendRTCP(kRtcpPli);
return SendRTCP(kRtcpPli);
case kKeyFrameReqFirRtcp:
return rtcp_sender_.SendRTCP(kRtcpFir);
return SendRTCP(kRtcpFir);
}
return -1;
}
@ -1785,7 +1357,14 @@ int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
id_,
"SendRTCPSliceLossIndication (picture_id:%d)",
picture_id);
return rtcp_sender_.SendRTCP(kRtcpSli, 0, 0, false, picture_id);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound && receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpSli, &receive_stats, 0, 0, false,
picture_id);
} else {
return rtcp_sender_.SendRTCP(kRtcpSli, NULL, 0, 0, false, picture_id);
}
}
int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) {
@ -1909,7 +1488,7 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
}
if (kRtcpOff != rtcp_sender_.Status()) {
// Send RTCP bye on the current SSRC.
rtcp_sender_.SendRTCP(kRtcpBye);
SendRTCP(kRtcpBye);
}
// Change local SSRC and inform all objects about the new SSRC.
rtcp_sender_.SetSSRC(new_ssrc);
@ -1917,10 +1496,6 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
}
}
uint32_t ModuleRtpRtcpImpl::BitrateReceivedNow() const {
return rtp_receiver_->BitrateNow();
}
void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
@ -1982,12 +1557,19 @@ void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
}
void ModuleRtpRtcpImpl::OnRequestSendReport() {
rtcp_sender_.SendRTCP(kRtcpSr);
SendRTCP(kRtcpSr);
}
int32_t ModuleRtpRtcpImpl::SendRTCPReferencePictureSelection(
const uint64_t picture_id) {
return rtcp_sender_.SendRTCP(kRtcpRpsi, 0, 0, false, picture_id);
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (rtcp_sender_.Status() == kRtcpCompound && receive_statistics_ &&
receive_statistics_->Statistics(&receive_stats, true)) {
return rtcp_sender_.SendRTCP(kRtcpRpsi, &receive_stats, 0, 0, false,
picture_id);
} else {
return rtcp_sender_.SendRTCP(kRtcpRpsi, NULL, 0, 0, false, picture_id);
}
}
uint32_t ModuleRtpRtcpImpl::SendTimeOfSendReport(
@ -2002,7 +1584,7 @@ void ModuleRtpRtcpImpl::OnReceivedNACK(
return;
}
uint16_t avg_rtt = 0;
rtcp_receiver_.RTT(rtp_receiver_->SSRC(), NULL, &avg_rtt, NULL, NULL);
rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &avg_rtt, NULL, NULL);
rtp_sender_.OnReceivedNACK(nack_sequence_numbers, avg_rtt);
}

View File

@ -17,8 +17,6 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -43,72 +41,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Receiver part.
// Configure a timeout value.
virtual int32_t SetPacketTimeout(const uint32_t rtp_timeout_ms,
const uint32_t rtcp_timeout_ms) OVERRIDE;
// Set periodic dead or alive notification.
virtual int32_t SetPeriodicDeadOrAliveStatus(
const bool enable,
const uint8_t sample_time_seconds) OVERRIDE;
// Get periodic dead or alive notification status.
virtual int32_t PeriodicDeadOrAliveStatus(
bool& enable,
uint8_t& sample_time_seconds) OVERRIDE;
virtual int32_t RegisterReceivePayload(const CodecInst& voice_codec) OVERRIDE;
virtual int32_t RegisterReceivePayload(
const VideoCodec& video_codec) OVERRIDE;
virtual int32_t ReceivePayloadType(const CodecInst& voice_codec,
int8_t* pl_type) OVERRIDE;
virtual int32_t ReceivePayloadType(const VideoCodec& video_codec,
int8_t* pl_type) OVERRIDE;
virtual int32_t DeRegisterReceivePayload(const int8_t payload_type) OVERRIDE;
// Get the currently configured SSRC filter.
virtual int32_t SSRCFilter(uint32_t& allowed_ssrc) const OVERRIDE;
// Set a SSRC to be used as a filter for incoming RTP streams.
virtual int32_t SetSSRCFilter(const bool enable,
const uint32_t allowed_ssrc) OVERRIDE;
// Get last received remote timestamp.
virtual uint32_t RemoteTimestamp() const OVERRIDE;
// Get the local time of the last received remote timestamp.
virtual int64_t LocalTimeOfRemoteTimeStamp() const OVERRIDE;
// Get the current estimated remote timestamp.
virtual int32_t EstimatedRemoteTimeStamp(uint32_t& timestamp) const OVERRIDE;
virtual uint32_t RemoteSSRC() const OVERRIDE;
virtual int32_t RemoteCSRCs(uint32_t arr_of_csrc[kRtpCsrcSize]) const
OVERRIDE;
virtual int32_t SetRTXReceiveStatus(const bool enable,
const uint32_t ssrc) OVERRIDE;
virtual int32_t RTXReceiveStatus(bool* enable, uint32_t* ssrc,
int* payloadType) const OVERRIDE;
virtual void SetRtxReceivePayloadType(int payload_type) OVERRIDE;
// Called when we receive an RTP packet.
virtual int32_t IncomingRtpPacket(
const uint8_t* incoming_packet,
const uint16_t packet_length,
const RTPHeader& parsed_rtp_header) OVERRIDE;
// Called when we receive an RTCP packet.
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
uint16_t incoming_packet_length) OVERRIDE;
virtual void SetRemoteSSRC(const uint32_t ssrc);
// Sender part.
virtual int32_t RegisterSendPayload(const CodecInst& voice_codec) OVERRIDE;
@ -239,32 +177,11 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Normal SR and RR are triggered via the process function.
virtual int32_t SendRTCP(uint32_t rtcp_packet_type = kRtcpReport) OVERRIDE;
// Statistics of our locally created statistics of the received RTP stream.
virtual int32_t StatisticsRTP(uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* max_jitter = NULL) const OVERRIDE;
// Reset RTP statistics.
virtual int32_t ResetStatisticsRTP() OVERRIDE;
virtual int32_t ResetReceiveDataCountersRTP() OVERRIDE;
virtual int32_t ResetSendDataCountersRTP() OVERRIDE;
// Statistics of the amount of data sent and received.
virtual int32_t DataCountersRTP(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const OVERRIDE;
virtual int32_t ReportBlockStatistics(
uint8_t* fraction_lost,
uint32_t* cum_lost,
uint32_t* ext_max,
uint32_t* jitter,
uint32_t* jitter_transmission_time_offset);
uint32_t* packets_sent) const OVERRIDE;
// Get received RTCP report, sender info.
virtual int32_t RemoteRTCPStat(RTCPSenderInfo* sender_info) OVERRIDE;
@ -313,13 +230,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// (NACK) Negative acknowledgment part.
// Is Negative acknowledgment requests on/off?
virtual NACKMethod NACK() const OVERRIDE;
// Turn negative acknowledgment requests on/off.
virtual int32_t SetNACKStatus(const NACKMethod method,
int max_reordering_threshold) OVERRIDE;
virtual int SelectiveRetransmissions() const OVERRIDE;
virtual int SetSelectiveRetransmissions(uint8_t settings) OVERRIDE;
@ -333,6 +243,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual int32_t SetStorePacketsStatus(
const bool enable, const uint16_t number_to_store) OVERRIDE;
virtual bool StorePackets() const OVERRIDE;
// (APP) Application specific data.
virtual int32_t SetRTCPApplicationSpecificData(
const uint8_t sub_type,
@ -350,13 +262,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual int32_t SetAudioPacketSize(
const uint16_t packet_size_samples) OVERRIDE;
// Forward DTMFs to decoder for playout.
virtual int SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) OVERRIDE;
// Is forwarding of outband telephone events turned on/off?
virtual bool TelephoneEventForwardToDecoder() const OVERRIDE;
virtual bool SendTelephoneEventActive(int8_t& telephone_event) const OVERRIDE;
// Send a TelephoneEvent tone using RFC 2833 (4733).
@ -384,8 +289,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Video part.
virtual RtpVideoCodecTypes ReceivedVideoCodec() const;
virtual RtpVideoCodecTypes SendVideoCodec() const;
virtual int32_t SendRTCPSliceLossIndication(
@ -427,8 +330,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
uint32_t* fec_rate,
uint32_t* nackRate) const OVERRIDE;
virtual void SetRemoteSSRC(const uint32_t ssrc);
virtual uint32_t SendTimeOfSendReport(const uint32_t send_report);
// Good state of RTP receiver inform sender.
@ -458,8 +359,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
bool UpdateRTCPReceiveInformationTimers();
void ProcessDeadOrAliveTimer();
uint32_t BitrateReceivedNow() const;
// Get remote SequenceNumber.
@ -468,10 +367,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Only for internal testing.
uint32_t LastSendReport(uint32_t& last_rtcptime);
RTPPayloadRegistry rtp_payload_registry_;
RTPSender rtp_sender_;
scoped_ptr<RTPReceiver> rtp_receiver_;
RTCPSender rtcp_sender_;
RTCPReceiver rtcp_receiver_;
@ -481,14 +377,13 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
private:
int64_t RtcpReportInterval();
RTPReceiverAudio* rtp_telephone_event_handler_;
ReceiveStatistics* receive_statistics_;
int32_t id_;
const bool audio_;
bool collision_detected_;
int64_t last_process_time_;
int64_t last_bitrate_process_time_;
int64_t last_packet_timeout_process_time_;
int64_t last_rtt_process_time_;
uint16_t packet_overhead_;
@ -497,10 +392,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
ModuleRtpRtcpImpl* default_module_;
std::list<ModuleRtpRtcpImpl*> child_modules_;
// Dead or alive.
bool dead_or_alive_active_;
uint32_t dead_or_alive_timeout_ms_;
int64_t dead_or_alive_last_timer_;
// Send side
NACKMethod nack_method_;
uint32_t nack_last_time_sent_full_;

View File

@ -350,7 +350,7 @@ int32_t RTPSender::SendOutgoingData(
return 0;
}
}
RtpVideoCodecTypes video_type = kRtpGenericVideo;
RtpVideoCodecTypes video_type = kRtpVideoGeneric;
if (CheckPayloadType(payload_type, &video_type) != 0) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
"%s invalid argument failed to find payload_type:%d",
@ -1178,7 +1178,7 @@ void RTPSender::SetSendingStatus(const bool enabled) {
}
frequency_hz = frequency;
} else {
frequency_hz = kDefaultVideoFrequency;
frequency_hz = kVideoPayloadTypeFrequency;
}
uint32_t RTPtime = ModuleRTPUtility::GetCurrentRTP(clock_, frequency_hz);

View File

@ -37,7 +37,7 @@ RTPSenderVideo::RTPSenderVideo(const int32_t id,
_rtpSender(*rtpSender),
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpGenericVideo),
_videoType(kRtpVideoGeneric),
_videoCodecInformation(NULL),
_maxBitrate(0),
_retransmissionSettings(kRetransmitBaseLayer),
@ -89,13 +89,13 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
ModuleRTPUtility::Payload*& payload) {
CriticalSectionScoped cs(_sendVideoCritsect);
RtpVideoCodecTypes videoType = kRtpGenericVideo;
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
videoType = kRtpVp8Video;
videoType = kRtpVideoVp8;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpGenericVideo;
videoType = kRtpVideoGeneric;
} else {
videoType = kRtpGenericVideo;
videoType = kRtpVideoGeneric;
}
payload = new ModuleRTPUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
@ -302,11 +302,11 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
int32_t retVal = -1;
switch(videoType)
{
case kRtpGenericVideo:
case kRtpVideoGeneric:
retVal = SendGeneric(frameType, payloadType, captureTimeStamp,
capture_time_ms, payloadData, payloadSize);
break;
case kRtpVp8Video:
case kRtpVideoVp8:
retVal = SendVP8(frameType,
payloadType,
captureTimeStamp,

View File

@ -46,6 +46,21 @@
namespace webrtc {
RtpData* NullObjectRtpData() {
static NullRtpData null_rtp_data;
return &null_rtp_data;
}
RtpFeedback* NullObjectRtpFeedback() {
static NullRtpFeedback null_rtp_feedback;
return &null_rtp_feedback;
}
RtpAudioFeedback* NullObjectRtpAudioFeedback() {
static NullRtpAudioFeedback null_rtp_audio_feedback;
return &null_rtp_audio_feedback;
}
namespace ModuleRTPUtility {
enum {
@ -188,9 +203,9 @@ void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
type = videoType;
switch (type) {
case kRtpGenericVideo:
case kRtpVideoGeneric:
break;
case kRtpVp8Video: {
case kRtpVideoVp8: {
info.VP8.nonReferenceFrame = false;
info.VP8.beginningOfPartition = false;
info.VP8.partitionID = 0;
@ -567,9 +582,9 @@ bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
parsedPacket.SetType(_videoType);
switch (_videoType) {
case kRtpGenericVideo:
case kRtpVideoGeneric:
return ParseGeneric(parsedPacket);
case kRtpVp8Video:
case kRtpVideoVp8:
return ParseVP8(parsedPacket);
default:
return false;

View File

@ -19,15 +19,13 @@
#include "webrtc/typedefs.h"
namespace webrtc {
enum RtpVideoCodecTypes
{
kRtpGenericVideo = 0,
kRtpFecVideo = 10,
kRtpVp8Video = 11
};
const uint8_t kRtpMarkerBitMask = 0x80;
RtpData* NullObjectRtpData();
RtpFeedback* NullObjectRtpFeedback();
RtpAudioFeedback* NullObjectRtpAudioFeedback();
namespace ModuleRTPUtility
{
// January 1970, in NTP seconds.
@ -36,22 +34,6 @@ namespace ModuleRTPUtility
// Magic NTP fractional unit.
const double NTP_FRAC = 4.294967296E+9;
struct AudioPayload
{
uint32_t frequency;
uint8_t channels;
uint32_t rate;
};
struct VideoPayload
{
RtpVideoCodecTypes videoCodecType;
uint32_t maxRate;
};
union PayloadUnion
{
AudioPayload Audio;
VideoPayload Video;
};
struct Payload
{
char name[RTP_PAYLOAD_NAME_SIZE];

View File

@ -76,13 +76,13 @@ TEST(ParseVP8Test, BasicHeader) {
payload[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
payload[1] = 0x01; // P frame.
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
@ -97,13 +97,13 @@ TEST(ParseVP8Test, PictureID) {
payload[1] = 0x80;
payload[2] = 17;
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
@ -117,7 +117,7 @@ TEST(ParseVP8Test, PictureID) {
// Re-use payload, but change to long PictureID.
payload[2] = 0x80 | 17;
payload[3] = 17;
RTPPayloadParser rtpPayloadParser2(kRtpVp8Video, payload, 10, 0);
RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10, 0);
ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
@ -136,13 +136,13 @@ TEST(ParseVP8Test, Tl0PicIdx) {
payload[1] = 0x40;
payload[2] = 17;
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 13, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 1 /*L*/, 0 /*T*/, 0 /*K*/);
@ -159,13 +159,13 @@ TEST(ParseVP8Test, TIDAndLayerSync) {
payload[1] = 0x20;
payload[2] = 0x80; // TID(2) + LayerSync(false)
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 1 /*T*/, 0 /*K*/);
@ -183,13 +183,13 @@ TEST(ParseVP8Test, KeyIdx) {
payload[1] = 0x10; // K = 1.
payload[2] = 0x11; // KEYIDX = 17 decimal.
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 1 /*K*/);
@ -209,13 +209,13 @@ TEST(ParseVP8Test, MultipleExtensions) {
payload[4] = 42; // Tl0PicIdx.
payload[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 1 /*L*/, 1 /*T*/, 1 /*K*/);
@ -236,7 +236,7 @@ TEST(ParseVP8Test, TooShortHeader) {
payload[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
payload[3] = 17; // PictureID, low 8 bits.
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
RTPPayload parsedPacket;
EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
@ -258,13 +258,13 @@ TEST(ParseVP8Test, TestWithPacketizer) {
ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
ASSERT_TRUE(last);
RTPPayloadParser rtpPayloadParser(kRtpVp8Video, packet, send_bytes, 0);
RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes, 0);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8,
inputHeader.nonReference /*N*/,

View File

@ -33,6 +33,10 @@ class RtpRtcpAPITest : public ::testing::Test {
configuration.audio = true;
configuration.clock = &fake_clock;
module = RtpRtcp::CreateRtpRtcp(configuration);
rtp_payload_registry_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_receiver_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, NULL, NULL, NULL, rtp_payload_registry_.get()));
}
virtual void TearDown() {
@ -40,6 +44,8 @@ class RtpRtcpAPITest : public ::testing::Test {
}
int test_id;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* module;
uint32_t test_ssrc;
uint32_t test_timestamp;
@ -103,9 +109,9 @@ TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_EQ(0, module->SetTMMBRStatus(false));
EXPECT_FALSE(module->TMMBR());
EXPECT_EQ(kNackOff, module->NACK());
EXPECT_EQ(0, module->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(kNackRtcp, module->NACK());
EXPECT_EQ(kNackOff, rtp_receiver_->NACK());
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(kNackRtcp, rtp_receiver_->NACK());
}
TEST_F(RtpRtcpAPITest, RTXSender) {
@ -129,7 +135,7 @@ TEST_F(RtpRtcpAPITest, RTXSender) {
EXPECT_EQ(0, module->SetRTXSendStatus(kRtxRetransmitted, false, 1));
EXPECT_EQ(0, module->RTXSendStatus(&rtx_mode, &ssrc, &payload_type));
EXPECT_EQ(kRtxRetransmitted, rtx_mode);
EXPECT_EQ(kRtxPayloadType ,payload_type);
EXPECT_EQ(kRtxPayloadType, payload_type);
}
TEST_F(RtpRtcpAPITest, RTXReceiver) {
@ -137,14 +143,14 @@ TEST_F(RtpRtcpAPITest, RTXReceiver) {
unsigned int ssrc = 0;
const int kRtxPayloadType = 119;
int payload_type = -1;
EXPECT_EQ(0, module->SetRTXReceiveStatus(true, 1));
module->SetRtxReceivePayloadType(kRtxPayloadType);
EXPECT_EQ(0, module->RTXReceiveStatus(&enable, &ssrc, &payload_type));
rtp_receiver_->SetRTXStatus(true, 1);
rtp_receiver_->SetRtxPayloadType(kRtxPayloadType);
rtp_receiver_->RTXStatus(&enable, &ssrc, &payload_type);
EXPECT_TRUE(enable);
EXPECT_EQ(1u, ssrc);
EXPECT_EQ(kRtxPayloadType ,payload_type);
EXPECT_EQ(0, module->SetRTXReceiveStatus(false, 0));
EXPECT_EQ(0, module->RTXReceiveStatus(&enable, &ssrc, &payload_type));
rtp_receiver_->SetRTXStatus(false, 0);
rtp_receiver_->RTXStatus(&enable, &ssrc, &payload_type);
EXPECT_FALSE(enable);
EXPECT_EQ(kRtxPayloadType ,payload_type);
}

View File

@ -10,7 +10,10 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -24,10 +27,18 @@ class LoopBackTransport : public webrtc::Transport {
LoopBackTransport()
: _count(0),
_packetLoss(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
_rtpRtcpModule(NULL) {
}
void SetSendModule(RtpRtcp* rtpRtcpModule) {
void SetSendModule(RtpRtcp* rtpRtcpModule,
RTPPayloadRegistry* payload_registry,
RtpReceiver* receiver,
ReceiveStatistics* receive_statistics) {
_rtpRtcpModule = rtpRtcpModule;
rtp_payload_registry_ = payload_registry;
rtp_receiver_ = receiver;
receive_statistics_ = receive_statistics;
}
void DropEveryNthPacket(int n) {
_packetLoss = n;
@ -44,8 +55,15 @@ class LoopBackTransport : public webrtc::Transport {
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return -1;
}
if (_rtpRtcpModule->IncomingRtpPacket(static_cast<const uint8_t*>(data),
len, header) < 0) {
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
receive_statistics_->IncomingPacket(header, len, false, true);
if (!rtp_receiver_->IncomingRtpPacket(&header,
static_cast<const uint8_t*>(data),
len, payload_specific, true)) {
return -1;
}
return len;
@ -59,10 +77,13 @@ class LoopBackTransport : public webrtc::Transport {
private:
int _count;
int _packetLoss;
ReceiveStatistics* receive_statistics_;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* _rtpRtcpModule;
};
class RtpReceiver : public RtpData {
class TestRtpReceiver : public NullRtpData {
public:
virtual int32_t OnReceivedPayloadData(

View File

@ -17,12 +17,13 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
using namespace webrtc;
#define test_rate 64000u
class VerifyingAudioReceiver : public RtpData {
class VerifyingAudioReceiver : public NullRtpData {
public:
virtual int32_t OnReceivedPayloadData(
const uint8_t* payloadData,
@ -58,7 +59,7 @@ class VerifyingAudioReceiver : public RtpData {
}
};
class RTPCallback : public RtpFeedback {
class RTPCallback : public NullRtpFeedback {
public:
virtual int32_t OnInitializeDecoder(
const int32_t id,
@ -73,24 +74,9 @@ class RTPCallback : public RtpFeedback {
}
return 0;
}
virtual void OnPacketTimeout(const int32_t id) {
}
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packetType) {
}
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {
}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
}
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) {
}
};
class AudioFeedback : public RtpAudioFeedback {
class AudioFeedback : public NullRtpAudioFeedback {
virtual void OnReceivedTelephoneEvent(const int32_t id,
const uint8_t event,
const bool end) {
@ -110,11 +96,6 @@ class AudioFeedback : public RtpAudioFeedback {
expectedEvent = 32;
}
}
virtual void OnPlayTelephoneEvent(const int32_t id,
const uint8_t event,
const uint16_t lengthMs,
const uint8_t volume) {
};
};
class RtpRtcpAudioTest : public ::testing::Test {
@ -137,26 +118,41 @@ class RtpRtcpAudioTest : public ::testing::Test {
transport1 = new LoopBackTransport();
transport2 = new LoopBackTransport();
receive_statistics1_.reset(ReceiveStatistics::Create(&fake_clock));
receive_statistics2_.reset(ReceiveStatistics::Create(&fake_clock));
rtp_payload_registry1_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_payload_registry2_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
RtpRtcp::Configuration configuration;
configuration.id = test_id;
configuration.audio = true;
configuration.clock = &fake_clock;
configuration.incoming_data = data_receiver1;
configuration.receive_statistics = receive_statistics1_.get();
configuration.outgoing_transport = transport1;
configuration.audio_messages = audioFeedback;
module1 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver1_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, audioFeedback, data_receiver1, NULL,
rtp_payload_registry1_.get()));
configuration.id = test_id + 1;
configuration.incoming_data = data_receiver2;
configuration.incoming_messages = rtp_callback;
configuration.receive_statistics = receive_statistics2_.get();
configuration.outgoing_transport = transport2;
configuration.audio_messages = audioFeedback;
module2 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver2_.reset(RtpReceiver::CreateAudioReceiver(
test_id + 1, &fake_clock, audioFeedback, data_receiver2, NULL,
rtp_payload_registry2_.get()));
transport1->SetSendModule(module2);
transport2->SetSendModule(module1);
transport1->SetSendModule(module2, rtp_payload_registry2_.get(),
rtp_receiver2_.get(), receive_statistics2_.get());
transport2->SetSendModule(module1, rtp_payload_registry1_.get(),
rtp_receiver1_.get(), receive_statistics1_.get());
}
virtual void TearDown() {
@ -173,6 +169,12 @@ class RtpRtcpAudioTest : public ::testing::Test {
int test_id;
RtpRtcp* module1;
RtpRtcp* module2;
scoped_ptr<ReceiveStatistics> receive_statistics1_;
scoped_ptr<ReceiveStatistics> receive_statistics2_;
scoped_ptr<RtpReceiver> rtp_receiver1_;
scoped_ptr<RtpReceiver> rtp_receiver2_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
VerifyingAudioReceiver* data_receiver1;
VerifyingAudioReceiver* data_receiver2;
LoopBackTransport* transport1;
@ -191,63 +193,93 @@ TEST_F(RtpRtcpAudioTest, Basic) {
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
// Test detection at the end of a DTMF tone.
EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
//EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
EXPECT_EQ(0, module1->SetSendingStatus(true));
// Start basic RTP test.
// Send an empty RTP packet.
// Should fail since we have not registerd the payload type.
// Should fail since we have not registered the payload type.
EXPECT_EQ(-1, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
96, 0, -1, NULL, 0));
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
printf("4\n");
const uint8_t test[5] = "test";
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
0, -1, test, 4));
EXPECT_EQ(test_ssrc, module2->RemoteSSRC());
EXPECT_EQ(test_timestamp, module2->RemoteTimestamp());
EXPECT_EQ(test_ssrc, rtp_receiver2_->SSRC());
EXPECT_EQ(test_timestamp, rtp_receiver2_->Timestamp());
}
TEST_F(RtpRtcpAudioTest, RED) {
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
EXPECT_EQ(0, module1->SetSendingStatus(true));
voiceCodec.pltype = 127;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "RED", 4);
voice_codec.pltype = 127;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "RED", 4);
EXPECT_EQ(0, module1->SetSendREDPayloadType(voiceCodec.pltype));
EXPECT_EQ(0, module1->SetSendREDPayloadType(voice_codec.pltype));
int8_t red = 0;
EXPECT_EQ(0, module1->SendREDPayloadType(red));
EXPECT_EQ(voiceCodec.pltype, red);
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(voice_codec.pltype, red);
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
RTPFragmentationHeader fragmentation;
fragmentation.fragmentationVectorSize = 2;
@ -275,28 +307,43 @@ TEST_F(RtpRtcpAudioTest, RED) {
}
TEST_F(RtpRtcpAudioTest, DTMF) {
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
voice_codec.rate = test_rate;
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
EXPECT_EQ(0, module1->SetSendingStatus(true));
// Prepare for DTMF.
voiceCodec.pltype = 97;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "telephone-event", 16);
voice_codec.pltype = 97;
voice_codec.plfreq = 8000;
memcpy(voice_codec.plname, "telephone-event", 16);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
// Start DTMF test.
uint32_t timeStamp = 160;

View File

@ -14,8 +14,10 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
using namespace webrtc;
@ -68,6 +70,20 @@ class RtcpCallback : public RtcpFeedback, public RtcpIntraFrameObserver {
RtpRtcp* _rtpRtcpModule;
};
class TestRtpFeedback : public NullRtpFeedback {
public:
TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
rtp_rtcp_->SetRemoteSSRC(SSRC);
}
private:
RtpRtcp* rtp_rtcp_;
};
class RtpRtcpRtcpTest : public ::testing::Test {
protected:
RtpRtcpRtcpTest() : fake_clock(123456) {
@ -81,31 +97,55 @@ class RtpRtcpRtcpTest : public ::testing::Test {
~RtpRtcpRtcpTest() {}
virtual void SetUp() {
receiver = new RtpReceiver();
receiver = new TestRtpReceiver();
transport1 = new LoopBackTransport();
transport2 = new LoopBackTransport();
myRTCPFeedback1 = new RtcpCallback();
myRTCPFeedback2 = new RtcpCallback();
receive_statistics1_.reset(ReceiveStatistics::Create(&fake_clock));
receive_statistics2_.reset(ReceiveStatistics::Create(&fake_clock));
RtpRtcp::Configuration configuration;
configuration.id = test_id;
configuration.audio = true;
configuration.clock = &fake_clock;
configuration.receive_statistics = receive_statistics1_.get();
configuration.outgoing_transport = transport1;
configuration.rtcp_feedback = myRTCPFeedback1;
configuration.intra_frame_callback = myRTCPFeedback1;
configuration.incoming_data = receiver;
rtp_payload_registry1_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
rtp_payload_registry2_.reset(new RTPPayloadRegistry(
test_id, RTPPayloadStrategy::CreateStrategy(true)));
module1 = RtpRtcp::CreateRtpRtcp(configuration);
rtp_feedback1_.reset(new TestRtpFeedback(module1));
rtp_receiver1_.reset(RtpReceiver::CreateAudioReceiver(
test_id, &fake_clock, NULL, receiver, rtp_feedback1_.get(),
rtp_payload_registry1_.get()));
configuration.receive_statistics = receive_statistics2_.get();
configuration.id = test_id + 1;
configuration.outgoing_transport = transport2;
configuration.rtcp_feedback = myRTCPFeedback2;
configuration.intra_frame_callback = myRTCPFeedback2;
module2 = RtpRtcp::CreateRtpRtcp(configuration);
transport1->SetSendModule(module2);
transport2->SetSendModule(module1);
rtp_feedback2_.reset(new TestRtpFeedback(module2));
rtp_receiver2_.reset(RtpReceiver::CreateAudioReceiver(
test_id + 1, &fake_clock, NULL, receiver, rtp_feedback2_.get(),
rtp_payload_registry2_.get()));
transport1->SetSendModule(module2, rtp_payload_registry2_.get(),
rtp_receiver2_.get(), receive_statistics2_.get());
transport2->SetSendModule(module1, rtp_payload_registry1_.get(),
rtp_receiver1_.get(), receive_statistics1_.get());
myRTCPFeedback1->SetModule(module1);
myRTCPFeedback2->SetModule(module2);
@ -121,16 +161,26 @@ class RtpRtcpRtcpTest : public ::testing::Test {
EXPECT_EQ(0, module1->SetSendingStatus(true));
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
voiceCodec.rate = 64000;
memcpy(voiceCodec.plname, "PCMU", 5);
CodecInst voice_codec;
voice_codec.pltype = 96;
voice_codec.plfreq = 8000;
voice_codec.rate = 64000;
memcpy(voice_codec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
EXPECT_EQ(0, module2->RegisterSendPayload(voice_codec));
EXPECT_EQ(0, rtp_receiver2_->RegisterReceivePayload(
voice_codec.plname,
voice_codec.pltype,
voice_codec.plfreq,
voice_codec.channels,
(voice_codec.rate < 0) ? 0 : voice_codec.rate));
// We need to send one RTP packet to get the RTCP packet to be accepted by
// the receiving module.
@ -151,9 +201,17 @@ class RtpRtcpRtcpTest : public ::testing::Test {
}
int test_id;
scoped_ptr<TestRtpFeedback> rtp_feedback1_;
scoped_ptr<TestRtpFeedback> rtp_feedback2_;
scoped_ptr<ReceiveStatistics> receive_statistics1_;
scoped_ptr<ReceiveStatistics> receive_statistics2_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
scoped_ptr<RtpReceiver> rtp_receiver1_;
scoped_ptr<RtpReceiver> rtp_receiver2_;
RtpRtcp* module1;
RtpRtcp* module2;
RtpReceiver* receiver;
TestRtpReceiver* receiver;
LoopBackTransport* transport1;
LoopBackTransport* transport2;
RtcpCallback* myRTCPFeedback1;
@ -173,7 +231,7 @@ TEST_F(RtpRtcpRtcpTest, RTCP_PLI_RPSI) {
TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
uint32_t testOfCSRC[webrtc::kRtpCsrcSize];
EXPECT_EQ(2, module2->RemoteCSRCs(testOfCSRC));
EXPECT_EQ(2, rtp_receiver2_->CSRCs(testOfCSRC));
EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
@ -192,10 +250,10 @@ TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
module2->Process();
char cName[RTCP_CNAME_SIZE];
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
EXPECT_EQ(-1, module2->RemoteCNAME(rtp_receiver2_->SSRC() + 1, cName));
// Check multiple CNAME.
EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
EXPECT_EQ(0, module2->RemoteCNAME(rtp_receiver2_->SSRC(), cName));
EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
@ -207,7 +265,7 @@ TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
EXPECT_EQ(0, module1->SetSendingStatus(false));
// Test that BYE clears the CNAME
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
EXPECT_EQ(-1, module2->RemoteCNAME(rtp_receiver2_->SSRC(), cName));
}
TEST_F(RtpRtcpRtcpTest, RTCP) {
@ -276,20 +334,12 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
EXPECT_EQ(static_cast<uint32_t>(0),
reportBlockReceived.cumulativeLost);
uint8_t fraction_lost = 0; // scale 0 to 255
uint32_t cum_lost = 0; // number of lost packets
uint32_t ext_max = 0; // highest sequence number received
uint32_t jitter = 0;
uint32_t max_jitter = 0;
EXPECT_EQ(0, module2->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter));
EXPECT_EQ(0, fraction_lost);
EXPECT_EQ((uint32_t)0, cum_lost);
EXPECT_EQ(test_sequence_number, ext_max);
EXPECT_EQ(reportBlockReceived.jitter, jitter);
ReceiveStatistics::RtpReceiveStatistics stats;
EXPECT_TRUE(receive_statistics2_->Statistics(&stats, true));
EXPECT_EQ(0, stats.fraction_lost);
EXPECT_EQ((uint32_t)0, stats.cumulative_lost);
EXPECT_EQ(test_sequence_number, stats.extended_max_sequence_number);
EXPECT_EQ(reportBlockReceived.jitter, stats.jitter);
uint16_t RTT;
uint16_t avgRTT;

View File

@ -15,6 +15,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
@ -27,6 +28,7 @@ class RtpRtcpVideoTest : public ::testing::Test {
protected:
RtpRtcpVideoTest()
: test_id_(123),
rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
test_ssrc_(3456),
test_timestamp_(4567),
test_sequence_number_(2345),
@ -36,23 +38,26 @@ class RtpRtcpVideoTest : public ::testing::Test {
virtual void SetUp() {
transport_ = new LoopBackTransport();
receiver_ = new RtpReceiver();
receiver_ = new TestRtpReceiver();
receive_statistics_.reset(ReceiveStatistics::Create(&fake_clock));
RtpRtcp::Configuration configuration;
configuration.id = test_id_;
configuration.audio = false;
configuration.clock = &fake_clock;
configuration.incoming_data = receiver_;
configuration.outgoing_transport = transport_;
video_module_ = RtpRtcp::CreateRtpRtcp(configuration);
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
test_id_, &fake_clock, receiver_, NULL, &rtp_payload_registry_));
EXPECT_EQ(0, video_module_->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, video_module_->SetSSRC(test_ssrc_));
EXPECT_EQ(0, video_module_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, rtp_receiver_->SetNACKStatus(kNackRtcp, 450));
EXPECT_EQ(0, video_module_->SetStorePacketsStatus(true, 600));
EXPECT_EQ(0, video_module_->SetSendingStatus(true));
transport_->SetSendModule(video_module_);
transport_->SetSendModule(video_module_, &rtp_payload_registry_,
rtp_receiver_.get(), receive_statistics_.get());
VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
@ -60,7 +65,11 @@ class RtpRtcpVideoTest : public ::testing::Test {
memcpy(video_codec.plName, "I420", 5);
EXPECT_EQ(0, video_module_->RegisterSendPayload(video_codec));
EXPECT_EQ(0, video_module_->RegisterReceivePayload(video_codec));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate));
payload_data_length_ = sizeof(video_frame_);
@ -118,9 +127,12 @@ class RtpRtcpVideoTest : public ::testing::Test {
}
int test_id_;
scoped_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* video_module_;
LoopBackTransport* transport_;
RtpReceiver* receiver_;
TestRtpReceiver* receiver_;
uint32_t test_ssrc_;
uint32_t test_timestamp_;
uint16_t test_sequence_number_;
@ -148,7 +160,11 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
codec.codecType = kVideoCodecVP8;
codec.plType = kPayloadType;
strncpy(codec.plName, "VP8", 4);
EXPECT_EQ(0, video_module_->RegisterReceivePayload(codec));
EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(codec.plName,
codec.plType,
90000,
0,
codec.maxBitrate));
for (int frame_idx = 0; frame_idx < 10; ++frame_idx) {
for (int packet_idx = 0; packet_idx < 5; ++packet_idx) {
int packet_size = PaddingPacket(padding_packet, timestamp, seq_num,
@ -157,8 +173,12 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
RTPHeader header;
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(padding_packet, packet_size, &header));
EXPECT_EQ(0, video_module_->IncomingRtpPacket(padding_packet,
packet_size, header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
&payload_specific));
EXPECT_TRUE(rtp_receiver_->IncomingRtpPacket(&header, padding_packet,
packet_size,
payload_specific, true));
EXPECT_EQ(0, receiver_->payload_size());
EXPECT_EQ(packet_size - 12, receiver_->rtp_header().header.paddingLength);
}

View File

@ -121,8 +121,8 @@ class VCMSendStatisticsCallback {
// Callback class used for informing the user of the incoming bit rate and frame rate.
class VCMReceiveStatisticsCallback {
public:
virtual int32_t ReceiveStatistics(const uint32_t bitRate,
const uint32_t frameRate) = 0;
virtual int32_t OnReceiveStatisticsUpdate(const uint32_t bitRate,
const uint32_t frameRate) = 0;
protected:
virtual ~VCMReceiveStatisticsCallback() {

View File

@ -36,7 +36,7 @@ TEST(TestDecodingState, FrameContinuity) {
packet->timestamp = 1;
packet->seqNum = 0xffff;
packet->frameType = kVideoFrameDelta;
packet->codecSpecificHeader.codec = kRTPVideoVP8;
packet->codecSpecificHeader.codec = kRtpVideoVp8;
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
FrameData frame_data;
frame_data.rtt_ms = 0;
@ -213,7 +213,7 @@ TEST(TestDecodingState, MultiLayerBehavior) {
VCMFrameBuffer frame;
VCMPacket* packet = new VCMPacket();
packet->frameType = kVideoFrameDelta;
packet->codecSpecificHeader.codec = kRTPVideoVP8;
packet->codecSpecificHeader.codec = kRtpVideoVp8;
packet->timestamp = 0;
packet->seqNum = 0;
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
@ -369,7 +369,7 @@ TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) {
VCMPacket packet;
frame.Reset();
packet.frameType = kVideoFrameKey;
packet.codecSpecificHeader.codec = kRTPVideoVP8;
packet.codecSpecificHeader.codec = kRtpVideoVp8;
packet.timestamp = 0;
packet.seqNum = 0;
packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;

View File

@ -104,7 +104,7 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
{
switch (header->codec)
{
case kRTPVideoVP8:
case kRtpVideoVp8:
{
if (_codecSpecificInfo.codecType != kVideoCodecVP8)
{

View File

@ -94,7 +94,7 @@ void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
{
switch(videoHeader.codec)
{
case kRTPVideoVP8:
case kRtpVideoVp8:
{
// Handle all packets within a frame as depending on the previous packet
// TODO(holmer): This should be changed to make fragments independent
@ -111,7 +111,7 @@ void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
codec = kVideoCodecVP8;
break;
}
case kRTPVideoI420:
case kRtpVideoI420:
{
codec = kVideoCodecI420;
break;

View File

@ -58,35 +58,35 @@ int VCMSessionInfo::HighSequenceNumber() const {
int VCMSessionInfo::PictureId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
return kNoPictureId;
return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
}
int VCMSessionInfo::TemporalId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
return kNoTemporalIdx;
return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
}
bool VCMSessionInfo::LayerSync() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
return false;
return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
}
int VCMSessionInfo::Tl0PicId() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
return kNoTl0PicIdx;
return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
}
bool VCMSessionInfo::NonReference() const {
if (packets_.empty() ||
packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
return false;
return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
}

View File

@ -69,7 +69,7 @@ class TestVP8Partitions : public TestSessionInfo {
TestSessionInfo::SetUp();
vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
packet_header_.frameType = kVideoFrameDelta;
packet_header_.type.Video.codec = kRTPVideoVP8;
packet_header_.type.Video.codec = kRtpVideoVp8;
vp8_header_->InitRTPVideoHeaderVP8();
fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
}

View File

@ -143,7 +143,8 @@ VideoCodingModuleImpl::Process() {
uint32_t bitRate;
uint32_t frameRate;
_receiver.ReceiveStatistics(&bitRate, &frameRate);
_receiveStatsCallback->ReceiveStatistics(bitRate, frameRate);
_receiveStatsCallback->OnReceiveStatisticsUpdate(bitRate,
frameRate);
}
// Size of render buffer.

View File

@ -192,7 +192,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyFrames) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRTPVideoVP8;
header.type.Video.codec = kRtpVideoVp8;
for (int i = 0; i < 10; ++i) {
EXPECT_CALL(packet_request_callback_, ResendPackets(_, _))
.Times(0);
@ -216,7 +216,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyFramesWithLosses) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRTPVideoVP8;
header.type.Video.codec = kRtpVideoVp8;
// Insert one video frame to get one frame decoded.
header.frameType = kVideoFrameKey;
header.type.Video.isFirstPacket = true;
@ -270,7 +270,7 @@ TEST_F(TestVideoCodingModule, PaddingOnlyAndVideo) {
header.header.payloadType = kUnusedPayloadType;
header.header.ssrc = 1;
header.header.headerLength = 12;
header.type.Video.codec = kRTPVideoVP8;
header.type.Video.codec = kRtpVideoVp8;
header.type.Video.codecHeader.VP8.pictureId = -1;
header.type.Video.codecHeader.VP8.tl0PicIdx = -1;
for (int i = 0; i < 3; ++i) {

View File

@ -67,7 +67,7 @@ class VCMRobustnessTest : public ::testing::Test {
rtp_info.header.sequenceNumber = seq_no;
rtp_info.header.markerBit = marker_bit;
rtp_info.header.payloadType = video_codec_.plType;
rtp_info.type.Video.codec = kRTPVideoVP8;
rtp_info.type.Video.codec = kRtpVideoVp8;
rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtp_info.type.Video.isFirstPacket = first;

View File

@ -151,7 +151,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
// Testing with VP8.
VideoCodingModule::Codec(kVideoCodecVP8, &sendCodec);
_vcm->RegisterSendCodec(&sendCodec, 1, 1440);
_encodeCompleteCallback->SetCodecType(kRTPVideoVP8);
_encodeCompleteCallback->SetCodecType(kRtpVideoVp8);
_vcm->InitializeReceiver();
TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK );
_vcm->InitializeSender();
@ -196,7 +196,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
VideoCodingModule::Codec(kVideoCodecVP8, &vp8EncSettings);
_vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
_encodeCallback->RegisterReceiverVCM(_vcm);
_encodeCallback->SetCodecType(kRTPVideoVP8);
_encodeCallback->SetCodecType(kRtpVideoVp8);
TEST(_vcm->RegisterExternalEncoder(encoder, vp8EncSettings.plType) == VCM_OK);
TEST(_vcm->RegisterSendCodec(&vp8EncSettings, 4, 1440) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
@ -232,7 +232,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
TEST(_vcm->RegisterReceiveCodec(&receiveCodec, 1, true) == VCM_OK); // Require key frame
_vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
_encodeCallback->RegisterReceiverVCM(_vcm);
_encodeCallback->SetCodecType(kRTPVideoVP8);
_encodeCallback->SetCodecType(kRtpVideoVp8);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
TEST(_vcm->ResetDecoder() == VCM_OK);

View File

@ -549,7 +549,7 @@ VCMEncComplete_KeyReqTest::SendData(
WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; // end of frame
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.type.Video.codec = kRtpVideoVp8;
rtpInfo.header.payloadType = payloadType;
rtpInfo.header.sequenceNumber = _seqNo;
_seqNo += 2;

View File

@ -18,6 +18,7 @@
#include <time.h>
#include <vector>
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/modules/video_coding/main/test/test_util.h"
@ -202,7 +203,6 @@ MediaOptTest::GeneralSetup()
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
configuration.incoming_data = _dataCallback;
configuration.outgoing_transport = _outgoingTransport;
_rtp = RtpRtcp::CreateRtpRtcp(configuration);
@ -211,21 +211,33 @@ MediaOptTest::GeneralSetup()
// Registering codecs for the RTP module
// Register receive and send payload
VideoCodec videoCodec;
strncpy(videoCodec.plName, "VP8", 32);
videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
VideoCodec video_codec;
strncpy(video_codec.plName, "VP8", 32);
video_codec.plType = VCM_VP8_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
strncpy(videoCodec.plName, "ULPFEC", 32);
videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
strncpy(video_codec.plName, "ULPFEC", 32);
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
strncpy(video_codec.plName, "RED", 32);
video_codec.plType = VCM_RED_PAYLOAD_TYPE;
rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate);
_rtp->RegisterSendPayload(video_codec);
if (_nackFecEnabled == 1)
_rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE,

View File

@ -53,6 +53,7 @@ public:
private:
webrtc::VideoCodingModule* _vcm;
webrtc::RtpReceiver* rtp_receiver_;
webrtc::RtpRtcp* _rtp;
webrtc::RTPSendCompleteCallback* _outgoingTransport;
RtpDataCallback* _dataCallback;

View File

@ -16,6 +16,8 @@
#include <string.h>
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/test/media_opt_test.h"
@ -152,29 +154,46 @@ int MTRxTxTest(CmdArgs& args)
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
configuration.incoming_data = &dataCallback;
configuration.outgoing_transport = outgoingTransport;
RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(configuration);
scoped_ptr<RTPPayloadRegistry> registry(new RTPPayloadRegistry(
-1, RTPPayloadStrategy::CreateStrategy(false)));
scoped_ptr<RtpReceiver> rtp_receiver(
RtpReceiver::CreateVideoReceiver(-1, Clock::GetRealTimeClock(),
&dataCallback, NULL, registry.get()));
// registering codecs for the RTP module
VideoCodec videoCodec;
strncpy(videoCodec.plName, "ULPFEC", 32);
videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
VideoCodec video_codec;
strncpy(video_codec.plName, "ULPFEC", 32);
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
strncpy(video_codec.plName, "RED", 32);
video_codec.plType = VCM_RED_PAYLOAD_TYPE;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
strncpy(videoCodec.plName, args.codecName.c_str(), 32);
videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
videoCodec.maxBitrate = 10000;
videoCodec.codecType = args.codecType;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
TEST(rtp->RegisterSendPayload(videoCodec) == 0);
strncpy(video_codec.plName, args.codecName.c_str(), 32);
video_codec.plType = VCM_VP8_PAYLOAD_TYPE;
video_codec.maxBitrate = 10000;
video_codec.codecType = args.codecType;
TEST(rtp_receiver->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
90000,
0,
video_codec.maxBitrate) == 0);
TEST(rtp->RegisterSendPayload(video_codec) == 0);
// inform RTP Module of error resilience features
TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0);
TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE,
VCM_ULPFEC_PAYLOAD_TYPE) == 0);
//VCM
if (vcm->InitializeReceiver() < 0)
@ -238,7 +257,8 @@ int MTRxTxTest(CmdArgs& args)
FecProtectionParams delta_params = protectionCallback.DeltaFecParameters();
FecProtectionParams key_params = protectionCallback.KeyFecParameters();
rtp->SetFecParameters(&delta_params, &key_params);
rtp->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff, kMaxPacketAgeToNack);
rtp_receiver->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff,
kMaxPacketAgeToNack);
vcm->SetChannelParameters(static_cast<uint32_t>(1000 * bitRate),
(uint8_t) lossRate, rttMS);

View File

@ -13,6 +13,8 @@
#include <math.h>
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/system_wrappers/interface/clock.h"
@ -95,7 +97,14 @@ TransportCallback::TransportPackets()
delete packet;
return -1;
}
if (_rtp->IncomingRtpPacket(packet->data, packet->length, header) < 0)
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header, packet->data,
packet->length, payload_specific,
true))
{
delete packet;
return -1;

View File

@ -95,7 +95,7 @@ VCMNTEncodeCompleteCallback::SendData(
switch (_test.VideoType())
{
case kVideoCodecVP8:
rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.type.Video.codec = kRtpVideoVp8;
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoHdr->codecHeader.VP8.nonReference;
@ -103,7 +103,7 @@ VCMNTEncodeCompleteCallback::SendData(
videoHdr->codecHeader.VP8.pictureId;
break;
case kVideoCodecI420:
rtpInfo.type.Video.codec = kRTPVideoI420;
rtpInfo.type.Video.codec = kRtpVideoI420;
break;
default:
assert(false);

View File

@ -22,7 +22,7 @@
#include <stdio.h>
#include <string>
class RtpDataCallback : public webrtc::RtpData {
class RtpDataCallback : public webrtc::NullRtpData {
public:
RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
virtual ~RtpDataCallback() {}

View File

@ -15,6 +15,8 @@
#include <map>
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/test/pcap_file_reader.h"
@ -217,8 +219,9 @@ class SsrcHandlers {
RtpRtcp::Configuration configuration;
configuration.id = 1;
configuration.audio = false;
configuration.incoming_data = handler->payload_sink_.get();
handler->rtp_module_.reset(RtpRtcp::CreateRtpRtcp(configuration));
handler->rtp_module_.reset(RtpReceiver::CreateVideoReceiver(
configuration.id, configuration.clock, handler->payload_sink_.get(),
NULL, handler->rtp_payload_registry_.get()));
if (handler->rtp_module_.get() == NULL) {
return -1;
}
@ -227,9 +230,6 @@ class SsrcHandlers {
kMaxPacketAgeToNack) < 0) {
return -1;
}
handler->rtp_module_->SetRTCPStatus(kRtcpNonCompound);
handler->rtp_module_->SetREMBStatus(true);
handler->rtp_module_->SetSSRCFilter(true, ssrc);
handler->rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset,
kDefaultTransmissionTimeOffsetExtensionId);
@ -241,7 +241,11 @@ class SsrcHandlers {
strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName)-1);
codec.plType = it->payload_type();
codec.codecType = it->codec_type();
if (handler->rtp_module_->RegisterReceivePayload(codec) < 0) {
if (handler->rtp_module_->RegisterReceivePayload(codec.plName,
codec.plType,
90000,
0,
codec.maxBitrate) < 0) {
return -1;
}
}
@ -250,20 +254,18 @@ class SsrcHandlers {
return 0;
}
void Process() {
for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
it->second->rtp_module_->Process();
}
}
void IncomingPacket(const uint8_t* data, uint32_t length) {
for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
if (it->second->rtp_header_parser_->IsRtcp(data, length)) {
it->second->rtp_module_->IncomingRtcpPacket(data, length);
} else {
if (!it->second->rtp_header_parser_->IsRtcp(data, length)) {
RTPHeader header;
it->second->rtp_header_parser_->Parse(data, length, &header);
it->second->rtp_module_->IncomingRtpPacket(data, length, header);
PayloadUnion payload_specific;
it->second->rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific);
bool in_order =
it->second->rtp_module_->InOrderPacket(header.sequenceNumber);
it->second->rtp_module_->IncomingRtpPacket(&header, data, length,
payload_specific, in_order);
}
}
}
@ -274,6 +276,8 @@ class SsrcHandlers {
Handler(uint32_t ssrc, const PayloadTypes& payload_types,
LostPackets* lost_packets)
: rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(new RTPPayloadRegistry(
0, RTPPayloadStrategy::CreateStrategy(false))),
rtp_module_(),
payload_sink_(),
ssrc_(ssrc),
@ -297,7 +301,8 @@ class SsrcHandlers {
}
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RtpRtcp> rtp_module_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_module_;
scoped_ptr<PayloadSinkInterface> payload_sink_;
private:
@ -367,8 +372,6 @@ class RtpPlayerImpl : public RtpPlayerInterface {
// Send any packets from packet source.
if (!end_of_file_ && (TimeUntilNextPacket() == 0 || first_packet_)) {
ssrc_handlers_.Process();
if (first_packet_) {
next_packet_length_ = sizeof(next_packet_);
if (packet_source_->NextPacket(next_packet_, &next_packet_length_,

View File

@ -14,6 +14,8 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/system_wrappers/interface/clock.h"
@ -34,7 +36,7 @@ VCMEncodeCompleteCallback::VCMEncodeCompleteCallback(FILE* encodedFile):
_encodeComplete(false),
_width(0),
_height(0),
_codecType(kRTPVideoNoVideo)
_codecType(kRtpVideoNone)
{
//
}
@ -73,14 +75,14 @@ VCMEncodeCompleteCallback::SendData(
rtpInfo.type.Video.width = (uint16_t)_width;
switch (_codecType)
{
case webrtc::kRTPVideoVP8:
case webrtc::kRtpVideoVp8:
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoHdr->codecHeader.VP8.nonReference;
rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoHdr->codecHeader.VP8.pictureId;
break;
case webrtc::kRTPVideoI420:
case webrtc::kRtpVideoI420:
break;
default:
assert(false);
@ -209,6 +211,8 @@ RTPSendCompleteCallback::RTPSendCompleteCallback(Clock* clock,
const char* filename):
_clock(clock),
_sendCount(0),
rtp_payload_registry_(NULL),
rtp_receiver_(NULL),
_rtp(NULL),
_lossPct(0),
_burstLength(0),
@ -299,7 +303,14 @@ RTPSendCompleteCallback::SendPacket(int channel, const void *data, int len)
delete packet;
return -1;
}
if (_rtp->IncomingRtpPacket(packet->data, packet->length, header) < 0)
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(
header.payloadType, &payload_specific)) {
return -1;
}
if (!rtp_receiver_->IncomingRtpPacket(&header, packet->data,
packet->length, payload_specific,
true))
{
delete packet;
return -1;

View File

@ -30,6 +30,7 @@
namespace webrtc
{
class RTPPayloadRegistry;
class RtpDump;
// Send Side - Packetization callback - send an encoded frame to the VCMReceiver
@ -60,7 +61,7 @@ public:
// Return encode complete (true/false)
bool EncodeComplete();
// Inform callback of codec used
void SetCodecType(RTPVideoCodecTypes codecType)
void SetCodecType(RtpVideoCodecTypes codecType)
{_codecType = codecType;}
// Inform callback of frame dimensions
void SetFrameDimensions(int32_t width, int32_t height)
@ -83,7 +84,7 @@ private:
bool _encodeComplete;
int32_t _width;
int32_t _height;
RTPVideoCodecTypes _codecType;
RtpVideoCodecTypes _codecType;
}; // end of VCMEncodeCompleteCallback
@ -114,7 +115,7 @@ public:
// Return encode complete (true/false)
bool EncodeComplete();
// Inform callback of codec used
void SetCodecType(RTPVideoCodecTypes codecType)
void SetCodecType(RtpVideoCodecTypes codecType)
{_codecType = codecType;}
// Inform callback of frame dimensions
@ -131,7 +132,7 @@ private:
RtpRtcp* _RTPModule;
int16_t _width;
int16_t _height;
RTPVideoCodecTypes _codecType;
RtpVideoCodecTypes _codecType;
}; // end of VCMEncodeCompleteCallback
// Decode Complete callback
@ -189,6 +190,8 @@ protected:
Clock* _clock;
uint32_t _sendCount;
RTPPayloadRegistry* rtp_payload_registry_;
RtpReceiver* rtp_receiver_;
RtpRtcp* _rtp;
double _lossPct;
double _burstLength;

View File

@ -147,12 +147,12 @@ int32_t FileOutputFrameReceiver::FrameToRender(
return 0;
}
webrtc::RTPVideoCodecTypes ConvertCodecType(const char* plname) {
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
if (strncmp(plname,"VP8" , 3) == 0) {
return webrtc::kRTPVideoVP8;
return webrtc::kRtpVideoVp8;
} else if (strncmp(plname,"I420" , 5) == 0) {
return webrtc::kRTPVideoI420;
return webrtc::kRtpVideoI420;
} else {
return webrtc::kRTPVideoNoVideo; // Default value
return webrtc::kRtpVideoNone; // Default value
}
}

View File

@ -102,6 +102,6 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
};
// Codec type conversion
webrtc::RTPVideoCodecTypes ConvertCodecType(const char* plname);
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname);
#endif

View File

@ -68,6 +68,12 @@ class VcmPayloadSinkFactory::VcmPayloadSink
return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
}
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) {
// We currently don't handle FEC.
return true;
}
// VCMPacketRequestCallback
virtual int32_t ResendPackets(const uint16_t* sequence_numbers,
uint16_t length) {

View File

@ -87,8 +87,6 @@ enum ViEErrors {
kViENetworkSendCodecNotSet, // SetSendGQoS- Need to set the send codec first.
kViENetworkServiceTypeNotSupported, // SetSendGQoS
kViENetworkNotSupported, // SetSendGQoS Not supported on this OS.
kViENetworkObserverAlreadyRegistered, // RegisterObserver
kViENetworkObserverNotRegistered, // SetPeriodicDeadOrAliveStatus - Need to call RegisterObserver first, DeregisterObserver if no observer is registered.
kViENetworkUnknownError, // An unknown error has occurred. Check the log file.
// ViERTP_RTCP.

View File

@ -32,24 +32,6 @@ enum ViEPacketTimeout {
PacketReceived = 1
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViENetworkObserver {
public:
// This method will be called periodically delivering a deadoralive
// decision for a specified channel.
virtual void OnPeriodicDeadOrAlive(const int video_channel,
const bool alive) = 0;
// This method is called once if a packet timeout occurred.
virtual void PacketTimeout(const int video_channel,
const ViEPacketTimeout timeout) = 0;
protected:
virtual ~ViENetworkObserver() {}
};
class WEBRTC_DLLEXPORT ViENetwork {
public:
// Default values.
@ -96,27 +78,6 @@ class WEBRTC_DLLEXPORT ViENetwork {
// over the network.
virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
// This function enables or disables warning reports if packets have not
// been received for a specified time interval.
virtual int SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds) = 0;
// Registers an instance of a user implementation of the ViENetwork
// observer.
virtual int RegisterObserver(const int video_channel,
ViENetworkObserver& observer) = 0;
// Removes a registered instance of ViENetworkObserver.
virtual int DeregisterObserver(const int video_channel) = 0;
// This function enables or disables the periodic deadoralive callback
// functionality for a specified channel.
virtual int SetPeriodicDeadOrAliveStatus(
const int video_channel,
const bool enable,
const unsigned int sample_time_seconds = KDefaultSampleTimeSeconds) = 0;
protected:
ViENetwork() {}
virtual ~ViENetwork() {}

View File

@ -25,24 +25,6 @@
#include <qos.h>
#endif
class ViEAutoTestNetworkObserver: public webrtc::ViENetworkObserver
{
public:
ViEAutoTestNetworkObserver()
{
}
virtual ~ViEAutoTestNetworkObserver()
{
}
virtual void OnPeriodicDeadOrAlive(const int videoChannel, const bool alive)
{
}
virtual void PacketTimeout(const int videoChannel,
const webrtc::ViEPacketTimeout timeout)
{
}
};
void ViEAutoTest::ViENetworkStandardTest()
{
TbInterfaces ViE("ViENetworkStandardTest"); // Create VIE
@ -545,26 +527,6 @@ void ViEAutoTest::ViENetworkAPITest()
EXPECT_NE(0, ViE.network->SetMTU(tbChannel.videoChannel, 1600));
// Valid input
EXPECT_EQ(0, ViE.network->SetMTU(tbChannel.videoChannel, 800));
//
// Observer and timeout
//
ViEAutoTestNetworkObserver vieTestObserver;
EXPECT_EQ(0, ViE.network->RegisterObserver(
tbChannel.videoChannel, vieTestObserver));
EXPECT_NE(0, ViE.network->RegisterObserver(
tbChannel.videoChannel, vieTestObserver));
EXPECT_EQ(0, ViE.network->SetPeriodicDeadOrAliveStatus(
tbChannel.videoChannel, true)); // No observer
EXPECT_EQ(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->SetPeriodicDeadOrAliveStatus(
tbChannel.videoChannel, true)); // No observer
// Packet timout notification
EXPECT_EQ(0, ViE.network->SetPacketTimeoutNotification(
tbChannel.videoChannel, true, 10));
}
//***************************************************************

View File

@ -15,6 +15,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
@ -74,7 +75,7 @@ ViEChannel::ViEChannel(int32_t channel_id,
default_rtp_rtcp_(default_rtp_rtcp),
rtp_rtcp_(NULL),
vcm_(*VideoCodingModule::Create(ViEModuleId(engine_id, channel_id))),
vie_receiver_(channel_id, &vcm_, remote_bitrate_estimator),
vie_receiver_(channel_id, &vcm_, remote_bitrate_estimator, this),
vie_sender_(channel_id),
vie_sync_(&vcm_, this),
stats_observer_(new ChannelStatsObserver(this)),
@ -83,16 +84,13 @@ ViEChannel::ViEChannel(int32_t channel_id,
do_key_frame_callbackRequest_(false),
rtp_observer_(NULL),
rtcp_observer_(NULL),
networkObserver_(NULL),
intra_frame_observer_(intra_frame_observer),
rtt_observer_(rtt_observer),
paced_sender_(paced_sender),
bandwidth_observer_(bandwidth_observer),
rtp_packet_timeout_(false),
send_timestamp_extension_id_(kInvalidRtpExtensionId),
absolute_send_time_extension_id_(kInvalidRtpExtensionId),
receive_absolute_send_time_enabled_(false),
using_packet_spread_(false),
external_transport_(NULL),
decoder_reset_(true),
wait_for_key_frame_(false),
@ -112,8 +110,6 @@ ViEChannel::ViEChannel(int32_t channel_id,
configuration.id = ViEModuleId(engine_id, channel_id);
configuration.audio = false;
configuration.default_module = default_rtp_rtcp;
configuration.incoming_data = &vie_receiver_;
configuration.incoming_messages = this;
configuration.outgoing_transport = &vie_sender_;
configuration.rtcp_feedback = this;
configuration.intra_frame_callback = intra_frame_observer;
@ -121,6 +117,7 @@ ViEChannel::ViEChannel(int32_t channel_id,
configuration.rtt_observer = rtt_observer;
configuration.remote_bitrate_estimator = remote_bitrate_estimator;
configuration.paced_sender = paced_sender;
configuration.receive_statistics = vie_receiver_.GetReceiveStatistics();
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
@ -132,6 +129,13 @@ int32_t ViEChannel::Init() {
"%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
engine_id_);
if (module_process_thread_.RegisterModule(
vie_receiver_.GetReceiveStatistics()) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Failed to register receive-statistics to process thread",
__FUNCTION__);
return -1;
}
// RTP/RTCP initialization.
if (rtp_rtcp_->SetSendingMediaStatus(false) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -197,7 +201,10 @@ int32_t ViEChannel::Init() {
VideoCodec video_codec;
if (vcm_.Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
rtp_rtcp_->RegisterSendPayload(video_codec);
rtp_rtcp_->RegisterReceivePayload(video_codec);
// TODO(holmer): Can we call SetReceiveCodec() here instead?
if (!vie_receiver_.RegisterPayload(video_codec)) {
return -1;
}
vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_);
vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
rtp_rtcp_->MaxDataPayloadLength());
@ -215,6 +222,7 @@ ViEChannel::~ViEChannel() {
channel_id_, engine_id_);
// Make sure we don't get more callbacks from the RTP module.
module_process_thread_.DeRegisterModule(vie_receiver_.GetReceiveStatistics());
module_process_thread_.DeRegisterModule(rtp_rtcp_.get());
module_process_thread_.DeRegisterModule(&vcm_);
module_process_thread_.DeRegisterModule(&vie_sync_);
@ -270,7 +278,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
(*it)->SetSendingMediaStatus(false);
}
}
NACKMethod nack_method = rtp_rtcp_->NACK();
bool fec_enabled = false;
uint8_t payload_type_red;
@ -317,12 +324,12 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::SetRTCPStatus failure", __FUNCTION__);
}
if (nack_method != kNackOff) {
if (rtp_rtcp_->StorePackets()) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
rtp_rtcp->SetNACKStatus(nack_method, max_nack_reordering_threshold_);
} else if (paced_sender_) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
}
if (fec_enabled) {
rtp_rtcp->SetGenericFECStatus(fec_enabled, payload_type_red,
payload_type_fec);
@ -444,12 +451,7 @@ int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
int8_t old_pltype = -1;
if (rtp_rtcp_->ReceivePayloadType(video_codec, &old_pltype) != -1) {
rtp_rtcp_->DeRegisterReceivePayload(old_pltype);
}
if (rtp_rtcp_->RegisterReceivePayload(video_codec) != 0) {
if (!vie_receiver_.SetReceiveCodec(video_codec)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not register receive payload type", __FUNCTION__);
return -1;
@ -659,8 +661,8 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
"%s: Could not enable NACK, RTPC not on ", __FUNCTION__);
return -1;
}
if (rtp_rtcp_->SetNACKStatus(nackMethod,
max_nack_reordering_threshold_) != 0) {
if (!vie_receiver_.SetNackStatus(true,
max_nack_reordering_threshold_)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not set NACK method %d", __FUNCTION__,
nackMethod);
@ -678,7 +680,6 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
it != simulcast_rtp_rtcp_.end();
it++) {
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetNACKStatus(nackMethod, max_nack_reordering_threshold_);
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
}
// Don't introduce errors when NACK is enabled.
@ -692,14 +693,13 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
if (paced_sender_ == NULL) {
rtp_rtcp->SetStorePacketsStatus(false, 0);
}
rtp_rtcp->SetNACKStatus(kNackOff, max_nack_reordering_threshold_);
}
vcm_.RegisterPacketRequestCallback(NULL);
if (paced_sender_ == NULL) {
rtp_rtcp_->SetStorePacketsStatus(false, 0);
}
if (rtp_rtcp_->SetNACKStatus(kNackOff,
max_nack_reordering_threshold_) != 0) {
if (!vie_receiver_.SetNackStatus(false,
max_nack_reordering_threshold_)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not turn off NACK", __FUNCTION__);
return -1;
@ -982,14 +982,15 @@ int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
}
int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
const uint32_t SSRC) const {
const uint32_t SSRC) {
WEBRTC_TRACE(webrtc::kTraceInfo,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(usage:%d, SSRC: 0x%x)",
__FUNCTION__, usage, SSRC);
return rtp_rtcp_->SetRTXReceiveStatus(true, SSRC);
vie_receiver_.SetRtxStatus(true, SSRC);
return 0;
}
// TODO(mflodman) Add kViEStreamTypeRtx.
@ -1019,7 +1020,7 @@ int32_t ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
*ssrc = rtp_rtcp_->RemoteSSRC();
*ssrc = vie_receiver_.GetRemoteSsrc();
return 0;
}
@ -1030,7 +1031,7 @@ int32_t ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
uint32_t arrayCSRC[kRtpCsrcSize];
memset(arrayCSRC, 0, sizeof(arrayCSRC));
int num_csrcs = rtp_rtcp_->RemoteCSRCs(arrayCSRC);
int num_csrcs = vie_receiver_.GetCsrcs(arrayCSRC);
if (num_csrcs > 0) {
memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(uint32_t));
for (int idx = 0; idx < num_csrcs; idx++) {
@ -1060,12 +1061,7 @@ int ViEChannel::SetRtxSendPayloadType(int payload_type) {
}
void ViEChannel::SetRtxReceivePayloadType(int payload_type) {
rtp_rtcp_->SetRtxReceivePayloadType(payload_type);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end(); it++) {
(*it)->SetRtxReceivePayloadType(payload_type);
}
vie_receiver_.SetRtxPayloadType(payload_type);
}
int32_t ViEChannel::SetStartSequenceNumber(uint16_t sequence_number) {
@ -1101,7 +1097,7 @@ int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
uint32_t remoteSSRC = rtp_rtcp_->RemoteSSRC();
uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
return rtp_rtcp_->RemoteCNAME(remoteSSRC, rtcp_cname);
}
@ -1208,7 +1204,7 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
// it++) {
// RtpRtcp* rtp_rtcp = *it;
// }
uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
// Get all RTCP receiver report blocks that have been received on this
// channel. If we receive RTP packets from a remote source we know the
@ -1251,24 +1247,33 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
return 0;
}
// TODO(holmer): This is a bad function name as it implies that it returns the
// received RTCP, while it actually returns the statistics which will be sent
// in the RTCP.
int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms) {
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
uint8_t frac_lost = 0;
if (rtp_rtcp_->StatisticsRTP(&frac_lost, cumulative_lost, extended_max,
jitter_samples) != 0) {
ReceiveStatistics* receive_statistics = vie_receiver_.GetReceiveStatistics();
ReceiveStatistics::RtpReceiveStatistics receive_stats;
if (!receive_statistics || !receive_statistics->Statistics(
&receive_stats, rtp_rtcp_->RTCP() == kRtcpOff)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not get received RTP statistics", __FUNCTION__);
return -1;
}
*fraction_lost = receive_stats.fraction_lost;
*cumulative_lost = receive_stats.cumulative_lost;
*extended_max = receive_stats.extended_max_sequence_number;
*jitter_samples = receive_stats.jitter;
*fraction_lost = frac_lost;
uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
uint16_t dummy = 0;
uint16_t rtt = 0;
if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
@ -1280,16 +1285,15 @@ int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
}
int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (rtp_rtcp_->DataCountersRTP(bytes_sent,
packets_sent,
bytes_received,
packets_received) != 0) {
ReceiveStatistics* receive_statistics = vie_receiver_.GetReceiveStatistics();
receive_statistics->GetDataCounters(bytes_received, packets_received);
if (rtp_rtcp_->DataCountersRTP(bytes_sent, packets_sent) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not get counters", __FUNCTION__);
return -1;
@ -1301,7 +1305,7 @@ int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t bytes_sent_temp = 0;
uint32_t packets_sent_temp = 0;
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp, NULL, NULL);
rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp);
bytes_sent += bytes_sent_temp;
packets_sent += packets_sent_temp;
}
@ -1562,92 +1566,6 @@ uint16_t ViEChannel::MaxDataPayloadLength() const {
return rtp_rtcp_->MaxDataPayloadLength();
}
int32_t ViEChannel::SetPacketTimeoutNotification(
bool enable, uint32_t timeout_seconds) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (enable) {
uint32_t timeout_ms = 1000 * timeout_seconds;
if (rtp_rtcp_->SetPacketTimeout(timeout_ms, 0) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return -1;
}
} else {
if (rtp_rtcp_->SetPacketTimeout(0, 0) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return -1;
}
}
return 0;
}
int32_t ViEChannel::RegisterNetworkObserver(
ViENetworkObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer alread added", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer added", __FUNCTION__);
networkObserver_ = observer;
} else {
if (!networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: no observer added", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: observer removed", __FUNCTION__);
networkObserver_ = NULL;
}
return 0;
}
bool ViEChannel::NetworkObserverRegistered() {
CriticalSectionScoped cs(callback_cs_.get());
return networkObserver_ != NULL;
}
int32_t ViEChannel::SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
CriticalSectionScoped cs(callback_cs_.get());
if (!networkObserver_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: no observer added", __FUNCTION__);
return -1;
}
bool enabled = false;
uint8_t current_sampletime_seconds = 0;
// Get old settings.
rtp_rtcp_->PeriodicDeadOrAliveStatus(enabled, current_sampletime_seconds);
// Set new settings.
if (rtp_rtcp_->SetPeriodicDeadOrAliveStatus(
enable, static_cast<uint8_t>(sample_time_seconds)) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not set periodic dead-or-alive status",
__FUNCTION__);
return -1;
}
if (!enable) {
// Restore last utilized sample time.
// Without this trick, the sample time would always be reset to default
// (2 sec), each time dead-or-alive was disabled without sample-time
// parameter.
rtp_rtcp_->SetPeriodicDeadOrAliveStatus(enable, current_sampletime_seconds);
}
return 0;
}
int32_t ViEChannel::EnableColorEnhancement(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -1702,9 +1620,9 @@ int32_t ViEChannel::FrameToRender(
}
uint32_t arr_ofCSRC[kRtpCsrcSize];
int32_t no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
int32_t no_of_csrcs = vie_receiver_.GetCsrcs(arr_ofCSRC);
if (no_of_csrcs <= 0) {
arr_ofCSRC[0] = rtp_rtcp_->RemoteSSRC();
arr_ofCSRC[0] = vie_receiver_.GetRemoteSsrc();
no_of_csrcs = 1;
}
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1728,8 +1646,8 @@ int32_t ViEChannel::StoreReceivedFrame(
return 0;
}
int32_t ViEChannel::ReceiveStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
int32_t ViEChannel::OnReceiveStatisticsUpdate(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1882,8 +1800,10 @@ int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
} else {
module_process_thread_.DeRegisterModule(&vie_sync_);
}
return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
rtp_rtcp_.get());
return vie_sync_.ConfigureSync(ve_channel_id,
ve_sync_interface,
rtp_rtcp_.get(),
vie_receiver_.GetRtpReceiver());
}
int32_t ViEChannel::VoiceChannel() {
@ -1954,52 +1874,6 @@ int32_t ViEChannel::OnInitializeDecoder(
return 0;
}
void ViEChannel::OnPacketTimeout(const int32_t id) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
CriticalSectionScoped cs(callback_cs_.get());
if (networkObserver_) {
networkObserver_->PacketTimeout(channel_id_, NoPacket);
rtp_packet_timeout_ = true;
}
}
void ViEChannel::OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (rtp_packet_timeout_ && packet_type == kPacketRtp) {
CriticalSectionScoped cs(callback_cs_.get());
if (networkObserver_) {
networkObserver_->PacketTimeout(channel_id_, PacketReceived);
}
// Reset even if no observer set, might have been removed during timeout.
rtp_packet_timeout_ = false;
}
}
void ViEChannel::OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(id=%d, alive=%d)", __FUNCTION__, id, alive);
CriticalSectionScoped cs(callback_cs_.get());
if (!networkObserver_) {
return;
}
bool is_alive = true;
if (alive == kRtpDead) {
is_alive = false;
}
networkObserver_->OnPeriodicDeadOrAlive(channel_id_, is_alive);
return;
}
void ViEChannel::OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
if (channel_id_ != ChannelId(id)) {
@ -2012,6 +1886,8 @@ void ViEChannel::OnIncomingSSRCChanged(const int32_t id,
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %u", __FUNCTION__, SSRC);
rtp_rtcp_->SetRemoteSSRC(SSRC);
CriticalSectionScoped cs(callback_cs_.get());
{
if (rtp_observer_) {
@ -2044,4 +1920,8 @@ void ViEChannel::OnIncomingCSRCChanged(const int32_t id,
}
}
void ViEChannel::ResetStatistics() {
vie_receiver_.GetReceiveStatistics()->ResetStatistics();
}
} // namespace webrtc

View File

@ -41,7 +41,6 @@ class RtpRtcp;
class ThreadWrapper;
class ViEDecoderObserver;
class ViEEffectFilter;
class ViENetworkObserver;
class ViERTCPObserver;
class ViERTPObserver;
class VideoCodingModule;
@ -209,16 +208,12 @@ class ViEChannel
const int frequency,
const uint8_t channels,
const uint32_t rate);
virtual void OnPacketTimeout(const int32_t id);
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type);
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive);
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC);
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added);
virtual void ResetStatistics();
int32_t SetLocalReceiver(const uint16_t rtp_port,
const uint16_t rtcp_port,
@ -241,7 +236,7 @@ class ViEChannel
char* ip_address,
uint32_t ip_address_length);
int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC) const;
int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
int32_t StartSend();
int32_t StopSend();
@ -270,12 +265,6 @@ class ViEChannel
int32_t SetMaxPacketBurstSize(uint16_t max_number_of_packets);
int32_t SetPacketBurstSpreadState(bool enable, const uint16_t frame_periodMS);
int32_t SetPacketTimeoutNotification(bool enable, uint32_t timeout_seconds);
int32_t RegisterNetworkObserver(ViENetworkObserver* observer);
bool NetworkObserverRegistered();
int32_t SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds);
int32_t EnableColorEnhancement(bool enable);
// Gets the modules used by the channel.
@ -298,7 +287,7 @@ class ViEChannel
const EncodedVideoData& frame_to_store);
// Implements VideoReceiveStatisticsCallback.
virtual int32_t ReceiveStatistics(const uint32_t bit_rate,
virtual int32_t OnReceiveStatisticsUpdate(const uint32_t bit_rate,
const uint32_t frame_rate);
// Implements VideoFrameTypeCallback.
@ -371,13 +360,11 @@ class ViEChannel
bool do_key_frame_callbackRequest_;
ViERTPObserver* rtp_observer_;
ViERTCPObserver* rtcp_observer_;
ViENetworkObserver* networkObserver_;
RtcpIntraFrameObserver* intra_frame_observer_;
RtcpRttObserver* rtt_observer_;
PacedSender* paced_sender_;
scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
bool rtp_packet_timeout_;
int send_timestamp_extension_id_;
int absolute_send_time_extension_id_;
bool receive_absolute_send_time_enabled_;

View File

@ -246,7 +246,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
shared_data_->channel_manager()->UpdateSsrcs(video_channel, ssrcs);
// Update the protection mode, we might be switching NACK/FEC.
vie_encoder->UpdateProtectionMethod();
vie_encoder->UpdateProtectionMethod(vie_encoder->nack_enabled());
// Get new best format for frame provider.
ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);

View File

@ -726,7 +726,7 @@ int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
return 0;
}
int32_t ViEEncoder::UpdateProtectionMethod() {
int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
bool fec_enabled = false;
uint8_t dummy_ptype_red = 0;
uint8_t dummy_ptypeFEC = 0;
@ -739,25 +739,23 @@ int32_t ViEEncoder::UpdateProtectionMethod() {
if (error) {
return -1;
}
bool nack_enabled = (default_rtp_rtcp_->NACK() == kNackOff) ? false : true;
if (fec_enabled_ == fec_enabled && nack_enabled_ == nack_enabled) {
if (fec_enabled_ == fec_enabled && nack_enabled_ == enable_nack) {
// No change needed, we're already in correct state.
return 0;
}
fec_enabled_ = fec_enabled;
nack_enabled_ = nack_enabled;
nack_enabled_ = enable_nack;
// Set Video Protection for VCM.
if (fec_enabled && nack_enabled) {
if (fec_enabled && nack_enabled_) {
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, true);
} else {
vcm_.SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNack, nack_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackSender, nack_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, false);
}
if (fec_enabled || nack_enabled) {
if (fec_enabled_ || nack_enabled_) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: FEC status ",
__FUNCTION__, fec_enabled);

View File

@ -114,7 +114,8 @@ class ViEEncoder
int CodecTargetBitrate(uint32_t* bitrate) const;
// Loss protection.
int32_t UpdateProtectionMethod();
int32_t UpdateProtectionMethod(bool enable_nack);
bool nack_enabled() const { return nack_enabled_; }
// Buffering mode.
void SetSenderBufferingMode(int target_delay_ms);

View File

@ -196,99 +196,4 @@ int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
}
return 0;
}
int ViENetworkImpl::SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d, enable: %d, timeout_seconds: %u)",
__FUNCTION__, video_channel, enable, timeout_seconds);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->SetPacketTimeoutNotification(enable,
timeout_seconds) != 0) {
shared_data_->SetLastError(kViENetworkUnknownError);
return -1;
}
return 0;
}
int ViENetworkImpl::RegisterObserver(const int video_channel,
ViENetworkObserver& observer) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->RegisterNetworkObserver(&observer) != 0) {
shared_data_->SetLastError(kViENetworkObserverAlreadyRegistered);
return -1;
}
return 0;
}
int ViENetworkImpl::DeregisterObserver(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (!vie_channel->NetworkObserverRegistered()) {
shared_data_->SetLastError(kViENetworkObserverNotRegistered);
return -1;
}
return vie_channel->RegisterNetworkObserver(NULL);
}
int ViENetworkImpl::SetPeriodicDeadOrAliveStatus(
const int video_channel,
bool enable,
unsigned int sample_time_seconds) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d, enable: %d, sample_time_seconds: %ul)",
__FUNCTION__, video_channel, enable, sample_time_seconds);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (!vie_channel->NetworkObserverRegistered()) {
shared_data_->SetLastError(kViENetworkObserverNotRegistered);
return -1;
}
if (vie_channel->SetPeriodicDeadOrAliveStatus(enable, sample_time_seconds)
!= 0) {
shared_data_->SetLastError(kViENetworkUnknownError);
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -37,16 +37,6 @@ class ViENetworkImpl
const void* data,
const int length);
virtual int SetMTU(int video_channel, unsigned int mtu);
virtual int SetPacketTimeoutNotification(const int video_channel,
bool enable,
int timeout_seconds);
virtual int RegisterObserver(const int video_channel,
ViENetworkObserver& observer);
virtual int DeregisterObserver(const int video_channel);
virtual int SetPeriodicDeadOrAliveStatus(
const int video_channel,
const bool enable,
const unsigned int sample_time_seconds);
protected:
explicit ViENetworkImpl(ViESharedData* shared_data);

View File

@ -13,7 +13,10 @@
#include <vector>
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
@ -25,10 +28,18 @@ namespace webrtc {
ViEReceiver::ViEReceiver(const int32_t channel_id,
VideoCodingModule* module_vcm,
RemoteBitrateEstimator* remote_bitrate_estimator)
RemoteBitrateEstimator* remote_bitrate_estimator,
RtpFeedback* rtp_feedback)
: receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
channel_id_(channel_id),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(new RTPPayloadRegistry(
channel_id, RTPPayloadStrategy::CreateStrategy(false))),
rtp_receiver_(RtpReceiver::CreateVideoReceiver(
channel_id, Clock::GetRealTimeClock(), this, rtp_feedback,
rtp_payload_registry_.get())),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
rtp_rtcp_(NULL),
vcm_(module_vcm),
remote_bitrate_estimator_(remote_bitrate_estimator),
@ -51,6 +62,49 @@ ViEReceiver::~ViEReceiver() {
}
}
bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
int8_t old_pltype = -1;
if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
kVideoPayloadTypeFrequency,
0,
video_codec.maxBitrate,
&old_pltype) != -1) {
rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
}
return RegisterPayload(video_codec);
}
bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
video_codec.plType,
kVideoPayloadTypeFrequency,
0,
video_codec.maxBitrate) == 0;
}
bool ViEReceiver::SetNackStatus(bool enable,
int max_nack_reordering_threshold) {
return rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
max_nack_reordering_threshold) == 0;
}
void ViEReceiver::SetRtxStatus(bool enable, uint32_t ssrc) {
rtp_receiver_->SetRTXStatus(true, ssrc);
}
void ViEReceiver::SetRtxPayloadType(uint32_t payload_type) {
rtp_receiver_->SetRtxPayloadType(payload_type);
}
uint32_t ViEReceiver::GetRemoteSsrc() const {
return rtp_receiver_->SSRC();
}
int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
return rtp_receiver_->CSRCs(csrcs);
}
int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
CriticalSectionScoped cs(receive_cs_.get());
if (external_decryption_) {
@ -77,6 +131,10 @@ void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
rtp_rtcp_ = module;
}
RtpReceiver* ViEReceiver::GetRtpReceiver() const {
return rtp_receiver_.get();
}
void ViEReceiver::RegisterSimulcastRtpRtcpModules(
const std::list<RtpRtcp*>& rtp_modules) {
CriticalSectionScoped cs(receive_cs_.get());
@ -134,6 +192,25 @@ int32_t ViEReceiver::OnReceivedPayloadData(
return 0;
}
bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
int rtp_packet_length) {
RTPHeader header;
if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVideo, channel_id_,
"IncomingPacket invalid RTP header");
return false;
}
header.payload_type_frequency = kVideoPayloadTypeFrequency;
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return false;
}
return rtp_receiver_->IncomingRtpPacket(&header, rtp_packet,
rtp_packet_length,
payload_specific, false);
}
int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
int rtp_packet_length) {
// TODO(mflodman) Change decrypt to get rid of this cast.
@ -182,9 +259,19 @@ int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
const int payload_size = received_packet_length - header.headerLength;
remote_bitrate_estimator_->IncomingPacket(TickTime::MillisecondTimestamp(),
payload_size, header);
assert(rtp_rtcp_); // Should be set by owner at construction time.
return rtp_rtcp_->IncomingRtpPacket(received_packet, received_packet_length,
header);
header.payload_type_frequency = kVideoPayloadTypeFrequency;
bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
bool retransmitted = !in_order && IsPacketRetransmitted(header);
rtp_receive_statistics_->IncomingPacket(header, received_packet_length,
retransmitted, in_order);
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return -1;
}
return rtp_receiver_->IncomingRtpPacket(&header, received_packet,
received_packet_length,
payload_specific, in_order) ? 0 : -1;
}
int ViEReceiver::InsertRTCPPacket(const int8_t* rtcp_packet,
@ -298,7 +385,7 @@ void ViEReceiver::EstimatedReceiveBandwidth(
// LatestEstimate returns an error if there is no valid bitrate estimate, but
// ViEReceiver instead returns a zero estimate.
remote_bitrate_estimator_->LatestEstimate(&ssrcs, available_bandwidth);
if (std::find(ssrcs.begin(), ssrcs.end(), rtp_rtcp_->RemoteSSRC()) !=
if (std::find(ssrcs.begin(), ssrcs.end(), rtp_receiver_->SSRC()) !=
ssrcs.end()) {
*available_bandwidth /= ssrcs.size();
} else {
@ -306,4 +393,25 @@ void ViEReceiver::EstimatedReceiveBandwidth(
}
}
ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
return rtp_receive_statistics_.get();
}
bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header) const {
bool rtx_enabled = false;
uint32_t rtx_ssrc = 0;
int rtx_payload_type = 0;
rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
if (!rtx_enabled) {
// Check if this is a retransmission.
ReceiveStatistics::RtpReceiveStatistics stats;
if (rtp_receive_statistics_->Statistics(&stats, false)) {
uint16_t min_rtt = 0;
rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
min_rtt);
}
}
return false;
}
} // namespace webrtc

View File

@ -14,6 +14,7 @@
#include <list>
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@ -23,23 +24,39 @@ namespace webrtc {
class CriticalSectionWrapper;
class Encryption;
class ReceiveStatistics;
class RemoteBitrateEstimator;
class RtpDump;
class RtpHeaderParser;
class RTPPayloadRegistry;
class RtpReceiver;
class RtpRtcp;
class VideoCodingModule;
class ViEReceiver : public RtpData {
public:
ViEReceiver(const int32_t channel_id, VideoCodingModule* module_vcm,
RemoteBitrateEstimator* remote_bitrate_estimator);
RemoteBitrateEstimator* remote_bitrate_estimator,
RtpFeedback* rtp_feedback);
~ViEReceiver();
bool SetReceiveCodec(const VideoCodec& video_codec);
bool RegisterPayload(const VideoCodec& video_codec);
bool SetNackStatus(bool enable, int max_nack_reordering_threshold);
void SetRtxStatus(bool enable, uint32_t ssrc);
void SetRtxPayloadType(uint32_t payload_type);
uint32_t GetRemoteSsrc() const;
int GetCsrcs(uint32_t* csrcs) const;
int RegisterExternalDecryption(Encryption* decryption);
int DeregisterExternalDecryption();
void SetRtpRtcpModule(RtpRtcp* module);
RtpReceiver* GetRtpReceiver() const;
void RegisterSimulcastRtpRtcpModules(const std::list<RtpRtcp*>& rtp_modules);
bool SetReceiveTimestampOffsetStatus(bool enable, int id);
@ -54,6 +71,8 @@ class ViEReceiver : public RtpData {
// Receives packets from external transport.
int ReceivedRTPPacket(const void* rtp_packet, int rtp_packet_length);
int ReceivedRTCPPacket(const void* rtcp_packet, int rtcp_packet_length);
virtual bool OnRecoveredPacket(const uint8_t* packet,
int packet_length) OVERRIDE;
// Implements RtpData.
virtual int32_t OnReceivedPayloadData(
@ -63,13 +82,19 @@ class ViEReceiver : public RtpData {
void EstimatedReceiveBandwidth(unsigned int* available_bandwidth) const;
ReceiveStatistics* GetReceiveStatistics() const;
private:
int InsertRTPPacket(const int8_t* rtp_packet, int rtp_packet_length);
int InsertRTCPPacket(const int8_t* rtcp_packet, int rtcp_packet_length);
bool IsPacketRetransmitted(const RTPHeader& header) const;
scoped_ptr<CriticalSectionWrapper> receive_cs_;
const int32_t channel_id_;
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
RtpRtcp* rtp_rtcp_;
std::list<RtpRtcp*> rtp_rtcp_simulcast_;
VideoCodingModule* vcm_;

View File

@ -501,7 +501,7 @@ int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod();
vie_encoder->UpdateProtectionMethod(enable);
return 0;
}
@ -542,7 +542,7 @@ int ViERTP_RTCPImpl::SetFECStatus(const int video_channel, const bool enable,
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod();
vie_encoder->UpdateProtectionMethod(false);
return 0;
}
@ -587,7 +587,7 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
vie_encoder->UpdateProtectionMethod();
vie_encoder->UpdateProtectionMethod(enable);
return 0;
}

View File

@ -10,6 +10,7 @@
#include "webrtc/video_engine/vie_sync_module.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -24,15 +25,15 @@ namespace webrtc {
enum { kSyncInterval = 1000};
int UpdateMeasurements(StreamSynchronization::Measurements* stream,
const RtpRtcp* rtp_rtcp) {
stream->latest_timestamp = rtp_rtcp->RemoteTimestamp();
stream->latest_receive_time_ms = rtp_rtcp->LocalTimeOfRemoteTimeStamp();
const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
stream->latest_timestamp = receiver.Timestamp();
stream->latest_receive_time_ms = receiver.LastReceivedTimeMs();
synchronization::RtcpMeasurement measurement;
if (0 != rtp_rtcp->RemoteNTP(&measurement.ntp_secs,
&measurement.ntp_frac,
NULL,
NULL,
&measurement.rtp_timestamp)) {
if (0 != rtp_rtcp.RemoteNTP(&measurement.ntp_secs,
&measurement.ntp_frac,
NULL,
NULL,
&measurement.rtp_timestamp)) {
return -1;
}
if (measurement.ntp_secs == 0 && measurement.ntp_frac == 0) {
@ -60,6 +61,7 @@ ViESyncModule::ViESyncModule(VideoCodingModule* vcm,
: data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
vcm_(vcm),
vie_channel_(vie_channel),
video_receiver_(NULL),
video_rtp_rtcp_(NULL),
voe_channel_id_(-1),
voe_sync_interface_(NULL),
@ -72,10 +74,12 @@ ViESyncModule::~ViESyncModule() {
int ViESyncModule::ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module) {
RtpRtcp* video_rtcp_module,
RtpReceiver* video_receiver) {
CriticalSectionScoped cs(data_cs_.get());
voe_channel_id_ = voe_channel_id;
voe_sync_interface_ = voe_sync_interface;
video_receiver_ = video_receiver;
video_rtp_rtcp_ = video_rtcp_module;
sync_.reset(new StreamSynchronization(voe_channel_id, vie_channel_->Id()));
@ -129,16 +133,21 @@ int32_t ViESyncModule::Process() {
playout_buffer_delay_ms;
RtpRtcp* voice_rtp_rtcp = NULL;
if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, voice_rtp_rtcp)) {
RtpReceiver* voice_receiver = NULL;
if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
&voice_receiver)) {
return 0;
}
assert(voice_rtp_rtcp);
assert(voice_receiver);
if (UpdateMeasurements(&video_measurement_, video_rtp_rtcp_) != 0) {
if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
*video_receiver_) != 0) {
return 0;
}
if (UpdateMeasurements(&audio_measurement_, voice_rtp_rtcp) != 0) {
if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
*voice_receiver) != 0) {
return 0;
}

View File

@ -36,7 +36,8 @@ class ViESyncModule : public Module {
int ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module);
RtpRtcp* video_rtcp_module,
RtpReceiver* video_receiver);
int VoiceChannel();
@ -51,6 +52,7 @@ class ViESyncModule : public Module {
scoped_ptr<CriticalSectionWrapper> data_cs_;
VideoCodingModule* vcm_;
ViEChannel* vie_channel_;
RtpReceiver* video_receiver_;
RtpRtcp* video_rtp_rtcp_;
int voe_channel_id_;
VoEVideoSync* voe_sync_interface_;

View File

@ -12,6 +12,10 @@
#include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/utility/interface/audio_frame_operations.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
@ -367,8 +371,8 @@ Channel::OnIncomingSSRCChanged(int32_t id,
assert(channel == _channelId);
// Reset RTP-module counters since a new incoming RTP stream is detected
_rtpRtcpModule->ResetReceiveDataCountersRTP();
_rtpRtcpModule->ResetStatisticsRTP();
rtp_receive_statistics_->ResetDataCounters();
rtp_receive_statistics_->ResetStatistics();
if (_rtpObserver)
{
@ -404,6 +408,10 @@ void Channel::OnIncomingCSRCChanged(int32_t id,
}
}
void Channel::ResetStatistics() {
rtp_receive_statistics_->ResetStatistics();
}
void
Channel::OnApplicationDataReceived(int32_t id,
uint8_t subType,
@ -629,18 +637,16 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
UpdatePacketDelay(rtpHeader->header.timestamp,
rtpHeader->header.sequenceNumber);
if (kNackOff != _rtpRtcpModule->NACK()) { // Is NACK on?
uint16_t round_trip_time = 0;
_rtpRtcpModule->RTT(_rtpRtcpModule->RemoteSSRC(), &round_trip_time,
NULL, NULL, NULL);
uint16_t round_trip_time = 0;
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
NULL, NULL, NULL);
std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
round_trip_time);
if (!nack_list.empty()) {
// Can't use nack_list.data() since it's not supported by all
// compilers.
ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
}
std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
round_trip_time);
if (!nack_list.empty()) {
// Can't use nack_list.data() since it's not supported by all
// compilers.
ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
}
return 0;
}
@ -883,6 +889,15 @@ Channel::Channel(int32_t channelId,
_instanceId(instanceId),
_channelId(channelId),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(
new RTPPayloadRegistry(channelId,
RTPPayloadStrategy::CreateStrategy(true))),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
rtp_receiver_(RtpReceiver::CreateAudioReceiver(
VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
this, this, rtp_payload_registry_.get())),
telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
_audioCodingModule(*AudioCodingModule::Create(
VoEModuleId(instanceId, channelId))),
_rtpDumpIn(*RtpDump::CreateRtpDump()),
@ -983,11 +998,10 @@ Channel::Channel(int32_t channelId,
RtpRtcp::Configuration configuration;
configuration.id = VoEModuleId(instanceId, channelId);
configuration.audio = true;
configuration.incoming_data = this;
configuration.incoming_messages = this;
configuration.outgoing_transport = this;
configuration.rtcp_feedback = this;
configuration.audio_messages = this;
configuration.receive_statistics = rtp_receive_statistics_.get();
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
@ -1135,12 +1149,9 @@ Channel::Init()
// disabled by the user.
// After StopListen (when no sockets exists), RTCP packets will no longer
// be transmitted since the Transport object will then be invalid.
const bool rtpRtcpFail =
((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
// RTCP is enabled by default
(_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
if (rtpRtcpFail)
telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
// RTCP is enabled by default.
if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@ -1171,7 +1182,12 @@ Channel::Init()
{
// Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
(_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
(rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) == -1))
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId),
@ -1494,12 +1510,7 @@ Channel::StopReceiving()
}
// Recover DTMF detection status.
int32_t ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
if (ret != 0) {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"StopReceiving() failed to restore telephone-event status.");
}
telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
RegisterReceiveCodecsToRTPModule();
_receiving = false;
return 0;
@ -1751,10 +1762,15 @@ Channel::SetRecPayloadType(const CodecInst& codec)
CodecInst rxCodec = codec;
// Get payload type for the given codec
_rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
rtp_payload_registry_->ReceivePayloadType(
rxCodec.plname,
rxCodec.plfreq,
rxCodec.channels,
(rxCodec.rate < 0) ? 0 : rxCodec.rate,
&pltype);
rxCodec.pltype = pltype;
if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR,
@ -1773,11 +1789,21 @@ Channel::SetRecPayloadType(const CodecInst& codec)
return 0;
}
if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
if (rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) != 0)
{
// First attempt to register failed => de-register and try again
_rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
if (rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@ -1805,7 +1831,12 @@ Channel::GetRecPayloadType(CodecInst& codec)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetRecPayloadType()");
int8_t payloadType(-1);
if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
if (rtp_payload_registry_->ReceivePayloadType(
codec.plname,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate,
&payloadType) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
@ -2165,12 +2196,27 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
"IncomingPacket invalid RTP header");
return -1;
}
header.payload_type_frequency =
rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
if (header.payload_type_frequency < 0) {
return -1;
}
bool retransmitted = IsPacketRetransmitted(header);
bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
rtp_receive_statistics_->IncomingPacket(header, static_cast<uint16_t>(length),
retransmitted, in_order);
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return -1;
}
// Deliver RTP packet to RTP/RTCP module for parsing
// The packet will be pushed back to the channel thru the
// OnReceivedPayloadData callback so we don't push it to the ACM here
if (_rtpRtcpModule->IncomingRtpPacket(reinterpret_cast<const uint8_t*>(data),
if (!rtp_receiver_->IncomingRtpPacket(&header,
reinterpret_cast<const uint8_t*>(data),
static_cast<uint16_t>(length),
header) == -1) {
payload_specific, in_order)) {
_engineStatisticsPtr->SetLastError(
VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
"Channel::IncomingRTPPacket() RTP packet is invalid");
@ -2178,6 +2224,24 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
return 0;
}
bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
bool rtx_enabled = false;
uint32_t rtx_ssrc = 0;
int rtx_payload_type = 0;
rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
if (!rtx_enabled) {
// Check if this is a retransmission.
ReceiveStatistics::RtpReceiveStatistics stats;
if (rtp_receive_statistics_->Statistics(&stats, false)) {
uint16_t min_rtt = 0;
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
min_rtt);
}
}
return false;
}
int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ReceivedRTCPPacket()");
@ -2202,141 +2266,6 @@ int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
return 0;
}
int32_t
Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetPacketTimeoutNotification()");
if (enable)
{
const uint32_t RTPtimeoutMS = 1000*timeoutSeconds;
const uint32_t RTCPtimeoutMS = 0;
_rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
_rtpPacketTimeOutIsEnabled = true;
_rtpTimeOutSeconds = timeoutSeconds;
}
else
{
_rtpRtcpModule->SetPacketTimeout(0, 0);
_rtpPacketTimeOutIsEnabled = false;
_rtpTimeOutSeconds = 0;
}
return 0;
}
int32_t
Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPacketTimeoutNotification()");
enabled = _rtpPacketTimeOutIsEnabled;
if (enabled)
{
timeoutSeconds = _rtpTimeOutSeconds;
}
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
"GetPacketTimeoutNotification() => enabled=%d,"
" timeoutSeconds=%d",
enabled, timeoutSeconds);
return 0;
}
int32_t
Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::RegisterDeadOrAliveObserver()");
CriticalSectionScoped cs(&_callbackCritSect);
if (_connectionObserverPtr)
{
_engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
"RegisterDeadOrAliveObserver() observer already enabled");
return -1;
}
_connectionObserverPtr = &observer;
_connectionObserver = true;
return 0;
}
int32_t
Channel::DeRegisterDeadOrAliveObserver()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterDeadOrAliveObserver()");
CriticalSectionScoped cs(&_callbackCritSect);
if (!_connectionObserverPtr)
{
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"DeRegisterDeadOrAliveObserver() observer already disabled");
return 0;
}
_connectionObserver = false;
_connectionObserverPtr = NULL;
return 0;
}
int32_t
Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetPeriodicDeadOrAliveStatus()");
if (!_connectionObserverPtr)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"SetPeriodicDeadOrAliveStatus() connection observer has"
" not been registered");
}
if (enable)
{
ResetDeadOrAliveCounters();
}
bool enabled(false);
uint8_t currentSampleTimeSec(0);
// Store last state (will be used later if dead-or-alive is disabled).
_rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
// Update the dead-or-alive state.
if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
enable, (uint8_t)sampleTimeSeconds) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR,
kTraceError,
"SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
"status");
return -1;
}
if (!enable)
{
// Restore last utilized sample time.
// Without this, the sample time would always be reset to default
// (2 sec), each time dead-or-alived was disabled without sample-time
// parameter.
_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
currentSampleTimeSec);
}
return 0;
}
int32_t
Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
{
_rtpRtcpModule->PeriodicDeadOrAliveStatus(
enabled,
(uint8_t&)sampleTimeSeconds);
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
"GetPeriodicDeadOrAliveStatus() => enabled=%d,"
" sampleTimeSeconds=%d",
enabled, sampleTimeSeconds);
return 0;
}
int Channel::StartPlayingFileLocally(const char* fileName,
bool loop,
FileFormats format,
@ -3145,8 +3074,8 @@ Channel::DeRegisterExternalEncryption()
}
int Channel::SendTelephoneEventOutband(unsigned char eventCode,
int lengthMs, int attenuationDb,
bool playDtmfEvent)
int lengthMs, int attenuationDb,
bool playDtmfEvent)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
@ -3675,7 +3604,7 @@ Channel::GetLocalSSRC(unsigned int& ssrc)
int
Channel::GetRemoteSSRC(unsigned int& ssrc)
{
ssrc = _rtpRtcpModule->RemoteSSRC();
ssrc = rtp_receiver_->SSRC();
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"GetRemoteSSRC() => ssrc=%lu", ssrc);
@ -3823,7 +3752,7 @@ Channel::GetRemoteRTCP_CNAME(char cName[256])
return -1;
}
char cname[RTCP_CNAME_SIZE];
const uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
const uint32_t remoteSSRC = rtp_receiver_->SSRC();
if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
{
_engineStatisticsPtr->SetLastError(
@ -3898,7 +3827,7 @@ Channel::GetRemoteRTCPData(
return -1;
}
uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
uint32_t remoteSSRC = rtp_receiver_->SSRC();
std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
for (; it != remote_stats.end(); ++it) {
if (it->remoteSSRC == remoteSSRC)
@ -3990,24 +3919,15 @@ Channel::GetRTPStatistics(
unsigned int& maxJitterMs,
unsigned int& discardedPackets)
{
uint8_t fraction_lost(0);
uint32_t cum_lost(0);
uint32_t ext_max(0);
uint32_t jitter(0);
uint32_t max_jitter(0);
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
"RTP/RTCP module");
ReceiveStatistics::RtpReceiveStatistics statistics;
if (!rtp_receive_statistics_->Statistics(
&statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
"RTP/RTCP module");
}
const int32_t playoutFrequency =
@ -4015,8 +3935,8 @@ Channel::GetRTPStatistics(
if (playoutFrequency > 0)
{
// Scale RTP statistics given the current playout frequency
maxJitterMs = max_jitter / (playoutFrequency / 1000);
averageJitterMs = jitter / (playoutFrequency / 1000);
maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
}
discardedPackets = _numberOfDiscardedPackets;
@ -4092,32 +4012,23 @@ int Channel::GetRemoteRTCPReportBlocks(
int
Channel::GetRTPStatistics(CallStatistics& stats)
{
uint8_t fraction_lost(0);
uint32_t cum_lost(0);
uint32_t ext_max(0);
uint32_t jitter(0);
uint32_t max_jitter(0);
// --- Part one of the final structure (four values)
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
&cum_lost,
&ext_max,
&jitter,
&max_jitter) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
"RTP/RTCP module");
ReceiveStatistics::RtpReceiveStatistics statistics;
if (!rtp_receive_statistics_->Statistics(
&statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
"GetRTPStatistics() failed to read RTP statistics from the "
"RTP/RTCP module");
}
stats.fractionLost = fraction_lost;
stats.cumulativeLost = cum_lost;
stats.extendedMax = ext_max;
stats.jitterSamples = jitter;
stats.fractionLost = statistics.fraction_lost;
stats.cumulativeLost = statistics.cumulative_lost;
stats.extendedMax = statistics.extended_max_sequence_number;
stats.jitterSamples = statistics.jitter;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId, _channelId),
@ -4139,7 +4050,7 @@ Channel::GetRTPStatistics(CallStatistics& stats)
} else
{
// The remote SSRC will be zero if no RTP packet has been received.
uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
uint32_t remoteSSRC = rtp_receiver_->SSRC();
if (remoteSSRC > 0)
{
uint16_t avgRTT(0);
@ -4176,10 +4087,10 @@ Channel::GetRTPStatistics(CallStatistics& stats)
uint32_t bytesReceived(0);
uint32_t packetsReceived(0);
rtp_receive_statistics_->GetDataCounters(&bytesReceived, &packetsReceived);
if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
&packetsSent,
&bytesReceived,
&packetsReceived) != 0)
&packetsSent) != 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId, _channelId),
@ -4261,8 +4172,8 @@ Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
// None of these functions can fail.
_rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
_rtpRtcpModule->SetNACKStatus(enable ? kNackRtcp : kNackOff,
maxNumberOfPackets);
rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
maxNumberOfPackets);
if (enable)
_audioCodingModule.EnableNack(maxNumberOfPackets);
else
@ -4702,7 +4613,7 @@ Channel::ResetRTCPStatistics()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ResetRTCPStatistics()");
uint32_t remoteSSRC(0);
remoteSSRC = _rtpRtcpModule->RemoteSSRC();
remoteSSRC = rtp_receiver_->SSRC();
return _rtpRtcpModule->ResetRTT(remoteSSRC);
}
@ -4731,7 +4642,7 @@ Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
uint16_t maxRTT;
uint16_t minRTT;
// The remote SSRC will be zero if no RTP packet has been received.
remoteSSRC = _rtpRtcpModule->RemoteSSRC();
remoteSSRC = rtp_receiver_->SSRC();
if (remoteSSRC == 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
@ -4941,11 +4852,12 @@ Channel::SetInitSequenceNumber(short sequenceNumber)
}
int
Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetRtpRtcp()");
rtpRtcpModule = _rtpRtcpModule.get();
*rtpRtcpModule = _rtpRtcpModule.get();
*rtp_receiver = rtp_receiver_.get();
return 0;
}
@ -5165,15 +5077,6 @@ Channel::UpdateDeadOrAliveCounters(bool alive)
int
Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
{
bool enabled;
uint8_t timeSec;
_rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
if (!enabled)
return (-1);
countDead = static_cast<int> (_countDeadDetections);
countAlive = static_cast<int> (_countAliveDetections);
return 0;
}
@ -5273,7 +5176,12 @@ Channel::RegisterReceiveCodecsToRTPModule()
{
// Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
(_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
(rtp_receiver_->RegisterReceivePayload(
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0) ? 0 : codec.rate) == -1))
{
WEBRTC_TRACE(
kTraceWarning,

View File

@ -35,16 +35,21 @@
namespace webrtc
{
class CriticalSectionWrapper;
class ProcessThread;
class AudioDeviceModule;
class RtpRtcp;
class CriticalSectionWrapper;
class FileWrapper;
class ProcessThread;
class ReceiveStatistics;
class RtpDump;
class VoiceEngineObserver;
class RTPPayloadRegistry;
class RtpReceiver;
class RTPReceiverAudio;
class RtpRtcp;
class TelephoneEventHandler;
class VoEMediaProcess;
class VoERTPObserver;
class VoERTCPObserver;
class VoERTPObserver;
class VoiceEngineObserver;
struct CallStatistics;
struct ReportBlock;
@ -133,12 +138,6 @@ public:
int32_t DeRegisterExternalTransport();
int32_t ReceivedRTPPacket(const int8_t* data, int32_t length);
int32_t ReceivedRTCPPacket(const int8_t* data, int32_t length);
int32_t SetPacketTimeoutNotification(bool enable, int timeoutSeconds);
int32_t GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds);
int32_t RegisterDeadOrAliveObserver(VoEConnectionObserver& observer);
int32_t DeRegisterDeadOrAliveObserver();
int32_t SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds);
int32_t GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds);
// VoEFile
int StartPlayingFileLocally(const char* fileName, bool loop,
@ -215,7 +214,7 @@ public:
int SetInitSequenceNumber(short sequenceNumber);
// VoEVideoSyncExtended
int GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const;
int GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const;
// VoEEncryption
int RegisterExternalEncryption(Encryption& encryption);
@ -307,6 +306,11 @@ public:
uint16_t payloadSize,
const WebRtcRTPHeader* rtpHeader);
bool OnRecoveredPacket(const uint8_t* packet, int packet_length) {
// Generic FEC not supported for audio.
return true;
}
public:
// From RtpFeedback in the RTP/RTCP module
int32_t OnInitializeDecoder(
@ -330,6 +334,8 @@ public:
void OnIncomingCSRCChanged(int32_t id,
uint32_t CSRC, bool added);
void ResetStatistics();
public:
// From RtcpFeedback in the RTP/RTCP module
void OnApplicationDataReceived(int32_t id,
@ -433,6 +439,7 @@ public:
uint32_t EncodeAndSend();
private:
bool IsPacketRetransmitted(const RTPHeader& header) const;
int ResendPackets(const uint16_t* sequence_numbers, int length);
int InsertInbandDtmfTone();
int32_t MixOrReplaceAudioWithFile(int mixingFrequency);
@ -453,6 +460,10 @@ private:
private:
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
scoped_ptr<RtpReceiver> rtp_receiver_;
TelephoneEventHandler* telephone_event_handler_;
scoped_ptr<RtpRtcp> _rtpRtcpModule;
AudioCodingModule& _audioCodingModule;
RtpDump& _rtpDumpIn;

View File

@ -89,31 +89,6 @@ public:
virtual int ReceivedRTCPPacket(
int channel, const void* data, unsigned int length) = 0;
// Enables or disables warnings that report if packets have not been
// received in |timeoutSeconds| seconds for a specific |channel|.
virtual int SetPacketTimeoutNotification(
int channel, bool enable, int timeoutSeconds = 2) = 0;
// Gets the current time-out notification status.
virtual int GetPacketTimeoutNotification(
int channel, bool& enabled, int& timeoutSeconds) = 0;
// Installs the observer class implementation for a specified |channel|.
virtual int RegisterDeadOrAliveObserver(
int channel, VoEConnectionObserver& observer) = 0;
// Removes the observer class implementation for a specified |channel|.
virtual int DeRegisterDeadOrAliveObserver(int channel) = 0;
// Enables or disables the periodic dead-or-alive callback functionality
// for a specified |channel|.
virtual int SetPeriodicDeadOrAliveStatus(
int channel, bool enable, int sampleTimeSeconds = 2) = 0;
// Gets the current dead-or-alive notification status.
virtual int GetPeriodicDeadOrAliveStatus(
int channel, bool& enabled, int& sampleTimeSeconds) = 0;
protected:
VoENetwork() {}
virtual ~VoENetwork() {}

Some files were not shown because too many files have changed in this diff Show More