VoE changes to allow forwarding of packets from VoE to ViE BWE.

BUG=
R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/10419004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5757 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
solenberg@webrtc.org 2014-03-24 10:38:25 +00:00
parent 37ca765650
commit b1f5010075
21 changed files with 340 additions and 58 deletions

View File

@ -734,6 +734,54 @@ struct PacketTime {
// If unknown, this value will be set to zero.
};
struct RTPHeaderExtension {
RTPHeaderExtension()
: hasTransmissionTimeOffset(false),
transmissionTimeOffset(0),
hasAbsoluteSendTime(false),
absoluteSendTime(0),
hasAudioLevel(false),
audioLevel(0) {}
bool hasTransmissionTimeOffset;
int32_t transmissionTimeOffset;
bool hasAbsoluteSendTime;
uint32_t absoluteSendTime;
// Audio Level includes both level in dBov and voiced/unvoiced bit. See:
// https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
bool hasAudioLevel;
uint8_t audioLevel;
};
struct RTPHeader {
RTPHeader()
: markerBit(false),
payloadType(0),
sequenceNumber(0),
timestamp(0),
ssrc(0),
numCSRCs(0),
paddingLength(0),
headerLength(0),
payload_type_frequency(0),
extension() {
memset(&arrOfCSRCs, 0, sizeof(arrOfCSRCs));
}
bool markerBit;
uint8_t payloadType;
uint16_t sequenceNumber;
uint32_t timestamp;
uint32_t ssrc;
uint8_t numCSRCs;
uint32_t arrOfCSRCs[kRtpCsrcSize];
uint8_t paddingLength;
uint16_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};
struct VideoStream {
VideoStream()
: width(0),

View File

@ -27,32 +27,6 @@
namespace webrtc {
struct RTPHeaderExtension {
bool hasTransmissionTimeOffset;
int32_t transmissionTimeOffset;
bool hasAbsoluteSendTime;
uint32_t absoluteSendTime;
// Audio Level includes both level in dBov and voiced/unvoiced bit. See:
// https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
bool hasAudioLevel;
uint8_t audioLevel;
};
struct RTPHeader {
bool markerBit;
uint8_t payloadType;
uint16_t sequenceNumber;
uint32_t timestamp;
uint32_t ssrc;
uint8_t numCSRCs;
uint32_t arrOfCSRCs[kRtpCsrcSize];
uint8_t paddingLength;
uint16_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};
struct RTPAudioHeader {
uint8_t numEnergy; // number of valid entries in arrOfEnergy
uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel

View File

@ -181,7 +181,7 @@ class BweTestFramework_RateCounterFilterTest : public ::testing::Test {
void TestRateCounter(int64_t run_for_ms, uint32_t payload_bits,
uint32_t expected_pps, uint32_t expected_bps) {
Packets packets;
RTPHeader header = {0};
RTPHeader header;
// "Send" a packet every 10 ms.
for (int64_t i = 0; i < run_for_ms; i += 10, now_ms_ += 10) {
packets.push_back(Packet(now_ms_ * 1000, payload_bits / 8, header));
@ -582,7 +582,7 @@ class BweTestFramework_ChokeFilterTest : public ::testing::Test {
uint32_t expected_kbit_transmitted) {
// Generate a bunch of packets, apply choke, verify output is ordered.
Packets packets;
RTPHeader header = {0};
RTPHeader header;
for (uint32_t i = 0; i < packets_to_generate; ++i) {
int64_t send_time_ms = now_ms_ + (i * run_for_ms) / packets_to_generate;
header.sequenceNumber = sequence_number_++;

View File

@ -52,7 +52,7 @@ int main(int argc, char** argv) {
int non_zero_abs_send_time = 0;
int non_zero_ts_offsets = 0;
while (rtp_reader->NextPacket(packet, &packet_length, &time_ms) == 0) {
webrtc::RTPHeader header = {};
webrtc::RTPHeader header;
parser->Parse(packet, packet_length, &header);
if (header.extension.absoluteSendTime != 0)
++non_zero_abs_send_time;

View File

@ -123,7 +123,7 @@ class RtpRtcpImplTest : public ::testing::Test {
};
TEST_F(RtpRtcpImplTest, Rtt) {
RTPHeader header = {};
RTPHeader header;
header.timestamp = 1;
header.sequenceNumber = 123;
header.ssrc = kSenderSsrc;

View File

@ -53,7 +53,8 @@ void VoiceChannelTransport::IncomingRTPPacket(
const int32_t packet_length,
const char* /*from_ip*/,
const uint16_t /*from_port*/) {
voe_network_->ReceivedRTPPacket(channel_, incoming_rtp_packet, packet_length);
voe_network_->ReceivedRTPPacket(
channel_, incoming_rtp_packet, packet_length, PacketTime());
}
void VoiceChannelTransport::IncomingRTCPPacket(

View File

@ -285,7 +285,7 @@ TEST_P(ParamCallPerfTest, PlaysOutAudioAndVideoInSync) {
channel_, packet, static_cast<unsigned int>(length));
} else {
ret = voe_network_->ReceivedRTPPacket(
channel_, packet, static_cast<unsigned int>(length));
channel_, packet, static_cast<unsigned int>(length), PacketTime());
}
return ret == 0;
}

View File

@ -79,6 +79,13 @@ class WEBRTC_DLLEXPORT ViENetwork {
// over the network.
virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
// Forward (audio) packet to bandwidth estimator for the given video channel,
// for aggregated audio+video BWE.
virtual int ReceivedBWEPacket(const int video_channel,
int64_t arrival_time_ms, int payload_size, const RTPHeader& header) {
return 0;
}
protected:
ViENetwork() {}
virtual ~ViENetwork() {}

View File

@ -197,4 +197,25 @@ int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
}
return 0;
}
int ViENetworkImpl::ReceivedBWEPacket(const int video_channel,
int64_t arrival_time_ms, int payload_size, const RTPHeader& header) {
WEBRTC_TRACE(kTraceStream, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d, time: %d, size: %d, ssrc: %u)", __FUNCTION__,
video_channel, arrival_time_ms, payload_size, header.ssrc);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
// TODO(solenberg): Implement!
// vie_channel->OnReceivedAudioPacket(arrival_time_ms, payload_size, header);
return 0;
}
} // namespace webrtc

View File

@ -38,7 +38,8 @@ class ViENetworkImpl
const void* data,
const int length);
virtual int SetMTU(int video_channel, unsigned int mtu);
virtual int ReceivedBWEPacket(const int video_channel,
int64_t arrival_time_ms, int payload_size, const RTPHeader& header);
protected:
explicit ViENetworkImpl(ViESharedData* shared_data);
virtual ~ViENetworkImpl();

View File

@ -24,6 +24,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
@ -911,6 +912,8 @@ Channel::Channel(int32_t channelId,
_countAliveDetections(0),
_countDeadDetections(0),
_outputSpeechType(AudioFrame::kNormalSpeech),
vie_network_(NULL),
video_channel_(-1),
_average_jitter_buffer_delay_us(0),
least_required_delay_ms_(0),
_previousTimestamp(0),
@ -1011,6 +1014,10 @@ Channel::~Channel()
// End of modules shutdown
// Delete other objects
if (vie_network_) {
vie_network_->Release();
vie_network_ = NULL;
}
RtpDump::DestroyRtpDump(&_rtpDumpIn);
RtpDump::DestroyRtpDump(&_rtpDumpOut);
delete &_callbackCritSect;
@ -2025,7 +2032,8 @@ Channel::DeRegisterExternalTransport()
return 0;
}
int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
const PacketTime& packet_time) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ReceivedRTPPacket()");
@ -2054,6 +2062,23 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
rtp_receive_statistics_->IncomingPacket(header, length,
IsPacketRetransmitted(header, in_order));
rtp_payload_registry_->SetIncomingPayloadType(header);
// Forward any packets to ViE bandwidth estimator, if enabled.
{
CriticalSectionScoped cs(&_callbackCritSect);
if (vie_network_) {
int64_t arrival_time_ms;
if (packet_time.timestamp != -1) {
arrival_time_ms = (packet_time.timestamp + 500) / 1000;
} else {
arrival_time_ms = TickTime::MillisecondTimestamp();
}
int payload_length = length - header.headerLength;
vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
payload_length, header);
}
}
return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
}
@ -3511,11 +3536,9 @@ int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
rtp_header_parser_->DeregisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime);
if (enable) {
if (!rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, id)) {
return -1;
}
if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, id)) {
return -1;
}
return 0;
}
@ -4105,6 +4128,21 @@ Channel::RTPDumpIsActive(RTPDirections direction)
return rtpDumpPtr->IsActive();
}
void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
int video_channel) {
CriticalSectionScoped cs(&_callbackCritSect);
if (vie_network_) {
vie_network_->Release();
vie_network_ = NULL;
}
video_channel_ = -1;
if (vie_network != NULL && video_channel != -1) {
vie_network_ = vie_network;
video_channel_ = video_channel;
}
}
uint32_t
Channel::Demultiplex(const AudioFrame& audioFrame)
{

View File

@ -47,6 +47,7 @@ class RtpReceiver;
class RTPReceiverAudio;
class RtpRtcp;
class TelephoneEventHandler;
class ViENetwork;
class VoEMediaProcess;
class VoERTCPObserver;
class VoERTPObserver;
@ -222,7 +223,8 @@ public:
// VoENetwork
int32_t RegisterExternalTransport(Transport& transport);
int32_t DeRegisterExternalTransport();
int32_t ReceivedRTPPacket(const int8_t* data, int32_t length);
int32_t ReceivedRTPPacket(const int8_t* data, int32_t length,
const PacketTime& packet_time);
int32_t ReceivedRTCPPacket(const int8_t* data, int32_t length);
// VoEFile
@ -366,6 +368,8 @@ public:
int StopRTPDump(RTPDirections direction);
bool RTPDumpIsActive(RTPDirections direction);
uint32_t LastRemoteTimeStamp() { return _lastRemoteTimeStamp; }
// Takes ownership of the ViENetwork.
void SetVideoEngineBWETarget(ViENetwork* vie_network, int video_channel);
// From AudioPacketizationCallback in the ACM
int32_t SendData(FrameType frameType,
@ -614,6 +618,8 @@ private:
uint32_t _countAliveDetections;
uint32_t _countDeadDetections;
AudioFrame::SpeechType _outputSpeechType;
ViENetwork* vie_network_;
int video_channel_;
// VoEVideoSync
uint32_t _average_jitter_buffer_delay_us;
int least_required_delay_ms_;

View File

@ -80,8 +80,15 @@ public:
// The packets received from the network should be passed to this
// function when external transport is enabled. Note that the data
// including the RTP-header must also be given to the VoiceEngine.
virtual int ReceivedRTPPacket(
int channel, const void* data, unsigned int length) = 0;
virtual int ReceivedRTPPacket(int channel,
const void* data,
unsigned int length) = 0;
virtual int ReceivedRTPPacket(int channel,
const void* data,
unsigned int length,
const PacketTime& packet_time) {
return 0;
}
// The packets received from the network should be passed to this
// function when external transport is enabled. Note that the data

View File

@ -44,6 +44,7 @@
namespace webrtc {
class ViENetwork;
class VoiceEngine;
// VoERTPObserver
@ -261,6 +262,13 @@ public:
int channel, unsigned char payloadType, bool markerBit,
const char* payloadData, unsigned short payloadSize) { return -1; };
// Sets video engine channel to receive incoming audio packets for
// aggregated bandwidth estimation. Takes ownership of the ViENetwork
// interface.
virtual int SetVideoEngineBWETarget(int channel, ViENetwork* vie_network,
int video_channel) {
return 0;
}
protected:
VoERTP_RTCP() {}

View File

@ -59,7 +59,8 @@ bool FakeExternalTransport::Process() {
switch (event_->Wait(500)) {
case webrtc::kEventSignaled:
lock_->Enter();
my_network_->ReceivedRTPPacket(channel_, packet_buffer_, length_);
my_network_->ReceivedRTPPacket(channel_, packet_buffer_, length_,
webrtc::PacketTime());
lock_->Leave();
return true;
case webrtc::kEventTimeout:

View File

@ -95,7 +95,8 @@ class LoopBackTransport : public webrtc::Transport {
switch (p.type) {
case Packet::Rtp:
voe_network_->ReceivedRTPPacket(p.channel, p.data, p.len);
voe_network_->ReceivedRTPPacket(p.channel, p.data, p.len,
webrtc::PacketTime());
break;
case Packet::Rtcp:
voe_network_->ReceivedRTCPPacket(p.channel, p.data, p.len);

View File

@ -12,8 +12,14 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/system_wrappers/interface/atomic32.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
using ::testing::_;
using ::testing::AtLeast;
using ::testing::Eq;
using ::testing::Field;
class ExtensionVerifyTransport : public webrtc::Transport {
public:
ExtensionVerifyTransport()
@ -26,10 +32,11 @@ class ExtensionVerifyTransport : public webrtc::Transport {
virtual int SendPacket(int channel, const void* data, int len) {
++received_packets_;
webrtc::RTPHeader header = {0};
webrtc::RTPHeader header;
if (parser_->Parse(static_cast<const uint8_t*>(data), len, &header)) {
bool ok = true;
if (audio_level_id_ >= 0 && !header.extension.hasAudioLevel) {
if (audio_level_id_ >= 0 &&
!header.extension.hasAudioLevel) {
ok = false;
}
if (absolute_sender_time_id_ >= 0 &&
@ -126,22 +133,148 @@ TEST_F(SendRtpRtcpHeaderExtensionsTest, SentPacketsIncludeAllExtensions2) {
EXPECT_TRUE(verifying_transport_.WaitForNPackets(10));
}
class MockViENetwork : public webrtc::ViENetwork {
public:
MockViENetwork() {}
virtual ~MockViENetwork() {}
MOCK_METHOD0(Release, int());
MOCK_METHOD2(SetNetworkTransmissionState, void(const int, const bool));
MOCK_METHOD2(RegisterSendTransport, int(const int, webrtc::Transport&));
MOCK_METHOD1(DeregisterSendTransport, int(const int));
MOCK_METHOD4(ReceivedRTPPacket, int(const int, const void*, const int,
const webrtc::PacketTime&));
MOCK_METHOD3(ReceivedRTCPPacket, int(const int, const void*, const int));
MOCK_METHOD2(SetMTU, int(int, unsigned int));
MOCK_METHOD4(ReceivedBWEPacket, int(const int, int64_t, int,
const webrtc::RTPHeader&));
};
class ReceiveRtpRtcpHeaderExtensionsTest : public AfterStreamingFixture {
protected:
virtual void SetUp() {
PausePlaying();
EXPECT_EQ(0,
voe_rtp_rtcp_->SetSendAbsoluteSenderTimeStatus(channel_, true, 11));
EXPECT_EQ(0,
voe_rtp_rtcp_->SetReceiveAbsoluteSenderTimeStatus(channel_, true, 11));
}
enum {
kVideoChannelId1 = 667,
kVideoChannelId2 = 668
};
MockViENetwork mock_network_;
};
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceivedAbsoluteSenderTimeWorks) {
EXPECT_EQ(0, voe_rtp_rtcp_->SetSendAbsoluteSenderTimeStatus(channel_, true,
11));
EXPECT_EQ(0, voe_rtp_rtcp_->SetReceiveAbsoluteSenderTimeStatus(channel_, true,
11));
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTDisabled) {
ResumePlaying();
// Ensure the RTP-RTCP process gets scheduled.
Sleep(1000);
// TODO(solenberg): Verify received packets are forwarded to RBE.
Sleep(500);
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTFailSetTarget) {
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_EQ(-1, voe_rtp_rtcp_->SetVideoEngineBWETarget(-1, &mock_network_,
kVideoChannelId1));
ResumePlaying();
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTEnabled) {
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(true)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTEnabledBadExtensionId) {
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(false)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetReceiveAbsoluteSenderTimeStatus(channel_, true,
1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTEnabledNotSending) {
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(false)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetSendAbsoluteSenderTimeStatus(channel_, false,
11));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTEnabledNotReceiving) {
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(false)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetReceiveAbsoluteSenderTimeStatus(channel_,
false, 11));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTSwitchViENetwork) {
MockViENetwork mock_network_2;
EXPECT_CALL(mock_network_2, Release()).Times(1);
EXPECT_CALL(mock_network_2, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(true)))))
.Times(AtLeast(1));
EXPECT_CALL(mock_network_, Release()).Times(1);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(true)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_2,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}
TEST_F(ReceiveRtpRtcpHeaderExtensionsTest, ReceiveASTSwitchVideoChannel) {
EXPECT_CALL(mock_network_, Release()).Times(2);
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId1, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(true)))))
.Times(AtLeast(1));
EXPECT_CALL(mock_network_, ReceivedBWEPacket(kVideoChannelId2, _, _,
Field(&webrtc::RTPHeader::extension,
Field(&webrtc::RTPHeaderExtension::hasAbsoluteSendTime, Eq(true)))))
.Times(AtLeast(1));
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId1));
ResumePlaying();
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, &mock_network_,
kVideoChannelId2));
Sleep(500);
EXPECT_EQ(0, voe_rtp_rtcp_->SetVideoEngineBWETarget(channel_, NULL, -1));
}

View File

@ -88,7 +88,14 @@ int VoENetworkImpl::DeRegisterExternalTransport(int channel)
int VoENetworkImpl::ReceivedRTPPacket(int channel,
const void* data,
unsigned int length)
unsigned int length) {
return ReceivedRTPPacket(channel, data, length, webrtc::PacketTime());
}
int VoENetworkImpl::ReceivedRTPPacket(int channel,
const void* data,
unsigned int length,
const PacketTime& packet_time)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
"ReceivedRTPPacket(channel=%d, length=%u)", channel, length);
@ -125,7 +132,8 @@ int VoENetworkImpl::ReceivedRTPPacket(int channel,
"ReceivedRTPPacket() external transport is not enabled");
return -1;
}
return channelPtr->ReceivedRTPPacket((const int8_t*) data, length);
return channelPtr->ReceivedRTPPacket((const int8_t*) data, length,
packet_time);
}
int VoENetworkImpl::ReceivedRTCPPacket(int channel, const void* data,

View File

@ -29,6 +29,10 @@ public:
virtual int ReceivedRTPPacket(int channel,
const void* data,
unsigned int length);
virtual int ReceivedRTPPacket(int channel,
const void* data,
unsigned int length,
const PacketTime& packet_time);
virtual int ReceivedRTCPPacket(int channel,
const void* data,

View File

@ -11,6 +11,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/voe_rtp_rtcp_impl.h"
#include "webrtc/voice_engine/voice_engine_impl.h"
@ -710,6 +711,27 @@ int VoERTP_RTCPImpl::GetLastRemoteTimeStamp(int channel,
return 0;
}
int VoERTP_RTCPImpl::SetVideoEngineBWETarget(int channel,
ViENetwork* vie_network,
int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetVideoEngineBWETarget(channel=%d, vie_network=?, video_channel=%d)",
channel, vie_network, video_channel);
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetVideoEngineBWETarget() failed to locate channel");
if (vie_network) {
vie_network->Release();
}
return -1;
}
channelPtr->SetVideoEngineBWETarget(vie_network, video_channel);
return 0;
}
#endif // #ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
} // namespace webrtc

View File

@ -116,6 +116,8 @@ public:
virtual int GetLastRemoteTimeStamp(int channel,
uint32_t* lastRemoteTimeStamp);
virtual int SetVideoEngineBWETarget(int channel, ViENetwork* vie_network,
int video_channel);
protected:
VoERTP_RTCPImpl(voe::SharedData* shared);
virtual ~VoERTP_RTCPImpl();