Add AudioReceiveStream to Call API.

BUG=4574
R=kwiberg@webrtc.org, mflodman@webrtc.org, pbos@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/51749004

Cr-Commit-Position: refs/heads/master@{#9114}
This commit is contained in:
Fredrik Solenberg
2015-04-29 15:24:01 +02:00
parent 10ba3eec5a
commit 23fba1ffa0
22 changed files with 491 additions and 163 deletions

View File

@@ -199,6 +199,14 @@ webrtc::Call::NetworkState FakeCall::GetNetworkState() const {
return network_state_;
}
webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) {
return nullptr;
}
void FakeCall::DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) {
}
webrtc::VideoSendStream* FakeCall::CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
const webrtc::VideoEncoderConfig& encoder_config) {
@@ -247,8 +255,11 @@ webrtc::PacketReceiver* FakeCall::Receiver() {
return this;
}
FakeCall::DeliveryStatus FakeCall::DeliverPacket(const uint8_t* packet,
FakeCall::DeliveryStatus FakeCall::DeliverPacket(webrtc::MediaType media_type,
const uint8_t* packet,
size_t length) {
EXPECT_TRUE(media_type == webrtc::MediaType::ANY ||
media_type == webrtc::MediaType::VIDEO);
EXPECT_GE(length, 12u);
uint32_t ssrc;
if (!GetRtpSsrc(packet, length, &ssrc))

View File

@@ -118,6 +118,11 @@ class FakeCall : public webrtc::Call, public webrtc::PacketReceiver {
void SetStats(const webrtc::Call::Stats& stats);
private:
webrtc::AudioReceiveStream* CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) override;
void DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) override;
webrtc::VideoSendStream* CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
const webrtc::VideoEncoderConfig& encoder_config) override;
@@ -129,7 +134,8 @@ class FakeCall : public webrtc::Call, public webrtc::PacketReceiver {
webrtc::VideoReceiveStream* receive_stream) override;
webrtc::PacketReceiver* Receiver() override;
DeliveryStatus DeliverPacket(const uint8_t* packet, size_t length) override;
DeliveryStatus DeliverPacket(webrtc::MediaType media_type,
const uint8_t* packet, size_t length) override;
webrtc::Call::Stats GetStats() const override;

View File

@@ -230,7 +230,7 @@ static std::vector<webrtc::RtpExtension> FilterRtpExtensions(
std::vector<webrtc::RtpExtension> webrtc_extensions;
for (size_t i = 0; i < extensions.size(); ++i) {
// Unsupported extensions will be ignored.
if (webrtc::RtpExtension::IsSupported(extensions[i].uri)) {
if (webrtc::RtpExtension::IsSupportedForVideo(extensions[i].uri)) {
webrtc_extensions.push_back(webrtc::RtpExtension(
extensions[i].uri, extensions[i].id));
} else {
@@ -1211,7 +1211,7 @@ void WebRtcVideoChannel2::OnPacketReceived(
rtc::Buffer* packet,
const rtc::PacketTime& packet_time) {
const webrtc::PacketReceiver::DeliveryStatus delivery_result =
call_->Receiver()->DeliverPacket(
call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
reinterpret_cast<const uint8_t*>(packet->data()), packet->size());
switch (delivery_result) {
case webrtc::PacketReceiver::DELIVERY_OK:
@@ -1237,7 +1237,7 @@ void WebRtcVideoChannel2::OnPacketReceived(
break;
}
if (call_->Receiver()->DeliverPacket(
if (call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
reinterpret_cast<const uint8_t*>(packet->data()), packet->size()) !=
webrtc::PacketReceiver::DELIVERY_OK) {
LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery.";
@@ -1248,7 +1248,7 @@ void WebRtcVideoChannel2::OnPacketReceived(
void WebRtcVideoChannel2::OnRtcpReceived(
rtc::Buffer* packet,
const rtc::PacketTime& packet_time) {
if (call_->Receiver()->DeliverPacket(
if (call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
reinterpret_cast<const uint8_t*>(packet->data()), packet->size()) !=
webrtc::PacketReceiver::DELIVERY_OK) {
LOG(LS_WARNING) << "Failed to deliver RTCP packet.";

View File

@@ -0,0 +1,47 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_AUDIO_RECEIVE_STREAM_H_
#define WEBRTC_AUDIO_RECEIVE_STREAM_H_
#include <string>
#include <vector>
#include "webrtc/common_types.h"
#include "webrtc/config.h"
namespace webrtc {
class AudioReceiveStream {
public:
struct Config {
Config() {}
std::string ToString() const;
// Receive-stream specific RTP settings.
struct Rtp {
Rtp() : remote_ssrc(0) {}
std::string ToString() const;
// Synchronization source (stream identifier) to be received.
uint32_t remote_ssrc;
// RTP header extensions used for the received stream.
std::vector<RtpExtension> extensions;
} rtp;
};
protected:
virtual ~AudioReceiveStream() {}
};
} // namespace webrtc
#endif // WEBRTC_AUDIO_RECEIVE_STREAM_H_

View File

@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/common_types.h"
#include "webrtc/audio_receive_stream.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
@@ -23,6 +24,13 @@ class VoiceEngine;
const char* Version();
enum class MediaType {
ANY,
AUDIO,
VIDEO,
DATA
};
class PacketReceiver {
public:
enum DeliveryStatus {
@@ -31,9 +39,9 @@ class PacketReceiver {
DELIVERY_PACKET_ERROR,
};
virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
virtual DeliveryStatus DeliverPacket(MediaType media_type,
const uint8_t* packet,
size_t length) = 0;
protected:
virtual ~PacketReceiver() {}
};
@@ -105,10 +113,14 @@ class Call {
static Call* Create(const Call::Config& config);
virtual AudioReceiveStream* CreateAudioReceiveStream(
const AudioReceiveStream::Config& config) = 0;
virtual void DestroyAudioReceiveStream(
AudioReceiveStream* receive_stream) = 0;
virtual VideoSendStream* CreateVideoSendStream(
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config) = 0;
virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0;
virtual VideoReceiveStream* CreateVideoReceiveStream(

View File

@@ -29,6 +29,24 @@ std::string RtpExtension::ToString() const {
return ss.str();
}
const char* RtpExtension::kTOffset = "urn:ietf:params:rtp-hdrext:toffset";
const char* RtpExtension::kAbsSendTime =
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time";
const char* RtpExtension::kVideoRotation = "urn:3gpp:video-orientation";
const char* RtpExtension::kAudioLevel =
"urn:ietf:params:rtp-hdrext:ssrc-audio-level";
bool RtpExtension::IsSupportedForAudio(const std::string& name) {
return name == webrtc::RtpExtension::kAbsSendTime ||
name == webrtc::RtpExtension::kAudioLevel;
}
bool RtpExtension::IsSupportedForVideo(const std::string& name) {
return name == webrtc::RtpExtension::kTOffset ||
name == webrtc::RtpExtension::kAbsSendTime ||
name == webrtc::RtpExtension::kVideoRotation;
}
VideoStream::VideoStream()
: width(0),
height(0),

View File

@@ -53,11 +53,13 @@ struct FecConfig {
struct RtpExtension {
RtpExtension(const std::string& name, int id) : name(name), id(id) {}
std::string ToString() const;
static bool IsSupported(const std::string& name);
static bool IsSupportedForAudio(const std::string& name);
static bool IsSupportedForVideo(const std::string& name);
static const char* kTOffset;
static const char* kAbsSendTime;
static const char* kVideoRotation;
static const char* kAudioLevel;
std::string name;
int id;
};

View File

@@ -202,7 +202,8 @@ void FakeNetworkPipe::Process() {
while (!packets_to_deliver.empty()) {
NetworkPacket* packet = packets_to_deliver.front();
packets_to_deliver.pop();
packet_receiver_->DeliverPacket(packet->data(), packet->data_length());
packet_receiver_->DeliverPacket(MediaType::ANY, packet->data(),
packet->data_length());
delete packet;
}
}

View File

@@ -29,11 +29,12 @@ class MockReceiver : public PacketReceiver {
virtual ~MockReceiver() {}
void IncomingPacket(const uint8_t* data, size_t length) {
DeliverPacket(data, length);
DeliverPacket(MediaType::ANY, data, length);
delete [] data;
}
MOCK_METHOD2(DeliverPacket, DeliveryStatus(const uint8_t*, size_t));
MOCK_METHOD3(DeliverPacket,
DeliveryStatus(MediaType, const uint8_t*, size_t));
};
class FakeNetworkPipeTest : public ::testing::Test {
@@ -41,7 +42,7 @@ class FakeNetworkPipeTest : public ::testing::Test {
virtual void SetUp() {
TickTime::UseFakeClock(12345);
receiver_.reset(new MockReceiver());
ON_CALL(*receiver_, DeliverPacket(_, _))
ON_CALL(*receiver_, DeliverPacket(_, _, _))
.WillByDefault(Return(PacketReceiver::DELIVERY_OK));
}
@@ -83,25 +84,25 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) {
kPacketSize);
// Time haven't increased yet, so we souldn't get any packets.
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(0);
pipe->Process();
// Advance enough time to release one packet.
TickTime::AdvanceFakeClock(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(1);
pipe->Process();
// Release all but one packet
TickTime::AdvanceFakeClock(9 * kPacketTimeMs - 1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(8);
pipe->Process();
// And the last one.
TickTime::AdvanceFakeClock(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(1);
pipe->Process();
}
@@ -125,19 +126,19 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) {
// Increase more than kPacketTimeMs, but not more than the extra delay.
TickTime::AdvanceFakeClock(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(0);
pipe->Process();
// Advance the network delay to get the first packet.
TickTime::AdvanceFakeClock(config.queue_delay_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(1);
pipe->Process();
// Advance one more kPacketTimeMs to get the last packet.
TickTime::AdvanceFakeClock(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(1);
pipe->Process();
}
@@ -161,7 +162,7 @@ TEST_F(FakeNetworkPipeTest, QueueLengthTest) {
// Increase time enough to deliver all three packets, verify only two are
// delivered.
TickTime::AdvanceFakeClock(3 * kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(2);
pipe->Process();
}
@@ -183,7 +184,7 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) {
SendPackets(pipe.get(), 3, kPacketSize);
TickTime::AdvanceFakeClock(3 * kPacketTimeMs + config.queue_delay_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _))
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _))
.Times(2);
pipe->Process();
@@ -214,13 +215,13 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
int packet_time_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize);
// Time hasn't increased yet, so we souldn't get any packets.
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(0);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(0);
pipe->Process();
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
TickTime::AdvanceFakeClock(packet_time_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(1);
pipe->Process();
}
@@ -236,20 +237,20 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
packet_time_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize);
// Time hasn't increased yet, so we souldn't get any packets.
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(0);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(0);
pipe->Process();
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
TickTime::AdvanceFakeClock(packet_time_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(1);
pipe->Process();
}
// Check that all the packets were sent.
EXPECT_EQ(static_cast<size_t>(2 * kNumPackets), pipe->sent_packets());
TickTime::AdvanceFakeClock(pipe->TimeUntilNextProcess());
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(0);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(0);
pipe->Process();
}
@@ -282,27 +283,27 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) {
int packet_time_2_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize);
// Time hasn't increased yet, so we souldn't get any packets.
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(0);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(0);
pipe->Process();
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
TickTime::AdvanceFakeClock(packet_time_1_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(1);
pipe->Process();
}
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
TickTime::AdvanceFakeClock(packet_time_2_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(1);
pipe->Process();
}
// Check that all the packets were sent.
EXPECT_EQ(static_cast<size_t>(2 * kNumPackets), pipe->sent_packets());
TickTime::AdvanceFakeClock(pipe->TimeUntilNextProcess());
EXPECT_CALL(*receiver_, DeliverPacket(_, _)).Times(0);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _)).Times(0);
pipe->Process();
}
} // namespace webrtc

View File

@@ -10,6 +10,8 @@ import("../build/webrtc.gni")
source_set("video") {
sources = [
"audio_receive_stream.cc",
"audio_receive_stream.h",
"call.cc",
"encoded_frame_callback_adapter.cc",
"encoded_frame_callback_adapter.h",

View File

@@ -0,0 +1,87 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video/audio_receive_stream.h"
#include <string>
#include "webrtc/base/checks.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
namespace webrtc {
std::string AudioReceiveStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{remote_ssrc: " << remote_ssrc;
ss << ", extensions: [";
for (size_t i = 0; i < extensions.size(); ++i) {
ss << extensions[i].ToString();
if (i != extensions.size() - 1)
ss << ", ";
}
ss << ']';
ss << '}';
return ss.str();
}
std::string AudioReceiveStream::Config::ToString() const {
std::stringstream ss;
ss << "{rtp: " << rtp.ToString();
ss << '}';
return ss.str();
}
namespace internal {
AudioReceiveStream::AudioReceiveStream(
RemoteBitrateEstimator* remote_bitrate_estimator,
const webrtc::AudioReceiveStream::Config& config)
: remote_bitrate_estimator_(remote_bitrate_estimator),
config_(config),
rtp_header_parser_(RtpHeaderParser::Create()) {
DCHECK(remote_bitrate_estimator_ != nullptr);
DCHECK(rtp_header_parser_ != nullptr);
for (const auto& ext : config.rtp.extensions) {
// One-byte-extension local identifiers are in the range 1-14 inclusive.
DCHECK_GE(ext.id, 1);
DCHECK_LE(ext.id, 14);
if (ext.name == RtpExtension::kAudioLevel) {
CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, ext.id));
} else if (ext.name == RtpExtension::kAbsSendTime) {
CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, ext.id));
} else {
RTC_NOTREACHED() << "Unsupported RTP extension.";
}
}
}
bool AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
return false;
}
bool AudioReceiveStream::DeliverRtp(const uint8_t* packet, size_t length) {
RTPHeader header;
if (!rtp_header_parser_->Parse(packet, length, &header)) {
return false;
}
// Only forward if the parsed header has absolute sender time. RTP time stamps
// may have different rates for audio and video and shouldn't be mixed.
if (header.extension.hasAbsoluteSendTime) {
int64_t arrival_time_ms = TickTime::MillisecondTimestamp();
size_t payload_size = length - header.headerLength;
remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_size,
header);
}
return true;
}
} // namespace internal
} // namespace webrtc

View File

@@ -0,0 +1,44 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_AUDIO_RECEIVE_STREAM_H_
#define WEBRTC_VIDEO_AUDIO_RECEIVE_STREAM_H_
#include "webrtc/audio_receive_stream.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
namespace webrtc {
class RemoteBitrateEstimator;
namespace internal {
class AudioReceiveStream : public webrtc::AudioReceiveStream {
public:
AudioReceiveStream(RemoteBitrateEstimator* remote_bitrate_estimator,
const webrtc::AudioReceiveStream::Config& config);
~AudioReceiveStream() override {}
bool DeliverRtcp(const uint8_t* packet, size_t length);
bool DeliverRtp(const uint8_t* packet, size_t length);
const webrtc::AudioReceiveStream::Config& config() const {
return config_;
}
private:
RemoteBitrateEstimator* const remote_bitrate_estimator_;
const webrtc::AudioReceiveStream::Config config_;
rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
};
} // namespace internal
} // namespace webrtc
#endif // WEBRTC_VIDEO_AUDIO_RECEIVE_STREAM_H_

View File

@@ -29,8 +29,8 @@
namespace webrtc {
namespace {
// Note: consider to write tests that don't depend on the trace system instead
// of re-using this class.
// Note: If you consider to re-use this class, think twice and instead consider
// writing tests that don't depend on the trace system.
class TraceObserver {
public:
TraceObserver() {
@@ -179,11 +179,12 @@ class BitrateEstimatorTest : public test::CallTest {
class Stream {
public:
explicit Stream(BitrateEstimatorTest* test)
Stream(BitrateEstimatorTest* test, bool receive_audio)
: test_(test),
is_sending_receiving_(false),
send_stream_(nullptr),
receive_stream_(nullptr),
audio_receive_stream_(nullptr),
video_receive_stream_(nullptr),
frame_generator_capturer_(),
fake_encoder_(Clock::GetRealTimeClock()),
fake_decoder_() {
@@ -201,33 +202,53 @@ class BitrateEstimatorTest : public test::CallTest {
send_stream_->Start();
frame_generator_capturer_->Start();
VideoReceiveStream::Decoder decoder;
decoder.decoder = &fake_decoder_;
decoder.payload_type = test_->send_config_.encoder_settings.payload_type;
decoder.payload_name = test_->send_config_.encoder_settings.payload_name;
test_->receive_config_.decoders.push_back(decoder);
test_->receive_config_.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0];
test_->receive_config_.rtp.local_ssrc++;
receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream(
test_->receive_config_);
receive_stream_->Start();
if (receive_audio) {
AudioReceiveStream::Config receive_config;
receive_config.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0];
receive_config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
audio_receive_stream_ = test_->receiver_call_->CreateAudioReceiveStream(
receive_config);
} else {
VideoReceiveStream::Decoder decoder;
decoder.decoder = &fake_decoder_;
decoder.payload_type =
test_->send_config_.encoder_settings.payload_type;
decoder.payload_name =
test_->send_config_.encoder_settings.payload_name;
test_->receive_config_.decoders.push_back(decoder);
test_->receive_config_.rtp.remote_ssrc =
test_->send_config_.rtp.ssrcs[0];
test_->receive_config_.rtp.local_ssrc++;
video_receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream(
test_->receive_config_);
video_receive_stream_->Start();
}
is_sending_receiving_ = true;
}
~Stream() {
EXPECT_FALSE(is_sending_receiving_);
frame_generator_capturer_.reset(nullptr);
test_->sender_call_->DestroyVideoSendStream(send_stream_);
send_stream_ = nullptr;
test_->receiver_call_->DestroyVideoReceiveStream(receive_stream_);
receive_stream_ = nullptr;
if (audio_receive_stream_) {
test_->receiver_call_->DestroyAudioReceiveStream(audio_receive_stream_);
audio_receive_stream_ = nullptr;
}
if (video_receive_stream_) {
test_->receiver_call_->DestroyVideoReceiveStream(video_receive_stream_);
video_receive_stream_ = nullptr;
}
}
void StopSending() {
if (is_sending_receiving_) {
frame_generator_capturer_->Stop();
send_stream_->Stop();
receive_stream_->Stop();
if (video_receive_stream_) {
video_receive_stream_->Stop();
}
is_sending_receiving_ = false;
}
}
@@ -236,7 +257,8 @@ class BitrateEstimatorTest : public test::CallTest {
BitrateEstimatorTest* test_;
bool is_sending_receiving_;
VideoSendStream* send_stream_;
VideoReceiveStream* receive_stream_;
AudioReceiveStream* audio_receive_stream_;
VideoReceiveStream* video_receive_stream_;
rtc::scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer_;
test::FakeEncoder fake_encoder_;
test::FakeDecoder fake_decoder_;
@@ -251,18 +273,18 @@ class BitrateEstimatorTest : public test::CallTest {
std::vector<Stream*> streams_;
};
TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefault) {
TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefaultForVideo) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
TEST_F(BitrateEstimatorTest, ImmediatelySwitchToAST) {
TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForAudio) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
receiver_trace_.PushExpectedLogLine(
@@ -272,18 +294,49 @@ TEST_F(BitrateEstimatorTest, ImmediatelySwitchToAST) {
receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
receiver_trace_.PushExpectedLogLine(
"AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, true));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToAST) {
TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForVideo) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
receiver_trace_.PushExpectedLogLine(
"AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTForAudio) {
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this, true));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
receiver_trace_.PushExpectedLogLine(
"AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this, true));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
send_config_.rtp.extensions[0] =
@@ -291,18 +344,18 @@ TEST_F(BitrateEstimatorTest, SwitchesToAST) {
receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
receiver_trace_.PushExpectedLogLine(
"AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOF) {
TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOFForVideo) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
send_config_.rtp.extensions[0] =
@@ -310,7 +363,7 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOF) {
receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
receiver_trace_.PushExpectedLogLine(
"AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
send_config_.rtp.extensions[0] =
@@ -319,7 +372,7 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOF) {
"WrappingBitrateEstimator: Switching to transmission time offset RBE.");
receiver_trace_.PushExpectedLogLine(
"RemoteBitrateEstimatorFactory: Instantiating.");
streams_.push_back(new Stream(this));
streams_.push_back(new Stream(this, false));
streams_[0]->StopSending();
streams_[1]->StopSending();
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());

View File

@@ -29,6 +29,7 @@
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video/audio_receive_stream.h"
#include "webrtc/video/video_receive_stream.h"
#include "webrtc/video/video_send_stream.h"
#include "webrtc/video_engine/include/vie_base.h"
@@ -38,17 +39,6 @@
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
namespace webrtc {
const char* RtpExtension::kTOffset = "urn:ietf:params:rtp-hdrext:toffset";
const char* RtpExtension::kAbsSendTime =
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time";
const char* RtpExtension::kVideoRotation = "urn:3gpp:video-orientation";
bool RtpExtension::IsSupported(const std::string& name) {
return name == webrtc::RtpExtension::kTOffset ||
name == webrtc::RtpExtension::kAbsSendTime ||
name == webrtc::RtpExtension::kVideoRotation;
}
VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
switch (codec_type) {
case kVp8:
@@ -107,29 +97,35 @@ class Call : public webrtc::Call, public PacketReceiver {
PacketReceiver* Receiver() override;
VideoSendStream* CreateVideoSendStream(
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config) override;
webrtc::AudioReceiveStream* CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) override;
void DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) override;
webrtc::VideoSendStream* CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config) override;
void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override;
VideoReceiveStream* CreateVideoReceiveStream(
const VideoReceiveStream::Config& config) override;
webrtc::VideoReceiveStream* CreateVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config) override;
void DestroyVideoReceiveStream(
webrtc::VideoReceiveStream* receive_stream) override;
Stats GetStats() const override;
DeliveryStatus DeliverPacket(const uint8_t* packet, size_t length) override;
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override;
void SetBitrateConfig(
const webrtc::Call::Config::BitrateConfig& bitrate_config) override;
void SignalNetworkState(NetworkState state) override;
private:
DeliveryStatus DeliverRtcp(const uint8_t* packet, size_t length);
DeliveryStatus DeliverRtp(const uint8_t* packet, size_t length);
DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet,
size_t length);
DeliveryStatus DeliverRtp(MediaType media_type, const uint8_t* packet,
size_t length);
Call::Config config_;
@@ -140,17 +136,20 @@ class Call : public webrtc::Call, public PacketReceiver {
bool network_enabled_ GUARDED_BY(network_enabled_crit_);
rtc::scoped_ptr<RWLockWrapper> receive_crit_;
std::map<uint32_t, VideoReceiveStream*> receive_ssrcs_
std::map<uint32_t, AudioReceiveStream*> audio_receive_ssrcs_
GUARDED_BY(receive_crit_);
std::map<uint32_t, VideoReceiveStream*> video_receive_ssrcs_
GUARDED_BY(receive_crit_);
std::set<VideoReceiveStream*> video_receive_streams_
GUARDED_BY(receive_crit_);
std::set<VideoReceiveStream*> receive_streams_ GUARDED_BY(receive_crit_);
rtc::scoped_ptr<RWLockWrapper> send_crit_;
std::map<uint32_t, VideoSendStream*> send_ssrcs_ GUARDED_BY(send_crit_);
std::set<VideoSendStream*> send_streams_ GUARDED_BY(send_crit_);
std::map<uint32_t, VideoSendStream*> video_send_ssrcs_ GUARDED_BY(send_crit_);
std::set<VideoSendStream*> video_send_streams_ GUARDED_BY(send_crit_);
rtc::scoped_ptr<CpuOveruseObserverProxy> overuse_observer_proxy_;
VideoSendStream::RtpStateMap suspended_send_ssrcs_;
VideoSendStream::RtpStateMap suspended_video_send_ssrcs_;
VideoEngine* video_engine_;
ViERTP_RTCP* rtp_rtcp_;
@@ -231,10 +230,11 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
}
Call::~Call() {
CHECK_EQ(0u, send_ssrcs_.size());
CHECK_EQ(0u, send_streams_.size());
CHECK_EQ(0u, receive_ssrcs_.size());
CHECK_EQ(0u, receive_streams_.size());
CHECK_EQ(0u, video_send_ssrcs_.size());
CHECK_EQ(0u, video_send_streams_.size());
CHECK_EQ(0u, audio_receive_ssrcs_.size());
CHECK_EQ(0u, video_receive_ssrcs_.size());
CHECK_EQ(0u, video_receive_streams_.size());
base_->DeleteChannel(base_channel_id_);
render_->DeRegisterVideoRenderModule(*external_render_.get());
@@ -249,8 +249,38 @@ Call::~Call() {
PacketReceiver* Call::Receiver() { return this; }
VideoSendStream* Call::CreateVideoSendStream(
const VideoSendStream::Config& config,
webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream");
LOG(LS_INFO) << "CreateAudioReceiveStream: " << config.ToString();
AudioReceiveStream* receive_stream = new AudioReceiveStream(
channel_group_->GetRemoteBitrateEstimator(), config);
{
WriteLockScoped write_lock(*receive_crit_);
DCHECK(audio_receive_ssrcs_.find(config.rtp.remote_ssrc) ==
audio_receive_ssrcs_.end());
audio_receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream;
}
return receive_stream;
}
void Call::DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream");
DCHECK(receive_stream != nullptr);
AudioReceiveStream* audio_receive_stream =
static_cast<AudioReceiveStream*>(receive_stream);
{
WriteLockScoped write_lock(*receive_crit_);
size_t num_deleted = audio_receive_ssrcs_.erase(
audio_receive_stream->config().rtp.remote_ssrc);
DCHECK(num_deleted == 1);
}
delete audio_receive_stream;
}
webrtc::VideoSendStream* Call::CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config) {
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
LOG(LS_INFO) << "CreateVideoSendStream: " << config.ToString();
@@ -261,17 +291,18 @@ VideoSendStream* Call::CreateVideoSendStream(
VideoSendStream* send_stream =
new VideoSendStream(config_.send_transport, overuse_observer_proxy_.get(),
video_engine_, channel_group_, config, encoder_config,
suspended_send_ssrcs_, base_channel_id_);
suspended_video_send_ssrcs_, base_channel_id_);
// This needs to be taken before send_crit_ as both locks need to be held
// while changing network state.
CriticalSectionScoped lock(network_enabled_crit_.get());
WriteLockScoped write_lock(*send_crit_);
send_streams_.insert(send_stream);
for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) {
DCHECK(send_ssrcs_.find(config.rtp.ssrcs[i]) == send_ssrcs_.end());
send_ssrcs_[config.rtp.ssrcs[i]] = send_stream;
for (uint32_t ssrc : config.rtp.ssrcs) {
DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
video_send_ssrcs_[ssrc] = send_stream;
}
video_send_streams_.insert(send_stream);
if (!network_enabled_)
send_stream->SignalNetworkState(kNetworkDown);
return send_stream;
@@ -286,16 +317,16 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
VideoSendStream* send_stream_impl = nullptr;
{
WriteLockScoped write_lock(*send_crit_);
std::map<uint32_t, VideoSendStream*>::iterator it = send_ssrcs_.begin();
while (it != send_ssrcs_.end()) {
auto it = video_send_ssrcs_.begin();
while (it != video_send_ssrcs_.end()) {
if (it->second == static_cast<VideoSendStream*>(send_stream)) {
send_stream_impl = it->second;
send_ssrcs_.erase(it++);
video_send_ssrcs_.erase(it++);
} else {
++it;
}
}
send_streams_.erase(send_stream_impl);
video_send_streams_.erase(send_stream_impl);
}
CHECK(send_stream_impl != nullptr);
@@ -304,14 +335,14 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
for (VideoSendStream::RtpStateMap::iterator it = rtp_state.begin();
it != rtp_state.end();
++it) {
suspended_send_ssrcs_[it->first] = it->second;
suspended_video_send_ssrcs_[it->first] = it->second;
}
delete send_stream_impl;
}
VideoReceiveStream* Call::CreateVideoReceiveStream(
const VideoReceiveStream::Config& config) {
webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream");
LOG(LS_INFO) << "CreateVideoReceiveStream: " << config.ToString();
VideoReceiveStream* receive_stream = new VideoReceiveStream(
@@ -322,14 +353,15 @@ VideoReceiveStream* Call::CreateVideoReceiveStream(
// while changing network state.
CriticalSectionScoped lock(network_enabled_crit_.get());
WriteLockScoped write_lock(*receive_crit_);
DCHECK(receive_ssrcs_.find(config.rtp.remote_ssrc) == receive_ssrcs_.end());
receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream;
DCHECK(video_receive_ssrcs_.find(config.rtp.remote_ssrc) ==
video_receive_ssrcs_.end());
video_receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream;
// TODO(pbos): Configure different RTX payloads per receive payload.
VideoReceiveStream::Config::Rtp::RtxMap::const_iterator it =
config.rtp.rtx.begin();
if (it != config.rtp.rtx.end())
receive_ssrcs_[it->second.ssrc] = receive_stream;
receive_streams_.insert(receive_stream);
video_receive_ssrcs_[it->second.ssrc] = receive_stream;
video_receive_streams_.insert(receive_stream);
if (!network_enabled_)
receive_stream->SignalNetworkState(kNetworkDown);
@@ -346,19 +378,18 @@ void Call::DestroyVideoReceiveStream(
WriteLockScoped write_lock(*receive_crit_);
// Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
// separate SSRC there can be either one or two.
std::map<uint32_t, VideoReceiveStream*>::iterator it =
receive_ssrcs_.begin();
while (it != receive_ssrcs_.end()) {
auto it = video_receive_ssrcs_.begin();
while (it != video_receive_ssrcs_.end()) {
if (it->second == static_cast<VideoReceiveStream*>(receive_stream)) {
if (receive_stream_impl != nullptr)
DCHECK(receive_stream_impl == it->second);
receive_stream_impl = it->second;
receive_ssrcs_.erase(it++);
video_receive_ssrcs_.erase(it++);
} else {
++it;
}
}
receive_streams_.erase(receive_stream_impl);
video_receive_streams_.erase(receive_stream_impl);
}
CHECK(receive_stream_impl != nullptr);
delete receive_stream_impl;
@@ -376,11 +407,8 @@ Call::Stats Call::GetStats() const {
stats.pacer_delay_ms = channel_group_->GetPacerQueuingDelayMs();
{
ReadLockScoped read_lock(*send_crit_);
for (std::map<uint32_t, VideoSendStream*>::const_iterator it =
send_ssrcs_.begin();
it != send_ssrcs_.end();
++it) {
int rtt_ms = it->second->GetRtt();
for (const auto& kv : video_send_ssrcs_) {
int rtt_ms = kv.second->GetRtt();
if (rtt_ms > 0)
stats.rtt_ms = rtt_ms;
}
@@ -417,41 +445,36 @@ void Call::SignalNetworkState(NetworkState state) {
network_enabled_ = state == kNetworkUp;
{
ReadLockScoped write_lock(*send_crit_);
for (std::map<uint32_t, VideoSendStream*>::iterator it =
send_ssrcs_.begin();
it != send_ssrcs_.end();
++it) {
it->second->SignalNetworkState(state);
for (auto& kv : video_send_ssrcs_) {
kv.second->SignalNetworkState(state);
}
}
{
ReadLockScoped write_lock(*receive_crit_);
for (std::map<uint32_t, VideoReceiveStream*>::iterator it =
receive_ssrcs_.begin();
it != receive_ssrcs_.end();
++it) {
it->second->SignalNetworkState(state);
for (auto& kv : video_receive_ssrcs_) {
kv.second->SignalNetworkState(state);
}
}
}
PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
size_t length) {
PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
const uint8_t* packet,
size_t length) {
// TODO(pbos): Figure out what channel needs it actually.
// Do NOT broadcast! Also make sure it's a valid packet.
// Return DELIVERY_UNKNOWN_SSRC if it can be determined that
// there's no receiver of the packet.
bool rtcp_delivered = false;
{
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
ReadLockScoped read_lock(*receive_crit_);
for (VideoReceiveStream* stream : receive_streams_) {
for (VideoReceiveStream* stream : video_receive_streams_) {
if (stream->DeliverRtcp(packet, length))
rtcp_delivered = true;
}
}
{
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
ReadLockScoped read_lock(*send_crit_);
for (VideoSendStream* stream : send_streams_) {
for (VideoSendStream* stream : video_send_streams_) {
if (stream->DeliverRtcp(packet, length))
rtcp_delivered = true;
}
@@ -459,7 +482,8 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
return rtcp_delivered ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
}
PacketReceiver::DeliveryStatus Call::DeliverRtp(const uint8_t* packet,
PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
const uint8_t* packet,
size_t length) {
// Minimum RTP header size.
if (length < 12)
@@ -468,22 +492,30 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(const uint8_t* packet,
uint32_t ssrc = ByteReader<uint32_t>::ReadBigEndian(&packet[8]);
ReadLockScoped read_lock(*receive_crit_);
std::map<uint32_t, VideoReceiveStream*>::iterator it =
receive_ssrcs_.find(ssrc);
if (it == receive_ssrcs_.end())
return DELIVERY_UNKNOWN_SSRC;
return it->second->DeliverRtp(packet, length) ? DELIVERY_OK
: DELIVERY_PACKET_ERROR;
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
auto it = audio_receive_ssrcs_.find(ssrc);
if (it != audio_receive_ssrcs_.end()) {
return it->second->DeliverRtp(packet, length) ? DELIVERY_OK
: DELIVERY_PACKET_ERROR;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
auto it = video_receive_ssrcs_.find(ssrc);
if (it != video_receive_ssrcs_.end()) {
return it->second->DeliverRtp(packet, length) ? DELIVERY_OK
: DELIVERY_PACKET_ERROR;
}
}
return DELIVERY_UNKNOWN_SSRC;
}
PacketReceiver::DeliveryStatus Call::DeliverPacket(const uint8_t* packet,
PacketReceiver::DeliveryStatus Call::DeliverPacket(MediaType media_type,
const uint8_t* packet,
size_t length) {
if (RtpHeaderParser::IsRtcp(packet, length))
return DeliverRtcp(packet, length);
return DeliverRtcp(media_type, packet, length);
return DeliverRtp(packet, length);
return DeliverRtp(media_type, packet, length);
}
} // namespace internal

View File

@@ -197,8 +197,10 @@ void CallPerfTest::TestAudioVideoSync(bool fec) {
: channel_(channel),
voe_network_(voe_network),
parser_(RtpHeaderParser::Create()) {}
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
EXPECT_TRUE(media_type == MediaType::ANY ||
media_type == MediaType::AUDIO);
int ret;
if (parser_->IsRtcp(packet, length)) {
ret = voe_network_->ReceivedRTCPPacket(channel_, packet, length);
@@ -522,7 +524,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
test::RtpRtcpObserver::SetReceivers(this, receive_transport_receiver);
}
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
VideoSendStream::Stats stats = send_stream_->GetStats();
if (stats.substreams.size() > 0) {
@@ -555,7 +557,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
observation_complete_->Set();
}
}
return send_transport_receiver_->DeliverPacket(packet, length);
return send_transport_receiver_->DeliverPacket(media_type, packet,
length);
}
void OnStreamsCreated(

View File

@@ -990,13 +990,13 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
}
private:
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
if (RtpHeaderParser::IsRtcp(packet, length)) {
return receiver_->DeliverPacket(packet, length);
return receiver_->DeliverPacket(media_type, packet, length);
} else {
DeliveryStatus delivery_status =
receiver_->DeliverPacket(packet, length);
receiver_->DeliverPacket(media_type, packet, length);
EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status);
delivered_packet_->Set();
return delivery_status;
@@ -1364,7 +1364,7 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
receiver_call_(nullptr),
has_seen_pacer_delay_(false) {}
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
Call::Stats sender_stats = sender_call_->GetStats();
Call::Stats receiver_stats = receiver_call_->GetStats();
@@ -1374,7 +1374,8 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
receiver_stats.recv_bandwidth_bps > 0 && has_seen_pacer_delay_) {
observation_complete_->Set();
}
return receiver_call_->Receiver()->DeliverPacket(packet, length);
return receiver_call_->Receiver()->DeliverPacket(media_type, packet,
length);
}
void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
@@ -1530,14 +1531,15 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
return SEND_PACKET;
}
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
// GetStats calls GetSendChannelRtcpStatistics
// (via VideoSendStream::GetRtt) which updates ReportBlockStats used by
// WebRTC.Video.SentPacketsLostInPercent.
// TODO(asapersson): Remove dependency on calling GetStats.
sender_call_->GetStats();
return receiver_call_->Receiver()->DeliverPacket(packet, length);
return receiver_call_->Receiver()->DeliverPacket(media_type, packet,
length);
}
bool MinMetricRunTimePassed() {

View File

@@ -123,7 +123,8 @@ class VideoAnalyzer : public PacketReceiver,
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
DeliveryStatus DeliverPacket(const uint8_t* packet, size_t length) override {
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
parser->Parse(packet, length, &header);
@@ -133,7 +134,7 @@ class VideoAnalyzer : public PacketReceiver,
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
}
return receiver_->DeliverPacket(packet, length);
return receiver_->DeliverPacket(media_type, packet, length);
}
void IncomingCapturedFrame(const I420VideoFrame& video_frame) override {

View File

@@ -264,7 +264,7 @@ bool LowRateStreamObserver::SendRtp(const uint8_t* data, size_t length) {
}
PacketReceiver::DeliveryStatus LowRateStreamObserver::DeliverPacket(
const uint8_t* packet, size_t length) {
MediaType media_type, const uint8_t* packet, size_t length) {
CriticalSectionScoped lock(crit_.get());
RTPHeader header;
EXPECT_TRUE(rtp_parser_->Parse(packet, length, &header));

View File

@@ -103,7 +103,8 @@ class LowRateStreamObserver : public test::DirectTransport,
bool SendRtp(const uint8_t* data, size_t length) override;
DeliveryStatus DeliverPacket(const uint8_t* packet, size_t length) override;
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override;
bool SendRtcp(const uint8_t* packet, size_t length) override;

View File

@@ -287,7 +287,8 @@ void RtpReplay() {
if (!rtp_reader->NextPacket(&packet))
break;
++num_packets;
switch (call->Receiver()->DeliverPacket(packet.data, packet.length)) {
switch (call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet.data,
packet.length)) {
case PacketReceiver::DELIVERY_OK:
break;
case PacketReceiver::DELIVERY_UNKNOWN_SSRC: {

View File

@@ -916,8 +916,10 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
}
private:
DeliveryStatus DeliverPacket(const uint8_t* packet,
DeliveryStatus DeliverPacket(MediaType media_type, const uint8_t* packet,
size_t length) override {
EXPECT_TRUE(media_type == MediaType::ANY ||
media_type == MediaType::VIDEO);
if (RtpHeaderParser::IsRtcp(packet, length))
return DELIVERY_OK;

View File

@@ -11,6 +11,8 @@
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
],
'webrtc_video_sources': [
'video/audio_receive_stream.cc',
'video/audio_receive_stream.h',
'video/call.cc',
'video/encoded_frame_callback_adapter.cc',
'video/encoded_frame_callback_adapter.h',