Update libjingle to 57692857

R=wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/4999004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5217 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
sergeyu@chromium.org
2013-12-05 00:24:06 +00:00
parent 3d9981d58a
commit 5bc25c41fc
33 changed files with 848 additions and 146 deletions

View File

@@ -235,16 +235,17 @@ bool ParseIceServers(const PeerConnectionInterface::IceServers& configuration,
} }
int port = kDefaultStunPort; int port = kDefaultStunPort;
if (service_type == TURNS) {
port = kDefaultStunTlsPort;
turn_transport_type = kTcpTransportType;
}
std::string address; std::string address;
if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) { if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
LOG(WARNING) << "Invalid Hostname format: " << uri_without_transport; LOG(WARNING) << "Invalid Hostname format: " << uri_without_transport;
continue; continue;
} }
if (service_type == TURNS) {
port = kDefaultStunTlsPort;
turn_transport_type = kTcpTransportType;
}
if (port <= 0 || port > 0xffff) { if (port <= 0 || port > 0xffff) {
LOG(WARNING) << "Invalid port: " << port; LOG(WARNING) << "Invalid port: " << port;

View File

@@ -62,6 +62,8 @@ static const char kTurnIceServerWithTransport[] =
static const char kSecureTurnIceServer[] = static const char kSecureTurnIceServer[] =
"turns:test@hello.com?transport=tcp"; "turns:test@hello.com?transport=tcp";
static const char kSecureTurnIceServerWithoutTransportParam[] = static const char kSecureTurnIceServerWithoutTransportParam[] =
"turns:test_no_transport@hello.com:443";
static const char kSecureTurnIceServerWithoutTransportAndPortParam[] =
"turns:test_no_transport@hello.com"; "turns:test_no_transport@hello.com";
static const char kTurnIceServerWithNoUsernameInUri[] = static const char kTurnIceServerWithNoUsernameInUri[] =
"turn:test.com:1234"; "turn:test.com:1234";
@@ -256,6 +258,9 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
ice_server.uri = kSecureTurnIceServerWithoutTransportParam; ice_server.uri = kSecureTurnIceServerWithoutTransportParam;
ice_server.password = kTurnPassword; ice_server.password = kTurnPassword;
ice_servers.push_back(ice_server); ice_servers.push_back(ice_server);
ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
ice_server.password = kTurnPassword;
ice_servers.push_back(ice_server);
talk_base::scoped_refptr<PeerConnectionInterface> pc( talk_base::scoped_refptr<PeerConnectionInterface> pc(
factory_->CreatePeerConnection(ice_servers, NULL, factory_->CreatePeerConnection(ice_servers, NULL,
allocator_factory_.get(), allocator_factory_.get(),
@@ -268,9 +273,12 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
turn_configs.push_back(turn1); turn_configs.push_back(turn1);
// TURNS with transport param should be default to tcp. // TURNS with transport param should be default to tcp.
webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2( webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
"hello.com", 443, "test_no_transport", kTurnPassword, "tcp", true);
turn_configs.push_back(turn2);
webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn3(
"hello.com", kDefaultStunTlsPort, "test_no_transport", "hello.com", kDefaultStunTlsPort, "test_no_transport",
kTurnPassword, "tcp", true); kTurnPassword, "tcp", true);
turn_configs.push_back(turn2); turn_configs.push_back(turn3);
VerifyTurnConfigurations(turn_configs); VerifyTurnConfigurations(turn_configs);
} }

View File

@@ -47,9 +47,12 @@ const char StatsReport::kStatsValueNameAvailableReceiveBandwidth[] =
"googAvailableReceiveBandwidth"; "googAvailableReceiveBandwidth";
const char StatsReport::kStatsValueNameAvailableSendBandwidth[] = const char StatsReport::kStatsValueNameAvailableSendBandwidth[] =
"googAvailableSendBandwidth"; "googAvailableSendBandwidth";
const char StatsReport::kStatsValueNameAvgEncodeMs[] = "googAvgEncodeMs";
const char StatsReport::kStatsValueNameBucketDelay[] = "googBucketDelay"; const char StatsReport::kStatsValueNameBucketDelay[] = "googBucketDelay";
const char StatsReport::kStatsValueNameBytesReceived[] = "bytesReceived"; const char StatsReport::kStatsValueNameBytesReceived[] = "bytesReceived";
const char StatsReport::kStatsValueNameBytesSent[] = "bytesSent"; const char StatsReport::kStatsValueNameBytesSent[] = "bytesSent";
const char StatsReport::kStatsValueNameCaptureJitterMs[] =
"googCaptureJitterMs";
const char StatsReport::kStatsValueNameChannelId[] = "googChannelId"; const char StatsReport::kStatsValueNameChannelId[] = "googChannelId";
const char StatsReport::kStatsValueNameCodecName[] = "googCodecName"; const char StatsReport::kStatsValueNameCodecName[] = "googCodecName";
const char StatsReport::kStatsValueNameComponent[] = "googComponent"; const char StatsReport::kStatsValueNameComponent[] = "googComponent";
@@ -292,6 +295,9 @@ void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {
info.framerate_sent); info.framerate_sent);
report->AddValue(StatsReport::kStatsValueNameRtt, info.rtt_ms); report->AddValue(StatsReport::kStatsValueNameRtt, info.rtt_ms);
report->AddValue(StatsReport::kStatsValueNameCodecName, info.codec_name); report->AddValue(StatsReport::kStatsValueNameCodecName, info.codec_name);
report->AddValue(StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms);
report->AddValue(StatsReport::kStatsValueNameCaptureJitterMs,
info.capture_jitter_ms);
} }
void ExtractStats(const cricket::BandwidthEstimationInfo& info, void ExtractStats(const cricket::BandwidthEstimationInfo& info,
@@ -334,24 +340,10 @@ void ExtractRemoteStats(const cricket::MediaReceiverInfo& info,
// TODO(hta): Extract some stats here. // TODO(hta): Extract some stats here.
} }
uint32 ExtractSsrc(const cricket::VoiceReceiverInfo& info) {
return info.ssrc;
}
uint32 ExtractSsrc(const cricket::VoiceSenderInfo& info) {
return info.ssrc;
}
uint32 ExtractSsrc(const cricket::VideoReceiverInfo& info) {
return info.ssrcs[0];
}
uint32 ExtractSsrc(const cricket::VideoSenderInfo& info) {
return info.ssrcs[0];
}
// Template to extract stats from a data vector. // Template to extract stats from a data vector.
// ExtractSsrc and ExtractStats must be defined and overloaded for each type. // In order to use the template, the functions that are called from it,
// ExtractStats and ExtractRemoteStats, must be defined and overloaded
// for each type.
template<typename T> template<typename T>
void ExtractStatsFromList(const std::vector<T>& data, void ExtractStatsFromList(const std::vector<T>& data,
const std::string& transport_id, const std::string& transport_id,
@@ -359,7 +351,7 @@ void ExtractStatsFromList(const std::vector<T>& data,
typename std::vector<T>::const_iterator it = data.begin(); typename std::vector<T>::const_iterator it = data.begin();
for (; it != data.end(); ++it) { for (; it != data.end(); ++it) {
std::string id; std::string id;
uint32 ssrc = ExtractSsrc(*it); uint32 ssrc = it->ssrc();
// Each object can result in 2 objects, a local and a remote object. // Each object can result in 2 objects, a local and a remote object.
// TODO(hta): Handle the case of multiple SSRCs per object. // TODO(hta): Handle the case of multiple SSRCs per object.
StatsReport* report = collector->PrepareLocalReport(ssrc, transport_id); StatsReport* report = collector->PrepareLocalReport(ssrc, transport_id);
@@ -772,7 +764,7 @@ StatsReport* StatsCollector::GetOrCreateReport(const std::string& type,
report->id = statsid; report->id = statsid;
report->type = type; report->type = type;
} else { } else {
report = &reports_[statsid]; report = &(it->second);
} }
return report; return report;
} }

View File

@@ -341,7 +341,7 @@ TEST_F(StatsCollectorTest, BytesCounterHandles64Bits) {
stats.AddStream(stream_); stats.AddStream(stream_);
// Construct a stats value to read. // Construct a stats value to read.
video_sender_info.ssrcs.push_back(1234); video_sender_info.add_ssrc(1234);
video_sender_info.bytes_sent = kBytesSent; video_sender_info.bytes_sent = kBytesSent;
stats_read.senders.push_back(video_sender_info); stats_read.senders.push_back(video_sender_info);
@@ -375,7 +375,7 @@ TEST_F(StatsCollectorTest, BandwidthEstimationInfoIsReported) {
stats.AddStream(stream_); stats.AddStream(stream_);
// Construct a stats value to read. // Construct a stats value to read.
video_sender_info.ssrcs.push_back(1234); video_sender_info.add_ssrc(1234);
video_sender_info.bytes_sent = kBytesSent; video_sender_info.bytes_sent = kBytesSent;
stats_read.senders.push_back(video_sender_info); stats_read.senders.push_back(video_sender_info);
cricket::BandwidthEstimationInfo bwe; cricket::BandwidthEstimationInfo bwe;
@@ -479,7 +479,7 @@ TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
const int64 kBytesSent = 12345678901234LL; const int64 kBytesSent = 12345678901234LL;
// Construct a stats value to read. // Construct a stats value to read.
video_sender_info.ssrcs.push_back(1234); video_sender_info.add_ssrc(1234);
video_sender_info.bytes_sent = kBytesSent; video_sender_info.bytes_sent = kBytesSent;
stats_read.senders.push_back(video_sender_info); stats_read.senders.push_back(video_sender_info);
@@ -537,7 +537,7 @@ TEST_F(StatsCollectorTest, TransportObjectLinkedFromSsrcObject) {
const int64 kBytesSent = 12345678901234LL; const int64 kBytesSent = 12345678901234LL;
// Construct a stats value to read. // Construct a stats value to read.
video_sender_info.ssrcs.push_back(1234); video_sender_info.add_ssrc(1234);
video_sender_info.bytes_sent = kBytesSent; video_sender_info.bytes_sent = kBytesSent;
stats_read.senders.push_back(video_sender_info); stats_read.senders.push_back(video_sender_info);
@@ -618,7 +618,7 @@ TEST_F(StatsCollectorTest, RemoteSsrcInfoIsPresent) {
remote_ssrc_stats.timestamp = 12345.678; remote_ssrc_stats.timestamp = 12345.678;
remote_ssrc_stats.ssrc = kSsrcOfTrack; remote_ssrc_stats.ssrc = kSsrcOfTrack;
cricket::VideoSenderInfo video_sender_info; cricket::VideoSenderInfo video_sender_info;
video_sender_info.ssrcs.push_back(kSsrcOfTrack); video_sender_info.add_ssrc(kSsrcOfTrack);
video_sender_info.remote_stats.push_back(remote_ssrc_stats); video_sender_info.remote_stats.push_back(remote_ssrc_stats);
stats_read.senders.push_back(video_sender_info); stats_read.senders.push_back(video_sender_info);

View File

@@ -128,6 +128,8 @@ class StatsReport {
// Internal StatsValue names // Internal StatsValue names
static const char kStatsValueNameAvgEncodeMs[];
static const char kStatsValueNameCaptureJitterMs[];
static const char kStatsValueNameCodecName[]; static const char kStatsValueNameCodecName[];
static const char kStatsValueNameEchoCancellationQualityMin[]; static const char kStatsValueNameEchoCancellationQualityMin[];
static const char kStatsValueNameEchoDelayMedian[]; static const char kStatsValueNameEchoDelayMedian[];

View File

@@ -119,6 +119,8 @@ void PeerConnectionTestWrapper::OnIceCandidate(
} }
void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) { void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
// This callback should take the ownership of |desc|.
talk_base::scoped_ptr<SessionDescriptionInterface> owned_desc(desc);
std::string sdp; std::string sdp;
EXPECT_TRUE(desc->ToString(&sdp)); EXPECT_TRUE(desc->ToString(&sdp));
@@ -183,9 +185,9 @@ void PeerConnectionTestWrapper::SetRemoteDescription(const std::string& type,
void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid, void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
int sdp_mline_index, int sdp_mline_index,
const std::string& candidate) { const std::string& candidate) {
EXPECT_TRUE(peer_connection_->AddIceCandidate( talk_base::scoped_ptr<webrtc::IceCandidateInterface> owned_candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
candidate, NULL))); EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
} }
void PeerConnectionTestWrapper::WaitForCallEstablished() { void PeerConnectionTestWrapper::WaitForCallEstablished() {

View File

@@ -341,13 +341,15 @@ static bool ParseFailed(const std::string& message,
const std::string& description, const std::string& description,
SdpParseError* error) { SdpParseError* error) {
// Get the first line of |message| from |line_start|. // Get the first line of |message| from |line_start|.
std::string first_line = message; std::string first_line;
size_t line_end = message.find(kNewLine, line_start); size_t line_end = message.find(kNewLine, line_start);
if (line_end != std::string::npos) { if (line_end != std::string::npos) {
if (line_end > 0 && (message.at(line_end - 1) == kReturn)) { if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
--line_end; --line_end;
} }
first_line = message.substr(line_start, (line_end - line_start)); first_line = message.substr(line_start, (line_end - line_start));
} else {
first_line = message.substr(line_start);
} }
if (error) { if (error) {
@@ -2387,7 +2389,7 @@ bool ParseContent(const std::string& message,
if (*pos >= message.size()) { if (*pos >= message.size()) {
break; // Done parsing break; // Done parsing
} else { } else {
return ParseFailed(message, *pos, "Can't find valid SDP line.", error); return ParseFailed(message, *pos, "Invalid SDP line.", error);
} }
} }

View File

@@ -1852,6 +1852,18 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInvalidExtmap) {
TestDeserializeExtmap(true, true); TestDeserializeExtmap(true, true);
} }
TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) {
JsepSessionDescription jdesc(kDummyString);
std::string sdp = kSdpFullString;
sdp = sdp.substr(0, sdp.size() - 2); // Remove \r\n at the end.
// Deserialize
SdpParseError error;
EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error));
const std::string lastline = "a=ssrc:6 label:video_track_id_3";
EXPECT_EQ(lastline, error.line);
EXPECT_EQ("Invalid SDP line.", error.description);
}
TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) { TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
JsepIceCandidate jcandidate(kDummyMid, kDummyIndex); JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
std::string new_sdp = kSdpOneCandidate; std::string new_sdp = kSdpOneCandidate;

View File

@@ -38,27 +38,29 @@ namespace talk_base {
/////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////
bool ToUtf8(const CFStringRef str16, std::string* str8) { bool ToUtf8(const CFStringRef str16, std::string* str8) {
if ((NULL == str16) || (NULL == str8)) if ((NULL == str16) || (NULL == str8)) {
return false; return false;
}
size_t maxlen = CFStringGetMaximumSizeForEncoding(CFStringGetLength(str16), size_t maxlen = CFStringGetMaximumSizeForEncoding(CFStringGetLength(str16),
kCFStringEncodingUTF8) kCFStringEncodingUTF8) + 1;
+ 1;
scoped_ptr<char[]> buffer(new char[maxlen]); scoped_ptr<char[]> buffer(new char[maxlen]);
if (!buffer || !CFStringGetCString(str16, buffer.get(), maxlen, if (!buffer || !CFStringGetCString(str16, buffer.get(), maxlen,
kCFStringEncodingUTF8)) kCFStringEncodingUTF8)) {
return false; return false;
}
str8->assign(buffer.get()); str8->assign(buffer.get());
return true; return true;
} }
bool ToUtf16(const std::string& str8, CFStringRef* str16) { bool ToUtf16(const std::string& str8, CFStringRef* str16) {
if (NULL == str16) if (NULL == str16) {
return false; return false;
}
*str16 = CFStringCreateWithBytes(kCFAllocatorDefault, *str16 = CFStringCreateWithBytes(kCFAllocatorDefault,
reinterpret_cast<const UInt8*>(str8.data()), reinterpret_cast<const UInt8*>(str8.data()),
str8.length(), kCFStringEncodingUTF8, str8.length(), kCFStringEncodingUTF8,
false); false);
return (NULL != *str16); return NULL != *str16;
} }
#ifdef OSX #ifdef OSX
@@ -100,23 +102,25 @@ static bool GetGestalt(OSType ostype, int* value) {
bool GetOSVersion(int* major, int* minor, int* bugfix) { bool GetOSVersion(int* major, int* minor, int* bugfix) {
ASSERT(major && minor && bugfix); ASSERT(major && minor && bugfix);
if (!GetGestalt(gestaltSystemVersion, major)) if (!GetGestalt(gestaltSystemVersion, major)) {
return false; return false;
}
if (*major < 0x1040) { if (*major < 0x1040) {
*bugfix = *major & 0xF; *bugfix = *major & 0xF;
*minor = (*major >> 4) & 0xF; *minor = (*major >> 4) & 0xF;
*major = (*major >> 8); *major = (*major >> 8);
return true; return true;
} }
return GetGestalt(gestaltSystemVersionMajor, major) return GetGestalt(gestaltSystemVersionMajor, major) &&
&& GetGestalt(gestaltSystemVersionMinor, minor) GetGestalt(gestaltSystemVersionMinor, minor) &&
&& GetGestalt(gestaltSystemVersionBugFix, bugfix); GetGestalt(gestaltSystemVersionBugFix, bugfix);
} }
MacOSVersionName GetOSVersionName() { MacOSVersionName GetOSVersionName() {
int major = 0, minor = 0, bugfix = 0; int major = 0, minor = 0, bugfix = 0;
if (!GetOSVersion(&major, &minor, &bugfix)) if (!GetOSVersion(&major, &minor, &bugfix)) {
return kMacOSUnknown; return kMacOSUnknown;
}
if (major > 10) { if (major > 10) {
return kMacOSNewer; return kMacOSNewer;
} }
@@ -136,14 +140,17 @@ MacOSVersionName GetOSVersionName() {
return kMacOSLion; return kMacOSLion;
case 8: case 8:
return kMacOSMountainLion; return kMacOSMountainLion;
case 9:
return kMacOSMavericks;
} }
return kMacOSNewer; return kMacOSNewer;
} }
bool GetQuickTimeVersion(std::string* out) { bool GetQuickTimeVersion(std::string* out) {
int ver; int ver;
if (!GetGestalt(gestaltQuickTimeVersion, &ver)) if (!GetGestalt(gestaltQuickTimeVersion, &ver)) {
return false; return false;
}
std::stringstream ss; std::stringstream ss;
ss << std::hex << ver; ss << std::hex << ver;

View File

@@ -56,7 +56,8 @@ enum MacOSVersionName {
kMacOSSnowLeopard, // 10.6 kMacOSSnowLeopard, // 10.6
kMacOSLion, // 10.7 kMacOSLion, // 10.7
kMacOSMountainLion, // 10.8 kMacOSMountainLion, // 10.8
kMacOSNewer, // 10.9+ kMacOSMavericks, // 10.9
kMacOSNewer, // 10.10+
}; };
bool GetOSVersion(int* major, int* minor, int* bugfix); bool GetOSVersion(int* major, int* minor, int* bugfix);

View File

@@ -30,12 +30,14 @@
TEST(MacUtilsTest, GetOsVersionName) { TEST(MacUtilsTest, GetOsVersionName) {
talk_base::MacOSVersionName ver = talk_base::GetOSVersionName(); talk_base::MacOSVersionName ver = talk_base::GetOSVersionName();
LOG(LS_INFO) << "GetOsVersionName " << ver;
EXPECT_NE(talk_base::kMacOSUnknown, ver); EXPECT_NE(talk_base::kMacOSUnknown, ver);
} }
TEST(MacUtilsTest, GetQuickTimeVersion) { TEST(MacUtilsTest, GetQuickTimeVersion) {
std::string version; std::string version;
EXPECT_TRUE(talk_base::GetQuickTimeVersion(&version)); EXPECT_TRUE(talk_base::GetQuickTimeVersion(&version));
LOG(LS_INFO) << "GetQuickTimeVersion " << version;
} }
TEST(MacUtilsTest, RunAppleScriptCompileError) { TEST(MacUtilsTest, RunAppleScriptCompileError) {

View File

@@ -58,6 +58,7 @@ SSLAdapter::Create(AsyncSocket* socket) {
#elif SSL_USE_OPENSSL // && !SSL_USE_SCHANNEL #elif SSL_USE_OPENSSL // && !SSL_USE_SCHANNEL
return new OpenSSLAdapter(socket); return new OpenSSLAdapter(socket);
#else // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL #else // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
delete socket;
return NULL; return NULL;
#endif // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL #endif // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
} }

View File

@@ -47,7 +47,9 @@ class SSLAdapter : public AsyncSocketAdapter {
// negotiation will begin as soon as the socket connects. // negotiation will begin as soon as the socket connects.
virtual int StartSSL(const char* hostname, bool restartable) = 0; virtual int StartSSL(const char* hostname, bool restartable) = 0;
// Create the default SSL adapter for this platform // Create the default SSL adapter for this platform. On failure, returns NULL
// and deletes |socket|. Otherwise, the returned SSLAdapter takes ownership
// of |socket|.
static SSLAdapter* Create(AsyncSocket* socket); static SSLAdapter* Create(AsyncSocket* socket);
private: private:

View File

@@ -1601,7 +1601,7 @@ void CallClient::PrintStats() const {
vmi.senders.begin(); it != vmi.senders.end(); ++it) { vmi.senders.begin(); it != vmi.senders.end(); ++it) {
console_->PrintLine("Sender: ssrc=%u codec='%s' bytes=%d packets=%d " console_->PrintLine("Sender: ssrc=%u codec='%s' bytes=%d packets=%d "
"rtt=%d jitter=%d", "rtt=%d jitter=%d",
it->ssrc, it->codec_name.c_str(), it->bytes_sent, it->ssrc(), it->codec_name.c_str(), it->bytes_sent,
it->packets_sent, it->rtt_ms, it->jitter_ms); it->packets_sent, it->rtt_ms, it->jitter_ms);
} }
@@ -1609,7 +1609,7 @@ void CallClient::PrintStats() const {
vmi.receivers.begin(); it != vmi.receivers.end(); ++it) { vmi.receivers.begin(); it != vmi.receivers.end(); ++it) {
console_->PrintLine("Receiver: ssrc=%u bytes=%d packets=%d " console_->PrintLine("Receiver: ssrc=%u bytes=%d packets=%d "
"jitter=%d loss=%.2f", "jitter=%d loss=%.2f",
it->ssrc, it->bytes_rcvd, it->packets_rcvd, it->ssrc(), it->bytes_rcvd, it->packets_rcvd,
it->jitter_ms, it->fraction_lost); it->jitter_ms, it->fraction_lost);
} }
} }

View File

@@ -173,6 +173,7 @@ struct AudioOptions {
experimental_agc.SetFrom(change.experimental_agc); experimental_agc.SetFrom(change.experimental_agc);
experimental_aec.SetFrom(change.experimental_aec); experimental_aec.SetFrom(change.experimental_aec);
aec_dump.SetFrom(change.aec_dump); aec_dump.SetFrom(change.aec_dump);
experimental_acm.SetFrom(change.experimental_acm);
tx_agc_target_dbov.SetFrom(change.tx_agc_target_dbov); tx_agc_target_dbov.SetFrom(change.tx_agc_target_dbov);
tx_agc_digital_compression_gain.SetFrom( tx_agc_digital_compression_gain.SetFrom(
change.tx_agc_digital_compression_gain); change.tx_agc_digital_compression_gain);
@@ -200,6 +201,7 @@ struct AudioOptions {
experimental_aec == o.experimental_aec && experimental_aec == o.experimental_aec &&
adjust_agc_delta == o.adjust_agc_delta && adjust_agc_delta == o.adjust_agc_delta &&
aec_dump == o.aec_dump && aec_dump == o.aec_dump &&
experimental_acm == o.experimental_acm &&
tx_agc_target_dbov == o.tx_agc_target_dbov && tx_agc_target_dbov == o.tx_agc_target_dbov &&
tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain && tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain &&
tx_agc_limiter == o.tx_agc_limiter && tx_agc_limiter == o.tx_agc_limiter &&
@@ -227,6 +229,7 @@ struct AudioOptions {
ost << ToStringIfSet("experimental_agc", experimental_agc); ost << ToStringIfSet("experimental_agc", experimental_agc);
ost << ToStringIfSet("experimental_aec", experimental_aec); ost << ToStringIfSet("experimental_aec", experimental_aec);
ost << ToStringIfSet("aec_dump", aec_dump); ost << ToStringIfSet("aec_dump", aec_dump);
ost << ToStringIfSet("experimental_acm", experimental_acm);
ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov); ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
ost << ToStringIfSet("tx_agc_digital_compression_gain", ost << ToStringIfSet("tx_agc_digital_compression_gain",
tx_agc_digital_compression_gain); tx_agc_digital_compression_gain);
@@ -263,6 +266,7 @@ struct AudioOptions {
Settable<bool> experimental_agc; Settable<bool> experimental_agc;
Settable<bool> experimental_aec; Settable<bool> experimental_aec;
Settable<bool> aec_dump; Settable<bool> aec_dump;
Settable<bool> experimental_acm;
// Note that tx_agc_* only applies to non-experimental AGC. // Note that tx_agc_* only applies to non-experimental AGC.
Settable<uint16> tx_agc_target_dbov; Settable<uint16> tx_agc_target_dbov;
Settable<uint16> tx_agc_digital_compression_gain; Settable<uint16> tx_agc_digital_compression_gain;
@@ -313,6 +317,7 @@ struct VideoOptions {
buffered_mode_latency.SetFrom(change.buffered_mode_latency); buffered_mode_latency.SetFrom(change.buffered_mode_latency);
lower_min_bitrate.SetFrom(change.lower_min_bitrate); lower_min_bitrate.SetFrom(change.lower_min_bitrate);
dscp.SetFrom(change.dscp); dscp.SetFrom(change.dscp);
suspend_below_min_bitrate.SetFrom(change.suspend_below_min_bitrate);
} }
bool operator==(const VideoOptions& o) const { bool operator==(const VideoOptions& o) const {
@@ -338,7 +343,8 @@ struct VideoOptions {
o.system_high_adaptation_threshhold && o.system_high_adaptation_threshhold &&
buffered_mode_latency == o.buffered_mode_latency && buffered_mode_latency == o.buffered_mode_latency &&
lower_min_bitrate == o.lower_min_bitrate && lower_min_bitrate == o.lower_min_bitrate &&
dscp == o.dscp; dscp == o.dscp &&
suspend_below_min_bitrate == o.suspend_below_min_bitrate;
} }
std::string ToString() const { std::string ToString() const {
@@ -367,6 +373,8 @@ struct VideoOptions {
ost << ToStringIfSet("buffered mode latency", buffered_mode_latency); ost << ToStringIfSet("buffered mode latency", buffered_mode_latency);
ost << ToStringIfSet("lower min bitrate", lower_min_bitrate); ost << ToStringIfSet("lower min bitrate", lower_min_bitrate);
ost << ToStringIfSet("dscp", dscp); ost << ToStringIfSet("dscp", dscp);
ost << ToStringIfSet("suspend below min bitrate",
suspend_below_min_bitrate);
ost << "}"; ost << "}";
return ost.str(); return ost.str();
} }
@@ -415,6 +423,9 @@ struct VideoOptions {
Settable<bool> lower_min_bitrate; Settable<bool> lower_min_bitrate;
// Set DSCP value for packet sent from video channel. // Set DSCP value for packet sent from video channel.
Settable<bool> dscp; Settable<bool> dscp;
// Enable WebRTC suspension of video. No video frames will be sent when the
// bitrate is below the configured minimum bitrate.
Settable<bool> suspend_below_min_bitrate;
}; };
// A class for playing out soundclips. // A class for playing out soundclips.
@@ -624,6 +635,35 @@ struct MediaSenderInfo {
fraction_lost(0.0), fraction_lost(0.0),
rtt_ms(0) { rtt_ms(0) {
} }
void add_ssrc(const SsrcSenderInfo& stat) {
local_stats.push_back(stat);
}
// Temporary utility function for call sites that only provide SSRC.
// As more info is added into SsrcSenderInfo, this function should go away.
void add_ssrc(uint32 ssrc) {
SsrcSenderInfo stat;
stat.ssrc = ssrc;
add_ssrc(stat);
}
// Utility accessor for clients that are only interested in ssrc numbers.
std::vector<uint32> ssrcs() const {
std::vector<uint32> retval;
for (std::vector<SsrcSenderInfo>::const_iterator it = local_stats.begin();
it != local_stats.end(); ++it) {
retval.push_back(it->ssrc);
}
return retval;
}
// Utility accessor for clients that make the assumption only one ssrc
// exists per media.
// This will eventually go away.
uint32 ssrc() const {
if (local_stats.size() > 0) {
return local_stats[0].ssrc;
} else {
return 0;
}
}
int64 bytes_sent; int64 bytes_sent;
int packets_sent; int packets_sent;
int packets_lost; int packets_lost;
@@ -641,6 +681,35 @@ struct MediaReceiverInfo {
packets_lost(0), packets_lost(0),
fraction_lost(0.0) { fraction_lost(0.0) {
} }
void add_ssrc(const SsrcReceiverInfo& stat) {
local_stats.push_back(stat);
}
// Temporary utility function for call sites that only provide SSRC.
// As more info is added into SsrcSenderInfo, this function should go away.
void add_ssrc(uint32 ssrc) {
SsrcReceiverInfo stat;
stat.ssrc = ssrc;
add_ssrc(stat);
}
std::vector<uint32> ssrcs() const {
std::vector<uint32> retval;
for (std::vector<SsrcReceiverInfo>::const_iterator it = local_stats.begin();
it != local_stats.end(); ++it) {
retval.push_back(it->ssrc);
}
return retval;
}
// Utility accessor for clients that make the assumption only one ssrc
// exists per media.
// This will eventually go away.
uint32 ssrc() const {
if (local_stats.size() > 0) {
return local_stats[0].ssrc;
} else {
return 0;
}
}
int64 bytes_rcvd; int64 bytes_rcvd;
int packets_rcvd; int packets_rcvd;
int packets_lost; int packets_lost;
@@ -651,8 +720,7 @@ struct MediaReceiverInfo {
struct VoiceSenderInfo : public MediaSenderInfo { struct VoiceSenderInfo : public MediaSenderInfo {
VoiceSenderInfo() VoiceSenderInfo()
: ssrc(0), : ext_seqnum(0),
ext_seqnum(0),
jitter_ms(0), jitter_ms(0),
audio_level(0), audio_level(0),
aec_quality_min(0.0), aec_quality_min(0.0),
@@ -663,7 +731,6 @@ struct VoiceSenderInfo : public MediaSenderInfo {
typing_noise_detected(false) { typing_noise_detected(false) {
} }
uint32 ssrc;
int ext_seqnum; int ext_seqnum;
int jitter_ms; int jitter_ms;
int audio_level; int audio_level;
@@ -677,8 +744,7 @@ struct VoiceSenderInfo : public MediaSenderInfo {
struct VoiceReceiverInfo : public MediaReceiverInfo { struct VoiceReceiverInfo : public MediaReceiverInfo {
VoiceReceiverInfo() VoiceReceiverInfo()
: ssrc(0), : ext_seqnum(0),
ext_seqnum(0),
jitter_ms(0), jitter_ms(0),
jitter_buffer_ms(0), jitter_buffer_ms(0),
jitter_buffer_preferred_ms(0), jitter_buffer_preferred_ms(0),
@@ -687,7 +753,6 @@ struct VoiceReceiverInfo : public MediaReceiverInfo {
expand_rate(0) { expand_rate(0) {
} }
uint32 ssrc;
int ext_seqnum; int ext_seqnum;
int jitter_ms; int jitter_ms;
int jitter_buffer_ms; int jitter_buffer_ms;
@@ -709,10 +774,11 @@ struct VideoSenderInfo : public MediaSenderInfo {
framerate_sent(0), framerate_sent(0),
nominal_bitrate(0), nominal_bitrate(0),
preferred_bitrate(0), preferred_bitrate(0),
adapt_reason(0) { adapt_reason(0),
capture_jitter_ms(0),
avg_encode_ms(0) {
} }
std::vector<uint32> ssrcs;
std::vector<SsrcGroup> ssrc_groups; std::vector<SsrcGroup> ssrc_groups;
int packets_cached; int packets_cached;
int firs_rcvd; int firs_rcvd;
@@ -724,6 +790,8 @@ struct VideoSenderInfo : public MediaSenderInfo {
int nominal_bitrate; int nominal_bitrate;
int preferred_bitrate; int preferred_bitrate;
int adapt_reason; int adapt_reason;
int capture_jitter_ms;
int avg_encode_ms;
}; };
struct VideoReceiverInfo : public MediaReceiverInfo { struct VideoReceiverInfo : public MediaReceiverInfo {
@@ -747,7 +815,6 @@ struct VideoReceiverInfo : public MediaReceiverInfo {
current_delay_ms(0) { current_delay_ms(0) {
} }
std::vector<uint32> ssrcs;
std::vector<SsrcGroup> ssrc_groups; std::vector<SsrcGroup> ssrc_groups;
int packets_concealed; int packets_concealed;
int firs_sent; int firs_sent;

View File

@@ -259,10 +259,12 @@ void RtpDataMediaChannel::OnPacketReceived(talk_base::Buffer* packet) {
DataCodec codec; DataCodec codec;
if (!FindCodecById(recv_codecs_, header.payload_type, &codec)) { if (!FindCodecById(recv_codecs_, header.payload_type, &codec)) {
LOG(LS_WARNING) << "Not receiving packet " // For bundling, this will be logged for every message.
<< header.ssrc << ":" << header.seq_num // So disable this logging.
<< " (" << data_len << ")" // LOG(LS_WARNING) << "Not receiving packet "
<< " because unknown payload id: " << header.payload_type; // << header.ssrc << ":" << header.seq_num
// << " (" << data_len << ")"
// << " because unknown payload id: " << header.payload_type;
return; return;
} }

View File

@@ -27,6 +27,7 @@
#include "talk/media/base/streamparams.h" #include "talk/media/base/streamparams.h"
#include <list>
#include <sstream> #include <sstream>
namespace cricket { namespace cricket {
@@ -180,4 +181,49 @@ bool RemoveStreamByIds(StreamParamsVec* streams,
return RemoveStream(streams, StreamSelector(groupid, id)); return RemoveStream(streams, StreamSelector(groupid, id));
} }
bool IsOneSsrcStream(const StreamParams& sp) {
if (sp.ssrcs.size() == 1 && sp.ssrc_groups.empty()) {
return true;
}
if (sp.ssrcs.size() == 2) {
const SsrcGroup* fid_group = sp.get_ssrc_group(kFidSsrcGroupSemantics);
if (fid_group != NULL) {
return (sp.ssrcs == fid_group->ssrcs);
}
}
return false;
}
static void RemoveFirst(std::list<uint32>* ssrcs, uint32 value) {
std::list<uint32>::iterator it =
std::find(ssrcs->begin(), ssrcs->end(), value);
if (it != ssrcs->end()) {
ssrcs->erase(it);
}
}
bool IsSimulcastStream(const StreamParams& sp) {
const SsrcGroup* const sg = sp.get_ssrc_group(kSimSsrcGroupSemantics);
if (sg == NULL || sg->ssrcs.size() < 2) {
return false;
}
// Start with all StreamParams SSRCs. Remove simulcast SSRCs (from sg) and
// RTX SSRCs. If we still have SSRCs left, we don't know what they're for.
// Also we remove first-found SSRCs only. So duplicates should lead to errors.
std::list<uint32> sp_ssrcs(sp.ssrcs.begin(), sp.ssrcs.end());
for (size_t i = 0; i < sg->ssrcs.size(); ++i) {
RemoveFirst(&sp_ssrcs, sg->ssrcs[i]);
}
for (size_t i = 0; i < sp.ssrc_groups.size(); ++i) {
const SsrcGroup& group = sp.ssrc_groups[i];
if (group.semantics.compare(kFidSsrcGroupSemantics) != 0 ||
group.ssrcs.size() != 2) {
continue;
}
RemoveFirst(&sp_ssrcs, group.ssrcs[1]);
}
// If there's SSRCs left that we don't know how to handle, we bail out.
return sp_ssrcs.size() == 0;
}
} // namespace cricket } // namespace cricket

View File

@@ -213,6 +213,16 @@ bool RemoveStreamByIds(StreamParamsVec* streams,
const std::string& groupid, const std::string& groupid,
const std::string& id); const std::string& id);
// Checks if |sp| defines parameters for a single primary stream. There may
// be an RTX stream associated with the primary stream. Leaving as non-static so
// we can test this function.
bool IsOneSsrcStream(const StreamParams& sp);
// Checks if |sp| defines parameters for one Simulcast stream. There may be RTX
// streams associated with the simulcast streams. Leaving as non-static so we
// can test this function.
bool IsSimulcastStream(const StreamParams& sp);
} // namespace cricket } // namespace cricket
#endif // TALK_MEDIA_BASE_STREAMPARAMS_H_ #endif // TALK_MEDIA_BASE_STREAMPARAMS_H_

View File

@@ -29,8 +29,10 @@
#include "talk/media/base/streamparams.h" #include "talk/media/base/streamparams.h"
#include "talk/media/base/testutils.h" #include "talk/media/base/testutils.h"
static const uint32 kSscrs1[] = {1}; static const uint32 kSsrcs1[] = {1};
static const uint32 kSscrs2[] = {1, 2}; static const uint32 kSsrcs2[] = {1, 2};
static const uint32 kSsrcs3[] = {1, 2, 3};
static const uint32 kRtxSsrcs3[] = {4, 5, 6};
static cricket::StreamParams CreateStreamParamsWithSsrcGroup( static cricket::StreamParams CreateStreamParamsWithSsrcGroup(
const std::string& semantics, const uint32 ssrcs_in[], size_t len) { const std::string& semantics, const uint32 ssrcs_in[], size_t len) {
@@ -44,10 +46,10 @@ static cricket::StreamParams CreateStreamParamsWithSsrcGroup(
TEST(SsrcGroup, EqualNotEqual) { TEST(SsrcGroup, EqualNotEqual) {
cricket::SsrcGroup ssrc_groups[] = { cricket::SsrcGroup ssrc_groups[] = {
cricket::SsrcGroup("ABC", MAKE_VECTOR(kSscrs1)), cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)),
cricket::SsrcGroup("ABC", MAKE_VECTOR(kSscrs2)), cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)),
cricket::SsrcGroup("Abc", MAKE_VECTOR(kSscrs2)), cricket::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)),
cricket::SsrcGroup("abc", MAKE_VECTOR(kSscrs2)), cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)),
}; };
for (size_t i = 0; i < ARRAY_SIZE(ssrc_groups); ++i) { for (size_t i = 0; i < ARRAY_SIZE(ssrc_groups); ++i) {
@@ -59,18 +61,18 @@ TEST(SsrcGroup, EqualNotEqual) {
} }
TEST(SsrcGroup, HasSemantics) { TEST(SsrcGroup, HasSemantics) {
cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSscrs1)); cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
EXPECT_TRUE(sg1.has_semantics("ABC")); EXPECT_TRUE(sg1.has_semantics("ABC"));
cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSscrs1)); cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1));
EXPECT_FALSE(sg2.has_semantics("ABC")); EXPECT_FALSE(sg2.has_semantics("ABC"));
cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSscrs1)); cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1));
EXPECT_FALSE(sg3.has_semantics("ABC")); EXPECT_FALSE(sg3.has_semantics("ABC"));
} }
TEST(SsrcGroup, ToString) { TEST(SsrcGroup, ToString) {
cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSscrs1)); cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str()); EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str());
} }
@@ -88,22 +90,22 @@ TEST(StreamParams, CreateLegacy) {
TEST(StreamParams, HasSsrcGroup) { TEST(StreamParams, HasSsrcGroup) {
cricket::StreamParams sp = cricket::StreamParams sp =
CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
EXPECT_EQ(2U, sp.ssrcs.size()); EXPECT_EQ(2U, sp.ssrcs.size());
EXPECT_EQ(kSscrs2[0], sp.first_ssrc()); EXPECT_EQ(kSsrcs2[0], sp.first_ssrc());
EXPECT_TRUE(sp.has_ssrcs()); EXPECT_TRUE(sp.has_ssrcs());
EXPECT_TRUE(sp.has_ssrc(kSscrs2[0])); EXPECT_TRUE(sp.has_ssrc(kSsrcs2[0]));
EXPECT_TRUE(sp.has_ssrc(kSscrs2[1])); EXPECT_TRUE(sp.has_ssrc(kSsrcs2[1]));
EXPECT_TRUE(sp.has_ssrc_group("XYZ")); EXPECT_TRUE(sp.has_ssrc_group("XYZ"));
EXPECT_EQ(1U, sp.ssrc_groups.size()); EXPECT_EQ(1U, sp.ssrc_groups.size());
EXPECT_EQ(2U, sp.ssrc_groups[0].ssrcs.size()); EXPECT_EQ(2U, sp.ssrc_groups[0].ssrcs.size());
EXPECT_EQ(kSscrs2[0], sp.ssrc_groups[0].ssrcs[0]); EXPECT_EQ(kSsrcs2[0], sp.ssrc_groups[0].ssrcs[0]);
EXPECT_EQ(kSscrs2[1], sp.ssrc_groups[0].ssrcs[1]); EXPECT_EQ(kSsrcs2[1], sp.ssrc_groups[0].ssrcs[1]);
} }
TEST(StreamParams, GetSsrcGroup) { TEST(StreamParams, GetSsrcGroup) {
cricket::StreamParams sp = cricket::StreamParams sp =
CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
EXPECT_EQ(NULL, sp.get_ssrc_group("xyz")); EXPECT_EQ(NULL, sp.get_ssrc_group("xyz"));
EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ")); EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ"));
} }
@@ -112,13 +114,13 @@ TEST(StreamParams, EqualNotEqual) {
cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1); cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1);
cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2); cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2);
cricket::StreamParams sg1 = cricket::StreamParams sg1 =
CreateStreamParamsWithSsrcGroup("ABC", kSscrs1, ARRAY_SIZE(kSscrs1)); CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, ARRAY_SIZE(kSsrcs1));
cricket::StreamParams sg2 = cricket::StreamParams sg2 =
CreateStreamParamsWithSsrcGroup("ABC", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, ARRAY_SIZE(kSsrcs2));
cricket::StreamParams sg3 = cricket::StreamParams sg3 =
CreateStreamParamsWithSsrcGroup("Abc", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
cricket::StreamParams sg4 = cricket::StreamParams sg4 =
CreateStreamParamsWithSsrcGroup("abc", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4}; cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4};
for (size_t i = 0; i < ARRAY_SIZE(sps); ++i) { for (size_t i = 0; i < ARRAY_SIZE(sps); ++i) {
@@ -159,7 +161,90 @@ TEST(StreamParams, FidFunctions) {
TEST(StreamParams, ToString) { TEST(StreamParams, ToString) {
cricket::StreamParams sp = cricket::StreamParams sp =
CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2)); CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}", EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}",
sp.ToString().c_str()); sp.ToString().c_str());
} }
TEST(StreamParams, TestIsOneSsrcStream_LegacyStream) {
EXPECT_TRUE(
cricket::IsOneSsrcStream(cricket::StreamParams::CreateLegacy(13)));
}
TEST(StreamParams, TestIsOneSsrcStream_SingleRtxStream) {
cricket::StreamParams stream;
stream.add_ssrc(13);
EXPECT_TRUE(stream.AddFidSsrc(13, 14));
EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
}
TEST(StreamParams, TestIsOneSsrcStream_SimulcastStream) {
EXPECT_FALSE(cricket::IsOneSsrcStream(
cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
EXPECT_FALSE(cricket::IsOneSsrcStream(
cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
}
TEST(StreamParams, TestIsOneSsrcStream_SimRtxStream) {
cricket::StreamParams stream =
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs3),
MAKE_VECTOR(kRtxSsrcs3));
EXPECT_FALSE(cricket::IsOneSsrcStream(stream));
}
TEST(StreamParams, TestIsSimulcastStream_LegacyStream) {
EXPECT_FALSE(
cricket::IsSimulcastStream(cricket::StreamParams::CreateLegacy(13)));
}
TEST(StreamParams, TestIsSimulcastStream_SingleRtxStream) {
cricket::StreamParams stream;
stream.add_ssrc(13);
EXPECT_TRUE(stream.AddFidSsrc(13, 14));
EXPECT_FALSE(cricket::IsSimulcastStream(stream));
}
TEST(StreamParams, TestIsSimulcastStream_SimulcastStream) {
EXPECT_TRUE(cricket::IsSimulcastStream(
cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
EXPECT_TRUE(cricket::IsSimulcastStream(
cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
}
TEST(StreamParams, TestIsSimulcastStream_SimRtxStream) {
cricket::StreamParams stream =
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs3),
MAKE_VECTOR(kRtxSsrcs3));
EXPECT_TRUE(cricket::IsSimulcastStream(stream));
}
TEST(StreamParams, TestIsSimulcastStream_InvalidStreams) {
// stream1 has extra non-sim, non-fid ssrc.
cricket::StreamParams stream1 =
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs3),
MAKE_VECTOR(kRtxSsrcs3));
stream1.add_ssrc(25);
EXPECT_FALSE(cricket::IsSimulcastStream(stream1));
// stream2 has invalid fid-group (no primary).
cricket::StreamParams stream2;
stream2.add_ssrc(13);
EXPECT_TRUE(stream2.AddFidSsrc(13, 14));
std::remove(stream2.ssrcs.begin(), stream2.ssrcs.end(), 13);
EXPECT_FALSE(cricket::IsSimulcastStream(stream2));
// stream3 has two SIM groups.
cricket::StreamParams stream3 =
cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2));
std::vector<uint32> sim_ssrcs = MAKE_VECTOR(kRtxSsrcs3);
cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, sim_ssrcs);
for (size_t i = 0; i < sim_ssrcs.size(); i++) {
stream3.add_ssrc(sim_ssrcs[i]);
}
stream3.ssrc_groups.push_back(sg);
EXPECT_FALSE(cricket::IsSimulcastStream(stream3));
}

View File

@@ -336,4 +336,30 @@ bool VideoFrameEqual(const VideoFrame* frame0, const VideoFrame* frame1) {
return true; return true;
} }
cricket::StreamParams CreateSimStreamParams(
const std::string& cname, const std::vector<uint32>& ssrcs) {
cricket::StreamParams sp;
cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, ssrcs);
sp.ssrcs = ssrcs;
sp.ssrc_groups.push_back(sg);
sp.cname = cname;
return sp;
}
// There should be an rtx_ssrc per ssrc.
cricket::StreamParams CreateSimWithRtxStreamParams(
const std::string& cname, const std::vector<uint32>& ssrcs,
const std::vector<uint32>& rtx_ssrcs) {
cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs);
for (size_t i = 0; i < ssrcs.size(); ++i) {
sp.ssrcs.push_back(rtx_ssrcs[i]);
std::vector<uint32> fid_ssrcs;
fid_ssrcs.push_back(ssrcs[i]);
fid_ssrcs.push_back(rtx_ssrcs[i]);
cricket::SsrcGroup fid_group(cricket::kFidSsrcGroupSemantics, fid_ssrcs);
sp.ssrc_groups.push_back(fid_group);
}
return sp;
}
} // namespace cricket } // namespace cricket

View File

@@ -237,6 +237,16 @@ bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
} }
return false; return false;
} }
// Create Simulcast StreamParams with given |ssrcs| and |cname|.
cricket::StreamParams CreateSimStreamParams(
const std::string& cname, const std::vector<uint32>& ssrcs);
// Create Simulcast stream with given |ssrcs| and |rtx_ssrcs|.
// The number of |rtx_ssrcs| must match number of |ssrcs|.
cricket::StreamParams CreateSimWithRtxStreamParams(
const std::string& cname, const std::vector<uint32>& ssrcs,
const std::vector<uint32>& rtx_ssrcs);
} // namespace cricket } // namespace cricket
#endif // TALK_MEDIA_BASE_TESTUTILS_H_ #endif // TALK_MEDIA_BASE_TESTUTILS_H_

View File

@@ -69,11 +69,13 @@ inline bool IsEqualCodec(const cricket::VideoCodec& a,
IsEqualRes(a, b.width, b.height, b.framerate); IsEqualRes(a, b.width, b.height, b.framerate);
} }
namespace std {
inline std::ostream& operator<<(std::ostream& s, const cricket::VideoCodec& c) { inline std::ostream& operator<<(std::ostream& s, const cricket::VideoCodec& c) {
s << "{" << c.name << "(" << c.id << "), " s << "{" << c.name << "(" << c.id << "), "
<< c.width << "x" << c.height << "x" << c.framerate << "}"; << c.width << "x" << c.height << "x" << c.framerate << "}";
return s; return s;
} }
} // namespace std
inline int TimeBetweenSend(const cricket::VideoCodec& codec) { inline int TimeBetweenSend(const cricket::VideoCodec& codec) {
return static_cast<int>( return static_cast<int>(
@@ -788,9 +790,9 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_GT(info.senders[0].framerate_sent, 0); EXPECT_GT(info.senders[0].framerate_sent, 0);
ASSERT_EQ(1U, info.receivers.size()); ASSERT_EQ(1U, info.receivers.size());
EXPECT_EQ(1U, info.senders[0].ssrcs.size()); EXPECT_EQ(1U, info.senders[0].ssrcs().size());
EXPECT_EQ(1U, info.receivers[0].ssrcs.size()); EXPECT_EQ(1U, info.receivers[0].ssrcs().size());
EXPECT_EQ(info.senders[0].ssrcs[0], info.receivers[0].ssrcs[0]); EXPECT_EQ(info.senders[0].ssrcs()[0], info.receivers[0].ssrcs()[0]);
EXPECT_EQ(NumRtpBytes(), info.receivers[0].bytes_rcvd); EXPECT_EQ(NumRtpBytes(), info.receivers[0].bytes_rcvd);
EXPECT_EQ(NumRtpPackets(), info.receivers[0].packets_rcvd); EXPECT_EQ(NumRtpPackets(), info.receivers[0].packets_rcvd);
EXPECT_EQ(0.0, info.receivers[0].fraction_lost); EXPECT_EQ(0.0, info.receivers[0].fraction_lost);
@@ -847,8 +849,8 @@ class VideoMediaChannelTest : public testing::Test,
ASSERT_EQ(2U, info.receivers.size()); ASSERT_EQ(2U, info.receivers.size());
for (size_t i = 0; i < info.receivers.size(); ++i) { for (size_t i = 0; i < info.receivers.size(); ++i) {
EXPECT_EQ(1U, info.receivers[i].ssrcs.size()); EXPECT_EQ(1U, info.receivers[i].ssrcs().size());
EXPECT_EQ(i + 1, info.receivers[i].ssrcs[0]); EXPECT_EQ(i + 1, info.receivers[i].ssrcs()[0]);
EXPECT_EQ(NumRtpBytes(), info.receivers[i].bytes_rcvd); EXPECT_EQ(NumRtpBytes(), info.receivers[i].bytes_rcvd);
EXPECT_EQ(NumRtpPackets(), info.receivers[i].packets_rcvd); EXPECT_EQ(NumRtpPackets(), info.receivers[i].packets_rcvd);
EXPECT_EQ(0.0, info.receivers[i].fraction_lost); EXPECT_EQ(0.0, info.receivers[i].fraction_lost);
@@ -903,12 +905,12 @@ class VideoMediaChannelTest : public testing::Test,
ASSERT_EQ(2U, info.senders.size()); ASSERT_EQ(2U, info.senders.size());
EXPECT_EQ(NumRtpPackets(), EXPECT_EQ(NumRtpPackets(),
info.senders[0].packets_sent + info.senders[1].packets_sent); info.senders[0].packets_sent + info.senders[1].packets_sent);
EXPECT_EQ(1U, info.senders[0].ssrcs.size()); EXPECT_EQ(1U, info.senders[0].ssrcs().size());
EXPECT_EQ(1234U, info.senders[0].ssrcs[0]); EXPECT_EQ(1234U, info.senders[0].ssrcs()[0]);
EXPECT_EQ(DefaultCodec().width, info.senders[0].frame_width); EXPECT_EQ(DefaultCodec().width, info.senders[0].frame_width);
EXPECT_EQ(DefaultCodec().height, info.senders[0].frame_height); EXPECT_EQ(DefaultCodec().height, info.senders[0].frame_height);
EXPECT_EQ(1U, info.senders[1].ssrcs.size()); EXPECT_EQ(1U, info.senders[1].ssrcs().size());
EXPECT_EQ(5678U, info.senders[1].ssrcs[0]); EXPECT_EQ(5678U, info.senders[1].ssrcs()[0]);
EXPECT_EQ(1024, info.senders[1].frame_width); EXPECT_EQ(1024, info.senders[1].frame_width);
EXPECT_EQ(768, info.senders[1].frame_height); EXPECT_EQ(768, info.senders[1].frame_height);
// The capturer must be unregistered here as it runs out of it's scope next. // The capturer must be unregistered here as it runs out of it's scope next.

View File

@@ -276,6 +276,8 @@ class FakeWebRtcVideoEngine
send(false), send(false),
receive_(false), receive_(false),
can_transmit_(true), can_transmit_(true),
remote_rtx_ssrc_(-1),
rtx_send_payload_type(-1),
rtcp_status_(webrtc::kRtcpNone), rtcp_status_(webrtc::kRtcpNone),
key_frame_request_method_(webrtc::kViEKeyFrameRequestNone), key_frame_request_method_(webrtc::kViEKeyFrameRequestNone),
tmmbr_(false), tmmbr_(false),
@@ -306,6 +308,9 @@ class FakeWebRtcVideoEngine
bool receive_; bool receive_;
bool can_transmit_; bool can_transmit_;
std::map<int, int> ssrcs_; std::map<int, int> ssrcs_;
std::map<int, int> rtx_ssrcs_;
int remote_rtx_ssrc_;
int rtx_send_payload_type;
std::string cname_; std::string cname_;
webrtc::ViERTCPMode rtcp_status_; webrtc::ViERTCPMode rtcp_status_;
webrtc::ViEKeyFrameRequestMethod key_frame_request_method_; webrtc::ViEKeyFrameRequestMethod key_frame_request_method_;
@@ -500,10 +505,23 @@ class FakeWebRtcVideoEngine
return static_cast<int>( return static_cast<int>(
channels_.find(channel)->second->ssrcs_.size()); channels_.find(channel)->second->ssrcs_.size());
} }
int GetNumRtxSsrcs(int channel) const {
WEBRTC_ASSERT_CHANNEL(channel);
return static_cast<int>(
channels_.find(channel)->second->rtx_ssrcs_.size());
}
bool GetIsTransmitting(int channel) const { bool GetIsTransmitting(int channel) const {
WEBRTC_ASSERT_CHANNEL(channel); WEBRTC_ASSERT_CHANNEL(channel);
return channels_.find(channel)->second->can_transmit_; return channels_.find(channel)->second->can_transmit_;
} }
int GetRtxSsrc(int channel, int simulcast_idx) const {
WEBRTC_ASSERT_CHANNEL(channel);
if (channels_.find(channel)->second->rtx_ssrcs_.find(simulcast_idx) ==
channels_.find(channel)->second->rtx_ssrcs_.end()) {
return -1;
}
return channels_.find(channel)->second->rtx_ssrcs_[simulcast_idx];
}
bool ReceiveCodecRegistered(int channel, bool ReceiveCodecRegistered(int channel,
const webrtc::VideoCodec& codec) const { const webrtc::VideoCodec& codec) const {
WEBRTC_ASSERT_CHANNEL(channel); WEBRTC_ASSERT_CHANNEL(channel);
@@ -557,6 +575,14 @@ class FakeWebRtcVideoEngine
WEBRTC_ASSERT_CHANNEL(channel); WEBRTC_ASSERT_CHANNEL(channel);
channels_[channel]->receive_bandwidth_ = receive_bandwidth; channels_[channel]->receive_bandwidth_ = receive_bandwidth;
}; };
int GetRtxSendPayloadType(int channel) {
WEBRTC_CHECK_CHANNEL(channel);
return channels_[channel]->rtx_send_payload_type;
}
int GetRemoteRtxSsrc(int channel) {
WEBRTC_CHECK_CHANNEL(channel);
return channels_.find(channel)->second->remote_rtx_ssrc_;
}
WEBRTC_STUB(Release, ()); WEBRTC_STUB(Release, ());
@@ -599,6 +625,9 @@ class FakeWebRtcVideoEngine
} }
WEBRTC_STUB(RegisterCpuOveruseObserver, WEBRTC_STUB(RegisterCpuOveruseObserver,
(int channel, webrtc::CpuOveruseObserver* observer)); (int channel, webrtc::CpuOveruseObserver* observer));
#ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_STUB(CpuOveruseMeasures, (int, int*, int*));
#endif
WEBRTC_STUB(ConnectAudioChannel, (const int, const int)); WEBRTC_STUB(ConnectAudioChannel, (const int, const int));
WEBRTC_STUB(DisconnectAudioChannel, (const int)); WEBRTC_STUB(DisconnectAudioChannel, (const int));
WEBRTC_FUNC(StartSend, (const int channel)) { WEBRTC_FUNC(StartSend, (const int channel)) {
@@ -716,6 +745,9 @@ class FakeWebRtcVideoEngine
WEBRTC_STUB(WaitForFirstKeyFrame, (const int, const bool)); WEBRTC_STUB(WaitForFirstKeyFrame, (const int, const bool));
WEBRTC_STUB(StartDebugRecording, (int, const char*)); WEBRTC_STUB(StartDebugRecording, (int, const char*));
WEBRTC_STUB(StopDebugRecording, (int)); WEBRTC_STUB(StopDebugRecording, (int));
#ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_VOID_STUB(SuspendBelowMinBitrate, (int));
#endif
// webrtc::ViECapture // webrtc::ViECapture
WEBRTC_STUB(NumberOfCaptureDevices, ()); WEBRTC_STUB(NumberOfCaptureDevices, ());
@@ -851,11 +883,28 @@ class FakeWebRtcVideoEngine
const webrtc::StreamType usage, const webrtc::StreamType usage,
const unsigned char idx)) { const unsigned char idx)) {
WEBRTC_CHECK_CHANNEL(channel); WEBRTC_CHECK_CHANNEL(channel);
channels_[channel]->ssrcs_[idx] = ssrc; switch (usage) {
case webrtc::kViEStreamTypeNormal:
channels_[channel]->ssrcs_[idx] = ssrc;
break;
case webrtc::kViEStreamTypeRtx:
channels_[channel]->rtx_ssrcs_[idx] = ssrc;
break;
default:
return -1;
}
return 0; return 0;
} }
WEBRTC_STUB_CONST(SetRemoteSSRCType, (const int,
const webrtc::StreamType, const unsigned int)); WEBRTC_FUNC_CONST(SetRemoteSSRCType, (const int channel,
const webrtc::StreamType usage, const unsigned int ssrc)) {
WEBRTC_CHECK_CHANNEL(channel);
if (usage == webrtc::kViEStreamTypeRtx) {
channels_.find(channel)->second->remote_rtx_ssrc_ = ssrc;
return 0;
}
return -1;
}
WEBRTC_FUNC_CONST(GetLocalSSRC, (const int channel, WEBRTC_FUNC_CONST(GetLocalSSRC, (const int channel,
unsigned int& ssrc)) { unsigned int& ssrc)) {
@@ -867,7 +916,12 @@ class FakeWebRtcVideoEngine
WEBRTC_STUB_CONST(GetRemoteSSRC, (const int, unsigned int&)); WEBRTC_STUB_CONST(GetRemoteSSRC, (const int, unsigned int&));
WEBRTC_STUB_CONST(GetRemoteCSRCs, (const int, unsigned int*)); WEBRTC_STUB_CONST(GetRemoteCSRCs, (const int, unsigned int*));
WEBRTC_STUB(SetRtxSendPayloadType, (const int, const uint8)); WEBRTC_FUNC(SetRtxSendPayloadType, (const int channel,
const uint8 payload_type)) {
WEBRTC_CHECK_CHANNEL(channel);
channels_[channel]->rtx_send_payload_type = payload_type;
return 0;
}
WEBRTC_STUB(SetRtxReceivePayloadType, (const int, const uint8)); WEBRTC_STUB(SetRtxReceivePayloadType, (const int, const uint8));
WEBRTC_STUB(SetStartSequenceNumber, (const int, unsigned short)); WEBRTC_STUB(SetStartSequenceNumber, (const int, unsigned short));
@@ -979,6 +1033,14 @@ class FakeWebRtcVideoEngine
unsigned int&, unsigned int&, unsigned int&, int&)); unsigned int&, unsigned int&, unsigned int&, int&));
WEBRTC_STUB_CONST(GetRTPStatistics, (const int, unsigned int&, unsigned int&, WEBRTC_STUB_CONST(GetRTPStatistics, (const int, unsigned int&, unsigned int&,
unsigned int&, unsigned int&)); unsigned int&, unsigned int&));
#ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_STUB_CONST(GetReceiveChannelRtcpStatistics, (const int,
webrtc::RtcpStatistics&, int&));
WEBRTC_STUB_CONST(GetSendChannelRtcpStatistics, (const int,
webrtc::RtcpStatistics&, int&));
WEBRTC_STUB_CONST(GetRtpStatistics, (const int, webrtc::StreamDataCounters&,
webrtc::StreamDataCounters&));
#endif
WEBRTC_FUNC_CONST(GetBandwidthUsage, (const int channel, WEBRTC_FUNC_CONST(GetBandwidthUsage, (const int channel,
unsigned int& total_bitrate, unsigned int& video_bitrate, unsigned int& total_bitrate, unsigned int& video_bitrate,
unsigned int& fec_bitrate, unsigned int& nack_bitrate)) { unsigned int& fec_bitrate, unsigned int& nack_bitrate)) {
@@ -1021,6 +1083,32 @@ class FakeWebRtcVideoEngine
} }
return 0; return 0;
} }
#ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_STUB(RegisterSendChannelRtcpStatisticsCallback,
(int, webrtc::RtcpStatisticsCallback*));
WEBRTC_STUB(DeregisterSendChannelRtcpStatisticsCallback,
(int, webrtc::RtcpStatisticsCallback*));
WEBRTC_STUB(RegisterReceiveChannelRtcpStatisticsCallback,
(int, webrtc::RtcpStatisticsCallback*));
WEBRTC_STUB(DeregisterReceiveChannelRtcpStatisticsCallback,
(int, webrtc::RtcpStatisticsCallback*));
WEBRTC_STUB(RegisterSendChannelRtpStatisticsCallback,
(int, webrtc::StreamDataCountersCallback*));
WEBRTC_STUB(DeregisterSendChannelRtpStatisticsCallback,
(int, webrtc::StreamDataCountersCallback*));
WEBRTC_STUB(RegisterReceiveChannelRtpStatisticsCallback,
(int, webrtc::StreamDataCountersCallback*));
WEBRTC_STUB(DeregisterReceiveChannelRtpStatisticsCallback,
(int, webrtc::StreamDataCountersCallback*));
WEBRTC_STUB(RegisterSendBitrateObserver,
(int, webrtc::BitrateStatisticsObserver*));
WEBRTC_STUB(DeregisterSendBitrateObserver,
(int, webrtc::BitrateStatisticsObserver*));
WEBRTC_STUB(RegisterSendFrameCountObserver,
(int, webrtc::FrameCountObserver*));
WEBRTC_STUB(DeregisterSendFrameCountObserver,
(int, webrtc::FrameCountObserver*));
#endif
WEBRTC_STUB(StartRTPDump, (const int, const char*, webrtc::RTPDirections)); WEBRTC_STUB(StartRTPDump, (const int, const char*, webrtc::RTPDirections));
WEBRTC_STUB(StopRTPDump, (const int, webrtc::RTPDirections)); WEBRTC_STUB(StopRTPDump, (const int, webrtc::RTPDirections));

View File

@@ -39,6 +39,8 @@
#include "talk/media/base/voiceprocessor.h" #include "talk/media/base/voiceprocessor.h"
#include "talk/media/webrtc/fakewebrtccommon.h" #include "talk/media/webrtc/fakewebrtccommon.h"
#include "talk/media/webrtc/webrtcvoe.h" #include "talk/media/webrtc/webrtcvoe.h"
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/common.h"
namespace cricket { namespace cricket {
@@ -75,7 +77,7 @@ class FakeWebRtcVoiceEngine
int dtmf_length_ms; int dtmf_length_ms;
}; };
struct Channel { struct Channel {
Channel() explicit Channel(bool use_experimental_acm)
: external_transport(false), : external_transport(false),
send(false), send(false),
playout(false), playout(false),
@@ -95,7 +97,8 @@ class FakeWebRtcVoiceEngine
fec_type(117), fec_type(117),
nack_max_packets(0), nack_max_packets(0),
send_ssrc(0), send_ssrc(0),
level_header_ext_(-1) { level_header_ext_(-1),
using_experimental_acm(use_experimental_acm) {
memset(&send_codec, 0, sizeof(send_codec)); memset(&send_codec, 0, sizeof(send_codec));
memset(&rx_agc_config, 0, sizeof(rx_agc_config)); memset(&rx_agc_config, 0, sizeof(rx_agc_config));
} }
@@ -124,6 +127,7 @@ class FakeWebRtcVoiceEngine
std::vector<webrtc::CodecInst> recv_codecs; std::vector<webrtc::CodecInst> recv_codecs;
webrtc::CodecInst send_codec; webrtc::CodecInst send_codec;
std::list<std::string> packets; std::list<std::string> packets;
bool using_experimental_acm;
}; };
FakeWebRtcVoiceEngine(const cricket::AudioCodec* const* codecs, FakeWebRtcVoiceEngine(const cricket::AudioCodec* const* codecs,
@@ -199,6 +203,10 @@ class FakeWebRtcVoiceEngine
int GetNACKMaxPackets(int channel) { int GetNACKMaxPackets(int channel) {
return channels_[channel]->nack_max_packets; return channels_[channel]->nack_max_packets;
} }
bool IsUsingExperimentalAcm(int channel) {
WEBRTC_ASSERT_CHANNEL(channel);
return channels_[channel]->using_experimental_acm;
}
int GetSendCNPayloadType(int channel, bool wideband) { int GetSendCNPayloadType(int channel, bool wideband) {
return (wideband) ? return (wideband) ?
channels_[channel]->cn16_type : channels_[channel]->cn16_type :
@@ -252,11 +260,11 @@ class FakeWebRtcVoiceEngine
true); true);
} }
} }
int AddChannel() { int AddChannel(bool use_experimental_acm) {
if (fail_create_channel_) { if (fail_create_channel_) {
return -1; return -1;
} }
Channel* ch = new Channel(); Channel* ch = new Channel(use_experimental_acm);
for (int i = 0; i < NumOfCodecs(); ++i) { for (int i = 0; i < NumOfCodecs(); ++i) {
webrtc::CodecInst codec; webrtc::CodecInst codec;
GetCodec(i, codec); GetCodec(i, codec);
@@ -288,11 +296,14 @@ class FakeWebRtcVoiceEngine
return NULL; return NULL;
} }
WEBRTC_FUNC(CreateChannel, ()) { WEBRTC_FUNC(CreateChannel, ()) {
return AddChannel(); return AddChannel(false);
} }
#ifdef USE_WEBRTC_DEV_BRANCH #ifdef USE_WEBRTC_DEV_BRANCH
WEBRTC_FUNC(CreateChannel, (const webrtc::Config& /*config*/)) { WEBRTC_FUNC(CreateChannel, (const webrtc::Config& config)) {
return AddChannel(); talk_base::scoped_ptr<webrtc::AudioCodingModule> acm(
config.Get<webrtc::AudioCodingModuleFactory>().Create(0));
return AddChannel(strcmp(acm->Version(), webrtc::kExperimentalAcmVersion)
== 0);
} }
#endif #endif
WEBRTC_FUNC(DeleteChannel, (int channel)) { WEBRTC_FUNC(DeleteChannel, (int channel)) {

View File

@@ -393,7 +393,8 @@ class WebRtcEncoderObserver : public webrtc::ViEEncoderObserver {
explicit WebRtcEncoderObserver(int video_channel) explicit WebRtcEncoderObserver(int video_channel)
: video_channel_(video_channel), : video_channel_(video_channel),
framerate_(0), framerate_(0),
bitrate_(0) { bitrate_(0),
suspended_(false) {
} }
// virtual functions from VieEncoderObserver. // virtual functions from VieEncoderObserver.
@@ -406,6 +407,12 @@ class WebRtcEncoderObserver : public webrtc::ViEEncoderObserver {
bitrate_ = bitrate; bitrate_ = bitrate;
} }
virtual void SuspendChange(int video_channel, bool is_suspended) {
talk_base::CritScope cs(&crit_);
ASSERT(video_channel_ == video_channel);
suspended_ = is_suspended;
}
int framerate() const { int framerate() const {
talk_base::CritScope cs(&crit_); talk_base::CritScope cs(&crit_);
return framerate_; return framerate_;
@@ -414,12 +421,17 @@ class WebRtcEncoderObserver : public webrtc::ViEEncoderObserver {
talk_base::CritScope cs(&crit_); talk_base::CritScope cs(&crit_);
return bitrate_; return bitrate_;
} }
bool suspended() const {
talk_base::CritScope cs(&crit_);
return suspended_;
}
private: private:
mutable talk_base::CriticalSection crit_; mutable talk_base::CriticalSection crit_;
int video_channel_; int video_channel_;
int framerate_; int framerate_;
int bitrate_; int bitrate_;
bool suspended_;
}; };
class WebRtcLocalStreamInfo { class WebRtcLocalStreamInfo {
@@ -757,9 +769,10 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> {
const WebRtcVideoEngine::VideoCodecPref const WebRtcVideoEngine::VideoCodecPref
WebRtcVideoEngine::kVideoCodecPrefs[] = { WebRtcVideoEngine::kVideoCodecPrefs[] = {
{kVp8PayloadName, 100, 0}, {kVp8PayloadName, 100, -1, 0},
{kRedPayloadName, 116, 1}, {kRedPayloadName, 116, -1, 1},
{kFecPayloadName, 117, 2}, {kFecPayloadName, 117, -1, 2},
{kRtxCodecName, 96, 100, 3},
}; };
// The formats are sorted by the descending order of width. We use the order to // The formats are sorted by the descending order of width. We use the order to
@@ -1319,6 +1332,10 @@ bool WebRtcVideoEngine::RebuildCodecList(const VideoCodec& in_codec) {
if (_stricmp(kVp8PayloadName, codec.name.c_str()) == 0) { if (_stricmp(kVp8PayloadName, codec.name.c_str()) == 0) {
AddDefaultFeedbackParams(&codec); AddDefaultFeedbackParams(&codec);
} }
if (pref.associated_payload_type != -1) {
codec.SetParam(kCodecParamAssociatedPayloadType,
pref.associated_payload_type);
}
video_codecs_.push_back(codec); video_codecs_.push_back(codec);
internal_codec_names.insert(codec.name); internal_codec_names.insert(codec.name);
} }
@@ -1488,6 +1505,7 @@ WebRtcVideoMediaChannel::WebRtcVideoMediaChannel(
remb_enabled_(false), remb_enabled_(false),
render_started_(false), render_started_(false),
first_receive_ssrc_(0), first_receive_ssrc_(0),
send_rtx_type_(-1),
send_red_type_(-1), send_red_type_(-1),
send_fec_type_(-1), send_fec_type_(-1),
send_min_bitrate_(kMinVideoBitrate), send_min_bitrate_(kMinVideoBitrate),
@@ -1564,12 +1582,19 @@ bool WebRtcVideoMediaChannel::SetSendCodecs(
if (sending_) { if (sending_) {
ConvertToCricketVideoCodec(*send_codec_, &current); ConvertToCricketVideoCodec(*send_codec_, &current);
} }
std::map<int, int> primary_rtx_pt_mapping;
for (std::vector<VideoCodec>::const_iterator iter = codecs.begin(); for (std::vector<VideoCodec>::const_iterator iter = codecs.begin();
iter != codecs.end(); ++iter) { iter != codecs.end(); ++iter) {
if (_stricmp(iter->name.c_str(), kRedPayloadName) == 0) { if (_stricmp(iter->name.c_str(), kRedPayloadName) == 0) {
send_red_type_ = iter->id; send_red_type_ = iter->id;
} else if (_stricmp(iter->name.c_str(), kFecPayloadName) == 0) { } else if (_stricmp(iter->name.c_str(), kFecPayloadName) == 0) {
send_fec_type_ = iter->id; send_fec_type_ = iter->id;
} else if (_stricmp(iter->name.c_str(), kRtxCodecName) == 0) {
int rtx_type = iter->id;
int rtx_primary_type = -1;
if (iter->GetParam(kCodecParamAssociatedPayloadType, &rtx_primary_type)) {
primary_rtx_pt_mapping[rtx_primary_type] = rtx_type;
}
} else if (engine()->CanSendCodec(*iter, current, &checked_codec)) { } else if (engine()->CanSendCodec(*iter, current, &checked_codec)) {
webrtc::VideoCodec wcodec; webrtc::VideoCodec wcodec;
if (engine()->ConvertFromCricketVideoCodec(checked_codec, &wcodec)) { if (engine()->ConvertFromCricketVideoCodec(checked_codec, &wcodec)) {
@@ -1625,6 +1650,14 @@ bool WebRtcVideoMediaChannel::SetSendCodecs(
// Select the first matched codec. // Select the first matched codec.
webrtc::VideoCodec& codec(send_codecs[0]); webrtc::VideoCodec& codec(send_codecs[0]);
// Set RTX payload type if primary now active. This value will be used in
// SetSendCodec.
std::map<int, int>::const_iterator rtx_it =
primary_rtx_pt_mapping.find(static_cast<int>(codec.plType));
if (rtx_it != primary_rtx_pt_mapping.end()) {
send_rtx_type_ = rtx_it->second;
}
if (!SetSendCodec( if (!SetSendCodec(
codec, codec.minBitrate, codec.startBitrate, codec.maxBitrate)) { codec, codec.minBitrate, codec.startBitrate, codec.maxBitrate)) {
return false; return false;
@@ -1726,9 +1759,9 @@ bool WebRtcVideoMediaChannel::SetSend(bool send) {
bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) { bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) {
LOG(LS_INFO) << "AddSendStream " << sp.ToString(); LOG(LS_INFO) << "AddSendStream " << sp.ToString();
if (!IsOneSsrcStream(sp)) { if (!IsOneSsrcStream(sp) && !IsSimulcastStream(sp)) {
LOG(LS_ERROR) << "AddSendStream: bad local stream parameters"; LOG(LS_ERROR) << "AddSendStream: bad local stream parameters";
return false; return false;
} }
uint32 ssrc_key; uint32 ssrc_key;
@@ -1758,6 +1791,11 @@ bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) {
return false; return false;
} }
// Set the corresponding RTX SSRC.
if (!SetLocalRtxSsrc(channel_id, sp, sp.first_ssrc(), 0)) {
return false;
}
// Set RTCP CName. // Set RTCP CName.
if (engine()->vie()->rtp()->SetRTCPCName(channel_id, if (engine()->vie()->rtp()->SetRTCPCName(channel_id,
sp.cname.c_str()) != 0) { sp.cname.c_str()) != 0) {
@@ -1862,10 +1900,11 @@ bool WebRtcVideoMediaChannel::AddRecvStream(const StreamParams& sp) {
return false; return false;
} }
// TODO(perkj): Implement recv media from multiple SSRCs per stream. // TODO(perkj): Implement recv media from multiple media SSRCs per stream.
if (sp.ssrcs.size() != 1) { // NOTE: We have two SSRCs per stream when RTX is enabled.
LOG(LS_ERROR) << "WebRtcVideoMediaChannel supports one receiving SSRC per" if (!IsOneSsrcStream(sp)) {
<< " stream"; LOG(LS_ERROR) << "WebRtcVideoMediaChannel supports one primary SSRC per"
<< " stream and one FID SSRC per primary SSRC.";
return false; return false;
} }
@@ -1878,6 +1917,16 @@ bool WebRtcVideoMediaChannel::AddRecvStream(const StreamParams& sp) {
return false; return false;
} }
// Set the corresponding RTX SSRC.
uint32 rtx_ssrc;
bool has_rtx = sp.GetFidSsrc(sp.first_ssrc(), &rtx_ssrc);
if (has_rtx && engine()->vie()->rtp()->SetRemoteSSRCType(
channel_id, webrtc::kViEStreamTypeRtx, rtx_ssrc) != 0) {
LOG_RTCERR3(SetRemoteSSRCType, channel_id, webrtc::kViEStreamTypeRtx,
rtx_ssrc);
return false;
}
// Get the default renderer. // Get the default renderer.
VideoRenderer* default_renderer = NULL; VideoRenderer* default_renderer = NULL;
if (InConferenceMode()) { if (InConferenceMode()) {
@@ -2030,10 +2079,6 @@ bool WebRtcVideoMediaChannel::SendIntraFrame() {
return success; return success;
} }
bool WebRtcVideoMediaChannel::IsOneSsrcStream(const StreamParams& sp) {
return (sp.ssrcs.size() == 1 && sp.ssrc_groups.size() == 0);
}
bool WebRtcVideoMediaChannel::HasReadySendChannels() { bool WebRtcVideoMediaChannel::HasReadySendChannels() {
return !send_channels_.empty() && return !send_channels_.empty() &&
((send_channels_.size() > 1) || ((send_channels_.size() > 1) ||
@@ -2235,7 +2280,9 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
WebRtcLocalStreamInfo* channel_stream_info = WebRtcLocalStreamInfo* channel_stream_info =
send_channel->local_stream_info(); send_channel->local_stream_info();
sinfo.ssrcs = send_params->ssrcs; for (size_t i = 0; i < send_params->ssrcs.size(); ++i) {
sinfo.add_ssrc(send_params->ssrcs[i]);
}
sinfo.codec_name = send_codec_->plName; sinfo.codec_name = send_codec_->plName;
sinfo.bytes_sent = bytes_sent; sinfo.bytes_sent = bytes_sent;
sinfo.packets_sent = packets_sent; sinfo.packets_sent = packets_sent;
@@ -2252,7 +2299,38 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
sinfo.nominal_bitrate = send_channel->encoder_observer()->bitrate(); sinfo.nominal_bitrate = send_channel->encoder_observer()->bitrate();
sinfo.preferred_bitrate = send_max_bitrate_; sinfo.preferred_bitrate = send_max_bitrate_;
sinfo.adapt_reason = send_channel->CurrentAdaptReason(); sinfo.adapt_reason = send_channel->CurrentAdaptReason();
sinfo.capture_jitter_ms = -1;
sinfo.avg_encode_ms = -1;
#ifdef USE_WEBRTC_DEV_BRANCH
int capture_jitter_ms = 0;
int avg_encode_time_ms = 0;
if (engine()->vie()->base()->CpuOveruseMeasures(
channel_id, &capture_jitter_ms, &avg_encode_time_ms) == 0) {
sinfo.capture_jitter_ms = capture_jitter_ms;
sinfo.avg_encode_ms = avg_encode_time_ms;
}
#endif
#ifdef USE_WEBRTC_DEV_BRANCH
// Get received RTCP statistics for the sender (reported by the remote
// client in a RTCP packet), if available.
// It's not a fatal error if we can't, since RTCP may not have arrived
// yet.
webrtc::RtcpStatistics outgoing_stream_rtcp_stats;
int outgoing_stream_rtt_ms;
if (engine_->vie()->rtp()->GetSendChannelRtcpStatistics(
channel_id,
outgoing_stream_rtcp_stats,
outgoing_stream_rtt_ms) == 0) {
// Convert Q8 to float.
sinfo.packets_lost = outgoing_stream_rtcp_stats.cumulative_lost;
sinfo.fraction_lost = static_cast<float>(
outgoing_stream_rtcp_stats.fraction_lost) / (1 << 8);
sinfo.rtt_ms = outgoing_stream_rtt_ms;
}
#else
// Get received RTCP statistics for the sender, if available. // Get received RTCP statistics for the sender, if available.
// It's not a fatal error if we can't, since RTCP may not have arrived // It's not a fatal error if we can't, since RTCP may not have arrived
// yet. // yet.
@@ -2273,6 +2351,7 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
sinfo.fraction_lost = static_cast<float>(r_fraction_lost) / (1 << 8); sinfo.fraction_lost = static_cast<float>(r_fraction_lost) / (1 << 8);
sinfo.rtt_ms = r_rtt_ms; sinfo.rtt_ms = r_rtt_ms;
} }
#endif
info->senders.push_back(sinfo); info->senders.push_back(sinfo);
unsigned int channel_total_bitrate_sent = 0; unsigned int channel_total_bitrate_sent = 0;
@@ -2327,6 +2406,19 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
ssrc == 0) ssrc == 0)
continue; continue;
#ifdef USE_WEBRTC_DEV_BRANCH
webrtc::StreamDataCounters sent;
webrtc::StreamDataCounters received;
if (engine_->vie()->rtp()->GetRtpStatistics(channel->channel_id(),
sent, received) != 0) {
LOG_RTCERR1(GetRTPStatistics, channel->channel_id());
return false;
}
VideoReceiverInfo rinfo;
rinfo.add_ssrc(ssrc);
rinfo.bytes_rcvd = received.bytes;
rinfo.packets_rcvd = received.packets;
#else
unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv; unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv;
if (engine_->vie()->rtp()->GetRTPStatistics( if (engine_->vie()->rtp()->GetRTPStatistics(
channel->channel_id(), bytes_sent, packets_sent, bytes_recv, channel->channel_id(), bytes_sent, packets_sent, bytes_recv,
@@ -2335,9 +2427,10 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
return false; return false;
} }
VideoReceiverInfo rinfo; VideoReceiverInfo rinfo;
rinfo.ssrcs.push_back(ssrc); rinfo.add_ssrc(ssrc);
rinfo.bytes_rcvd = bytes_recv; rinfo.bytes_rcvd = bytes_recv;
rinfo.packets_rcvd = packets_recv; rinfo.packets_rcvd = packets_recv;
#endif
rinfo.packets_lost = -1; rinfo.packets_lost = -1;
rinfo.packets_concealed = -1; rinfo.packets_concealed = -1;
rinfo.fraction_lost = -1; // from SentRTCP rinfo.fraction_lost = -1; // from SentRTCP
@@ -2349,6 +2442,20 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
rinfo.framerate_output = fps; rinfo.framerate_output = fps;
channel->decoder_observer()->ExportTo(&rinfo); channel->decoder_observer()->ExportTo(&rinfo);
#ifdef USE_WEBRTC_DEV_BRANCH
// Get our locally created statistics of the received RTP stream.
webrtc::RtcpStatistics incoming_stream_rtcp_stats;
int incoming_stream_rtt_ms;
if (engine_->vie()->rtp()->GetReceiveChannelRtcpStatistics(
channel->channel_id(),
incoming_stream_rtcp_stats,
incoming_stream_rtt_ms) == 0) {
// Convert Q8 to float.
rinfo.packets_lost = incoming_stream_rtcp_stats.cumulative_lost;
rinfo.fraction_lost = static_cast<float>(
incoming_stream_rtcp_stats.fraction_lost) / (1 << 8);
}
#else
// Get sent RTCP statistics. // Get sent RTCP statistics.
uint16 s_fraction_lost; uint16 s_fraction_lost;
unsigned int s_cumulative_lost; unsigned int s_cumulative_lost;
@@ -2362,6 +2469,7 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) {
rinfo.packets_lost = s_cumulative_lost; rinfo.packets_lost = s_cumulative_lost;
rinfo.fraction_lost = static_cast<float>(s_fraction_lost) / (1 << 8); rinfo.fraction_lost = static_cast<float>(s_fraction_lost) / (1 << 8);
} }
#endif
info->receivers.push_back(rinfo); info->receivers.push_back(rinfo);
unsigned int estimated_recv_stream_bandwidth = 0; unsigned int estimated_recv_stream_bandwidth = 0;
@@ -2616,6 +2724,10 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) {
bool dscp_option_changed = (options_.dscp != options.dscp); bool dscp_option_changed = (options_.dscp != options.dscp);
bool suspend_below_min_bitrate_changed =
options.suspend_below_min_bitrate.IsSet() &&
(options_.suspend_below_min_bitrate != options.suspend_below_min_bitrate);
bool conference_mode_turned_off = false; bool conference_mode_turned_off = false;
if (options_.conference_mode.IsSet() && options.conference_mode.IsSet() && if (options_.conference_mode.IsSet() && options.conference_mode.IsSet() &&
options_.conference_mode.GetWithDefaultIfUnset(false) && options_.conference_mode.GetWithDefaultIfUnset(false) &&
@@ -2722,6 +2834,19 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) {
LOG(LS_WARNING) << "Failed to set DSCP settings for video channel"; LOG(LS_WARNING) << "Failed to set DSCP settings for video channel";
} }
} }
if (suspend_below_min_bitrate_changed) {
#ifdef USE_WEBRTC_DEV_BRANCH
if (options_.suspend_below_min_bitrate.GetWithDefaultIfUnset(false)) {
for (SendChannelMap::iterator it = send_channels_.begin();
it != send_channels_.end(); ++it) {
engine()->vie()->codec()->SuspendBelowMinBitrate(
it->second->channel_id());
}
} else {
LOG(LS_WARNING) << "Cannot disable video suspension once it is enabled";
}
#endif
}
return true; return true;
} }
@@ -3317,6 +3442,15 @@ bool WebRtcVideoMediaChannel::SetSendCodec(
return false; return false;
} }
// NOTE: SetRtxSendPayloadType must be called after all simulcast SSRCs
// are configured. Otherwise ssrc's configured after this point will use
// the primary PT for RTX.
if (send_rtx_type_ != -1 &&
engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
send_rtx_type_) != 0) {
LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
return false;
}
} }
send_channel->set_interval( send_channel->set_interval(
cricket::VideoFormat::FpsToInterval(target_codec.maxFramerate)); cricket::VideoFormat::FpsToInterval(target_codec.maxFramerate));
@@ -3392,6 +3526,9 @@ void WebRtcVideoMediaChannel::LogSendCodecChange(const std::string& reason) {
<< vie_codec.codecSpecific.VP8.keyFrameInterval; << vie_codec.codecSpecific.VP8.keyFrameInterval;
} }
if (send_rtx_type_ != -1) {
LOG(LS_INFO) << "RTX payload type: " << send_rtx_type_;
}
} }
bool WebRtcVideoMediaChannel::SetReceiveCodecs( bool WebRtcVideoMediaChannel::SetReceiveCodecs(
@@ -3656,6 +3793,22 @@ bool WebRtcVideoMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
header_extension_uri); header_extension_uri);
return SetHeaderExtension(setter, channel_id, extension); return SetHeaderExtension(setter, channel_id, extension);
} }
bool WebRtcVideoMediaChannel::SetLocalRtxSsrc(int channel_id,
const StreamParams& send_params,
uint32 primary_ssrc,
int stream_idx) {
uint32 rtx_ssrc = 0;
bool has_rtx = send_params.GetFidSsrc(primary_ssrc, &rtx_ssrc);
if (has_rtx && engine()->vie()->rtp()->SetLocalSSRC(
channel_id, rtx_ssrc, webrtc::kViEStreamTypeRtx, stream_idx) != 0) {
LOG_RTCERR4(SetLocalSSRC, channel_id, rtx_ssrc,
webrtc::kViEStreamTypeRtx, stream_idx);
return false;
}
return true;
}
} // namespace cricket } // namespace cricket
#endif // HAVE_WEBRTC_VIDEO #endif // HAVE_WEBRTC_VIDEO

View File

@@ -180,6 +180,9 @@ class WebRtcVideoEngine : public sigslot::has_slots<>,
struct VideoCodecPref { struct VideoCodecPref {
const char* name; const char* name;
int payload_type; int payload_type;
// For RTX, this field is the payload-type that RTX applies to.
// For other codecs, it should be set to -1.
int associated_payload_type;
int pref; int pref;
}; };
@@ -356,9 +359,6 @@ class WebRtcVideoMediaChannel : public talk_base::MessageHandler,
bool StopSend(WebRtcVideoChannelSendInfo* send_channel); bool StopSend(WebRtcVideoChannelSendInfo* send_channel);
bool SendIntraFrame(int channel_id); bool SendIntraFrame(int channel_id);
// Send with one local SSRC. Normal case.
bool IsOneSsrcStream(const StreamParams& sp);
bool HasReadySendChannels(); bool HasReadySendChannels();
// Send channel key returns the key corresponding to the provided local SSRC // Send channel key returns the key corresponding to the provided local SSRC
@@ -400,6 +400,10 @@ class WebRtcVideoMediaChannel : public talk_base::MessageHandler,
// Signal when cpu adaptation has no further scope to adapt. // Signal when cpu adaptation has no further scope to adapt.
void OnCpuAdaptationUnable(); void OnCpuAdaptationUnable();
// Set the local (send-side) RTX SSRC corresponding to primary_ssrc.
bool SetLocalRtxSsrc(int channel_id, const StreamParams& send_params,
uint32 primary_ssrc, int stream_idx);
// Global state. // Global state.
WebRtcVideoEngine* engine_; WebRtcVideoEngine* engine_;
VoiceMediaChannel* voice_channel_; VoiceMediaChannel* voice_channel_;
@@ -423,6 +427,7 @@ class WebRtcVideoMediaChannel : public talk_base::MessageHandler,
// Global send side state. // Global send side state.
SendChannelMap send_channels_; SendChannelMap send_channels_;
talk_base::scoped_ptr<webrtc::VideoCodec> send_codec_; talk_base::scoped_ptr<webrtc::VideoCodec> send_codec_;
int send_rtx_type_;
int send_red_type_; int send_red_type_;
int send_fec_type_; int send_fec_type_;
int send_min_bitrate_; int send_min_bitrate_;

View File

@@ -656,6 +656,45 @@ TEST_F(WebRtcVideoEngineTestFake, RembEnabledOnReceiveChannels) {
EXPECT_TRUE(vie_.GetRembStatusContribute(new_channel_num)); EXPECT_TRUE(vie_.GetRembStatusContribute(new_channel_num));
} }
TEST_F(WebRtcVideoEngineTestFake, RecvStreamWithRtx) {
EXPECT_TRUE(SetupEngine());
int default_channel = vie_.GetLastChannel();
cricket::VideoOptions options;
options.conference_mode.Set(true);
EXPECT_TRUE(channel_->SetOptions(options));
EXPECT_TRUE(channel_->AddSendStream(
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs3),
MAKE_VECTOR(kRtxSsrcs3))));
EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(channel_->AddRecvStream(
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs1),
MAKE_VECTOR(kRtxSsrc1))));
int new_channel_num = vie_.GetLastChannel();
EXPECT_NE(default_channel, new_channel_num);
EXPECT_EQ(4, vie_.GetRemoteRtxSsrc(new_channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, RecvStreamNoRtx) {
EXPECT_TRUE(SetupEngine());
int default_channel = vie_.GetLastChannel();
cricket::VideoOptions options;
options.conference_mode.Set(true);
EXPECT_TRUE(channel_->SetOptions(options));
EXPECT_TRUE(channel_->AddSendStream(
cricket::CreateSimWithRtxStreamParams("cname",
MAKE_VECTOR(kSsrcs3),
MAKE_VECTOR(kRtxSsrcs3))));
EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
int new_channel_num = vie_.GetLastChannel();
EXPECT_NE(default_channel, new_channel_num);
EXPECT_EQ(-1, vie_.GetRemoteRtxSsrc(new_channel_num));
}
// Test support for RTP timestamp offset header extension. // Test support for RTP timestamp offset header extension.
TEST_F(WebRtcVideoEngineTestFake, RtpTimestampOffsetHeaderExtensions) { TEST_F(WebRtcVideoEngineTestFake, RtpTimestampOffsetHeaderExtensions) {
EXPECT_TRUE(SetupEngine()); EXPECT_TRUE(SetupEngine());
@@ -1319,7 +1358,7 @@ TEST_F(WebRtcVideoEngineTestFake, TestSetInvalidCpuThreshold) {
TEST_F(WebRtcVideoEngineTest, FindCodec) { TEST_F(WebRtcVideoEngineTest, FindCodec) {
// We should not need to init engine in order to get codecs. // We should not need to init engine in order to get codecs.
const std::vector<cricket::VideoCodec>& c = engine_.codecs(); const std::vector<cricket::VideoCodec>& c = engine_.codecs();
EXPECT_EQ(3U, c.size()); EXPECT_EQ(4U, c.size());
cricket::VideoCodec vp8(104, "VP8", 320, 200, 30, 0); cricket::VideoCodec vp8(104, "VP8", 320, 200, 30, 0);
EXPECT_TRUE(engine_.FindCodec(vp8)); EXPECT_TRUE(engine_.FindCodec(vp8));
@@ -1354,6 +1393,24 @@ TEST_F(WebRtcVideoEngineTest, FindCodec) {
cricket::VideoCodec fec_ci(102, "ulpfec", 0, 0, 30, 0); cricket::VideoCodec fec_ci(102, "ulpfec", 0, 0, 30, 0);
EXPECT_TRUE(engine_.FindCodec(fec)); EXPECT_TRUE(engine_.FindCodec(fec));
cricket::VideoCodec rtx(96, "rtx", 0, 0, 30, 0);
EXPECT_TRUE(engine_.FindCodec(rtx));
}
TEST_F(WebRtcVideoEngineTest, RtxCodecHasAptSet) {
std::vector<cricket::VideoCodec>::const_iterator it;
bool apt_checked = false;
for (it = engine_.codecs().begin(); it != engine_.codecs().end(); ++it) {
if (_stricmp(cricket::kRtxCodecName, it->name.c_str()) && it->id != 96) {
continue;
}
int apt;
EXPECT_TRUE(it->GetParam("apt", &apt));
EXPECT_EQ(100, apt);
apt_checked = true;
}
EXPECT_TRUE(apt_checked);
} }
TEST_F(WebRtcVideoEngineTest, StartupShutdown) { TEST_F(WebRtcVideoEngineTest, StartupShutdown) {

View File

@@ -230,6 +230,7 @@ static AudioOptions GetDefaultEngineOptions() {
options.experimental_agc.Set(false); options.experimental_agc.Set(false);
options.experimental_aec.Set(false); options.experimental_aec.Set(false);
options.aec_dump.Set(false); options.aec_dump.Set(false);
options.experimental_acm.Set(false);
return options; return options;
} }
@@ -260,7 +261,7 @@ class WebRtcSoundclipMedia : public SoundclipMedia {
if (!engine_->voe_sc()) { if (!engine_->voe_sc()) {
return false; return false;
} }
webrtc_channel_ = engine_->voe_sc()->base()->CreateChannel(); webrtc_channel_ = engine_->CreateSoundclipVoiceChannel();
if (webrtc_channel_ == -1) { if (webrtc_channel_ == -1) {
LOG_RTCERR0(CreateChannel); LOG_RTCERR0(CreateChannel);
return false; return false;
@@ -333,6 +334,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine()
log_filter_(SeverityToFilter(kDefaultLogSeverity)), log_filter_(SeverityToFilter(kDefaultLogSeverity)),
is_dumping_aec_(false), is_dumping_aec_(false),
desired_local_monitor_enable_(false), desired_local_monitor_enable_(false),
use_experimental_acm_(false),
tx_processor_ssrc_(0), tx_processor_ssrc_(0),
rx_processor_ssrc_(0) { rx_processor_ssrc_(0) {
Construct(); Construct();
@@ -350,6 +352,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine(VoEWrapper* voe_wrapper,
log_filter_(SeverityToFilter(kDefaultLogSeverity)), log_filter_(SeverityToFilter(kDefaultLogSeverity)),
is_dumping_aec_(false), is_dumping_aec_(false),
desired_local_monitor_enable_(false), desired_local_monitor_enable_(false),
use_experimental_acm_(false),
tx_processor_ssrc_(0), tx_processor_ssrc_(0),
rx_processor_ssrc_(0) { rx_processor_ssrc_(0) {
Construct(); Construct();
@@ -377,6 +380,10 @@ void WebRtcVoiceEngine::Construct() {
RtpHeaderExtension(kRtpAudioLevelHeaderExtension, RtpHeaderExtension(kRtpAudioLevelHeaderExtension,
kRtpAudioLevelHeaderExtensionId)); kRtpAudioLevelHeaderExtensionId));
options_ = GetDefaultEngineOptions(); options_ = GetDefaultEngineOptions();
// Initialize the VoE Configuration to the default ACM.
voe_config_.Set<webrtc::AudioCodingModuleFactory>(
new webrtc::AudioCodingModuleFactory);
} }
static bool IsOpus(const AudioCodec& codec) { static bool IsOpus(const AudioCodec& codec) {
@@ -714,6 +721,12 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
LOG(LS_INFO) << "Applying audio options: " << options.ToString(); LOG(LS_INFO) << "Applying audio options: " << options.ToString();
// Configure whether ACM1 or ACM2 is used.
bool enable_acm2 = false;
if (options.experimental_acm.Get(&enable_acm2)) {
EnableExperimentalAcm(enable_acm2);
}
webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing(); webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing();
bool echo_cancellation; bool echo_cancellation;
@@ -940,7 +953,7 @@ bool WebRtcVoiceEngine::SetDevices(const Device* in_device,
} }
if (ret) { if (ret) {
if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) { if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) {
LOG_RTCERR2(SetRecordingDevice, in_device->name, in_id); LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
ret = false; ret = false;
} }
} }
@@ -952,7 +965,7 @@ bool WebRtcVoiceEngine::SetDevices(const Device* in_device,
} }
if (ret) { if (ret) {
if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) { if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) {
LOG_RTCERR2(SetPlayoutDevice, out_device->name, out_id); LOG_RTCERR2(SetPlayoutDevice, out_name, out_id);
ret = false; ret = false;
} }
} }
@@ -1248,6 +1261,21 @@ bool WebRtcVoiceEngine::ShouldIgnoreTrace(const std::string& trace) {
return false; return false;
} }
void WebRtcVoiceEngine::EnableExperimentalAcm(bool enable) {
if (enable == use_experimental_acm_)
return;
if (enable) {
LOG(LS_INFO) << "VoiceEngine is set to use new ACM (ACM2 + NetEq4).";
voe_config_.Set<webrtc::AudioCodingModuleFactory>(
new webrtc::NewAudioCodingModuleFactory());
} else {
LOG(LS_INFO) << "VoiceEngine is set to use legacy ACM (ACM1 + Neteq3).";
voe_config_.Set<webrtc::AudioCodingModuleFactory>(
new webrtc::AudioCodingModuleFactory());
}
use_experimental_acm_ = enable;
}
void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace, void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace,
int length) { int length) {
talk_base::LoggingSeverity sev = talk_base::LS_VERBOSE; talk_base::LoggingSeverity sev = talk_base::LS_VERBOSE;
@@ -1580,6 +1608,22 @@ void WebRtcVoiceEngine::StopAecDump() {
} }
} }
int WebRtcVoiceEngine::CreateVoiceChannel(VoEWrapper* voice_engine_wrapper) {
#ifdef USE_WEBRTC_DEV_BRANCH
return voice_engine_wrapper->base()->CreateChannel(voe_config_);
#else
return voice_engine_wrapper->base()->CreateChannel();
#endif
}
int WebRtcVoiceEngine::CreateMediaVoiceChannel() {
return CreateVoiceChannel(voe_wrapper_.get());
}
int WebRtcVoiceEngine::CreateSoundclipVoiceChannel() {
return CreateVoiceChannel(voe_wrapper_sc_.get());
}
// This struct relies on the generated copy constructor and assignment operator // This struct relies on the generated copy constructor and assignment operator
// since it is used in an stl::map. // since it is used in an stl::map.
struct WebRtcVoiceMediaChannel::WebRtcVoiceChannelInfo { struct WebRtcVoiceMediaChannel::WebRtcVoiceChannelInfo {
@@ -1597,7 +1641,7 @@ struct WebRtcVoiceMediaChannel::WebRtcVoiceChannelInfo {
WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine) WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
: WebRtcMediaChannel<VoiceMediaChannel, WebRtcVoiceEngine>( : WebRtcMediaChannel<VoiceMediaChannel, WebRtcVoiceEngine>(
engine, engine,
engine->voe()->base()->CreateChannel()), engine->CreateMediaVoiceChannel()),
send_bw_setting_(false), send_bw_setting_(false),
send_autobw_(false), send_autobw_(false),
send_bw_bps_(0), send_bw_bps_(0),
@@ -2231,7 +2275,7 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
channel = voe_channel(); channel = voe_channel();
} else { } else {
// Create a new channel for sending audio data. // Create a new channel for sending audio data.
channel = engine()->voe()->base()->CreateChannel(); channel = engine()->CreateMediaVoiceChannel();
if (channel == -1) { if (channel == -1) {
LOG_RTCERR0(CreateChannel); LOG_RTCERR0(CreateChannel);
return false; return false;
@@ -2346,7 +2390,7 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
} }
// Create a new channel for receiving audio data. // Create a new channel for receiving audio data.
int channel = engine()->voe()->base()->CreateChannel(); int channel = engine()->CreateMediaVoiceChannel();
if (channel == -1) { if (channel == -1) {
LOG_RTCERR0(CreateChannel); LOG_RTCERR0(CreateChannel);
return false; return false;
@@ -2966,7 +3010,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
continue; continue;
} }
sinfo.ssrc = ssrc; sinfo.add_ssrc(ssrc);
sinfo.codec_name = send_codec_.get() ? send_codec_->plname : ""; sinfo.codec_name = send_codec_.get() ? send_codec_->plname : "";
sinfo.bytes_sent = cs.bytesSent; sinfo.bytes_sent = cs.bytesSent;
sinfo.packets_sent = cs.packetsSent; sinfo.packets_sent = cs.packetsSent;
@@ -2988,7 +3032,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
for (iter = receive_blocks.begin(); iter != receive_blocks.end(); for (iter = receive_blocks.begin(); iter != receive_blocks.end();
++iter) { ++iter) {
// Lookup report for send ssrc only. // Lookup report for send ssrc only.
if (iter->source_SSRC == sinfo.ssrc) { if (iter->source_SSRC == sinfo.ssrc()) {
// Convert Q8 to floating point. // Convert Q8 to floating point.
sinfo.fraction_lost = static_cast<float>(iter->fraction_lost) / 256; sinfo.fraction_lost = static_cast<float>(iter->fraction_lost) / 256;
// Convert samples to milliseconds. // Convert samples to milliseconds.
@@ -3041,7 +3085,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
engine()->voe()->rtp()->GetRTCPStatistics(*it, cs) != -1 && engine()->voe()->rtp()->GetRTCPStatistics(*it, cs) != -1 &&
engine()->voe()->codec()->GetRecCodec(*it, codec) != -1) { engine()->voe()->codec()->GetRecCodec(*it, codec) != -1) {
VoiceReceiverInfo rinfo; VoiceReceiverInfo rinfo;
rinfo.ssrc = ssrc; rinfo.add_ssrc(ssrc);
rinfo.bytes_rcvd = cs.bytesReceived; rinfo.bytes_rcvd = cs.bytesReceived;
rinfo.packets_rcvd = cs.packetsReceived; rinfo.packets_rcvd = cs.packetsReceived;
// The next four fields are from the most recently sent RTCP report. // The next four fields are from the most recently sent RTCP report.

View File

@@ -43,6 +43,8 @@
#include "talk/media/webrtc/webrtcexport.h" #include "talk/media/webrtc/webrtcexport.h"
#include "talk/media/webrtc/webrtcvoe.h" #include "talk/media/webrtc/webrtcvoe.h"
#include "talk/session/media/channel.h" #include "talk/session/media/channel.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#if !defined(LIBPEERCONNECTION_LIB) && \ #if !defined(LIBPEERCONNECTION_LIB) && \
!defined(LIBPEERCONNECTION_IMPLEMENTATION) !defined(LIBPEERCONNECTION_IMPLEMENTATION)
@@ -175,6 +177,10 @@ class WebRtcVoiceEngine
// Check whether the supplied trace should be ignored. // Check whether the supplied trace should be ignored.
bool ShouldIgnoreTrace(const std::string& trace); bool ShouldIgnoreTrace(const std::string& trace);
// Create a VoiceEngine Channel.
int CreateMediaVoiceChannel();
int CreateSoundclipVoiceChannel();
private: private:
typedef std::vector<WebRtcSoundclipMedia *> SoundclipList; typedef std::vector<WebRtcSoundclipMedia *> SoundclipList;
typedef std::vector<WebRtcVoiceMediaChannel *> ChannelList; typedef std::vector<WebRtcVoiceMediaChannel *> ChannelList;
@@ -192,6 +198,9 @@ class WebRtcVoiceEngine
// allows us to selectively turn on and off different options easily // allows us to selectively turn on and off different options easily
// at any time. // at any time.
bool ApplyOptions(const AudioOptions& options); bool ApplyOptions(const AudioOptions& options);
// Configure for using ACM2, if |enable| is true, otherwise configure for
// ACM1.
void EnableExperimentalAcm(bool enable);
virtual void Print(webrtc::TraceLevel level, const char* trace, int length); virtual void Print(webrtc::TraceLevel level, const char* trace, int length);
virtual void CallbackOnError(int channel, int errCode); virtual void CallbackOnError(int channel, int errCode);
// Given the device type, name, and id, find device id. Return true and // Given the device type, name, and id, find device id. Return true and
@@ -215,6 +224,7 @@ class WebRtcVoiceEngine
void StartAecDump(const std::string& filename); void StartAecDump(const std::string& filename);
void StopAecDump(); void StopAecDump();
int CreateVoiceChannel(VoEWrapper* voe);
// When a voice processor registers with the engine, it is connected // When a voice processor registers with the engine, it is connected
// to either the Rx or Tx signals, based on the direction parameter. // to either the Rx or Tx signals, based on the direction parameter.
@@ -246,6 +256,10 @@ class WebRtcVoiceEngine
// callback as well as the RegisterChannel/UnregisterChannel. // callback as well as the RegisterChannel/UnregisterChannel.
talk_base::CriticalSection channels_cs_; talk_base::CriticalSection channels_cs_;
webrtc::AgcConfig default_agc_config_; webrtc::AgcConfig default_agc_config_;
webrtc::Config voe_config_;
bool use_experimental_acm_;
bool initialized_; bool initialized_;
// See SetOptions and SetOptionOverrides for a description of the // See SetOptions and SetOptionOverrides for a description of the
// difference between options and overrides. // difference between options and overrides.

View File

@@ -1653,7 +1653,7 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
// Verify the statistic information is correct. // Verify the statistic information is correct.
for (unsigned int i = 0; i < ARRAY_SIZE(kSsrcs4); ++i) { for (unsigned int i = 0; i < ARRAY_SIZE(kSsrcs4); ++i) {
EXPECT_EQ(kSsrcs4[i], info.senders[i].ssrc); EXPECT_EQ(kSsrcs4[i], info.senders[i].ssrc());
EXPECT_EQ(kPcmuCodec.name, info.senders[i].codec_name); EXPECT_EQ(kPcmuCodec.name, info.senders[i].codec_name);
EXPECT_EQ(cricket::kIntStatValue, info.senders[i].bytes_sent); EXPECT_EQ(cricket::kIntStatValue, info.senders[i].bytes_sent);
EXPECT_EQ(cricket::kIntStatValue, info.senders[i].packets_sent); EXPECT_EQ(cricket::kIntStatValue, info.senders[i].packets_sent);
@@ -1978,7 +1978,7 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
cricket::VoiceMediaInfo info; cricket::VoiceMediaInfo info;
EXPECT_EQ(true, channel_->GetStats(&info)); EXPECT_EQ(true, channel_->GetStats(&info));
EXPECT_EQ(1u, info.senders.size()); EXPECT_EQ(1u, info.senders.size());
EXPECT_EQ(kSsrc1, info.senders[0].ssrc); EXPECT_EQ(kSsrc1, info.senders[0].ssrc());
EXPECT_EQ(kPcmuCodec.name, info.senders[0].codec_name); EXPECT_EQ(kPcmuCodec.name, info.senders[0].codec_name);
EXPECT_EQ(cricket::kIntStatValue, info.senders[0].bytes_sent); EXPECT_EQ(cricket::kIntStatValue, info.senders[0].bytes_sent);
EXPECT_EQ(cricket::kIntStatValue, info.senders[0].packets_sent); EXPECT_EQ(cricket::kIntStatValue, info.senders[0].packets_sent);
@@ -2982,3 +2982,40 @@ TEST(WebRtcVoiceEngineTest, CoInitialize) {
#endif #endif
TEST_F(WebRtcVoiceEngineTestFake, SetExperimentalAcm) {
EXPECT_TRUE(SetupEngine());
// By default experimental ACM should not be used.
int media_channel = engine_.CreateMediaVoiceChannel();
ASSERT_GE(media_channel, 0);
EXPECT_FALSE(voe_.IsUsingExperimentalAcm(media_channel));
int soundclip_channel = engine_.CreateSoundclipVoiceChannel();
ASSERT_GE(soundclip_channel, 0);
EXPECT_FALSE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
#ifdef USE_WEBRTC_DEV_BRANCH
// Set options to use experimental ACM.
cricket::AudioOptions options;
options.experimental_acm.Set(true);
ASSERT_TRUE(engine_.SetOptions(options));
media_channel = engine_.CreateMediaVoiceChannel();
ASSERT_GE(media_channel, 0);
EXPECT_TRUE(voe_.IsUsingExperimentalAcm(media_channel));
soundclip_channel = engine_.CreateSoundclipVoiceChannel();
ASSERT_GE(soundclip_channel, 0);
EXPECT_TRUE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
// Set option to use legacy ACM.
options.experimental_acm.Set(false);
ASSERT_TRUE(engine_.SetOptions(options));
media_channel = engine_.CreateMediaVoiceChannel();
ASSERT_GE(media_channel, 0);
EXPECT_FALSE(voe_.IsUsingExperimentalAcm(media_channel));
soundclip_channel = engine_.CreateSoundclipVoiceChannel();
ASSERT_GE(soundclip_channel, 0);
EXPECT_FALSE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
#endif
}

View File

@@ -206,14 +206,6 @@ void TurnPort::PrepareAddress() {
return; return;
} }
// If protocol family of server address doesn't match with local, return.
if (!IsCompatibleAddress(server_address_.address)) {
LOG(LS_ERROR) << "Server IP address family does not match with "
<< "local host address family type";
OnAllocateError();
return;
}
if (!server_address_.address.port()) { if (!server_address_.address.port()) {
// We will set default TURN port, if no port is set in the address. // We will set default TURN port, if no port is set in the address.
server_address_.address.SetPort(TURN_DEFAULT_PORT); server_address_.address.SetPort(TURN_DEFAULT_PORT);
@@ -222,6 +214,14 @@ void TurnPort::PrepareAddress() {
if (server_address_.address.IsUnresolved()) { if (server_address_.address.IsUnresolved()) {
ResolveTurnAddress(server_address_.address); ResolveTurnAddress(server_address_.address);
} else { } else {
// If protocol family of server address doesn't match with local, return.
if (!IsCompatibleAddress(server_address_.address)) {
LOG(LS_ERROR) << "Server IP address family does not match with "
<< "local host address family type";
OnAllocateError();
return;
}
LOG_J(LS_INFO, this) << "Trying to connect to TURN server via " LOG_J(LS_INFO, this) << "Trying to connect to TURN server via "
<< ProtoToString(server_address_.proto) << " @ " << ProtoToString(server_address_.proto) << " @ "
<< server_address_.address.ToSensitiveString(); << server_address_.address.ToSensitiveString();

View File

@@ -939,4 +939,12 @@ VideoFormat ChannelManager::GetStartCaptureFormat() {
Bind(&MediaEngineInterface::GetStartCaptureFormat, media_engine_.get())); Bind(&MediaEngineInterface::GetStartCaptureFormat, media_engine_.get()));
} }
bool ChannelManager::SetAudioOptions(const AudioOptions& options) {
if (!media_engine_->SetAudioOptions(options)) {
return false;
}
audio_options_ = options;
return true;
}
} // namespace cricket } // namespace cricket

View File

@@ -225,6 +225,11 @@ class ChannelManager : public talk_base::MessageHandler,
// TODO(hellner): Remove this function once the engine capturer has been // TODO(hellner): Remove this function once the engine capturer has been
// removed. // removed.
VideoFormat GetStartCaptureFormat(); VideoFormat GetStartCaptureFormat();
// TODO(turajs): Remove this function when ACM2 is in use. Used mainly to
// choose between ACM1 and ACM2.
bool SetAudioOptions(const AudioOptions& options);
protected: protected:
// Adds non-transient parameters which can only be changed through the // Adds non-transient parameters which can only be changed through the
// options store. // options store.