Update talk to 59410372.

R=jiayl@webrtc.org, wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/6929004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5367 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mallinath@webrtc.org 2014-01-11 01:26:23 +00:00
parent 023cc5abc7
commit 0f3356e20b
50 changed files with 974 additions and 746 deletions

View File

@ -29,9 +29,9 @@
#include <string>
#include "talk/app/webrtc/mediastreamprovider.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/base/logging.h"
#include "talk/base/refcount.h"
#include "talk/media/sctp/sctputils.h"
namespace webrtc {
@ -46,7 +46,7 @@ talk_base::scoped_refptr<DataChannel> DataChannel::Create(
DataChannelProviderInterface* provider,
cricket::DataChannelType dct,
const std::string& label,
const DataChannelInit* config) {
const InternalDataChannelInit& config) {
talk_base::scoped_refptr<DataChannel> channel(
new talk_base::RefCountedObject<DataChannel>(provider, dct, label));
if (!channel->Init(config)) {
@ -62,39 +62,40 @@ DataChannel::DataChannel(
: label_(label),
observer_(NULL),
state_(kConnecting),
was_ever_writable_(false),
connected_to_provider_(false),
data_channel_type_(dct),
provider_(provider),
waiting_for_open_ack_(false),
was_ever_writable_(false),
connected_to_provider_(false),
send_ssrc_set_(false),
send_ssrc_(0),
receive_ssrc_set_(false),
send_ssrc_(0),
receive_ssrc_(0) {
}
bool DataChannel::Init(const DataChannelInit* config) {
bool DataChannel::Init(const InternalDataChannelInit& config) {
if (data_channel_type_ == cricket::DCT_RTP &&
(config->reliable ||
config->id != -1 ||
config->maxRetransmits != -1 ||
config->maxRetransmitTime != -1)) {
(config.reliable ||
config.id != -1 ||
config.maxRetransmits != -1 ||
config.maxRetransmitTime != -1)) {
LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
<< "invalid DataChannelInit.";
return false;
} else if (data_channel_type_ == cricket::DCT_SCTP) {
if (config->id < -1 ||
config->maxRetransmits < -1 ||
config->maxRetransmitTime < -1) {
if (config.id < -1 ||
config.maxRetransmits < -1 ||
config.maxRetransmitTime < -1) {
LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
<< "invalid DataChannelInit.";
return false;
}
if (config->maxRetransmits != -1 && config->maxRetransmitTime != -1) {
if (config.maxRetransmits != -1 && config.maxRetransmitTime != -1) {
LOG(LS_ERROR) <<
"maxRetransmits and maxRetransmitTime should not be both set.";
return false;
}
config_ = *config;
config_ = config;
// Try to connect to the transport in case the transport channel already
// exists.
@ -197,9 +198,44 @@ bool DataChannel::SendOpenMessage(const talk_base::Buffer* raw_buffer) {
cricket::SendDataResult send_result;
bool retval = provider_->SendData(send_params, *buffer, &send_result);
if (!retval && send_result == cricket::SDR_BLOCK) {
if (retval) {
LOG(LS_INFO) << "Sent OPEN message on channel " << config_.id;
// Send data as ordered before we receive any mesage from the remote peer
// to make sure the remote peer will not receive any data before it receives
// the OPEN message.
waiting_for_open_ack_ = true;
} else if (send_result == cricket::SDR_BLOCK) {
// Link is congested. Queue for later.
QueueControl(buffer.release());
} else {
LOG(LS_ERROR) << "Failed to send OPEN message with result "
<< send_result << " on channel " << config_.id;
}
return retval;
}
bool DataChannel::SendOpenAckMessage(const talk_base::Buffer* raw_buffer) {
ASSERT(data_channel_type_ == cricket::DCT_SCTP &&
was_ever_writable_ &&
config_.id >= 0);
talk_base::scoped_ptr<const talk_base::Buffer> buffer(raw_buffer);
cricket::SendDataParams send_params;
send_params.ssrc = config_.id;
send_params.ordered = config_.ordered;
send_params.type = cricket::DMT_CONTROL;
cricket::SendDataResult send_result;
bool retval = provider_->SendData(send_params, *buffer, &send_result);
if (retval) {
LOG(LS_INFO) << "Sent OPEN_ACK message on channel " << config_.id;
} else if (send_result == cricket::SDR_BLOCK) {
// Link is congested. Queue for later.
QueueControl(buffer.release());
} else {
LOG(LS_ERROR) << "Failed to send OPEN_ACK message with result "
<< send_result << " on channel " << config_.id;
}
return retval;
}
@ -254,6 +290,35 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
return;
}
if (params.type == cricket::DMT_CONTROL) {
ASSERT(data_channel_type_ == cricket::DCT_SCTP);
if (!waiting_for_open_ack_) {
// Ignore it if we are not expecting an ACK message.
LOG(LS_WARNING) << "DataChannel received unexpected CONTROL message, "
<< "sid = " << params.ssrc;
return;
}
if (ParseDataChannelOpenAckMessage(payload)) {
// We can send unordered as soon as we receive the ACK message.
waiting_for_open_ack_ = false;
LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = "
<< params.ssrc;
} else {
LOG(LS_WARNING) << "DataChannel failed to parse OPEN_ACK message, sid = "
<< params.ssrc;
}
return;
}
ASSERT(params.type == cricket::DMT_BINARY ||
params.type == cricket::DMT_TEXT);
LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " << params.ssrc;
// We can send unordered as soon as we receive any DATA message since the
// remote side must have received the OPEN (and old clients do not send
// OPEN_ACK).
waiting_for_open_ack_ = false;
bool binary = (params.type == cricket::DMT_BINARY);
talk_base::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
if (was_ever_writable_ && observer_) {
@ -279,14 +344,17 @@ void DataChannel::OnChannelReady(bool writable) {
if (!was_ever_writable_) {
was_ever_writable_ = true;
if (data_channel_type_ == cricket::DCT_SCTP && !config_.negotiated) {
talk_base::Buffer* payload = new talk_base::Buffer;
if (!cricket::WriteDataChannelOpenMessage(label_, config_, payload)) {
// TODO(jiayl): close the data channel on this error.
LOG(LS_ERROR) << "Could not write data channel OPEN message";
return;
if (data_channel_type_ == cricket::DCT_SCTP) {
if (config_.open_handshake_role == InternalDataChannelInit::kOpener) {
talk_base::Buffer* payload = new talk_base::Buffer;
WriteDataChannelOpenMessage(label_, config_, payload);
SendOpenMessage(payload);
} else if (config_.open_handshake_role ==
InternalDataChannelInit::kAcker) {
talk_base::Buffer* payload = new talk_base::Buffer;
WriteDataChannelOpenAckMessage(payload);
SendOpenAckMessage(payload);
}
SendOpenMessage(payload);
}
UpdateState();
@ -412,7 +480,12 @@ void DataChannel::DeliverQueuedControlData() {
while (!queued_control_data_.empty()) {
const talk_base::Buffer* buf = queued_control_data_.front();
queued_control_data_.pop();
SendOpenMessage(buf);
if (config_.open_handshake_role == InternalDataChannelInit::kOpener) {
SendOpenMessage(buf);
} else {
ASSERT(config_.open_handshake_role == InternalDataChannelInit::kAcker);
SendOpenAckMessage(buf);
}
}
}
@ -430,6 +503,13 @@ bool DataChannel::InternalSendWithoutQueueing(
if (data_channel_type_ == cricket::DCT_SCTP) {
send_params.ordered = config_.ordered;
// Send as ordered if it is waiting for the OPEN_ACK message.
if (waiting_for_open_ack_ && !config_.ordered) {
send_params.ordered = true;
LOG(LS_VERBOSE) << "Sending data as ordered for unordered DataChannel "
<< "because the OPEN_ACK message has not been received.";
}
send_params.max_rtx_count = config_.maxRetransmits;
send_params.max_rtx_ms = config_.maxRetransmitTime;
send_params.ssrc = config_.id;

View File

@ -64,6 +64,25 @@ class DataChannelProviderInterface {
virtual ~DataChannelProviderInterface() {}
};
struct InternalDataChannelInit : public DataChannelInit {
enum OpenHandshakeRole {
kOpener,
kAcker,
kNone
};
// The default role is kOpener because the default |negotiated| is false.
InternalDataChannelInit() : open_handshake_role(kOpener) {}
explicit InternalDataChannelInit(const DataChannelInit& base)
: DataChannelInit(base), open_handshake_role(kOpener) {
// If the channel is externally negotiated, do not send the OPEN message.
if (base.negotiated) {
open_handshake_role = kNone;
}
}
OpenHandshakeRole open_handshake_role;
};
// DataChannel is a an implementation of the DataChannelInterface based on
// libjingle's data engine. It provides an implementation of unreliable or
// reliabledata channels. Currently this class is specifically designed to use
@ -87,7 +106,7 @@ class DataChannel : public DataChannelInterface,
DataChannelProviderInterface* provider,
cricket::DataChannelType dct,
const std::string& label,
const DataChannelInit* config);
const InternalDataChannelInit& config);
virtual void RegisterObserver(DataChannelObserver* observer);
virtual void UnregisterObserver();
@ -156,7 +175,7 @@ class DataChannel : public DataChannelInterface,
virtual ~DataChannel();
private:
bool Init(const DataChannelInit* config);
bool Init(const InternalDataChannelInit& config);
void DoClose();
void UpdateState();
void SetState(DataState state);
@ -172,19 +191,20 @@ class DataChannel : public DataChannelInterface,
cricket::SendDataResult* send_result);
bool QueueSendData(const DataBuffer& buffer);
bool SendOpenMessage(const talk_base::Buffer* buffer);
bool SendOpenAckMessage(const talk_base::Buffer* buffer);
std::string label_;
DataChannelInit config_;
InternalDataChannelInit config_;
DataChannelObserver* observer_;
DataState state_;
bool was_ever_writable_;
bool connected_to_provider_;
cricket::DataChannelType data_channel_type_;
DataChannelProviderInterface* provider_;
bool waiting_for_open_ack_;
bool was_ever_writable_;
bool connected_to_provider_;
bool send_ssrc_set_;
uint32 send_ssrc_;
bool receive_ssrc_set_;
uint32 send_ssrc_;
uint32 receive_ssrc_;
// Control messages that always have to get sent out before any queued
// data.
@ -197,7 +217,7 @@ class DataChannelFactory {
public:
virtual talk_base::scoped_refptr<DataChannel> CreateDataChannel(
const std::string& label,
const DataChannelInit* config) = 0;
const InternalDataChannelInit* config) = 0;
protected:
virtual ~DataChannelFactory() {}

View File

@ -26,6 +26,7 @@
*/
#include "talk/app/webrtc/datachannel.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/test/fakedatachannelprovider.h"
#include "talk/base/gunit.h"
#include "testing/base/public/gmock.h"
@ -42,7 +43,8 @@ class SctpDataChannelTest : public testing::Test {
protected:
SctpDataChannelTest()
: webrtc_data_channel_(
DataChannel::Create(&provider_, cricket::DCT_SCTP, "test", &init_)) {
DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test", init_)) {
}
void SetChannelReady() {
@ -59,7 +61,7 @@ class SctpDataChannelTest : public testing::Test {
webrtc_data_channel_->RegisterObserver(observer_.get());
}
webrtc::DataChannelInit init_;
webrtc::InternalDataChannelInit init_;
FakeDataChannelProvider provider_;
talk_base::scoped_ptr<FakeDataChannelObserver> observer_;
talk_base::scoped_refptr<DataChannel> webrtc_data_channel_;
@ -69,7 +71,7 @@ class SctpDataChannelTest : public testing::Test {
TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
provider_.set_transport_available(true);
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", &init_);
&provider_, cricket::DCT_SCTP, "test1", init_);
EXPECT_TRUE(provider_.IsConnected(dc.get()));
// The sid is not set yet, so it should not have added the streams.
@ -153,15 +155,69 @@ TEST_F(SctpDataChannelTest, OpenMessageSent) {
// state.
TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
SetChannelReady();
webrtc::DataChannelInit init;
webrtc::InternalDataChannelInit init;
init.id = 1;
talk_base::scoped_refptr<DataChannel> dc =
DataChannel::Create(&provider_, cricket::DCT_SCTP, "test1", &init);
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", init);
EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state());
EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(),
1000);
}
// Tests that an unordered DataChannel sends data as ordered until the OPEN_ACK
// message is received.
TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
SetChannelReady();
webrtc::InternalDataChannelInit init;
init.id = 1;
init.ordered = false;
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", init);
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
// Sends a message and verifies it's ordered.
webrtc::DataBuffer buffer("some data");
ASSERT_TRUE(dc->Send(buffer));
EXPECT_TRUE(provider_.last_send_data_params().ordered);
// Emulates receiving an OPEN_ACK message.
cricket::ReceiveDataParams params;
params.ssrc = init.id;
params.type = cricket::DMT_CONTROL;
talk_base::Buffer payload;
webrtc::WriteDataChannelOpenAckMessage(&payload);
dc->OnDataReceived(NULL, params, payload);
// Sends another message and verifies it's unordered.
ASSERT_TRUE(dc->Send(buffer));
EXPECT_FALSE(provider_.last_send_data_params().ordered);
}
// Tests that an unordered DataChannel sends unordered data after any DATA
// message is received.
TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
SetChannelReady();
webrtc::InternalDataChannelInit init;
init.id = 1;
init.ordered = false;
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", init);
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
// Emulates receiving a DATA message.
cricket::ReceiveDataParams params;
params.ssrc = init.id;
params.type = cricket::DMT_TEXT;
webrtc::DataBuffer buffer("data");
dc->OnDataReceived(NULL, params, buffer.data);
// Sends a message and verifies it's unordered.
ASSERT_TRUE(dc->Send(buffer));
EXPECT_FALSE(provider_.last_send_data_params().ordered);
}
// Tests that messages are sent with the right ssrc.
TEST_F(SctpDataChannelTest, SendDataSsrc) {
webrtc_data_channel_->SetSctpSid(1);
@ -199,3 +255,50 @@ TEST_F(SctpDataChannelTest, ReceiveDataWithValidSsrc) {
webrtc_data_channel_->OnDataReceived(NULL, params, buffer.data);
}
// Tests that no CONTROL message is sent if the datachannel is negotiated and
// not created from an OPEN message.
TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
webrtc::InternalDataChannelInit config;
config.id = 1;
config.negotiated = true;
config.open_handshake_role = webrtc::InternalDataChannelInit::kNone;
SetChannelReady();
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", config);
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(0U, provider_.last_send_data_params().ssrc);
}
// Tests that OPEN_ACK message is sent if the datachannel is created from an
// OPEN message.
TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
webrtc::InternalDataChannelInit config;
config.id = 1;
config.negotiated = true;
config.open_handshake_role = webrtc::InternalDataChannelInit::kAcker;
SetChannelReady();
talk_base::scoped_refptr<DataChannel> dc = DataChannel::Create(
&provider_, cricket::DCT_SCTP, "test1", config);
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(static_cast<unsigned int>(config.id),
provider_.last_send_data_params().ssrc);
EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
}
// Tests the OPEN_ACK role assigned by InternalDataChannelInit.
TEST_F(SctpDataChannelTest, OpenAckRoleInitialization) {
webrtc::InternalDataChannelInit init;
EXPECT_EQ(webrtc::InternalDataChannelInit::kOpener, init.open_handshake_role);
EXPECT_FALSE(init.negotiated);
webrtc::DataChannelInit base;
base.negotiated = true;
webrtc::InternalDataChannelInit init2(base);
EXPECT_EQ(webrtc::InternalDataChannelInit::kNone, init2.open_handshake_role);
}

View File

@ -37,24 +37,6 @@ using webrtc::MediaSourceInterface;
namespace webrtc {
// Constraint keys.
// They are declared as static members in mediaconstraintsinterface.h
const char MediaConstraintsInterface::kEchoCancellation[] =
"googEchoCancellation";
const char MediaConstraintsInterface::kExperimentalEchoCancellation[] =
"googEchoCancellation2";
const char MediaConstraintsInterface::kAutoGainControl[] =
"googAutoGainControl";
const char MediaConstraintsInterface::kExperimentalAutoGainControl[] =
"googAutoGainControl2";
const char MediaConstraintsInterface::kNoiseSuppression[] =
"googNoiseSuppression";
const char MediaConstraintsInterface::kHighpassFilter[] =
"googHighpassFilter";
const char MediaConstraintsInterface::kTypingNoiseDetection[] =
"googTypingNoiseDetection";
const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
namespace {
// Convert constraints to audio options. Return false if constraints are

View File

@ -34,6 +34,64 @@ namespace webrtc {
const char MediaConstraintsInterface::kValueTrue[] = "true";
const char MediaConstraintsInterface::kValueFalse[] = "false";
// Constraints declared as static members in mediastreaminterface.h
// Specified by draft-alvestrand-constraints-resolution-00b
const char MediaConstraintsInterface::kMinAspectRatio[] = "minAspectRatio";
const char MediaConstraintsInterface::kMaxAspectRatio[] = "maxAspectRatio";
const char MediaConstraintsInterface::kMaxWidth[] = "maxWidth";
const char MediaConstraintsInterface::kMinWidth[] = "minWidth";
const char MediaConstraintsInterface::kMaxHeight[] = "maxHeight";
const char MediaConstraintsInterface::kMinHeight[] = "minHeight";
const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Audio constraints.
const char MediaConstraintsInterface::kEchoCancellation[] =
"googEchoCancellation";
const char MediaConstraintsInterface::kExperimentalEchoCancellation[] =
"googEchoCancellation2";
const char MediaConstraintsInterface::kAutoGainControl[] =
"googAutoGainControl";
const char MediaConstraintsInterface::kExperimentalAutoGainControl[] =
"googAutoGainControl2";
const char MediaConstraintsInterface::kNoiseSuppression[] =
"googNoiseSuppression";
const char MediaConstraintsInterface::kHighpassFilter[] =
"googHighpassFilter";
const char MediaConstraintsInterface::kTypingNoiseDetection[] =
"googTypingNoiseDetection";
const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
// Google-specific constraint keys for a local video source (getUserMedia).
const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction";
const char MediaConstraintsInterface::kLeakyBucket[] = "googLeakyBucket";
const char MediaConstraintsInterface::kTemporalLayeredScreencast[] =
"googTemporalLayeredScreencast";
// TODO(ronghuawu): Remove once cpu overuse detection is stable.
const char MediaConstraintsInterface::kCpuOveruseDetection[] =
"googCpuOveruseDetection";
// Constraint keys for CreateOffer / CreateAnswer defined in W3C specification.
const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
"OfferToReceiveAudio";
const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
"OfferToReceiveVideo";
const char MediaConstraintsInterface::kVoiceActivityDetection[] =
"VoiceActivityDetection";
const char MediaConstraintsInterface::kIceRestart[] =
"IceRestart";
// Google specific constraint for BUNDLE enable/disable.
const char MediaConstraintsInterface::kUseRtpMux[] =
"googUseRtpMUX";
// Below constraints should be used during PeerConnection construction.
const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
"DtlsSrtpKeyAgreement";
const char MediaConstraintsInterface::kEnableRtpDataChannels[] =
"RtpDataChannels";
const char MediaConstraintsInterface::kEnableDscp[] = "googDscp";
const char MediaConstraintsInterface::kEnableIPv6[] = "googIPv6";
// Set |value| to the value associated with the first appearance of |key|, or
// return false if |key| is not found.
bool MediaConstraintsInterface::Constraints::FindFirst(

View File

@ -107,11 +107,10 @@ class MediaConstraintsInterface {
static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP
// Temporary pseudo-constraints used to enable DataChannels
static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels
// TODO(perkj): Remove kEnableSctpDataChannels once Chrome use
// PeerConnectionFactory::SetOptions.
static const char kEnableSctpDataChannels[]; // Enable SCTP DataChannels
// Temporary pseudo-constraint for enabling DSCP through JS.
static const char kEnableDscp[];
// Constraint to enable IPv6 through JS.
static const char kEnableIPv6[];
// The prefix of internal-only constraints whose JS set values should be
// stripped by Chrome before passed down to Libjingle.

View File

@ -34,6 +34,7 @@
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/mediastreamtrackproxy.h"
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/bytebuffer.h"
@ -49,18 +50,6 @@ namespace webrtc {
using talk_base::scoped_ptr;
using talk_base::scoped_refptr;
// Supported MediaConstraints.
const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
"OfferToReceiveAudio";
const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
"OfferToReceiveVideo";
const char MediaConstraintsInterface::kIceRestart[] =
"IceRestart";
const char MediaConstraintsInterface::kUseRtpMux[] =
"googUseRtpMUX";
const char MediaConstraintsInterface::kVoiceActivityDetection[] =
"VoiceActivityDetection";
static bool ParseConstraints(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options, bool is_answer) {
@ -261,13 +250,24 @@ bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) {
}
bool MediaStreamSignaling::AddDataChannelFromOpenMessage(
const std::string& label,
const DataChannelInit& config) {
const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload) {
if (!data_channel_factory_) {
LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
<< "are not supported.";
return false;
}
std::string label;
InternalDataChannelInit config;
config.id = params.ssrc;
if (!ParseDataChannelOpenMessage(payload, &label, &config)) {
LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
<< params.ssrc;
return false;
}
config.open_handshake_role = InternalDataChannelInit::kAcker;
scoped_refptr<DataChannel> channel(
data_channel_factory_->CreateDataChannel(label, &config));
if (!channel.get()) {

View File

@ -194,8 +194,8 @@ class MediaStreamSignaling {
// be offered in a SessionDescription.
bool AddDataChannel(DataChannel* data_channel);
// After we receive an OPEN message, create a data channel and add it.
bool AddDataChannelFromOpenMessage(
const std::string& label, const DataChannelInit& config);
bool AddDataChannelFromOpenMessage(const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload);
// Returns a MediaSessionOptions struct with options decided by |constraints|,
// the local MediaStreams and DataChannels.

View File

@ -30,6 +30,7 @@
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/app/webrtc/test/fakedatachannelprovider.h"
@ -246,13 +247,19 @@ class FakeDataChannelFactory : public webrtc::DataChannelFactory {
virtual talk_base::scoped_refptr<webrtc::DataChannel> CreateDataChannel(
const std::string& label,
const webrtc::DataChannelInit* config) {
return webrtc::DataChannel::Create(provider_, type_, label, config);
const webrtc::InternalDataChannelInit* config) {
last_init_ = *config;
return webrtc::DataChannel::Create(provider_, type_, label, *config);
}
const webrtc::InternalDataChannelInit& last_init() const {
return last_init_;
}
private:
FakeDataChannelProvider* provider_;
cricket::DataChannelType type_;
webrtc::InternalDataChannelInit last_init_;
};
class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
@ -528,11 +535,11 @@ class MediaStreamSignalingTest: public testing::Test {
talk_base::scoped_refptr<webrtc::DataChannel> AddDataChannel(
cricket::DataChannelType type, const std::string& label, int id) {
webrtc::DataChannelInit config;
webrtc::InternalDataChannelInit config;
config.id = id;
talk_base::scoped_refptr<webrtc::DataChannel> data_channel(
webrtc::DataChannel::Create(
data_channel_provider_.get(), type, label, &config));
data_channel_provider_.get(), type, label, config));
EXPECT_TRUE(data_channel.get() != NULL);
EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get()));
return data_channel;
@ -1078,10 +1085,10 @@ TEST_F(MediaStreamSignalingTest, SctpIdAllocationNoReuse) {
TEST_F(MediaStreamSignalingTest, RtpDuplicatedLabelNotAllowed) {
AddDataChannel(cricket::DCT_RTP, "a", -1);
webrtc::DataChannelInit config;
webrtc::InternalDataChannelInit config;
talk_base::scoped_refptr<webrtc::DataChannel> data_channel =
webrtc::DataChannel::Create(
data_channel_provider_.get(), cricket::DCT_RTP, "a", &config);
data_channel_provider_.get(), cricket::DCT_RTP, "a", config);
ASSERT_TRUE(data_channel.get() != NULL);
EXPECT_FALSE(signaling_->AddDataChannel(data_channel.get()));
}
@ -1092,6 +1099,25 @@ TEST_F(MediaStreamSignalingTest, SctpDuplicatedLabelAllowed) {
AddDataChannel(cricket::DCT_SCTP, "a", -1);
}
// Verifies the correct configuration is used to create DataChannel from an OPEN
// message.
TEST_F(MediaStreamSignalingTest, CreateDataChannelFromOpenMessage) {
FakeDataChannelFactory fake_factory(data_channel_provider_.get(),
cricket::DCT_SCTP);
signaling_->SetDataChannelFactory(&fake_factory);
webrtc::DataChannelInit config;
config.id = 1;
talk_base::Buffer payload;
webrtc::WriteDataChannelOpenMessage("a", config, &payload);
cricket::ReceiveDataParams params;
params.ssrc = config.id;
EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload));
EXPECT_EQ(config.id, fake_factory.last_init().id);
EXPECT_FALSE(fake_factory.last_init().negotiated);
EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker,
fake_factory.last_init().open_handshake_role);
}
// Verifies that duplicated label from OPEN message is allowed.
TEST_F(MediaStreamSignalingTest, DuplicatedLabelFromOpenMessageAllowed) {
AddDataChannel(cricket::DCT_SCTP, "a", -1);
@ -1101,5 +1127,9 @@ TEST_F(MediaStreamSignalingTest, DuplicatedLabelFromOpenMessageAllowed) {
signaling_->SetDataChannelFactory(&fake_factory);
webrtc::DataChannelInit config;
config.id = 0;
EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage("a", config));
talk_base::Buffer payload;
webrtc::WriteDataChannelOpenMessage("a", config, &payload);
cricket::ReceiveDataParams params;
params.ssrc = config.id;
EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload));
}

View File

@ -32,6 +32,7 @@
#include "talk/app/webrtc/dtmfsender.h"
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/mediastreamhandler.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/base/logging.h"
@ -359,11 +360,21 @@ bool PeerConnection::DoInitialize(
observer_ = observer;
port_allocator_.reset(
allocator_factory->CreatePortAllocator(stun_config, turn_config));
// To handle both internal and externally created port allocator, we will
// enable BUNDLE here. Also enabling TURN and disable legacy relay service.
port_allocator_->set_flags(cricket::PORTALLOCATOR_ENABLE_BUNDLE |
cricket::PORTALLOCATOR_ENABLE_SHARED_UFRAG |
cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET);
// enable BUNDLE here.
int portallocator_flags = cricket::PORTALLOCATOR_ENABLE_BUNDLE |
cricket::PORTALLOCATOR_ENABLE_SHARED_UFRAG |
cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET;
bool value;
if (FindConstraint(
constraints,
MediaConstraintsInterface::kEnableIPv6,
&value, NULL) && value) {
portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_IPV6;
}
port_allocator_->set_flags(portallocator_flags);
// No step delay is used while allocating ports.
port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
@ -485,8 +496,12 @@ talk_base::scoped_refptr<DataChannelInterface>
PeerConnection::CreateDataChannel(
const std::string& label,
const DataChannelInit* config) {
talk_base::scoped_ptr<InternalDataChannelInit> internal_config;
if (config) {
internal_config.reset(new InternalDataChannelInit(*config));
}
talk_base::scoped_refptr<DataChannelInterface> channel(
session_->CreateDataChannel(label, config));
session_->CreateDataChannel(label, internal_config.get()));
if (!channel.get())
return NULL;

View File

@ -25,19 +25,19 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/media/sctp/sctputils.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/base/buffer.h"
#include "talk/base/bytebuffer.h"
#include "talk/base/logging.h"
namespace cricket {
namespace webrtc {
// Format defined at
// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-01#section
static const uint8 DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
static const uint8 DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
enum DataChannelOpenMessageChannelType {
DCOMCT_ORDERED_RELIABLE = 0x00,
@ -48,10 +48,9 @@ enum DataChannelOpenMessageChannelType {
DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
};
bool ParseDataChannelOpenMessage(
const talk_base::Buffer& payload,
std::string* label,
webrtc::DataChannelInit* config) {
bool ParseDataChannelOpenMessage(const talk_base::Buffer& payload,
std::string* label,
DataChannelInit* config) {
// Format defined at
// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
@ -123,14 +122,28 @@ bool ParseDataChannelOpenMessage(
config->maxRetransmitTime = reliability_param;
break;
}
return true;
}
bool WriteDataChannelOpenMessage(
const std::string& label,
const webrtc::DataChannelInit& config,
talk_base::Buffer* payload) {
bool ParseDataChannelOpenAckMessage(const talk_base::Buffer& payload) {
talk_base::ByteBuffer buffer(payload.data(), payload.length());
uint8 message_type;
if (!buffer.ReadUInt8(&message_type)) {
LOG(LS_WARNING) << "Could not read OPEN_ACK message type.";
return false;
}
if (message_type != DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE) {
LOG(LS_WARNING) << "Data Channel OPEN_ACK message of unexpected type: "
<< message_type;
return false;
}
return true;
}
bool WriteDataChannelOpenMessage(const std::string& label,
const DataChannelInit& config,
talk_base::Buffer* payload) {
// Format defined at
// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
uint8 channel_type = 0;
@ -173,4 +186,9 @@ bool WriteDataChannelOpenMessage(
return true;
}
} // namespace cricket
void WriteDataChannelOpenAckMessage(talk_base::Buffer* payload) {
talk_base::ByteBuffer buffer(talk_base::ByteBuffer::ORDER_NETWORK);
buffer.WriteUInt8(DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE);
payload->SetData(buffer.Data(), buffer.Length());
}
} // namespace webrtc

View File

@ -25,29 +25,31 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_MEDIA_BASE_SCTPUTILS_H_
#define TALK_MEDIA_BASE_SCTPUTILS_H_
#ifndef TALK_APP_WEBRTC_SCTPUTILS_H_
#define TALK_APP_WEBRTC_SCTPUTILS_H_
#include <string>
#include "talk/app/webrtc/datachannelinterface.h"
namespace talk_base {
class Buffer;
} // namespace talk_base
namespace webrtc {
struct DataChannelInit;
} // namespace webrtc
namespace cricket {
bool ParseDataChannelOpenMessage(const talk_base::Buffer& payload,
std::string* label,
webrtc::DataChannelInit* config);
DataChannelInit* config);
bool ParseDataChannelOpenAckMessage(const talk_base::Buffer& payload);
bool WriteDataChannelOpenMessage(const std::string& label,
const webrtc::DataChannelInit& config,
const DataChannelInit& config,
talk_base::Buffer* payload);
} // namespace cricket
void WriteDataChannelOpenAckMessage(talk_base::Buffer* payload);
} // namespace webrtc
#endif // TALK_MEDIA_BASE_SCTPUTILS_H_
#endif // TALK_APP_WEBRTC_SCTPUTILS_H_

View File

@ -25,10 +25,9 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/base/bytebuffer.h"
#include "talk/base/gunit.h"
#include "talk/media/sctp/sctputils.h"
#include "talk/app/webrtc/sctputils.h"
class SctpUtilsTest : public testing::Test {
public:
@ -80,23 +79,22 @@ class SctpUtilsTest : public testing::Test {
}
};
TEST_F(SctpUtilsTest, WriteParseMessageWithOrderedReliable) {
std::string input_label = "abc";
TEST_F(SctpUtilsTest, WriteParseOpenMessageWithOrderedReliable) {
webrtc::DataChannelInit config;
std::string label = "abc";
config.protocol = "y";
talk_base::Buffer packet;
ASSERT_TRUE(
cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
VerifyOpenMessageFormat(packet, label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
EXPECT_EQ(label, output_label);
EXPECT_EQ(config.protocol, output_config.protocol);
EXPECT_EQ(config.ordered, output_config.ordered);
EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
@ -104,24 +102,23 @@ TEST_F(SctpUtilsTest, WriteParseMessageWithOrderedReliable) {
}
TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) {
std::string input_label = "abc";
webrtc::DataChannelInit config;
std::string label = "abc";
config.ordered = false;
config.maxRetransmitTime = 10;
config.protocol = "y";
talk_base::Buffer packet;
ASSERT_TRUE(
cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
VerifyOpenMessageFormat(packet, label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
EXPECT_EQ(label, output_label);
EXPECT_EQ(config.protocol, output_config.protocol);
EXPECT_EQ(config.ordered, output_config.ordered);
EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
@ -129,25 +126,36 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) {
}
TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
std::string input_label = "abc";
webrtc::DataChannelInit config;
std::string label = "abc";
config.maxRetransmits = 10;
config.protocol = "y";
talk_base::Buffer packet;
ASSERT_TRUE(
cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
VerifyOpenMessageFormat(packet, label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
EXPECT_EQ(label, output_label);
EXPECT_EQ(config.protocol, output_config.protocol);
EXPECT_EQ(config.ordered, output_config.ordered);
EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
EXPECT_EQ(-1, output_config.maxRetransmitTime);
}
TEST_F(SctpUtilsTest, WriteParseAckMessage) {
talk_base::Buffer packet;
webrtc::WriteDataChannelOpenAckMessage(&packet);
uint8 message_type;
talk_base::ByteBuffer buffer(packet.data(), packet.length());
ASSERT_TRUE(buffer.ReadUInt8(&message_type));
EXPECT_EQ(0x02, message_type);
EXPECT_TRUE(webrtc::ParseDataChannelOpenAckMessage(packet));
}

View File

@ -36,30 +36,6 @@ using cricket::CaptureState;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
namespace webrtc {
// Constraint keys. Specified by draft-alvestrand-constraints-resolution-00b
// They are declared as static members in mediastreaminterface.h
const char MediaConstraintsInterface::kMinAspectRatio[] = "minAspectRatio";
const char MediaConstraintsInterface::kMaxAspectRatio[] = "maxAspectRatio";
const char MediaConstraintsInterface::kMaxWidth[] = "maxWidth";
const char MediaConstraintsInterface::kMinWidth[] = "minWidth";
const char MediaConstraintsInterface::kMaxHeight[] = "maxHeight";
const char MediaConstraintsInterface::kMinHeight[] = "minHeight";
const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Google-specific keys
const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction";
const char MediaConstraintsInterface::kLeakyBucket[] = "googLeakyBucket";
const char MediaConstraintsInterface::kTemporalLayeredScreencast[] =
"googTemporalLayeredScreencast";
// TODO(ronghuawu): Remove once cpu overuse detection is stable.
const char MediaConstraintsInterface::kCpuOveruseDetection[] =
"googCpuOveruseDetection";
} // namespace webrtc
namespace {
const double kRoundingTruncation = 0.0005;

View File

@ -1172,6 +1172,7 @@ class WebRtcSdpTest : public testing::Test {
"m=video 3457 RTP/SAVPF 101\r\n"
"a=rtpmap:101 VP8/90000\r\n"
"a=rtcp-fb:101 nack\r\n"
"a=rtcp-fb:101 nack pli\r\n"
"a=rtcp-fb:101 goog-remb\r\n"
"a=rtcp-fb:101 ccm fir\r\n";
std::ostringstream os;
@ -1203,6 +1204,9 @@ class WebRtcSdpTest : public testing::Test {
EXPECT_TRUE(vp8.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamNack,
cricket::kParamValueEmpty)));
EXPECT_TRUE(vp8.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamNack,
cricket::kRtcpFbNackParamPli)));
EXPECT_TRUE(vp8.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamRemb,
cricket::kParamValueEmpty)));
@ -1902,6 +1906,9 @@ TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
// Missing space.
const char kSdpInvalidLine6[] = "a=fingerprint:sha-1"
"4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
// MD5 is not allowed in fingerprints.
const char kSdpInvalidLine7[] = "a=fingerprint:md5 "
"4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B";
// Broken session description
ReplaceAndTryToParse("v=", kSdpDestroyer);
@ -1925,6 +1932,7 @@ TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
ReplaceAndTryToParse("a=sendrecv", kSdpInvalidLine4);
ReplaceAndTryToParse("a=sendrecv", kSdpInvalidLine5);
ReplaceAndTryToParse("a=sendrecv", kSdpInvalidLine6);
ReplaceAndTryToParse("a=sendrecv", kSdpInvalidLine7);
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithReorderedPltypes) {

View File

@ -54,23 +54,6 @@ using cricket::TransportInfo;
namespace webrtc {
const char MediaConstraintsInterface::kInternalConstraintPrefix[] = "internal";
// Supported MediaConstraints.
// DSCP constraints.
const char MediaConstraintsInterface::kEnableDscp[] = "googDscp";
// DTLS-SRTP pseudo-constraints.
const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
"DtlsSrtpKeyAgreement";
// DataChannel pseudo constraints.
const char MediaConstraintsInterface::kEnableRtpDataChannels[] =
"RtpDataChannels";
// This constraint is for internal use only, representing the Chrome command
// line flag. So it is prefixed with kInternalConstraintPrefix so JS values
// will be removed.
const char MediaConstraintsInterface::kEnableSctpDataChannels[] =
"deprecatedSctpDataChannels";
// Error messages
const char kSetLocalSdpFailed[] = "SetLocalDescription failed: ";
const char kSetRemoteSdpFailed[] = "SetRemoteDescription failed: ";
@ -1022,7 +1005,7 @@ bool WebRtcSession::ReadyToSendData() const {
talk_base::scoped_refptr<DataChannel> WebRtcSession::CreateDataChannel(
const std::string& label,
const DataChannelInit* config) {
const InternalDataChannelInit* config) {
if (state() == STATE_RECEIVEDTERMINATE) {
return NULL;
}
@ -1030,8 +1013,8 @@ talk_base::scoped_refptr<DataChannel> WebRtcSession::CreateDataChannel(
LOG(LS_ERROR) << "CreateDataChannel: Data is not supported in this call.";
return NULL;
}
DataChannelInit new_config = config ? (*config) : DataChannelInit();
InternalDataChannelInit new_config =
config ? (*config) : InternalDataChannelInit();
if (data_channel_type_ == cricket::DCT_SCTP) {
if (new_config.id < 0) {
talk_base::SSLRole role;
@ -1047,8 +1030,8 @@ talk_base::scoped_refptr<DataChannel> WebRtcSession::CreateDataChannel(
}
}
talk_base::scoped_refptr<DataChannel> channel(
DataChannel::Create(this, data_channel_type_, label, &new_config));
talk_base::scoped_refptr<DataChannel> channel(DataChannel::Create(
this, data_channel_type_, label, new_config));
if (channel && !mediastream_signaling_->AddDataChannel(channel))
return NULL;
@ -1398,8 +1381,8 @@ bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content) {
}
if (sctp) {
mediastream_signaling_->OnDataTransportCreatedForSctp();
data_channel_->SignalNewStreamReceived.connect(
this, &WebRtcSession::OnNewDataChannelReceived);
data_channel_->SignalDataReceived.connect(
this, &WebRtcSession::OnDataChannelMessageReceived);
}
return true;
}
@ -1417,14 +1400,17 @@ void WebRtcSession::CopySavedCandidates(
saved_candidates_.clear();
}
void WebRtcSession::OnNewDataChannelReceived(
const std::string& label, const DataChannelInit& init) {
void WebRtcSession::OnDataChannelMessageReceived(
cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload) {
ASSERT(data_channel_type_ == cricket::DCT_SCTP);
if (!mediastream_signaling_->AddDataChannelFromOpenMessage(
label, init)) {
LOG(LS_WARNING) << "Failed to create data channel from OPEN message.";
return;
if (params.type == cricket::DMT_CONTROL &&
mediastream_signaling_->IsSctpSidAvailable(params.ssrc)) {
// Received CONTROL on unused sid, process as an OPEN message.
mediastream_signaling_->AddDataChannelFromOpenMessage(params, payload);
}
// otherwise ignore the message.
}
// Returns false if bundle is enabled and rtcp_mux is disabled.

View File

@ -195,9 +195,10 @@ class WebRtcSession : public cricket::BaseSession,
virtual void RemoveSctpDataStream(uint32 sid) OVERRIDE;
virtual bool ReadyToSendData() const OVERRIDE;
// Implements DataChannelFactory.
talk_base::scoped_refptr<DataChannel> CreateDataChannel(
const std::string& label,
const DataChannelInit* config);
const InternalDataChannelInit* config) OVERRIDE;
cricket::DataChannelType data_channel_type() const;
@ -275,8 +276,11 @@ class WebRtcSession : public cricket::BaseSession,
// The |saved_candidates_| will be cleared after this function call.
void CopySavedCandidates(SessionDescriptionInterface* dest_desc);
void OnNewDataChannelReceived(const std::string& label,
const DataChannelInit& init);
// Listens to SCTP CONTROL messages on unused SIDs and process them as OPEN
// messages.
void OnDataChannelMessageReceived(cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload);
bool GetLocalTrackId(uint32 ssrc, std::string* track_id);
bool GetRemoteTrackId(uint32 ssrc, std::string* track_id);

View File

@ -874,7 +874,7 @@ class WebRtcSessionTest : public testing::Test {
}
void SetLocalDescriptionWithDataChannel() {
webrtc::DataChannelInit dci;
webrtc::InternalDataChannelInit dci;
dci.reliable = false;
session_->CreateDataChannel("datachannel", &dci);
SessionDescriptionInterface* offer = CreateOffer(NULL);
@ -2675,7 +2675,7 @@ TEST_F(WebRtcSessionTest, TestSctpDataChannelSendPortParsing) {
// TEST PLAN: Set the port number to something new, set it in the SDP,
// and pass it all the way down.
webrtc::DataChannelInit dci;
webrtc::InternalDataChannelInit dci;
dci.reliable = true;
EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
talk_base::scoped_refptr<webrtc::DataChannel> dc =

View File

@ -27,6 +27,7 @@
#ifndef TALK_BASE_ASYNCSOCKET_H_
#define TALK_BASE_ASYNCSOCKET_H_
#ifndef __native_client__
#include "talk/base/common.h"
#include "talk/base/sigslot.h"
@ -138,4 +139,5 @@ class AsyncSocketAdapter : public AsyncSocket, public sigslot::has_slots<> {
} // namespace talk_base
#endif // __native_client__
#endif // TALK_BASE_ASYNCSOCKET_H_

View File

@ -28,7 +28,7 @@
#ifndef TALK_BASE_BYTEORDER_H_
#define TALK_BASE_BYTEORDER_H_
#ifdef POSIX
#if defined(POSIX) && !defined(__native_client__)
#include <arpa/inet.h>
#endif

View File

@ -349,6 +349,9 @@ void LogMessage::ConfigureLogging(const char* params, const char* filename) {
}
#endif // WIN32
LogToDebug(debug_level);
#if !defined(__native_client__) // No logging to file in NaCl.
scoped_ptr<FileStream> stream;
if (NO_LOGGING != file_level) {
stream.reset(new FileStream);
@ -357,8 +360,8 @@ void LogMessage::ConfigureLogging(const char* params, const char* filename) {
}
}
LogToDebug(debug_level);
LogToStream(stream.release(), file_level);
#endif
}
int LogMessage::ParseLogSeverity(const std::string& value) {

View File

@ -376,6 +376,13 @@ inline bool LogCheckLevel(LoggingSeverity sev) {
LOG_GLE(sev)
#define LAST_SYSTEM_ERROR \
(::GetLastError())
#elif __native_client__
#define LOG_ERR_EX(sev, err) \
LOG(sev)
#define LOG_ERR(sev) \
LOG(sev)
#define LAST_SYSTEM_ERROR \
(0)
#elif POSIX
#define LOG_ERR_EX(sev, err) \
LOG_ERRNO_EX(sev, err)

View File

@ -70,6 +70,19 @@ MessageDigest* MessageDigestFactory::Create(const std::string& alg) {
#endif
}
bool IsFips180DigestAlgorithm(const std::string& alg) {
// These are the FIPS 180 algorithms. According to RFC 4572 Section 5,
// "Self-signed certificates (for which legacy certificates are not a
// consideration) MUST use one of the FIPS 180 algorithms (SHA-1,
// SHA-224, SHA-256, SHA-384, or SHA-512) as their signature algorithm,
// and thus also MUST use it to calculate certificate fingerprints."
return alg == DIGEST_SHA_1 ||
alg == DIGEST_SHA_224 ||
alg == DIGEST_SHA_256 ||
alg == DIGEST_SHA_384 ||
alg == DIGEST_SHA_512;
}
size_t ComputeDigest(MessageDigest* digest, const void* input, size_t in_len,
void* output, size_t out_len) {
digest->Update(input, in_len);

View File

@ -60,6 +60,9 @@ class MessageDigestFactory {
static MessageDigest* Create(const std::string& alg);
};
// A whitelist of approved digest algorithms from RFC 4572 (FIPS 180).
bool IsFips180DigestAlgorithm(const std::string& alg);
// Functions to create hashes.
// Computes the hash of |in_len| bytes of |input|, using the |digest| hash

View File

@ -32,8 +32,13 @@
#include "talk/base/common.h"
#include "talk/base/logging.h"
#include "talk/base/messagequeue.h"
#if defined(__native_client__)
#include "talk/base/nullsocketserver.h"
typedef talk_base::NullSocketServer DefaultSocketServer;
#else
#include "talk/base/physicalsocketserver.h"
typedef talk_base::PhysicalSocketServer DefaultSocketServer;
#endif
namespace talk_base {
@ -129,7 +134,7 @@ MessageQueue::MessageQueue(SocketServer* ss)
// server, and provide it to the MessageQueue, since the Thread controls
// the I/O model, and MQ is agnostic to those details. Anyway, this causes
// messagequeue_unittest to depend on network libraries... yuck.
default_ss_.reset(new PhysicalSocketServer());
default_ss_.reset(new DefaultSocketServer());
ss_ = default_ss_.get();
}
ss_->SetMessageQueue(this);

View File

@ -28,6 +28,14 @@
#ifndef TALK_BASE_SOCKET_H__
#define TALK_BASE_SOCKET_H__
#if defined(__native_client__)
namespace talk_base {
// These should never be defined or instantiated.
class Socket;
class AsyncSocket;
} // namespace talk_base
#else
#include <errno.h>
#ifdef POSIX
@ -199,4 +207,5 @@ class Socket {
} // namespace talk_base
#endif // !__native_client__
#endif // TALK_BASE_SOCKET_H__

View File

@ -65,7 +65,7 @@ struct SSLFingerprint {
static SSLFingerprint* CreateFromRfc4572(const std::string& algorithm,
const std::string& fingerprint) {
if (algorithm.empty())
if (algorithm.empty() || !talk_base::IsFips180DigestAlgorithm(algorithm))
return NULL;
if (fingerprint.empty())

View File

@ -711,7 +711,7 @@ void AsyncWriteStream::ClearBufferAndWrite() {
}
}
#ifdef POSIX
#if defined(POSIX) && !defined(__native_client__)
// Have to identically rewrite the FileStream destructor or else it would call
// the base class's Close() instead of the sub-class's.

View File

@ -28,6 +28,8 @@
#ifndef TALK_BASE_STREAM_H_
#define TALK_BASE_STREAM_H_
#include <stdio.h>
#include "talk/base/basictypes.h"
#include "talk/base/buffer.h"
#include "talk/base/criticalsection.h"
@ -497,7 +499,6 @@ class CircularFileStream : public FileStream {
size_t read_segment_available_;
};
// A stream which pushes writes onto a separate thread and
// returns from the write call immediately.
class AsyncWriteStream : public StreamInterface {
@ -539,7 +540,7 @@ class AsyncWriteStream : public StreamInterface {
};
#ifdef POSIX
#if defined(POSIX) && !defined(__native_client__)
// A FileStream that is actually not a file, but the output or input of a
// sub-command. See "man 3 popen" for documentation of the underlying OS popen()
// function.

View File

@ -50,7 +50,6 @@
#include <limits.h>
#include <pwd.h>
#include <stdio.h>
#include <unistd.h>
#endif // POSIX && !OSX
#if defined(LINUX)
@ -368,6 +367,8 @@ bool UnixFilesystem::GetAppPathname(Pathname* path) {
if (success)
path->SetPathname(path8);
return success;
#elif defined(__native_client__)
return false;
#else // OSX
char buffer[NAME_MAX+1];
size_t len = readlink("/proc/self/exe", buffer, ARRAY_SIZE(buffer) - 1);
@ -453,6 +454,7 @@ bool UnixFilesystem::GetAppDataFolder(Pathname* path, bool per_user) {
if (!CreateFolder(*path, 0700)) {
return false;
}
#if !defined(__native_client__)
// If the folder already exists, it may have the wrong mode or be owned by
// someone else, both of which are security problems. Setting the mode
// avoids both issues since it will fail if the path is not owned by us.
@ -460,6 +462,7 @@ bool UnixFilesystem::GetAppDataFolder(Pathname* path, bool per_user) {
LOG_ERR(LS_ERROR) << "Can't set mode on " << path;
return false;
}
#endif
return true;
}
@ -553,3 +556,11 @@ char* UnixFilesystem::CopyString(const std::string& str) {
}
} // namespace talk_base
#if defined(__native_client__)
extern "C" int __attribute__((weak))
link(const char* oldpath, const char* newpath) {
errno = EACCES;
return -1;
}
#endif

View File

@ -847,8 +847,6 @@
# TODO(ronghuawu): Enable when SCTP is ready.
# 'media/sctp/sctpdataengine.cc',
# 'media/sctp/sctpdataengine.h',
'media/sctp/sctputils.cc',
'media/sctp/sctputils.h',
'media/webrtc/webrtccommon.h',
'media/webrtc/webrtcexport.h',
'media/webrtc/webrtcmediaengine.h',
@ -1169,6 +1167,8 @@
'app/webrtc/proxy.h',
'app/webrtc/remotevideocapturer.cc',
'app/webrtc/remotevideocapturer.h',
'app/webrtc/sctputils.cc',
'app/webrtc/sctputils.h',
'app/webrtc/statscollector.cc',
'app/webrtc/statscollector.h',
'app/webrtc/statstypes.h',

View File

@ -290,7 +290,6 @@ talk.Library(env, name = "jingle",
"media/base/videoframe.cc",
"media/devices/devicemanager.cc",
"media/devices/filevideocapturer.cc",
"media/sctp/sctputils.cc",
"session/media/audiomonitor.cc",
"session/media/call.cc",
"session/media/channel.cc",

View File

@ -389,6 +389,7 @@
'app/webrtc/peerconnectioninterface_unittest.cc',
# 'app/webrtc/peerconnectionproxy_unittest.cc',
'app/webrtc/remotevideocapturer_unittest.cc',
'app/webrtc/sctputils.cc',
'app/webrtc/test/fakeaudiocapturemodule.cc',
'app/webrtc/test/fakeaudiocapturemodule.h',
'app/webrtc/test/fakeaudiocapturemodule_unittest.cc',

View File

@ -78,6 +78,7 @@ const int kPreferredStereo = 0;
const int kPreferredUseInbandFec = 0;
const char kRtcpFbParamNack[] = "nack";
const char kRtcpFbNackParamPli[] = "pli";
const char kRtcpFbParamRemb[] = "goog-remb";
const char kRtcpFbParamCcm[] = "ccm";

View File

@ -89,6 +89,7 @@ extern const int kPreferredUseInbandFec;
// rtcp-fb messages according to RFC 4585
extern const char kRtcpFbParamNack[];
extern const char kRtcpFbNackParamPli[];
// rtcp-fb messages according to
// http://tools.ietf.org/html/draft-alvestrand-rmcat-remb-00
extern const char kRtcpFbParamRemb[];

View File

@ -48,6 +48,7 @@ class FakeVideoRenderer : public VideoRenderer {
}
virtual bool SetSize(int width, int height, int reserved) {
talk_base::CritScope cs(&crit_);
width_ = width;
height_ = height;
++num_set_sizes_;
@ -56,6 +57,7 @@ class FakeVideoRenderer : public VideoRenderer {
}
virtual bool RenderFrame(const VideoFrame* frame) {
talk_base::CritScope cs(&crit_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values.
black_frame_ = CheckFrameColorYuv(6, 48, 128, 128, 128, 128, frame);
@ -79,11 +81,26 @@ class FakeVideoRenderer : public VideoRenderer {
}
int errors() const { return errors_; }
int width() const { return width_; }
int height() const { return height_; }
int num_set_sizes() const { return num_set_sizes_; }
int num_rendered_frames() const { return num_rendered_frames_; }
bool black_frame() const { return black_frame_; }
int width() const {
talk_base::CritScope cs(&crit_);
return width_;
}
int height() const {
talk_base::CritScope cs(&crit_);
return height_;
}
int num_set_sizes() const {
talk_base::CritScope cs(&crit_);
return num_set_sizes_;
}
int num_rendered_frames() const {
talk_base::CritScope cs(&crit_);
return num_rendered_frames_;
}
bool black_frame() const {
talk_base::CritScope cs(&crit_);
return black_frame_;
}
sigslot::signal3<int, int, int> SignalSetSize;
sigslot::signal1<const VideoFrame*> SignalRenderFrame;
@ -143,6 +160,7 @@ class FakeVideoRenderer : public VideoRenderer {
int num_set_sizes_;
int num_rendered_frames_;
bool black_frame_;
mutable talk_base::CriticalSection crit_;
};
} // namespace cricket

View File

@ -50,10 +50,6 @@ class RateLimiter;
class Timing;
}
namespace webrtc {
struct DataChannelInit;
}
namespace cricket {
class AudioRenderer;
@ -1157,11 +1153,6 @@ class DataMediaChannel : public MediaChannel {
// Signal when the media channel is ready to send the stream. Arguments are:
// writable(bool)
sigslot::signal1<bool> SignalReadyToSend;
// Signal for notifying when a new stream is added from the remote side. Used
// for the in-band negotioation through the OPEN message for SCTP data
// channel.
sigslot::signal2<const std::string&, const webrtc::DataChannelInit&>
SignalNewStreamReceived;
};
} // namespace cricket

View File

@ -893,8 +893,11 @@ class VideoMediaChannelTest : public testing::Test,
talk_base::scoped_ptr<cricket::FakeVideoCapturer> capturer(
new cricket::FakeVideoCapturer);
capturer->SetScreencast(true);
cricket::VideoFormat format(1024, 768,
cricket::VideoFormat::FpsToInterval(5), 0);
const int kTestWidth = 160;
const int kTestHeight = 120;
cricket::VideoFormat format(kTestWidth, kTestHeight,
cricket::VideoFormat::FpsToInterval(5),
cricket::FOURCC_I420);
EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(5678)));
@ -902,8 +905,10 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(5678)));
EXPECT_TRUE(channel_->SetRenderer(5678, &renderer1));
EXPECT_TRUE(capturer->CaptureCustomFrame(1024, 768, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, 1024, 768, kTimeout);
EXPECT_TRUE(capturer->CaptureCustomFrame(
kTestWidth, kTestHeight, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer1, 1, kTestWidth, kTestHeight, kTimeout);
// Get stats, and make sure they are correct for two senders.
cricket::VideoMediaInfo info;
@ -917,8 +922,8 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(DefaultCodec().height, info.senders[0].frame_height);
EXPECT_EQ(1U, info.senders[1].ssrcs().size());
EXPECT_EQ(5678U, info.senders[1].ssrcs()[0]);
EXPECT_EQ(1024, info.senders[1].frame_width);
EXPECT_EQ(768, info.senders[1].frame_height);
EXPECT_EQ(kTestWidth, info.senders[1].frame_width);
EXPECT_EQ(kTestHeight, info.senders[1].frame_height);
// The capturer must be unregistered here as it runs out of it's scope next.
EXPECT_TRUE(channel_->SetCapturer(5678, NULL));
}
@ -1221,9 +1226,11 @@ class VideoMediaChannelTest : public testing::Test,
// Tests that we can add and remove capturers and frames are sent out properly
void AddRemoveCapturer() {
const cricket::VideoCodec codec(DefaultCodec());
cricket::VideoCodec codec = DefaultCodec();
codec.width = 320;
codec.height = 240;
const int time_between_send = TimeBetweenSend(codec);
EXPECT_TRUE(SetDefaultCodec());
EXPECT_TRUE(SetOneCodec(codec));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(channel_->SetRender(true));
EXPECT_EQ(0, renderer_.num_rendered_frames());
@ -1232,8 +1239,9 @@ class VideoMediaChannelTest : public testing::Test,
talk_base::scoped_ptr<cricket::FakeVideoCapturer> capturer(
new cricket::FakeVideoCapturer);
capturer->SetScreencast(true);
cricket::VideoFormat format(1024, 768,
cricket::VideoFormat::FpsToInterval(30), 0);
cricket::VideoFormat format(480, 360,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420);
EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
// All capturers start generating frames with the same timestamp. ViE does
// not allow the same timestamp to be used. Capture one frame before
@ -1305,11 +1313,6 @@ class VideoMediaChannelTest : public testing::Test,
void AddRemoveCapturerMultipleSources() {
// WebRTC implementation will drop frames if pushed to quickly. Wait the
// interval time to avoid that.
const cricket::VideoFormat send_format(
1024,
768,
cricket::VideoFormat::FpsToInterval(30),
0);
// WebRTC implementation will drop frames if pushed to quickly. Wait the
// interval time to avoid that.
// Set up the stream associated with the engine.
@ -1352,11 +1355,17 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(channel_->SetRender(true));
// Test capturer associated with engine.
EXPECT_TRUE(capturer1->CaptureCustomFrame(1024, 768, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, 1024, 768, kTimeout);
const int kTestWidth = 160;
const int kTestHeight = 120;
EXPECT_TRUE(capturer1->CaptureCustomFrame(
kTestWidth, kTestHeight, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer1, 1, kTestWidth, kTestHeight, kTimeout);
// Capture a frame with additional capturer2, frames should be received
EXPECT_TRUE(capturer2->CaptureCustomFrame(1024, 768, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, 1024, 768, kTimeout);
EXPECT_TRUE(capturer2->CaptureCustomFrame(
kTestWidth, kTestHeight, cricket::FOURCC_I420));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer2, 1, kTestWidth, kTestHeight, kTimeout);
// Successfully remove the capturer.
EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
// Fail to re-remove the capturer.

View File

@ -32,14 +32,12 @@
#include <sstream>
#include <vector>
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/base/buffer.h"
#include "talk/base/helpers.h"
#include "talk/base/logging.h"
#include "talk/media/base/codec.h"
#include "talk/media/base/constants.h"
#include "talk/media/base/streamparams.h"
#include "talk/media/sctp/sctputils.h"
#include "usrsctplib/usrsctp.h"
namespace {
@ -497,11 +495,17 @@ bool SctpDataMediaChannel::RemoveSendStream(uint32 ssrc) {
}
bool SctpDataMediaChannel::AddRecvStream(const StreamParams& stream) {
return AddStream(stream);
// SCTP DataChannels are always bi-directional and calling AddSendStream will
// enable both sending and receiving on the stream. So AddRecvStream is a
// no-op.
return true;
}
bool SctpDataMediaChannel::RemoveRecvStream(uint32 ssrc) {
return ResetStream(ssrc);
// SCTP DataChannels are always bi-directional and calling RemoveSendStream
// will disable both sending and receiving on the stream. So RemoveRecvStream
// is a no-op.
return true;
}
bool SctpDataMediaChannel::SendData(
@ -619,34 +623,12 @@ void SctpDataMediaChannel::OnInboundPacketFromSctpToChannel(
void SctpDataMediaChannel::OnDataFromSctpToChannel(
const ReceiveDataParams& params, talk_base::Buffer* buffer) {
if (open_streams_.find(params.ssrc) == open_streams_.end()) {
if (params.type == DMT_CONTROL) {
std::string label;
webrtc::DataChannelInit config;
if (ParseDataChannelOpenMessage(*buffer, &label, &config)) {
config.id = params.ssrc;
// Do not send the OPEN message for this data channel.
config.negotiated = true;
SignalNewStreamReceived(label, config);
// Add the stream immediately.
StreamParams sparams = StreamParams::CreateLegacy(params.ssrc);
AddSendStream(sparams);
AddRecvStream(sparams);
} else {
LOG(LS_ERROR) << debug_name_ << "->OnDataFromSctpToChannel(...): "
<< "Received malformed control message";
}
} else {
LOG(LS_WARNING) << debug_name_ << "->OnDataFromSctpToChannel(...): "
<< "Received packet for unknown ssrc: " << params.ssrc;
}
return;
}
if (receiving_) {
LOG(LS_VERBOSE) << debug_name_ << "->OnDataFromSctpToChannel(...): "
<< "Posting with length: " << buffer->length();
<< "Posting with length: " << buffer->length()
<< " on stream " << params.ssrc;
// Reports all received messages to upper layers, no matter whether the sid
// is known.
SignalDataReceived(params, buffer->data(), buffer->length());
} else {
LOG(LS_WARNING) << debug_name_ << "->OnDataFromSctpToChannel(...): "
@ -663,9 +645,7 @@ bool SctpDataMediaChannel::AddStream(const StreamParams& stream) {
const uint32 ssrc = stream.first_ssrc();
if (open_streams_.find(ssrc) != open_streams_.end()) {
// We usually get an AddSendStream and an AddRecvStream for each stream, so
// this is really unlikely to be a useful warning message.
LOG(LS_VERBOSE) << debug_name_ << "->Add(Send|Recv)Stream(...): "
LOG(LS_WARNING) << debug_name_ << "->Add(Send|Recv)Stream(...): "
<< "Not adding data stream '" << stream.id
<< "' with ssrc=" << ssrc
<< " because stream is already open.";
@ -976,5 +956,4 @@ void SctpDataMediaChannel::OnMessage(talk_base::Message* msg) {
}
}
}
} // namespace cricket

View File

@ -30,7 +30,7 @@
#include <errno.h>
#include <string>
#include <vector>
#include <set>
namespace cricket {
// Some ERRNO values get re-#defined to WSA* equivalents in some talk/
@ -216,7 +216,6 @@ class SctpDataMediaChannel : public DataMediaChannel,
talk_base::Buffer* buffer);
void OnNotificationFromSctp(talk_base::Buffer* buffer);
void OnNotificationAssocChange(const sctp_assoc_change& change);
void OnStreamResetEvent(const struct sctp_stream_reset_event* evt);
// Responsible for marshalling incoming data to the channels listeners, and

View File

@ -31,7 +31,6 @@
#include <string>
#include <vector>
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/base/bind.h"
#include "talk/base/buffer.h"
#include "talk/base/criticalsection.h"
@ -44,7 +43,6 @@
#include "talk/media/base/constants.h"
#include "talk/media/base/mediachannel.h"
#include "talk/media/sctp/sctpdataengine.h"
#include "talk/media/sctp/sctputils.h"
enum {
MSG_PACKET = 1,
@ -276,8 +274,6 @@ class SctpDataMediaChannelTest : public testing::Test,
// When data is received, pass it to the SctpFakeDataReceiver.
channel->SignalDataReceived.connect(
recv, &SctpFakeDataReceiver::OnDataReceived);
channel->SignalNewStreamReceived.connect(
this, &SctpDataMediaChannelTest::OnNewStreamReceived);
return channel;
}
@ -286,6 +282,7 @@ class SctpDataMediaChannelTest : public testing::Test,
cricket::SendDataResult* result) {
cricket::SendDataParams params;
params.ssrc = ssrc;
return chan->SendData(params, talk_base::Buffer(
msg.data(), msg.length()), result);
}
@ -313,15 +310,6 @@ class SctpDataMediaChannelTest : public testing::Test,
SctpFakeDataReceiver* receiver1() { return recv1_.get(); }
SctpFakeDataReceiver* receiver2() { return recv2_.get(); }
void OnNewStreamReceived(const std::string& label,
const webrtc::DataChannelInit& init) {
last_label_ = label;
last_dc_init_ = init;
}
std::string last_label() { return last_label_; }
webrtc::DataChannelInit last_dc_init() { return last_dc_init_; }
private:
talk_base::scoped_ptr<cricket::SctpDataEngine> engine_;
talk_base::scoped_ptr<SctpFakeNetworkInterface> net1_;
@ -330,8 +318,6 @@ class SctpDataMediaChannelTest : public testing::Test,
talk_base::scoped_ptr<SctpFakeDataReceiver> recv2_;
talk_base::scoped_ptr<cricket::SctpDataMediaChannel> chan1_;
talk_base::scoped_ptr<cricket::SctpDataMediaChannel> chan2_;
std::string last_label_;
webrtc::DataChannelInit last_dc_init_;
};
// Verifies that SignalReadyToSend is fired.
@ -389,33 +375,6 @@ TEST_F(SctpDataMediaChannelTest, SendData) {
<< "recv1.last_data=" << receiver1()->last_data();
}
TEST_F(SctpDataMediaChannelTest, SendReceiveOpenMessage) {
SetupConnectedChannels();
std::string label("x");
webrtc::DataChannelInit config;
config.id = 10;
// Send the OPEN message on a unknown ssrc.
channel1()->AddSendStream(cricket::StreamParams::CreateLegacy(config.id));
cricket::SendDataParams params;
params.ssrc = config.id;
params.type = cricket::DMT_CONTROL;
cricket::SendDataResult result;
talk_base::Buffer buffer;
ASSERT_TRUE(cricket::WriteDataChannelOpenMessage(label, config, &buffer));
ASSERT_TRUE(channel1()->SendData(params, buffer, &result));
// Send data on the new ssrc immediately after sending the OPEN message.
ASSERT_TRUE(SendData(channel1(), config.id, "hi chan2", &result));
// Verifies the received OPEN message.
EXPECT_TRUE_WAIT(last_label() == label, 1000);
EXPECT_EQ(config.id, last_dc_init().id);
EXPECT_EQ(true, last_dc_init().negotiated);
// Verifies the received data.
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), config.id, "hi chan2"), 1000);
}
TEST_F(SctpDataMediaChannelTest, ClosesRemoteStream) {
SetupConnectedChannels();
SignalChannelClosedObserver chan_1_sig_receiver, chan_2_sig_receiver;

View File

@ -1310,6 +1310,8 @@ static void AddDefaultFeedbackParams(VideoCodec* codec) {
codec->AddFeedbackParam(kFir);
const FeedbackParam kNack(kRtcpFbParamNack, kParamValueEmpty);
codec->AddFeedbackParam(kNack);
const FeedbackParam kPli(kRtcpFbParamNack, kRtcpFbNackParamPli);
codec->AddFeedbackParam(kPli);
const FeedbackParam kRemb(kRtcpFbParamRemb, kParamValueEmpty);
codec->AddFeedbackParam(kRemb);
}
@ -2787,7 +2789,7 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) {
}
if (dscp_option_changed) {
talk_base::DiffServCodePoint dscp = talk_base::DSCP_DEFAULT;
if (options.dscp.GetWithDefaultIfUnset(false))
if (options_.dscp.GetWithDefaultIfUnset(false))
dscp = kVideoDscpValue;
if (MediaChannel::SetDscp(dscp) != 0) {
LOG(LS_WARNING) << "Failed to set DSCP settings for video channel";

View File

@ -147,6 +147,20 @@ class WebRtcVideoEngineTestFake : public testing::Test,
channel_->SendFrame(&capturer, &frame);
return true;
}
void VerifyCodecFeedbackParams(const cricket::VideoCodec& codec) {
EXPECT_TRUE(codec.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamNack,
cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamNack,
cricket::kRtcpFbNackParamPli)));
EXPECT_TRUE(codec.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamRemb,
cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamCcm,
cricket::kRtcpFbCcmParamFir)));
}
void VerifyVP8SendCodec(int channel_num,
unsigned int width,
unsigned int height,
@ -237,8 +251,10 @@ TEST_F(WebRtcVideoEngineTest, WebRtcShouldLog) {
EXPECT_EQ(talk_base::LS_INFO, talk_base::LogMessage::GetLogToStream(&stream));
webrtc::Trace::Add(webrtc::kTraceStateInfo, webrtc::kTraceUndefined, 0,
webrtc_log);
EXPECT_TRUE_WAIT(std::string::npos != str.find(webrtc_log), 10);
talk_base::Thread::Current()->ProcessMessages(100);
talk_base::LogMessage::RemoveLogToStream(&stream);
// Access |str| after LogMessage is done with it to avoid data racing.
EXPECT_NE(std::string::npos, str.find(webrtc_log));
}
// Tests that webrtc logs are not logged when they should't be.
@ -889,6 +905,17 @@ TEST_F(WebRtcVideoEngineTestFake, AddRecvStream1On1) {
EXPECT_EQ(channel_num, vie_.GetLastChannel());
}
// Test that NACK, PLI and REMB are enabled for internal codec.
TEST_F(WebRtcVideoEngineTestFake, InternalCodecFeedbackParams) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
// Vp8 will appear at the beginning.
size_t pos = 0;
EXPECT_EQ("VP8", codecs[pos].name);
VerifyCodecFeedbackParams(codecs[pos]);
}
// Test that AddRecvStream doesn't change remb for 1:1 call.
TEST_F(WebRtcVideoEngineTestFake, NoRembChangeAfterAddRecvStream) {
EXPECT_TRUE(SetupEngine());
@ -1395,6 +1422,322 @@ TEST_F(WebRtcVideoEngineTestFake, TestSetInvalidCpuThreshold) {
}
TEST_F(WebRtcVideoEngineTestFake, ResetCodecOnScreencast) {
EXPECT_TRUE(SetupEngine());
cricket::VideoOptions options;
options.video_noise_reduction.Set(true);
EXPECT_TRUE(channel_->SetOptions(options));
// Set send codec.
cricket::VideoCodec codec(kVP8Codec);
std::vector<cricket::VideoCodec> codec_list;
codec_list.push_back(codec);
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(123)));
EXPECT_TRUE(channel_->SetSendCodecs(codec_list));
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_EQ(1, vie_.num_set_send_codecs());
webrtc::VideoCodec gcodec;
memset(&gcodec, 0, sizeof(gcodec));
int channel_num = vie_.GetLastChannel();
EXPECT_EQ(0, vie_.GetSendCodec(channel_num, gcodec));
EXPECT_TRUE(gcodec.codecSpecific.VP8.denoisingOn);
// Send a screencast frame with the same size.
// Verify that denoising is turned off.
SendI420ScreencastFrame(kVP8Codec.width, kVP8Codec.height);
EXPECT_EQ(2, vie_.num_set_send_codecs());
EXPECT_EQ(0, vie_.GetSendCodec(channel_num, gcodec));
EXPECT_FALSE(gcodec.codecSpecific.VP8.denoisingOn);
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderIfFactoryIsNotGiven) {
engine_.SetExternalDecoderFactory(NULL);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterDecoderIfFactoryIsGiven) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_TRUE(vie_.ExternalDecoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderMultipleTimes) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_TRUE(vie_.ExternalDecoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
EXPECT_EQ(1, decoder_factory_.GetNumCreatedDecoders());
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
EXPECT_EQ(1, decoder_factory_.GetNumCreatedDecoders());
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderForNonVP8) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kRedCodec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderIfFactoryIsNotGiven) {
engine_.SetExternalEncoderFactory(NULL);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalEncoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterEncoderIfFactoryIsGiven) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderMultipleTimes) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterEncoderWithMultipleSendStreams) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we add the first stream (1234), it reuses the default send channel,
// so it doesn't increase the registration count of external encoders.
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(1234)));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we add the second stream (2345), it creates a new channel and
// increments the registration count.
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(2345)));
EXPECT_EQ(2, vie_.GetTotalNumExternalEncoderRegistered());
// At this moment the total registration count is two, but only one encoder
// is registered per channel.
int channel_num = vie_.GetLastChannel();
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
// Removing send streams decrements the registration count.
EXPECT_TRUE(channel_->RemoveSendStream(1234));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we remove the last send stream, it also destroys the last send
// channel and causes the registration count to drop to zero. It is a little
// weird, but not a bug.
EXPECT_TRUE(channel_->RemoveSendStream(2345));
EXPECT_EQ(0, vie_.GetTotalNumExternalEncoderRegistered());
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderForNonVP8) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
// Note: unlike the SetRecvCodecs, we must set a valid video codec for
// channel_->SetSendCodecs() to succeed.
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalEncoderRegistered(channel_num));
}
// Test that NACK, PLI and REMB are enabled for external codec.
TEST_F(WebRtcVideoEngineTestFake, ExternalCodecFeedbackParams) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
// The external codec will appear at last.
size_t pos = codecs.size() - 1;
EXPECT_EQ("GENERIC", codecs[pos].name);
VerifyCodecFeedbackParams(codecs[pos]);
}
// Test external codec with be added to the end of the supported codec list.
TEST_F(WebRtcVideoEngineTestFake, ExternalCodecAddedToTheEnd) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
EXPECT_EQ("VP8", codecs[0].name);
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
codecs = engine_.codecs();
cricket::VideoCodec internal_codec = codecs[0];
cricket::VideoCodec external_codec = codecs[codecs.size() - 1];
// The external codec will appear at last.
EXPECT_EQ("GENERIC", external_codec.name);
// The internal codec is preferred.
EXPECT_GE(internal_codec.preference, external_codec.preference);
}
// Test that external codec with be ignored if it has the same name as one of
// the internal codecs.
TEST_F(WebRtcVideoEngineTestFake, ExternalCodecIgnored) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> internal_codecs(engine_.codecs());
EXPECT_EQ("VP8", internal_codecs[0].name);
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
std::vector<cricket::VideoCodec> codecs = engine_.codecs();
EXPECT_EQ("VP8", codecs[0].name);
EXPECT_EQ(internal_codecs[0].height, codecs[0].height);
EXPECT_EQ(internal_codecs[0].width, codecs[0].width);
// Verify the last codec is not the external codec.
EXPECT_NE("VP8", codecs[codecs.size() - 1].name);
}
TEST_F(WebRtcVideoEngineTestFake, UpdateEncoderCodecsAfterSetFactory) {
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
encoder_factory_.NotifyCodecsAvailable();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
// Tests that OnReadyToSend will be propagated into ViE.
TEST_F(WebRtcVideoEngineTestFake, OnReadyToSend) {
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
EXPECT_TRUE(vie_.GetIsTransmitting(channel_num));
channel_->OnReadyToSend(false);
EXPECT_FALSE(vie_.GetIsTransmitting(channel_num));
channel_->OnReadyToSend(true);
EXPECT_TRUE(vie_.GetIsTransmitting(channel_num));
}
#if 0
TEST_F(WebRtcVideoEngineTestFake, CaptureFrameTimestampToNtpTimestamp) {
EXPECT_TRUE(SetupEngine());
int capture_id = vie_.GetCaptureId(vie_.GetLastChannel());
// Set send codec.
cricket::VideoCodec codec(kVP8Codec);
std::vector<cricket::VideoCodec> codec_list;
codec_list.push_back(codec);
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(123)));
EXPECT_TRUE(channel_->SetSendCodecs(codec_list));
EXPECT_TRUE(channel_->SetSend(true));
int64 timestamp = time(NULL) * talk_base::kNumNanosecsPerSec;
SendI420ScreencastFrameWithTimestamp(
kVP8Codec.width, kVP8Codec.height, timestamp);
EXPECT_EQ(talk_base::UnixTimestampNanosecsToNtpMillisecs(timestamp),
vie_.GetCaptureLastTimestamp(capture_id));
SendI420ScreencastFrameWithTimestamp(kVP8Codec.width, kVP8Codec.height, 0);
EXPECT_EQ(0, vie_.GetCaptureLastTimestamp(capture_id));
}
#endif
/////////////////////////
// Tests with real ViE //
/////////////////////////
@ -1619,6 +1962,10 @@ TEST_F(WebRtcVideoMediaChannelTest, TestSetDscpOptions) {
options.dscp.Set(true);
EXPECT_TRUE(channel_->SetOptions(options));
EXPECT_EQ(talk_base::DSCP_AF41, network_interface->dscp());
// Verify previous value is not modified if dscp option is not set.
cricket::VideoOptions options1;
EXPECT_TRUE(channel_->SetOptions(options1));
EXPECT_EQ(talk_base::DSCP_AF41, network_interface->dscp());
options.dscp.Set(false);
EXPECT_TRUE(channel_->SetOptions(options));
EXPECT_EQ(talk_base::DSCP_DEFAULT, network_interface->dscp());
@ -1697,327 +2044,3 @@ TEST_F(WebRtcVideoMediaChannelTest, TwoStreamsReUseFirstStream) {
Base::TwoStreamsReUseFirstStream(cricket::VideoCodec(100, "VP8", 640, 400, 30,
0));
}
TEST_F(WebRtcVideoEngineTestFake, ResetCodecOnScreencast) {
EXPECT_TRUE(SetupEngine());
cricket::VideoOptions options;
options.video_noise_reduction.Set(true);
EXPECT_TRUE(channel_->SetOptions(options));
// Set send codec.
cricket::VideoCodec codec(kVP8Codec);
std::vector<cricket::VideoCodec> codec_list;
codec_list.push_back(codec);
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(123)));
EXPECT_TRUE(channel_->SetSendCodecs(codec_list));
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_EQ(1, vie_.num_set_send_codecs());
webrtc::VideoCodec gcodec;
memset(&gcodec, 0, sizeof(gcodec));
int channel_num = vie_.GetLastChannel();
EXPECT_EQ(0, vie_.GetSendCodec(channel_num, gcodec));
EXPECT_TRUE(gcodec.codecSpecific.VP8.denoisingOn);
// Send a screencast frame with the same size.
// Verify that denoising is turned off.
SendI420ScreencastFrame(kVP8Codec.width, kVP8Codec.height);
EXPECT_EQ(2, vie_.num_set_send_codecs());
EXPECT_EQ(0, vie_.GetSendCodec(channel_num, gcodec));
EXPECT_FALSE(gcodec.codecSpecific.VP8.denoisingOn);
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderIfFactoryIsNotGiven) {
engine_.SetExternalDecoderFactory(NULL);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterDecoderIfFactoryIsGiven) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_TRUE(vie_.ExternalDecoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderMultipleTimes) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_TRUE(vie_.ExternalDecoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
EXPECT_EQ(1, decoder_factory_.GetNumCreatedDecoders());
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(1, vie_.GetNumExternalDecoderRegistered(channel_num));
EXPECT_EQ(1, decoder_factory_.GetNumCreatedDecoders());
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterDecoderForNonVP8) {
decoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
engine_.SetExternalDecoderFactory(&decoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kRedCodec);
EXPECT_TRUE(channel_->SetRecvCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalDecoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderIfFactoryIsNotGiven) {
engine_.SetExternalEncoderFactory(NULL);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalEncoderRegistered(channel_num));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterEncoderIfFactoryIsGiven) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderMultipleTimes) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
TEST_F(WebRtcVideoEngineTestFake, RegisterEncoderWithMultipleSendStreams) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we add the first stream (1234), it reuses the default send channel,
// so it doesn't increase the registration count of external encoders.
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(1234)));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we add the second stream (2345), it creates a new channel and
// increments the registration count.
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(2345)));
EXPECT_EQ(2, vie_.GetTotalNumExternalEncoderRegistered());
// At this moment the total registration count is two, but only one encoder
// is registered per channel.
int channel_num = vie_.GetLastChannel();
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
// Removing send streams decrements the registration count.
EXPECT_TRUE(channel_->RemoveSendStream(1234));
EXPECT_EQ(1, vie_.GetTotalNumExternalEncoderRegistered());
// When we remove the last send stream, it also destroys the last send
// channel and causes the registration count to drop to zero. It is a little
// weird, but not a bug.
EXPECT_TRUE(channel_->RemoveSendStream(2345));
EXPECT_EQ(0, vie_.GetTotalNumExternalEncoderRegistered());
}
TEST_F(WebRtcVideoEngineTestFake, DontRegisterEncoderForNonVP8) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
// Note: unlike the SetRecvCodecs, we must set a valid video codec for
// channel_->SetSendCodecs() to succeed.
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_EQ(0, vie_.GetNumExternalEncoderRegistered(channel_num));
}
// Test that NACK and REMB are enabled for external codec.
TEST_F(WebRtcVideoEngineTestFake, FeedbackParamsForNonVP8) {
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
// The external codec will appear at last.
size_t pos = codecs.size() - 1;
EXPECT_EQ("GENERIC", codecs[pos].name);
EXPECT_TRUE(codecs[pos].HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamNack,
cricket::kParamValueEmpty)));
EXPECT_TRUE(codecs[pos].HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamRemb,
cricket::kParamValueEmpty)));
EXPECT_TRUE(codecs[pos].HasFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamCcm,
cricket::kRtcpFbCcmParamFir)));
}
// Test external codec with be added to the end of the supported codec list.
TEST_F(WebRtcVideoEngineTestFake, ExternalCodecAddedToTheEnd) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
EXPECT_EQ("VP8", codecs[0].name);
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric,
"GENERIC");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
codecs = engine_.codecs();
cricket::VideoCodec internal_codec = codecs[0];
cricket::VideoCodec external_codec = codecs[codecs.size() - 1];
// The external codec will appear at last.
EXPECT_EQ("GENERIC", external_codec.name);
// The internal codec is preferred.
EXPECT_GE(internal_codec.preference, external_codec.preference);
}
// Test that external codec with be ignored if it has the same name as one of
// the internal codecs.
TEST_F(WebRtcVideoEngineTestFake, ExternalCodecIgnored) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::VideoCodec> internal_codecs(engine_.codecs());
EXPECT_EQ("VP8", internal_codecs[0].name);
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
engine_.SetExternalEncoderFactory(&encoder_factory_);
encoder_factory_.NotifyCodecsAvailable();
std::vector<cricket::VideoCodec> codecs = engine_.codecs();
EXPECT_EQ("VP8", codecs[0].name);
EXPECT_EQ(internal_codecs[0].height, codecs[0].height);
EXPECT_EQ(internal_codecs[0].width, codecs[0].width);
// Verify the last codec is not the external codec.
EXPECT_NE("VP8", codecs[codecs.size() - 1].name);
}
TEST_F(WebRtcVideoEngineTestFake, UpdateEncoderCodecsAfterSetFactory) {
engine_.SetExternalEncoderFactory(&encoder_factory_);
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
encoder_factory_.NotifyCodecsAvailable();
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVP8Codec);
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(vie_.ExternalEncoderRegistered(channel_num, 100));
EXPECT_EQ(1, vie_.GetNumExternalEncoderRegistered(channel_num));
EXPECT_EQ(1, encoder_factory_.GetNumCreatedEncoders());
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
}
// Tests that OnReadyToSend will be propagated into ViE.
TEST_F(WebRtcVideoEngineTestFake, OnReadyToSend) {
EXPECT_TRUE(SetupEngine());
int channel_num = vie_.GetLastChannel();
EXPECT_TRUE(vie_.GetIsTransmitting(channel_num));
channel_->OnReadyToSend(false);
EXPECT_FALSE(vie_.GetIsTransmitting(channel_num));
channel_->OnReadyToSend(true);
EXPECT_TRUE(vie_.GetIsTransmitting(channel_num));
}
#if 0
TEST_F(WebRtcVideoEngineTestFake, CaptureFrameTimestampToNtpTimestamp) {
EXPECT_TRUE(SetupEngine());
int capture_id = vie_.GetCaptureId(vie_.GetLastChannel());
// Set send codec.
cricket::VideoCodec codec(kVP8Codec);
std::vector<cricket::VideoCodec> codec_list;
codec_list.push_back(codec);
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(123)));
EXPECT_TRUE(channel_->SetSendCodecs(codec_list));
EXPECT_TRUE(channel_->SetSend(true));
int64 timestamp = time(NULL) * talk_base::kNumNanosecsPerSec;
SendI420ScreencastFrameWithTimestamp(
kVP8Codec.width, kVP8Codec.height, timestamp);
EXPECT_EQ(talk_base::UnixTimestampNanosecsToNtpMillisecs(timestamp),
vie_.GetCaptureLastTimestamp(capture_id));
SendI420ScreencastFrameWithTimestamp(kVP8Codec.width, kVP8Codec.height, 0);
EXPECT_EQ(0, vie_.GetCaptureLastTimestamp(capture_id));
}
#endif

View File

@ -1769,7 +1769,7 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
}
if (dscp_option_changed) {
talk_base::DiffServCodePoint dscp = talk_base::DSCP_DEFAULT;
if (options.dscp.GetWithDefaultIfUnset(false))
if (options_.dscp.GetWithDefaultIfUnset(false))
dscp = kAudioDscpValue;
if (MediaChannel::SetDscp(dscp) != 0) {
LOG(LS_WARNING) << "Failed to set DSCP settings for audio channel";
@ -1879,7 +1879,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// this, but double-check to be sure.
webrtc::CodecInst voe_codec;
if (!engine()->FindWebRtcCodec(*it, &voe_codec)) {
LOG(LS_WARNING) << "Unknown codec " << ToString(voe_codec);
LOG(LS_WARNING) << "Unknown codec " << ToString(*it);
continue;
}
@ -2431,14 +2431,14 @@ bool WebRtcVoiceMediaChannel::ConfigureRecvChannel(int channel) {
}
// Use the same SSRC as our default channel (so the RTCP reports are correct).
unsigned int send_ssrc;
unsigned int send_ssrc = 0;
webrtc::VoERTP_RTCP* rtp = engine()->voe()->rtp();
if (rtp->GetLocalSSRC(voe_channel(), send_ssrc) == -1) {
LOG_RTCERR2(GetSendSSRC, channel, send_ssrc);
LOG_RTCERR1(GetSendSSRC, channel);
return false;
}
if (rtp->SetLocalSSRC(channel, send_ssrc) == -1) {
LOG_RTCERR2(SetSendSSRC, channel, send_ssrc);
LOG_RTCERR1(SetSendSSRC, channel);
return false;
}

View File

@ -2718,6 +2718,10 @@ TEST_F(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
options.dscp.Set(true);
EXPECT_TRUE(channel->SetOptions(options));
EXPECT_EQ(talk_base::DSCP_EF, network_interface->dscp());
// Verify previous value is not modified if dscp option is not set.
cricket::AudioOptions options1;
EXPECT_TRUE(channel->SetOptions(options1));
EXPECT_EQ(talk_base::DSCP_EF, network_interface->dscp());
options.dscp.Set(false);
EXPECT_TRUE(channel->SetOptions(options));
EXPECT_EQ(talk_base::DSCP_DEFAULT, network_interface->dscp());

View File

@ -54,7 +54,6 @@ const uint32 PORTALLOCATOR_ENABLE_IPV6 = 0x40;
const uint32 PORTALLOCATOR_ENABLE_SHARED_UFRAG = 0x80;
const uint32 PORTALLOCATOR_ENABLE_SHARED_SOCKET = 0x100;
const uint32 PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE = 0x200;
const uint32 PORTALLOCATOR_USE_LARGE_SOCKET_SEND_BUFFERS = 0x400;
const uint32 kDefaultPortAllocatorFlags = 0;

View File

@ -65,10 +65,6 @@ const int PHASE_SSLTCP = 3;
const int kNumPhases = 4;
// Both these values are in bytes.
const int kLargeSocketSendBufferSize = 128 * 1024;
const int kNormalSocketSendBufferSize = 64 * 1024;
const int SHAKE_MIN_DELAY = 45 * 1000; // 45 seconds
const int SHAKE_MAX_DELAY = 90 * 1000; // 90 seconds
@ -490,16 +486,6 @@ void BasicPortAllocatorSession::AddAllocatedPort(Port* port,
port->set_send_retransmit_count_attribute((allocator_->flags() &
PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE) != 0);
if (content_name().compare(CN_VIDEO) == 0 &&
component_ == cricket::ICE_CANDIDATE_COMPONENT_RTP) {
// For video RTP alone, we set send-buffer sizes. This used to be set in the
// engines/channels.
int sendBufSize = (flags() & PORTALLOCATOR_USE_LARGE_SOCKET_SEND_BUFFERS)
? kLargeSocketSendBufferSize
: kNormalSocketSendBufferSize;
port->SetOption(talk_base::Socket::OPT_SNDBUF, sendBufSize);
}
PortData data(port, seq);
ports_.push_back(data);

View File

@ -331,56 +331,7 @@ TEST_F(PortAllocatorTest, TestSetupVideoRtpPortsWithNormalSendBuffers) {
// If we Stop gathering now, we shouldn't get a second "done" callback.
session_->StopGettingPorts();
// All ports should have normal send-buffer sizes (64KB).
CheckSendBufferSizesOfAllPorts(64 * 1024);
}
TEST_F(PortAllocatorTest, TestSetupVideoRtpPortsWithLargeSendBuffers) {
AddInterface(kClientAddr);
allocator_->set_flags(allocator_->flags() |
cricket::PORTALLOCATOR_USE_LARGE_SOCKET_SEND_BUFFERS);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP,
cricket::CN_VIDEO));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_TRUE(candidate_allocation_done_);
// If we Stop gathering now, we shouldn't get a second "done" callback.
session_->StopGettingPorts();
// All ports should have large send-buffer sizes (128KB).
CheckSendBufferSizesOfAllPorts(128 * 1024);
}
TEST_F(PortAllocatorTest, TestSetupVideoRtcpPortsAndCheckSendBuffers) {
AddInterface(kClientAddr);
allocator_->set_flags(allocator_->flags() |
cricket::PORTALLOCATOR_USE_LARGE_SOCKET_SEND_BUFFERS);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTCP,
cricket::CN_DATA));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_TRUE(candidate_allocation_done_);
// If we Stop gathering now, we shouldn't get a second "done" callback.
session_->StopGettingPorts();
// No ports should have send-buffer size set.
CheckSendBufferSizesOfAllPorts(-1);
}
TEST_F(PortAllocatorTest, TestSetupNonVideoPortsAndCheckSendBuffers) {
AddInterface(kClientAddr);
allocator_->set_flags(allocator_->flags() |
cricket::PORTALLOCATOR_USE_LARGE_SOCKET_SEND_BUFFERS);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP,
cricket::CN_DATA));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_TRUE(candidate_allocation_done_);
// If we Stop gathering now, we shouldn't get a second "done" callback.
session_->StopGettingPorts();
// No ports should have send-buffer size set.
// All ports should have unset send-buffer sizes.
CheckSendBufferSizesOfAllPorts(-1);
}

View File

@ -81,7 +81,6 @@ enum {
MSG_SETSCREENCASTFACTORY,
MSG_FIRSTPACKETRECEIVED,
MSG_SESSION_ERROR,
MSG_NEWSTREAMRECEIVED,
};
// Value specified in RFC 5764.
@ -2515,8 +2514,6 @@ bool DataChannel::Init() {
this, &DataChannel::OnDataChannelError);
media_channel()->SignalReadyToSend.connect(
this, &DataChannel::OnDataChannelReadyToSend);
media_channel()->SignalNewStreamReceived.connect(
this, &DataChannel::OnDataChannelNewStreamReceived);
srtp_filter()->SignalSrtpError.connect(
this, &DataChannel::OnSrtpError);
return true;
@ -2740,13 +2737,6 @@ void DataChannel::OnMessage(talk_base::Message *pmsg) {
delete data;
break;
}
case MSG_NEWSTREAMRECEIVED: {
DataChannelNewStreamReceivedMessageData* data =
static_cast<DataChannelNewStreamReceivedMessageData*>(pmsg->pdata);
SignalNewStreamReceived(data->label, data->init);
delete data;
break;
}
default:
BaseChannel::OnMessage(pmsg);
break;
@ -2802,14 +2792,6 @@ void DataChannel::OnDataChannelReadyToSend(bool writable) {
new DataChannelReadyToSendMessageData(writable));
}
void DataChannel::OnDataChannelNewStreamReceived(
const std::string& label, const webrtc::DataChannelInit& init) {
signaling_thread()->Post(
this,
MSG_NEWSTREAMRECEIVED,
new DataChannelNewStreamReceivedMessageData(label, init));
}
void DataChannel::OnSrtpError(uint32 ssrc, SrtpFilter::Mode mode,
SrtpFilter::Error error) {
switch (error) {

View File

@ -31,7 +31,6 @@
#include <string>
#include <vector>
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/base/asyncudpsocket.h"
#include "talk/base/criticalsection.h"
#include "talk/base/network.h"
@ -634,11 +633,6 @@ class DataChannel : public BaseChannel {
// That occurs when the channel is enabled, the transport is writable,
// both local and remote descriptions are set, and the channel is unblocked.
sigslot::signal1<bool> SignalReadyToSendData;
// Signal for notifying when a new stream is added from the remote side. Used
// for the in-band negotioation through the OPEN message for SCTP data
// channel.
sigslot::signal2<const std::string&, const webrtc::DataChannelInit&>
SignalNewStreamReceived;
protected:
// downcasts a MediaChannel.
@ -678,17 +672,6 @@ class DataChannel : public BaseChannel {
typedef talk_base::TypedMessageData<bool> DataChannelReadyToSendMessageData;
struct DataChannelNewStreamReceivedMessageData
: public talk_base::MessageData {
DataChannelNewStreamReceivedMessageData(
const std::string& label, const webrtc::DataChannelInit& init)
: label(label),
init(init) {
}
const std::string label;
const webrtc::DataChannelInit init;
};
// overrides from BaseChannel
virtual const ContentInfo* GetFirstContent(const SessionDescription* sdesc);
// If data_channel_type_ is DCT_NONE, set it. Otherwise, check that
@ -717,8 +700,6 @@ class DataChannel : public BaseChannel {
const ReceiveDataParams& params, const char* data, size_t len);
void OnDataChannelError(uint32 ssrc, DataMediaChannel::Error error);
void OnDataChannelReadyToSend(bool writable);
void OnDataChannelNewStreamReceived(const std::string& label,
const webrtc::DataChannelInit& init);
void OnSrtpError(uint32 ssrc, SrtpFilter::Mode mode, SrtpFilter::Error error);
talk_base::scoped_ptr<DataMediaMonitor> media_monitor_;