Fixes two issues in how we handle OfferToReceiveX for CreateOffer:

1. the options set in the first CreateOffer call should not affect the result of a second CreateOffer call, if SetLocalDescription is not called after the first CreateOffer. So the member var options_ of MediaStreamSignaling is removed to make each CreateOffer independent.
Instead, MediaSession is responsible to make sure that an m-line in the current local description is never removed from the newly created offer.

2. OfferToReceiveAudio used to default to true, i.e. always having m=audio line even if no audio track. This is changed so that by default m=audio is only added if there are any audio tracks.

BUG=2108
R=pthatcher@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/16309004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7068 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
jiayl@webrtc.org 2014-09-04 17:12:25 +00:00
parent afa77cd803
commit 52055a276d
6 changed files with 99 additions and 103 deletions

View File

@ -51,9 +51,9 @@ namespace webrtc {
using rtc::scoped_ptr; using rtc::scoped_ptr;
using rtc::scoped_refptr; using rtc::scoped_refptr;
static bool ParseConstraints( static bool ParseConstraintsForAnswer(
const MediaConstraintsInterface* constraints, const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options, bool is_answer) { cricket::MediaSessionOptions* options) {
bool value; bool value;
size_t mandatory_constraints_satisfied = 0; size_t mandatory_constraints_satisfied = 0;
@ -82,7 +82,7 @@ static bool ParseConstraints(
// kOfferToReceiveVideo defaults to false according to spec. But // kOfferToReceiveVideo defaults to false according to spec. But
// if it is an answer and video is offered, we should still accept video // if it is an answer and video is offered, we should still accept video
// per default. // per default.
options->has_video |= is_answer; options->has_video = true;
} }
if (FindConstraint(constraints, if (FindConstraint(constraints,
@ -133,6 +133,55 @@ static bool IsValidOfferToReceiveMedia(int value) {
(value <= Options::kMaxOfferToReceiveMedia); (value <= Options::kMaxOfferToReceiveMedia);
} }
// Add the stream and RTP data channel info to |session_options|.
static void SetStreams(
cricket::MediaSessionOptions* session_options,
rtc::scoped_refptr<StreamCollection> streams,
const MediaStreamSignaling::RtpDataChannels& rtp_data_channels) {
session_options->streams.clear();
if (streams != NULL) {
for (size_t i = 0; i < streams->count(); ++i) {
MediaStreamInterface* stream = streams->at(i);
AudioTrackVector audio_tracks(stream->GetAudioTracks());
// For each audio track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < audio_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
session_options->AddStream(
cricket::MEDIA_TYPE_AUDIO, track->id(), stream->label());
}
VideoTrackVector video_tracks(stream->GetVideoTracks());
// For each video track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < video_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
session_options->AddStream(
cricket::MEDIA_TYPE_VIDEO, track->id(), stream->label());
}
}
}
// Check for data channels.
MediaStreamSignaling::RtpDataChannels::const_iterator data_channel_it =
rtp_data_channels.begin();
for (; data_channel_it != rtp_data_channels.end(); ++data_channel_it) {
const DataChannel* channel = data_channel_it->second;
if (channel->state() == DataChannel::kConnecting ||
channel->state() == DataChannel::kOpen) {
// |streamid| and |sync_label| are both set to the DataChannel label
// here so they can be signaled the same way as MediaStreams and Tracks.
// For MediaStreams, the sync_label is the MediaStream label and the
// track label is the same as |streamid|.
const std::string& streamid = channel->label();
const std::string& sync_label = channel->label();
session_options->AddStream(
cricket::MEDIA_TYPE_DATA, streamid, sync_label);
}
}
}
// Factory class for creating remote MediaStreams and MediaStreamTracks. // Factory class for creating remote MediaStreams and MediaStreamTracks.
class RemoteMediaStreamFactory { class RemoteMediaStreamFactory {
public: public:
@ -192,8 +241,6 @@ MediaStreamSignaling::MediaStreamSignaling(
channel_manager)), channel_manager)),
last_allocated_sctp_even_sid_(-2), last_allocated_sctp_even_sid_(-2),
last_allocated_sctp_odd_sid_(-1) { last_allocated_sctp_odd_sid_(-1) {
options_.has_video = false;
options_.has_audio = false;
} }
MediaStreamSignaling::~MediaStreamSignaling() { MediaStreamSignaling::~MediaStreamSignaling() {
@ -377,40 +424,38 @@ bool MediaStreamSignaling::GetOptionsForOffer(
return false; return false;
} }
UpdateSessionOptions(); session_options->has_audio = false;
session_options->has_video = false;
SetStreams(session_options, local_streams_, rtp_data_channels_);
// |options.has_audio| and |options.has_video| can only change from false to // If |offer_to_receive_[audio/video]| is undefined, respect the flags set
// true, but never change from true to false. This is to make sure // from SetStreams. Otherwise, overwrite it based on |rtc_options|.
// CreateOffer / CreateAnswer doesn't remove a media content if (rtc_options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) {
// description that has been created. session_options->has_audio = rtc_options.offer_to_receive_audio > 0;
if (rtc_options.offer_to_receive_audio > 0) {
options_.has_audio = true;
} }
if (rtc_options.offer_to_receive_video > 0) { if (rtc_options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) {
options_.has_video = true; session_options->has_video = rtc_options.offer_to_receive_video > 0;
} }
options_.vad_enabled = rtc_options.voice_activity_detection;
options_.transport_options.ice_restart = rtc_options.ice_restart;
options_.bundle_enabled = rtc_options.use_rtp_mux;
options_.bundle_enabled = EvaluateNeedForBundle(options_); session_options->vad_enabled = rtc_options.voice_activity_detection;
*session_options = options_; session_options->transport_options.ice_restart = rtc_options.ice_restart;
session_options->bundle_enabled = rtc_options.use_rtp_mux;
session_options->bundle_enabled = EvaluateNeedForBundle(*session_options);
return true; return true;
} }
bool MediaStreamSignaling::GetOptionsForAnswer( bool MediaStreamSignaling::GetOptionsForAnswer(
const MediaConstraintsInterface* constraints, const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options) { cricket::MediaSessionOptions* options) {
UpdateSessionOptions(); options->has_audio = false;
options->has_video = false;
SetStreams(options, local_streams_, rtp_data_channels_);
// Copy the |options_| to not let the flag MediaSessionOptions::has_audio and if (!ParseConstraintsForAnswer(constraints, options)) {
// MediaSessionOptions::has_video affect subsequent offers.
cricket::MediaSessionOptions current_options = options_;
if (!ParseConstraints(constraints, &current_options, true)) {
return false; return false;
} }
current_options.bundle_enabled = EvaluateNeedForBundle(current_options); options->bundle_enabled = EvaluateNeedForBundle(*options);
*options = current_options;
return true; return true;
} }
@ -545,54 +590,6 @@ void MediaStreamSignaling::OnDataChannelClose() {
} }
} }
void MediaStreamSignaling::UpdateSessionOptions() {
options_.streams.clear();
if (local_streams_ != NULL) {
for (size_t i = 0; i < local_streams_->count(); ++i) {
MediaStreamInterface* stream = local_streams_->at(i);
AudioTrackVector audio_tracks(stream->GetAudioTracks());
if (!audio_tracks.empty()) {
options_.has_audio = true;
}
// For each audio track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < audio_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
stream->label());
}
VideoTrackVector video_tracks(stream->GetVideoTracks());
if (!video_tracks.empty()) {
options_.has_video = true;
}
// For each video track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < video_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
stream->label());
}
}
}
// Check for data channels.
RtpDataChannels::const_iterator data_channel_it = rtp_data_channels_.begin();
for (; data_channel_it != rtp_data_channels_.end(); ++data_channel_it) {
const DataChannel* channel = data_channel_it->second;
if (channel->state() == DataChannel::kConnecting ||
channel->state() == DataChannel::kOpen) {
// |streamid| and |sync_label| are both set to the DataChannel label
// here so they can be signaled the same way as MediaStreams and Tracks.
// For MediaStreams, the sync_label is the MediaStream label and the
// track label is the same as |streamid|.
const std::string& streamid = channel->label();
const std::string& sync_label = channel->label();
options_.AddStream(cricket::MEDIA_TYPE_DATA, streamid, sync_label);
}
}
}
void MediaStreamSignaling::UpdateRemoteStreamsList( void MediaStreamSignaling::UpdateRemoteStreamsList(
const cricket::StreamParamsVec& streams, const cricket::StreamParamsVec& streams,
cricket::MediaType media_type, cricket::MediaType media_type,

View File

@ -160,6 +160,9 @@ class MediaStreamSignalingObserver {
class MediaStreamSignaling : public sigslot::has_slots<> { class MediaStreamSignaling : public sigslot::has_slots<> {
public: public:
typedef std::map<std::string, rtc::scoped_refptr<DataChannel> >
RtpDataChannels;
MediaStreamSignaling(rtc::Thread* signaling_thread, MediaStreamSignaling(rtc::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer, MediaStreamSignalingObserver* stream_observer,
cricket::ChannelManager* channel_manager); cricket::ChannelManager* channel_manager);
@ -289,8 +292,6 @@ class MediaStreamSignaling : public sigslot::has_slots<> {
}; };
typedef std::vector<TrackInfo> TrackInfos; typedef std::vector<TrackInfo> TrackInfos;
void UpdateSessionOptions();
// Makes sure a MediaStream Track is created for each StreamParam in // Makes sure a MediaStream Track is created for each StreamParam in
// |streams|. |media_type| is the type of the |streams| and can be either // |streams|. |media_type| is the type of the |streams| and can be either
// audio or video. // audio or video.
@ -378,7 +379,6 @@ class MediaStreamSignaling : public sigslot::has_slots<> {
RemotePeerInfo remote_info_; RemotePeerInfo remote_info_;
rtc::Thread* signaling_thread_; rtc::Thread* signaling_thread_;
DataChannelFactory* data_channel_factory_; DataChannelFactory* data_channel_factory_;
cricket::MediaSessionOptions options_;
MediaStreamSignalingObserver* stream_observer_; MediaStreamSignalingObserver* stream_observer_;
rtc::scoped_refptr<StreamCollection> local_streams_; rtc::scoped_refptr<StreamCollection> local_streams_;
rtc::scoped_refptr<StreamCollection> remote_streams_; rtc::scoped_refptr<StreamCollection> remote_streams_;
@ -392,8 +392,6 @@ class MediaStreamSignaling : public sigslot::has_slots<> {
int last_allocated_sctp_even_sid_; int last_allocated_sctp_even_sid_;
int last_allocated_sctp_odd_sid_; int last_allocated_sctp_odd_sid_;
typedef std::map<std::string, rtc::scoped_refptr<DataChannel> >
RtpDataChannels;
typedef std::vector<rtc::scoped_refptr<DataChannel> > SctpDataChannels; typedef std::vector<rtc::scoped_refptr<DataChannel> > SctpDataChannels;
RtpDataChannels rtp_data_channels_; RtpDataChannels rtp_data_channels_;

View File

@ -775,8 +775,10 @@ TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) {
RTCOfferAnswerOptions default_rtc_options; RTCOfferAnswerOptions default_rtc_options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(default_rtc_options, EXPECT_TRUE(signaling_->GetOptionsForOffer(default_rtc_options,
&updated_offer_options)); &updated_offer_options));
EXPECT_TRUE(updated_offer_options.has_audio); // By default, |has_audio| or |has_video| are false if there is no media
EXPECT_TRUE(updated_offer_options.has_video); // track.
EXPECT_FALSE(updated_offer_options.has_audio);
EXPECT_FALSE(updated_offer_options.has_video);
} }
// This test verifies that the remote MediaStreams corresponding to a received // This test verifies that the remote MediaStreams corresponding to a received

View File

@ -508,10 +508,6 @@ void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
return; return;
} }
RTCOfferAnswerOptions options; RTCOfferAnswerOptions options;
// Defaults to receiving audio and not receiving video.
options.offer_to_receive_audio =
RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
options.offer_to_receive_video = 0;
bool value; bool value;
size_t mandatory_constraints = 0; size_t mandatory_constraints = 0;

View File

@ -510,7 +510,7 @@ class WebRtcSessionTest : public testing::Test {
} }
void VerifyAnswerFromNonCryptoOffer() { void VerifyAnswerFromNonCryptoOffer() {
// Create a SDP without Crypto. // Create an SDP without Crypto.
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_video = true; options.has_video = true;
JsepSessionDescription* offer( JsepSessionDescription* offer(
@ -1211,14 +1211,13 @@ TEST_F(WebRtcSessionTest, TestCreateSdesOfferReceiveSdesAnswer) {
rtc::FromString<uint64>(offer->session_version())); rtc::FromString<uint64>(offer->session_version()));
SetLocalDescriptionWithoutError(offer); SetLocalDescriptionWithoutError(offer);
EXPECT_EQ(0u, video_channel_->send_streams().size());
EXPECT_EQ(0u, voice_channel_->send_streams().size());
mediastream_signaling_.SendAudioVideoStream2(); mediastream_signaling_.SendAudioVideoStream2();
answer = CreateRemoteAnswer(session_->local_description()); answer = CreateRemoteAnswer(session_->local_description());
SetRemoteDescriptionWithoutError(answer); SetRemoteDescriptionWithoutError(answer);
EXPECT_EQ(0u, video_channel_->send_streams().size());
EXPECT_EQ(0u, voice_channel_->send_streams().size());
// Make sure the receive streams have not changed. // Make sure the receive streams have not changed.
ASSERT_EQ(1u, video_channel_->recv_streams().size()); ASSERT_EQ(1u, video_channel_->recv_streams().size());
EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id); EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
@ -1992,13 +1991,21 @@ TEST_F(WebRtcSessionTest, CreateOfferWithConstraints) {
const cricket::ContentInfo* content = const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description()); cricket::GetFirstAudioContent(offer->description());
EXPECT_TRUE(content != NULL); EXPECT_TRUE(content != NULL);
content = cricket::GetFirstVideoContent(offer->description()); content = cricket::GetFirstVideoContent(offer->description());
EXPECT_TRUE(content != NULL); EXPECT_TRUE(content != NULL);
// TODO(perkj): Should the direction be set to SEND_ONLY if // Sets constraints to false and verifies that audio/video contents are
// The constraints is set to not receive audio or video but a track is added? // removed.
options.offer_to_receive_audio = 0;
options.offer_to_receive_video = 0;
offer.reset(CreateOffer(options));
content = cricket::GetFirstAudioContent(offer->description());
EXPECT_TRUE(content == NULL);
content = cricket::GetFirstVideoContent(offer->description());
EXPECT_TRUE(content == NULL);
} }
// Test that an answer can not be created if the last remote description is not // Test that an answer can not be created if the last remote description is not
@ -2037,8 +2044,7 @@ TEST_F(WebRtcSessionTest, CreateAudioAnswerWithoutConstraintsOrStreams) {
Init(NULL); Init(NULL);
// Create a remote offer with audio only. // Create a remote offer with audio only.
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_audio = true;
options.has_video = false;
rtc::scoped_ptr<JsepSessionDescription> offer( rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options)); CreateRemoteOffer(options));
ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL); ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL);
@ -2174,7 +2180,6 @@ TEST_F(WebRtcSessionTest, TestAVOfferWithAudioOnlyAnswer) {
SessionDescriptionInterface* offer = CreateOffer(); SessionDescriptionInterface* offer = CreateOffer();
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_video = false;
SessionDescriptionInterface* answer = CreateRemoteAnswer(offer, options); SessionDescriptionInterface* answer = CreateRemoteAnswer(offer, options);
// SetLocalDescription and SetRemoteDescriptions takes ownership of offer // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
@ -2887,7 +2892,6 @@ TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescriptionWithDisabled) {
TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) { TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
Init(NULL); Init(NULL);
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_audio = true;
options.has_video = true; options.has_video = true;
rtc::scoped_ptr<JsepSessionDescription> offer( rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options)); CreateRemoteOffer(options));
@ -2919,7 +2923,6 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) { TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
Init(NULL); Init(NULL);
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_audio = true;
options.has_video = true; options.has_video = true;
rtc::scoped_ptr<JsepSessionDescription> offer( rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options)); CreateRemoteOffer(options));
@ -2985,7 +2988,6 @@ TEST_F(WebRtcSessionTest, TestIceStatesBundle) {
TEST_F(WebRtcSessionTest, SetSdpFailedOnSessionError) { TEST_F(WebRtcSessionTest, SetSdpFailedOnSessionError) {
Init(NULL); Init(NULL);
cricket::MediaSessionOptions options; cricket::MediaSessionOptions options;
options.has_audio = true;
options.has_video = true; options.has_video = true;
cricket::BaseSession::Error error_code = cricket::BaseSession::ERROR_CONTENT; cricket::BaseSession::Error error_code = cricket::BaseSession::ERROR_CONTENT;

View File

@ -1170,28 +1170,28 @@ SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
if (current_description) { if (current_description) {
ContentInfos::const_iterator it = current_description->contents().begin(); ContentInfos::const_iterator it = current_description->contents().begin();
for (; it != current_description->contents().end(); ++it) { for (; it != current_description->contents().end(); ++it) {
if (IsMediaContentOfType(&*it, MEDIA_TYPE_AUDIO) && options.has_audio) { if (IsMediaContentOfType(&*it, MEDIA_TYPE_AUDIO)) {
if (!AddAudioContentForOffer(options, current_description, if (!AddAudioContentForOffer(options, current_description,
audio_rtp_extensions, audio_codecs, audio_rtp_extensions, audio_codecs,
&current_streams, offer.get())) { &current_streams, offer.get())) {
return NULL; return NULL;
} }
audio_added = true; audio_added = true;
} else if (IsMediaContentOfType(&*it, MEDIA_TYPE_VIDEO) && } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_VIDEO)) {
options.has_video) {
if (!AddVideoContentForOffer(options, current_description, if (!AddVideoContentForOffer(options, current_description,
video_rtp_extensions, video_codecs, video_rtp_extensions, video_codecs,
&current_streams, offer.get())) { &current_streams, offer.get())) {
return NULL; return NULL;
} }
video_added = true; video_added = true;
} else if (IsMediaContentOfType(&*it, MEDIA_TYPE_DATA) && } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_DATA)) {
options.has_data()) {
if (!AddDataContentForOffer(options, current_description, &data_codecs, if (!AddDataContentForOffer(options, current_description, &data_codecs,
&current_streams, offer.get())) { &current_streams, offer.get())) {
return NULL; return NULL;
} }
data_added = true; data_added = true;
} else {
ASSERT(false);
} }
} }
} }
@ -1459,6 +1459,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer(
bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED); bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
SetMediaProtocol(secure_transport, audio.get()); SetMediaProtocol(secure_transport, audio.get());
desc->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release()); desc->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release());
if (!AddTransportOffer(CN_AUDIO, options.transport_options, if (!AddTransportOffer(CN_AUDIO, options.transport_options,
current_description, desc)) { current_description, desc)) {