Implementation of PcSignaling. A Class to handle signaling between peerconnections.
Review URL: http://webrtc-codereview.appspot.com/149002 git-svn-id: http://webrtc.googlecode.com/svn/trunk@657 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
18421f2063
commit
2f56ff48a4
@ -625,6 +625,15 @@
|
||||
'libjingle',
|
||||
],
|
||||
} ], # inside_chromium_build
|
||||
['peer_connection_dev==1', {
|
||||
'sources-': [
|
||||
'<(libjingle_orig)/source/talk/session/phone/mediasession.cc',
|
||||
],
|
||||
'sources+': [
|
||||
'<(libjingle_mods)/source/talk/session/phone/mediasession.cc',
|
||||
]
|
||||
|
||||
}], # peer_connection_dev
|
||||
], # conditions
|
||||
},
|
||||
# seperate project for app
|
||||
@ -685,6 +694,8 @@
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl.h',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmanagerimpl.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmanagerimpl.h',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.h',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectiontransport.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectiontransport.h',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/ref_count.h',
|
||||
@ -751,7 +762,8 @@
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamimpl_unittest.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittests.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl_unittest.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmanager_unittest.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmanager_unittest.cc',
|
||||
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling_unittest.cc',
|
||||
],
|
||||
}, { # peer_connection_dev != 1
|
||||
'type': 'none',
|
||||
|
@ -35,6 +35,7 @@ AudioTrackImpl::AudioTrackImpl(const std::string& label, uint32 ssrc)
|
||||
kind_(kAudioTrackKind),
|
||||
label_(label),
|
||||
ssrc_(ssrc),
|
||||
state_(kInitializing),
|
||||
audio_device_(NULL) {
|
||||
}
|
||||
|
||||
@ -44,6 +45,7 @@ AudioTrackImpl::AudioTrackImpl(const std::string& label,
|
||||
kind_(kAudioTrackKind),
|
||||
label_(label),
|
||||
ssrc_(0),
|
||||
state_(kInitializing),
|
||||
audio_device_(audio_device) {
|
||||
}
|
||||
|
||||
@ -86,6 +88,18 @@ bool AudioTrackImpl::set_ssrc(uint32 ssrc) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MediaStreamTrack::TrackState AudioTrackImpl::state() {
|
||||
return state_;
|
||||
}
|
||||
|
||||
bool AudioTrackImpl::set_state(TrackState new_state) {
|
||||
bool fire_on_change = state_ != new_state;
|
||||
state_ = new_state;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalAudioTrack>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
scoped_refptr<AudioTrack> AudioTrackImpl::Create(
|
||||
const std::string& label, uint32 ssrc) {
|
||||
RefCountImpl<AudioTrackImpl>* track =
|
||||
|
@ -55,9 +55,11 @@ class AudioTrackImpl : public NotifierImpl<LocalAudioTrack> {
|
||||
virtual const std::string& kind();
|
||||
virtual const std::string& label();
|
||||
virtual uint32 ssrc();
|
||||
virtual TrackState state();
|
||||
virtual bool enabled();
|
||||
virtual bool set_enabled(bool enable);
|
||||
virtual bool set_ssrc(uint32 ssrc);
|
||||
virtual bool set_state(TrackState new_state);
|
||||
|
||||
protected:
|
||||
AudioTrackImpl(const std::string& label, uint32 ssrc);
|
||||
@ -68,6 +70,7 @@ class AudioTrackImpl : public NotifierImpl<LocalAudioTrack> {
|
||||
std::string kind_;
|
||||
std::string label_;
|
||||
uint32 ssrc_;
|
||||
TrackState state_;
|
||||
scoped_refptr<AudioDeviceModule> audio_device_;
|
||||
};
|
||||
|
||||
|
@ -66,10 +66,18 @@ class Notifier {
|
||||
class MediaStreamTrack : public RefCount,
|
||||
public Notifier {
|
||||
public:
|
||||
enum TrackState {
|
||||
kInitializing, // Track is beeing negotiated.
|
||||
kLive = 1, // Track alive
|
||||
kEnded = 2, // Track have ended
|
||||
kFailed = 3, // Track negotiation failed.
|
||||
};
|
||||
|
||||
virtual const std::string& kind() = 0;
|
||||
virtual const std::string& label() = 0;
|
||||
virtual uint32 ssrc() = 0;
|
||||
virtual bool enabled() = 0;
|
||||
virtual TrackState state() = 0;
|
||||
// Enable or disables a track.
|
||||
// For Remote streams - disable means that the video is not decoded,
|
||||
// or audio not decoded.
|
||||
@ -78,6 +86,7 @@ class MediaStreamTrack : public RefCount,
|
||||
virtual bool set_enabled(bool enable) = 0;
|
||||
// Return false (or assert) if the ssrc is already set.
|
||||
virtual bool set_ssrc(uint32 ssrc) = 0;
|
||||
virtual bool set_state(TrackState new_state) = 0;
|
||||
};
|
||||
|
||||
// Reference counted wrapper for a VideoRenderer.
|
||||
|
@ -44,6 +44,7 @@ class StreamCollection : public RefCount {
|
||||
public:
|
||||
virtual size_t count() = 0;
|
||||
virtual MediaStream* at(size_t index) = 0;
|
||||
virtual MediaStream* find(const std::string& label) = 0;
|
||||
protected:
|
||||
// Dtor protected as objects shouldn't be deleted via this interface.
|
||||
~StreamCollection() {}
|
||||
|
@ -0,0 +1,481 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2011, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "talk/app/webrtc_dev/audiotrackimpl.h"
|
||||
#include "talk/app/webrtc_dev/mediastreamimpl.h"
|
||||
#include "talk/app/webrtc_dev/videotrackimpl.h"
|
||||
#include "talk/base/helpers.h"
|
||||
#include "talk/base/messagequeue.h"
|
||||
#include "talk/session/phone/channelmanager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum {
|
||||
MSG_SEND_QUEUED_OFFER = 301,
|
||||
};
|
||||
static const int kGlareMinWaitTime = 2 * 1000; // 2 sec
|
||||
static const int kGlareWaitIntervall = 1 * 1000; // 1 sec
|
||||
|
||||
// Verifies that a SessionDescription contains as least one valid media content
|
||||
// and a valid codec.
|
||||
static bool VerifyAnswer(const cricket::SessionDescription* answer_desc) {
|
||||
// We need to verify that at least one media content with
|
||||
// a codec is available.
|
||||
const cricket::ContentInfo* audio_content =
|
||||
GetFirstAudioContent(answer_desc);
|
||||
if (audio_content) {
|
||||
const cricket::AudioContentDescription* audio_desc =
|
||||
static_cast<const cricket::AudioContentDescription*>(
|
||||
audio_content->description);
|
||||
if (audio_desc->codecs().size() > 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const cricket::ContentInfo* video_content =
|
||||
GetFirstVideoContent(answer_desc);
|
||||
if (video_content) {
|
||||
const cricket::VideoContentDescription* video_desc =
|
||||
static_cast<const cricket::VideoContentDescription*>(
|
||||
video_content->description);
|
||||
if (video_desc->codecs().size() > 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
scoped_refptr<PeerConnectionMessage> PeerConnectionMessage::Create(
|
||||
PeerConnectionMessageType type,
|
||||
const cricket::SessionDescription* desc) {
|
||||
return new RefCountImpl<PeerConnectionMessage> (type, desc);
|
||||
}
|
||||
|
||||
scoped_refptr<PeerConnectionMessage> PeerConnectionMessage::CreateErrorMessage(
|
||||
ErrorCode error) {
|
||||
return new RefCountImpl<PeerConnectionMessage> (error);
|
||||
}
|
||||
|
||||
PeerConnectionMessage::PeerConnectionMessage(
|
||||
PeerConnectionMessageType type,
|
||||
const cricket::SessionDescription* desc)
|
||||
: type_(type),
|
||||
desc_(desc),
|
||||
error_code_(kNoError) {
|
||||
}
|
||||
|
||||
PeerConnectionMessage::PeerConnectionMessage(ErrorCode error)
|
||||
: type_(kError),
|
||||
desc_(NULL),
|
||||
error_code_(error) {
|
||||
}
|
||||
|
||||
PeerConnectionSignaling::PeerConnectionSignaling(
|
||||
cricket::ChannelManager* channel_manager)
|
||||
: signaling_thread_(talk_base::Thread::Current()),
|
||||
state_(kIdle),
|
||||
ssrc_counter_(0),
|
||||
session_description_factory_(channel_manager) {
|
||||
}
|
||||
|
||||
PeerConnectionSignaling::~PeerConnectionSignaling() {
|
||||
}
|
||||
|
||||
void PeerConnectionSignaling::ProcessSignalingMessage(
|
||||
PeerConnectionMessage* message,
|
||||
StreamCollection* local_streams) {
|
||||
switch (message->type()) {
|
||||
case PeerConnectionMessage::kOffer: {
|
||||
// Don't handle offers when we are waiting for an answer.
|
||||
if (state_ == kWaitingForAnswer) {
|
||||
state_ = kGlare;
|
||||
// Resends our last offer in 2 to 3s.
|
||||
const int timeout = kGlareMinWaitTime +
|
||||
talk_base::CreateRandomId() % kGlareWaitIntervall;
|
||||
signaling_thread_->PostDelayed(
|
||||
timeout, this, MSG_SEND_QUEUED_OFFER, NULL);
|
||||
scoped_refptr<PeerConnectionMessage> msg =
|
||||
PeerConnectionMessage::CreateErrorMessage(
|
||||
PeerConnectionMessage::kWrongState);
|
||||
SignalNewPeerConnectionMessage(msg);
|
||||
break;
|
||||
}
|
||||
if (state_ == kGlare) {
|
||||
state_ = kIdle;
|
||||
}
|
||||
// Reset all pending offers. Instead, send the new streams in the answer.
|
||||
signaling_thread_->Clear(this, MSG_SEND_QUEUED_OFFER, NULL);
|
||||
queued_offers_.clear();
|
||||
GenerateAnswer(message, local_streams);
|
||||
UpdateRemoteStreams(message->desc());
|
||||
break;
|
||||
}
|
||||
case PeerConnectionMessage::kAnswer: {
|
||||
ASSERT(state_ != PeerConnectionSignaling::kIdle);
|
||||
if (state_ == PeerConnectionSignaling::kIdle)
|
||||
return;
|
||||
UpdateRemoteStreams(message->desc());
|
||||
scoped_refptr<StreamCollection> streams(queued_offers_.front());
|
||||
queued_offers_.pop_front();
|
||||
UpdateSendingLocalStreams(message->desc(), streams);
|
||||
// Check if we have more offers waiting in the queue.
|
||||
if (queued_offers_.size() > 0)
|
||||
// Send the next offer.
|
||||
signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
|
||||
else
|
||||
state_ = PeerConnectionSignaling::kIdle;
|
||||
break;
|
||||
}
|
||||
case PeerConnectionMessage::kError: {
|
||||
if (message->error() != PeerConnectionMessage::kWrongState)
|
||||
SignalErrorMessageReceived(message->error());
|
||||
|
||||
// An error have occurred that we can't do anything about.
|
||||
// Reset the state and wait for user action.
|
||||
queued_offers_.clear();
|
||||
state_ = kIdle;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnectionSignaling::CreateOffer(StreamCollection* local_streams) {
|
||||
queued_offers_.push_back(local_streams);
|
||||
if (state_ == kIdle) {
|
||||
// Check if we can sent a new offer.
|
||||
// Only one offer is allowed at the time.
|
||||
state_ = PeerConnectionSignaling::kWaitingForAnswer;
|
||||
signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnectionSignaling::CreateOffer_s() {
|
||||
ASSERT(queued_offers_.size() > 0);
|
||||
scoped_refptr<StreamCollection> local_streams(queued_offers_.front());
|
||||
cricket::MediaSessionOptions options;
|
||||
options.is_video = true;
|
||||
InitMediaSessionOptions(&options, local_streams);
|
||||
|
||||
talk_base::scoped_ptr<cricket::SessionDescription> offer(
|
||||
session_description_factory_.CreateOffer(options));
|
||||
|
||||
scoped_refptr<PeerConnectionMessage> offer_message =
|
||||
PeerConnectionMessage::Create(PeerConnectionMessage::kOffer,
|
||||
offer.release());
|
||||
|
||||
// If we are updating with new streams we need to get an answer
|
||||
// before we can handle a remote offer.
|
||||
// We also need the response before we are allowed to start send media.
|
||||
SignalNewPeerConnectionMessage(offer_message);
|
||||
}
|
||||
|
||||
PeerConnectionSignaling::State PeerConnectionSignaling::GetState() {
|
||||
return state_;
|
||||
}
|
||||
|
||||
// Implement talk_base::MessageHandler.
|
||||
void PeerConnectionSignaling::OnMessage(talk_base::Message* msg) {
|
||||
switch (msg->message_id) {
|
||||
case MSG_SEND_QUEUED_OFFER:
|
||||
CreateOffer_s();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnectionSignaling::GenerateAnswer(PeerConnectionMessage* message,
|
||||
StreamCollection* local_streams) {
|
||||
// Create a MediaSessionOptions object with the sources we want to send.
|
||||
cricket::MediaSessionOptions options;
|
||||
options.is_video = true;
|
||||
InitMediaSessionOptions(&options, local_streams);
|
||||
|
||||
// Use the MediaSessionFactory to create an SDP answer.
|
||||
talk_base::scoped_ptr<cricket::SessionDescription> answer(
|
||||
session_description_factory_.CreateAnswer(message->desc(), options));
|
||||
|
||||
scoped_refptr<PeerConnectionMessage> answer_message;
|
||||
if (VerifyAnswer(answer.get())) {
|
||||
answer_message = PeerConnectionMessage::Create(
|
||||
PeerConnectionMessage::kAnswer, answer.release());
|
||||
|
||||
} else {
|
||||
answer_message = PeerConnectionMessage::CreateErrorMessage(
|
||||
PeerConnectionMessage::kOfferNotAcceptable);
|
||||
}
|
||||
|
||||
// Signal that the new answer is ready to be sent.
|
||||
SignalNewPeerConnectionMessage(answer_message);
|
||||
|
||||
// Start send the local streams.
|
||||
// TODO(perkj): Defer the start of sending local media so the remote peer
|
||||
// have time to receive the signaling message before media arrives?
|
||||
// This is under debate.
|
||||
UpdateSendingLocalStreams(answer_message->desc(), local_streams);
|
||||
}
|
||||
|
||||
// Fills a MediaSessionOptions struct with the MediaTracks we want to sent given
|
||||
// the local MediaStreams.
|
||||
// MediaSessionOptions contains the ssrc of the media track, the cname
|
||||
// corresponding to the MediaStream and a label of the track.
|
||||
void PeerConnectionSignaling::InitMediaSessionOptions(
|
||||
cricket::MediaSessionOptions* options,
|
||||
StreamCollection* local_streams) {
|
||||
for (size_t i = 0; i < local_streams->count(); ++i) {
|
||||
MediaStream* stream = local_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackList> tracks = stream->tracks();
|
||||
|
||||
// For each track in the stream, add it to the MediaSessionOptions.
|
||||
for (size_t j = 0; j < tracks->count(); ++j) {
|
||||
scoped_refptr<MediaStreamTrack> track = tracks->at(j);
|
||||
if (track->kind().compare(kAudioTrackKind) == 0) {
|
||||
// TODO(perkj): Better ssrc?
|
||||
// Does talk_base::CreateRandomNonZeroId() generate unique id?
|
||||
if (track->ssrc() == 0)
|
||||
track->set_ssrc(++ssrc_counter_);
|
||||
options->audio_sources.push_back(cricket::SourceParam(track->ssrc(),
|
||||
track->label(),
|
||||
stream->label()));
|
||||
}
|
||||
if (track->kind().compare(kVideoTrackKind) == 0) {
|
||||
if (track->ssrc() == 0)
|
||||
track->set_ssrc(++ssrc_counter_); // TODO(perkj): Better ssrc?
|
||||
options->video_sources.push_back(cricket::SourceParam(track->ssrc(),
|
||||
track->label(),
|
||||
stream->label()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Updates or Creates remote MediaStream objects given a
|
||||
// remote SessionDesription.
|
||||
// If the remote SessionDesription contain new remote MediaStreams
|
||||
// SignalRemoteStreamAdded is triggered. If a remote MediaStream is missing from
|
||||
// the remote SessionDescription SignalRemoteStreamRemoved is triggered.
|
||||
void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
const cricket::SessionDescription* remote_desc) {
|
||||
RemoteStreamMap current_streams;
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStreamImpl> >
|
||||
MediaStreamPair;
|
||||
|
||||
const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
|
||||
if (audio_content) {
|
||||
const cricket::AudioContentDescription* audio_desc =
|
||||
static_cast<const cricket::AudioContentDescription*>(
|
||||
audio_content->description);
|
||||
|
||||
for (cricket::Sources::const_iterator it = audio_desc->sources().begin();
|
||||
it != audio_desc->sources().end();
|
||||
++it) {
|
||||
RemoteStreamMap::iterator old_streams_it =
|
||||
remote_streams_.find(it->cname);
|
||||
RemoteStreamMap::iterator new_streams_it =
|
||||
current_streams.find(it->cname);
|
||||
|
||||
if (old_streams_it == remote_streams_.end()) {
|
||||
if (new_streams_it == current_streams.end()) {
|
||||
// New stream
|
||||
scoped_refptr<MediaStreamImpl> stream(
|
||||
MediaStreamImpl::Create(it->cname));
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
new_streams_it = current_streams.find(it->cname);
|
||||
}
|
||||
scoped_refptr<AudioTrack> track(AudioTrackImpl::Create(it->description,
|
||||
it->ssrc));
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
new_streams_it->second->AddTrack(track);
|
||||
|
||||
} else {
|
||||
scoped_refptr<MediaStreamImpl> stream(old_streams_it->second);
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
|
||||
if (video_content) {
|
||||
const cricket::VideoContentDescription* video_desc =
|
||||
static_cast<const cricket::VideoContentDescription*>(
|
||||
video_content->description);
|
||||
|
||||
for (cricket::Sources::const_iterator it = video_desc->sources().begin();
|
||||
it != video_desc->sources().end();
|
||||
++it) {
|
||||
RemoteStreamMap::iterator old_streams_it =
|
||||
remote_streams_.find(it->cname);
|
||||
RemoteStreamMap::iterator new_streams_it =
|
||||
current_streams.find(it->cname);
|
||||
|
||||
if (old_streams_it == remote_streams_.end()) {
|
||||
if (new_streams_it == current_streams.end()) {
|
||||
// New stream
|
||||
scoped_refptr<MediaStreamImpl> stream(
|
||||
MediaStreamImpl::Create(it->cname));
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
new_streams_it = current_streams.find(it->cname);
|
||||
}
|
||||
scoped_refptr<VideoTrack> track(VideoTrackImpl::Create(it->description,
|
||||
it->ssrc));
|
||||
new_streams_it->second->AddTrack(track);
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
|
||||
} else {
|
||||
scoped_refptr<MediaStreamImpl> stream(old_streams_it->second);
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate current_streams to find all new streams.
|
||||
// Change the state of the new stream and SignalRemoteStreamAdded.
|
||||
for (RemoteStreamMap::iterator it = current_streams.begin();
|
||||
it != current_streams.end();
|
||||
++it) {
|
||||
scoped_refptr<MediaStreamImpl> new_stream(it->second);
|
||||
RemoteStreamMap::iterator old_streams_it =
|
||||
remote_streams_.find(new_stream->label());
|
||||
if (old_streams_it == remote_streams_.end()) {
|
||||
new_stream->set_ready_state(MediaStream::kLive);
|
||||
SignalRemoteStreamAdded(new_stream);
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate the old list of remote streams.
|
||||
// If a stream is not found in the new list it have been removed.
|
||||
// Change the state of the removed stream and SignalRemoteStreamRemoved.
|
||||
for (RemoteStreamMap::iterator it = remote_streams_.begin();
|
||||
it != remote_streams_.end();
|
||||
++it) {
|
||||
scoped_refptr<MediaStreamImpl> old_stream(it->second);
|
||||
RemoteStreamMap::iterator new_streams_it =
|
||||
current_streams.find(old_stream->label());
|
||||
if (new_streams_it == current_streams.end()) {
|
||||
old_stream->set_ready_state(MediaStream::kEnded);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(old_stream->tracks());
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
tracklist->at(j)->set_state(MediaStreamTrack::kEnded);
|
||||
}
|
||||
SignalRemoteStreamRemoved(old_stream);
|
||||
}
|
||||
}
|
||||
// Set the remote_streams_ map to the map of MediaStreams we just created to
|
||||
// be prepared for the next offer.
|
||||
remote_streams_ = current_streams;
|
||||
}
|
||||
|
||||
// Update the state of all local streams we have just negotiated. If the
|
||||
// negotiation succeeded the state is changed to kLive, if the negotiation
|
||||
// failed the state is changed to kEnded.
|
||||
void PeerConnectionSignaling::UpdateSendingLocalStreams(
|
||||
const cricket::SessionDescription* answer_desc,
|
||||
StreamCollection* negotiated_streams) {
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStream> > MediaStreamPair;
|
||||
LocalStreamMap current_local_streams;
|
||||
|
||||
for (size_t i = 0; i < negotiated_streams->count(); ++i) {
|
||||
scoped_refptr<MediaStream> stream = negotiated_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(stream->tracks());
|
||||
|
||||
bool stream_ok = false; // A stream is ok if at least one track succeed.
|
||||
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
scoped_refptr<MediaStreamTrack> track = tracklist->at(j);
|
||||
if (track->kind().compare(kAudioTrackKind) == 0) {
|
||||
const cricket::ContentInfo* audio_content =
|
||||
GetFirstAudioContent(answer_desc);
|
||||
|
||||
if (!audio_content) { // The remote does not accept audio.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
continue;
|
||||
}
|
||||
const cricket::AudioContentDescription* audio_desc =
|
||||
static_cast<const cricket::AudioContentDescription*>(
|
||||
audio_content->description);
|
||||
// TODO(perkj): Do we need to store the codec in the track?
|
||||
if (audio_desc->codecs().size() <= 0) {
|
||||
// No common codec.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
}
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
stream_ok = true;
|
||||
}
|
||||
if (track->kind().compare(kVideoTrackKind) == 0) {
|
||||
const cricket::ContentInfo* video_content =
|
||||
GetFirstVideoContent(answer_desc);
|
||||
|
||||
if (!video_content) { // The remote does not accept video.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
continue;
|
||||
}
|
||||
const cricket::VideoContentDescription* video_desc =
|
||||
static_cast<const cricket::VideoContentDescription*>(
|
||||
video_content->description);
|
||||
// TODO(perkj): Do we need to store the codec in the track?
|
||||
if (video_desc->codecs().size() <= 0) {
|
||||
// No common codec.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
}
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
stream_ok = true;
|
||||
}
|
||||
}
|
||||
if (stream_ok) {
|
||||
// We have successfully negotiated to send this stream.
|
||||
// Change the stream and store it as successfully negotiated.
|
||||
stream->set_ready_state(MediaStream::kLive);
|
||||
current_local_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
} else {
|
||||
stream->set_ready_state(MediaStream::kEnded);
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate the old list of remote streams.
|
||||
// If a stream is not found in the new list it have been removed.
|
||||
// Change the state of the removed stream and all its tracks to kEnded.
|
||||
for (LocalStreamMap::iterator it = local_streams_.begin();
|
||||
it != local_streams_.end();
|
||||
++it) {
|
||||
scoped_refptr<MediaStream> old_stream(it->second);
|
||||
MediaStream* new_streams = negotiated_streams->find(old_stream->label());
|
||||
if (new_streams == NULL) {
|
||||
old_stream->set_ready_state(MediaStream::kEnded);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(old_stream->tracks());
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
tracklist->at(j)->set_state(MediaStreamTrack::kEnded);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update the local_streams_ for next update.
|
||||
local_streams_ = current_local_streams;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,184 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2011, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file contains classes used for handling signaling between
|
||||
// two PeerConnections.
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
|
||||
#define TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
|
||||
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc_dev/mediastreamimpl.h"
|
||||
#include "talk/app/webrtc_dev/peerconnection.h"
|
||||
#include "talk/app/webrtc_dev/ref_count.h"
|
||||
#include "talk/app/webrtc_dev/scoped_refptr.h"
|
||||
#include "talk/base/basictypes.h"
|
||||
#include "talk/base/messagehandler.h"
|
||||
#include "talk/base/scoped_ptr.h"
|
||||
#include "talk/base/thread.h"
|
||||
#include "talk/session/phone/mediasession.h"
|
||||
#include "talk/p2p/base/sessiondescription.h"
|
||||
|
||||
namespace cricket {
|
||||
class ChannelManager;
|
||||
}
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// PeerConnectionMessage represent an SDP offer or an answer.
|
||||
// Instances of this class can be serialized / deserialized and are used for
|
||||
// signaling between PeerConnection objects.
|
||||
// Each instance has a type, a sequence number and a session description.
|
||||
class PeerConnectionMessage : public RefCount {
|
||||
public:
|
||||
enum PeerConnectionMessageType {
|
||||
kOffer,
|
||||
kAnswer,
|
||||
kError
|
||||
};
|
||||
|
||||
enum ErrorCode {
|
||||
kNoError = 0,
|
||||
kWrongState = 10, // Offer received when Answer was expected.
|
||||
kParseError = 20, // Can't parse / process offer.
|
||||
kOfferNotAcceptable = 30, // The offer have been rejected.
|
||||
kMessageNotDeliverable = 40 // The signaling channel is broken.
|
||||
};
|
||||
|
||||
static scoped_refptr<PeerConnectionMessage> Create(
|
||||
PeerConnectionMessageType type,
|
||||
const cricket::SessionDescription* desc);
|
||||
|
||||
static scoped_refptr<PeerConnectionMessage> CreateErrorMessage(
|
||||
ErrorCode error);
|
||||
|
||||
PeerConnectionMessageType type() {return type_;}
|
||||
ErrorCode error() {return error_code_;}
|
||||
const cricket::SessionDescription* desc() {return desc_.get();}
|
||||
|
||||
// TODO(perkj): Add functions for serializing and deserializing this class.
|
||||
|
||||
protected:
|
||||
PeerConnectionMessage(PeerConnectionMessageType type,
|
||||
const cricket::SessionDescription* desc);
|
||||
explicit PeerConnectionMessage(ErrorCode error);
|
||||
|
||||
private:
|
||||
PeerConnectionMessageType type_;
|
||||
ErrorCode error_code_;
|
||||
talk_base::scoped_ptr<const cricket::SessionDescription> desc_;
|
||||
};
|
||||
|
||||
// PeerConnectionSignaling is a class responsible for handling signaling
|
||||
// between PeerConnection objects.
|
||||
// It creates remote MediaStream objects when the remote peer signals it wants
|
||||
// to send a new MediaStream.
|
||||
// It changes the state of local MediaStreams and tracks
|
||||
// when a remote peer is ready to receive media.
|
||||
// Call CreateOffer to negotiate new local streams to send.
|
||||
// Call ProcessSignalingMessage when a new PeerConnectionMessage have been
|
||||
// received from the remote peer.
|
||||
class PeerConnectionSignaling : public talk_base::MessageHandler {
|
||||
public:
|
||||
enum State {
|
||||
// Ready to sent new offer or receive a new offer.
|
||||
kIdle,
|
||||
// We have sent an offer and expect an answer, or we want to update
|
||||
// our own offer.
|
||||
kWaitingForAnswer,
|
||||
// While waiting for an answer to our offer we received an offer from
|
||||
// the remote peer.
|
||||
kGlare
|
||||
};
|
||||
|
||||
explicit PeerConnectionSignaling(cricket::ChannelManager* channel_manager);
|
||||
~PeerConnectionSignaling();
|
||||
|
||||
// Process a received offer/answer from the remote peer.
|
||||
void ProcessSignalingMessage(PeerConnectionMessage* message,
|
||||
StreamCollection* local_streams);
|
||||
|
||||
// Creates an offer containing all tracks in local_streams.
|
||||
// When the offer is ready it is signaled by SignalNewPeerConnectionMessage.
|
||||
// When the remote peer is ready to receive media on a stream , the state of
|
||||
// the local stream will change to kAlive.
|
||||
void CreateOffer(StreamCollection* local_streams);
|
||||
|
||||
// Returns the current state.
|
||||
State GetState();
|
||||
|
||||
// New PeerConnectionMessage with an SDP offer/answer is ready to be sent.
|
||||
// The listener to this signal is expected to serialize and send the
|
||||
// PeerConnectionMessage to the remote peer.
|
||||
sigslot::signal1<PeerConnectionMessage*> SignalNewPeerConnectionMessage;
|
||||
|
||||
// A new remote stream have been discovered.
|
||||
sigslot::signal1<MediaStream*> SignalRemoteStreamAdded;
|
||||
|
||||
// Remote stream is no longer available.
|
||||
sigslot::signal1<MediaStream*> SignalRemoteStreamRemoved;
|
||||
|
||||
// Remote PeerConnection sent an error message.
|
||||
sigslot::signal1<PeerConnectionMessage::ErrorCode> SignalErrorMessageReceived;
|
||||
|
||||
private:
|
||||
// Implement talk_base::MessageHandler.
|
||||
virtual void OnMessage(talk_base::Message* msg);
|
||||
void CreateOffer_s();
|
||||
void GenerateAnswer(PeerConnectionMessage* message,
|
||||
StreamCollection* local_streams);
|
||||
|
||||
void InitMediaSessionOptions(cricket::MediaSessionOptions* options,
|
||||
StreamCollection* local_streams);
|
||||
|
||||
void UpdateRemoteStreams(const cricket::SessionDescription* remote_desc);
|
||||
void UpdateSendingLocalStreams(
|
||||
const cricket::SessionDescription* answer_desc,
|
||||
StreamCollection* negotiated_streams);
|
||||
|
||||
typedef std::list<scoped_refptr<StreamCollection> > StreamCollectionList;
|
||||
StreamCollectionList queued_offers_;
|
||||
|
||||
talk_base::Thread* signaling_thread_;
|
||||
State state_;
|
||||
uint32 ssrc_counter_;
|
||||
|
||||
typedef std::map<std::string, scoped_refptr<MediaStreamImpl> >
|
||||
RemoteStreamMap;
|
||||
RemoteStreamMap remote_streams_;
|
||||
typedef std::map<std::string, scoped_refptr<MediaStream> >
|
||||
LocalStreamMap;
|
||||
LocalStreamMap local_streams_;
|
||||
cricket::MediaSessionDescriptionFactory session_description_factory_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
|
@ -0,0 +1,344 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2011, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "talk/app/webrtc_dev/mediastreamimpl.h"
|
||||
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
|
||||
#include "talk/app/webrtc_dev/streamcollectionimpl.h"
|
||||
#include "talk/base/scoped_ptr.h"
|
||||
#include "talk/base/thread.h"
|
||||
#include "talk/session/phone/channelmanager.h"
|
||||
|
||||
static const char kStreamLabel1[] = "local_stream_1";
|
||||
static const char kAudioTrackLabel1[] = "local_audio_1";
|
||||
static const char kVideoTrackLabel1[] = "local_video_1";
|
||||
static const int kWaitTime = 5000;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
typedef std::map<std::string, scoped_refptr<MediaStream> > MediaStreamMap;
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStream> > RemotePair;
|
||||
|
||||
class MockMediaTrackObserver : public webrtc::Observer {
|
||||
public:
|
||||
explicit MockMediaTrackObserver(MediaStreamTrack* track) : track_(track) {
|
||||
track_state = track->state();
|
||||
track->RegisterObserver(this);
|
||||
}
|
||||
|
||||
virtual void OnChanged() {
|
||||
track_state = track_->state();
|
||||
}
|
||||
|
||||
webrtc::MediaStreamTrack::TrackState track_state;
|
||||
private:
|
||||
scoped_refptr<MediaStreamTrack> track_;
|
||||
};
|
||||
|
||||
class MockMediaStreamObserver : public webrtc::Observer {
|
||||
public:
|
||||
explicit MockMediaStreamObserver(MediaStream* stream) : stream_(stream) {
|
||||
ready_state = stream->ready_state();
|
||||
stream_->RegisterObserver(this);
|
||||
}
|
||||
|
||||
virtual void OnChanged() {
|
||||
ready_state = stream_->ready_state();
|
||||
}
|
||||
|
||||
webrtc::MediaStream::ReadyState ready_state;
|
||||
private:
|
||||
scoped_refptr<MediaStream> stream_;
|
||||
};
|
||||
|
||||
class MockSignalingObserver : public sigslot::has_slots<> {
|
||||
public:
|
||||
MockSignalingObserver()
|
||||
: remote_peer_(NULL) {
|
||||
}
|
||||
|
||||
// New remote stream have been discovered.
|
||||
virtual void OnRemoteStreamAdded(MediaStream* remote_stream) {
|
||||
EXPECT_EQ(MediaStream::kLive, remote_stream->ready_state());
|
||||
remote_media_streams_.insert(RemotePair(remote_stream->label(),
|
||||
remote_stream));
|
||||
}
|
||||
|
||||
// Remote stream is no longer available.
|
||||
virtual void OnRemoteStreamRemoved(MediaStream* remote_stream) {
|
||||
EXPECT_NE(remote_media_streams_.find(remote_stream->label()),
|
||||
remote_media_streams_.end());
|
||||
remote_media_streams_.erase(remote_stream->label());
|
||||
}
|
||||
|
||||
// New answer ready to be sent.
|
||||
void OnSignalingMessage(PeerConnectionMessage* message) {
|
||||
if (remote_peer_) {
|
||||
remote_peer_->ProcessSignalingMessage(message, remote_local_collection_);
|
||||
}
|
||||
if (message->type() != PeerConnectionMessage::kError) {
|
||||
last_message = message;
|
||||
}
|
||||
}
|
||||
|
||||
// Tell this object to answer the remote_peer.
|
||||
// remote_local_collection is the local collection the remote peer want to
|
||||
// send in an answer.
|
||||
void AnswerPeer(PeerConnectionSignaling* remote_peer,
|
||||
StreamCollectionImpl* remote_local_collection) {
|
||||
remote_peer_ = remote_peer;
|
||||
remote_local_collection_ = remote_local_collection;
|
||||
}
|
||||
|
||||
void CancelAnswerPeer() {
|
||||
remote_peer_ = NULL;
|
||||
remote_local_collection_.release();
|
||||
}
|
||||
|
||||
MediaStream* RemoteStream(const std::string& label) {
|
||||
MediaStreamMap::iterator it = remote_media_streams_.find(label);
|
||||
if (it != remote_media_streams_.end())
|
||||
return it->second;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
virtual ~MockSignalingObserver() {}
|
||||
|
||||
scoped_refptr<PeerConnectionMessage> last_message;
|
||||
|
||||
private:
|
||||
MediaStreamMap remote_media_streams_;
|
||||
scoped_refptr<StreamCollectionImpl> remote_local_collection_;
|
||||
PeerConnectionSignaling* remote_peer_;
|
||||
};
|
||||
|
||||
class PeerConnectionSignalingTest: public testing::Test {
|
||||
protected:
|
||||
virtual void SetUp() {
|
||||
channel_manager_.reset(new cricket::ChannelManager(
|
||||
talk_base::Thread::Current()));
|
||||
EXPECT_TRUE(channel_manager_->Init());
|
||||
|
||||
signaling1_.reset(new PeerConnectionSignaling(channel_manager_.get()));
|
||||
observer1_.reset(new MockSignalingObserver());
|
||||
signaling1_->SignalNewPeerConnectionMessage.connect(
|
||||
observer1_.get(), &MockSignalingObserver::OnSignalingMessage);
|
||||
signaling1_->SignalRemoteStreamAdded.connect(
|
||||
observer1_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
|
||||
signaling1_->SignalRemoteStreamRemoved.connect(
|
||||
observer1_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
|
||||
|
||||
signaling2_.reset(new PeerConnectionSignaling(channel_manager_.get()));
|
||||
observer2_.reset(new MockSignalingObserver());
|
||||
signaling2_->SignalNewPeerConnectionMessage.connect(
|
||||
observer2_.get(), &MockSignalingObserver::OnSignalingMessage);
|
||||
signaling2_->SignalRemoteStreamAdded.connect(
|
||||
observer2_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
|
||||
signaling2_->SignalRemoteStreamRemoved.connect(
|
||||
observer2_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
|
||||
}
|
||||
|
||||
talk_base::scoped_ptr<MockSignalingObserver> observer1_;
|
||||
talk_base::scoped_ptr<MockSignalingObserver> observer2_;
|
||||
talk_base::scoped_ptr<PeerConnectionSignaling> signaling1_;
|
||||
talk_base::scoped_ptr<PeerConnectionSignaling> signaling2_;
|
||||
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
|
||||
};
|
||||
|
||||
TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(CreateLocalMediaStream(label));
|
||||
MockMediaStreamObserver stream_observer1(stream);
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
MockMediaTrackObserver track_observer1(audio_track);
|
||||
|
||||
// Peer 1 create an offer with only one audio track.
|
||||
scoped_refptr<StreamCollectionImpl> local_collection1(
|
||||
StreamCollectionImpl::Create());
|
||||
local_collection1->AddStream(stream);
|
||||
// Verify that the local stream is now initializing.
|
||||
EXPECT_EQ(MediaStream::kInitializing, stream_observer1.ready_state);
|
||||
// Verify that the audio track is now initializing.
|
||||
EXPECT_EQ(MediaStreamTrack::kInitializing, track_observer1.track_state);
|
||||
|
||||
// Peer 2 only receive. Create an empty collection
|
||||
scoped_refptr<StreamCollectionImpl> local_collection2(
|
||||
StreamCollectionImpl::Create());
|
||||
|
||||
// Connect all messages sent from Peer1 to be received on Peer2
|
||||
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
|
||||
// Connect all messages sent from Peer2 to be received on Peer1
|
||||
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
|
||||
|
||||
// Peer 1 generates the offer and and send it to Peer2.
|
||||
signaling1_->CreateOffer(local_collection1);
|
||||
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
|
||||
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
|
||||
|
||||
// Verify that the local stream is now sending.
|
||||
EXPECT_EQ(MediaStream::kLive, stream_observer1.ready_state);
|
||||
// Verify that the local audio track is now sending.
|
||||
EXPECT_EQ(MediaStreamTrack::kLive, track_observer1.track_state);
|
||||
|
||||
// Verify that PeerConnection2 is aware of the sending stream.
|
||||
EXPECT_TRUE(observer2_->RemoteStream(label) != NULL);
|
||||
}
|
||||
|
||||
TEST_F(PeerConnectionSignalingTest, Glare) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(CreateLocalMediaStream(label));
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
|
||||
// Peer 1 create an offer with only one audio track.
|
||||
scoped_refptr<StreamCollectionImpl> local_collection1(
|
||||
StreamCollectionImpl::Create());
|
||||
local_collection1->AddStream(stream);
|
||||
signaling1_->CreateOffer(local_collection1);
|
||||
EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
|
||||
signaling1_->GetState());
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Peer 2 only receive. Create an empty collection.
|
||||
scoped_refptr<StreamCollectionImpl> local_collection2(
|
||||
StreamCollectionImpl::Create());
|
||||
// Peer 2 create an empty offer.
|
||||
signaling2_->CreateOffer(local_collection2);
|
||||
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Peer 2 sends the offer to Peer1 and Peer1 sends its offer to Peer2.
|
||||
ASSERT_TRUE(observer1_->last_message != NULL);
|
||||
ASSERT_TRUE(observer2_->last_message != NULL);
|
||||
signaling2_->ProcessSignalingMessage(observer1_->last_message,
|
||||
local_collection2);
|
||||
|
||||
signaling1_->ProcessSignalingMessage(observer2_->last_message,
|
||||
local_collection1);
|
||||
|
||||
EXPECT_EQ(PeerConnectionSignaling::kGlare, signaling1_->GetState());
|
||||
EXPECT_EQ(PeerConnectionSignaling::kGlare, signaling2_->GetState());
|
||||
|
||||
// Make sure all messages are send between
|
||||
// the two PeerConnectionSignaling objects.
|
||||
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
|
||||
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
|
||||
|
||||
// Process all delayed posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(kWaitTime);
|
||||
|
||||
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
|
||||
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
|
||||
|
||||
// Verify that PeerConnection2 is aware of the sending stream.
|
||||
EXPECT_TRUE(observer2_->RemoteStream(label) != NULL);
|
||||
}
|
||||
|
||||
TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(CreateLocalMediaStream(label));
|
||||
MockMediaStreamObserver stream_observer1(stream);
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
MockMediaTrackObserver track_observer1(audio_track);
|
||||
audio_track->RegisterObserver(&track_observer1);
|
||||
|
||||
// Add a local video track.
|
||||
scoped_refptr<LocalVideoTrack> video_track(
|
||||
CreateLocalVideoTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
|
||||
// Peer 1 create an empty collection
|
||||
scoped_refptr<StreamCollectionImpl> local_collection1(
|
||||
StreamCollectionImpl::Create());
|
||||
|
||||
// Peer 2 create an empty collection
|
||||
scoped_refptr<StreamCollectionImpl> local_collection2(
|
||||
StreamCollectionImpl::Create());
|
||||
|
||||
// Connect all messages sent from Peer1 to be received on Peer2
|
||||
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
|
||||
// Connect all messages sent from Peer2 to be received on Peer1
|
||||
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
|
||||
|
||||
// Peer 1 creates an empty offer and send it to Peer2.
|
||||
signaling1_->CreateOffer(local_collection1);
|
||||
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Peer2 add a stream.
|
||||
local_collection2->AddStream(stream);
|
||||
|
||||
signaling2_->CreateOffer(local_collection2);
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Verify that the PeerConnection 2 local stream is now sending.
|
||||
EXPECT_EQ(MediaStream::kLive, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrack::kLive, track_observer1.track_state);
|
||||
|
||||
// Verify that PeerConnection1 is aware of the sending stream.
|
||||
EXPECT_TRUE(observer1_->RemoteStream(label) != NULL);
|
||||
|
||||
// Remove the stream
|
||||
local_collection2->RemoveStream(stream);
|
||||
|
||||
signaling2_->CreateOffer(local_collection2);
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Verify that PeerConnection1 is not aware of the sending stream.
|
||||
EXPECT_TRUE(observer1_->RemoteStream(label) == NULL);
|
||||
|
||||
// Verify that the PeerConnection 2 local stream is now ended.
|
||||
EXPECT_EQ(MediaStream::kEnded, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrack::kEnded, track_observer1.track_state);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -28,6 +28,7 @@
|
||||
#ifndef TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
|
||||
#define TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "talk/app/webrtc_dev/peerconnection.h"
|
||||
@ -58,6 +59,16 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
return media_streams_.at(index);
|
||||
}
|
||||
|
||||
virtual MediaStream* find(const std::string& label) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
if ((*it)->label().compare(label) == 0) {
|
||||
return (*it);
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void AddStream(MediaStream* stream) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
@ -77,15 +88,6 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
}
|
||||
}
|
||||
|
||||
MediaStream* FindFirstStream(const std::string& label) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
if ((*it)->label().compare(label) == 0) {
|
||||
return (*it);
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
protected:
|
||||
StreamCollectionImpl() {}
|
||||
|
@ -35,6 +35,7 @@ VideoTrackImpl::VideoTrackImpl(const std::string& label, uint32 ssrc)
|
||||
kind_(kVideoTrackKind),
|
||||
label_(label),
|
||||
ssrc_(ssrc),
|
||||
state_(kInitializing),
|
||||
video_device_(NULL) {
|
||||
}
|
||||
|
||||
@ -44,6 +45,7 @@ VideoTrackImpl::VideoTrackImpl(const std::string& label,
|
||||
kind_(kVideoTrackKind),
|
||||
label_(label),
|
||||
ssrc_(0),
|
||||
state_(kInitializing),
|
||||
video_device_(video_device) {
|
||||
}
|
||||
|
||||
@ -94,6 +96,18 @@ bool VideoTrackImpl::set_ssrc(uint32 ssrc) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MediaStreamTrack::TrackState VideoTrackImpl::state() {
|
||||
return state_;
|
||||
}
|
||||
|
||||
bool VideoTrackImpl::set_state(TrackState new_state) {
|
||||
bool fire_on_change = state_ != new_state;
|
||||
state_ = new_state;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalVideoTrack>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
scoped_refptr<VideoTrack> VideoTrackImpl::Create(const std::string& label,
|
||||
uint32 ssrc) {
|
||||
RefCountImpl<VideoTrackImpl>* track =
|
||||
|
@ -54,8 +54,10 @@ class VideoTrackImpl : public NotifierImpl<LocalVideoTrack> {
|
||||
virtual const std::string& label();
|
||||
virtual uint32 ssrc();
|
||||
virtual bool enabled();
|
||||
virtual TrackState state();
|
||||
virtual bool set_enabled(bool enable);
|
||||
virtual bool set_ssrc(uint32 ssrc);
|
||||
virtual bool set_state(TrackState new_state);
|
||||
|
||||
protected:
|
||||
VideoTrackImpl(const std::string& label, uint32 ssrc);
|
||||
@ -66,6 +68,7 @@ class VideoTrackImpl : public NotifierImpl<LocalVideoTrack> {
|
||||
std::string kind_;
|
||||
std::string label_;
|
||||
uint32 ssrc_;
|
||||
TrackState state_;
|
||||
scoped_refptr<VideoCaptureModule> video_device_;
|
||||
scoped_refptr<VideoRenderer> video_renderer_;
|
||||
};
|
||||
|
@ -0,0 +1,312 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2004--2005, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "talk/session/phone/mediasession.h"
|
||||
|
||||
#include "talk/base/helpers.h"
|
||||
#include "talk/base/logging.h"
|
||||
#include "talk/p2p/base/constants.h"
|
||||
#include "talk/session/phone/channelmanager.h"
|
||||
#include "talk/session/phone/cryptoparams.h"
|
||||
#include "talk/session/phone/srtpfilter.h"
|
||||
#include "talk/xmpp/constants.h"
|
||||
|
||||
namespace {
|
||||
const char kInline[] = "inline:";
|
||||
}
|
||||
|
||||
namespace cricket {
|
||||
|
||||
static bool CreateCryptoParams(int tag, const std::string& cipher,
|
||||
CryptoParams *out) {
|
||||
std::string key;
|
||||
key.reserve(SRTP_MASTER_KEY_BASE64_LEN);
|
||||
|
||||
if (!talk_base::CreateRandomString(SRTP_MASTER_KEY_BASE64_LEN, &key)) {
|
||||
return false;
|
||||
}
|
||||
out->tag = tag;
|
||||
out->cipher_suite = cipher;
|
||||
out->key_params = kInline;
|
||||
out->key_params += key;
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifdef HAVE_SRTP
|
||||
static bool AddCryptoParams(const std::string& cipher_suite,
|
||||
CryptoParamsVec *out) {
|
||||
int size = out->size();
|
||||
|
||||
out->resize(size + 1);
|
||||
return CreateCryptoParams(size, cipher_suite, &out->at(size));
|
||||
}
|
||||
#endif
|
||||
|
||||
// For audio, HMAC 32 is prefered because of the low overhead.
|
||||
static bool GetSupportedAudioCryptos(CryptoParamsVec* cryptos) {
|
||||
#ifdef HAVE_SRTP
|
||||
return AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_32, cryptos) &&
|
||||
AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_80, cryptos);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
static bool GetSupportedVideoCryptos(CryptoParamsVec* cryptos) {
|
||||
#ifdef HAVE_SRTP
|
||||
return AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_80, cryptos);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
// For video support only 80-bit SHA1 HMAC. For audio 32-bit HMAC is
|
||||
// tolerated because it is low overhead. Pick the crypto in the list
|
||||
// that is supported.
|
||||
static bool SelectCrypto(const MediaContentDescription* offer,
|
||||
CryptoParams *crypto) {
|
||||
bool audio = offer->type() == MEDIA_TYPE_AUDIO;
|
||||
const CryptoParamsVec& cryptos = offer->cryptos();
|
||||
|
||||
for (CryptoParamsVec::const_iterator i = cryptos.begin();
|
||||
i != cryptos.end(); ++i) {
|
||||
if (CS_AES_CM_128_HMAC_SHA1_80 == i->cipher_suite ||
|
||||
(CS_AES_CM_128_HMAC_SHA1_32 == i->cipher_suite && audio)) {
|
||||
return CreateCryptoParams(i->tag, i->cipher_suite, crypto);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
MediaSessionDescriptionFactory::MediaSessionDescriptionFactory()
|
||||
: secure_(SEC_DISABLED) {
|
||||
}
|
||||
|
||||
MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
|
||||
ChannelManager* channel_manager)
|
||||
: secure_(SEC_DISABLED) {
|
||||
channel_manager->GetSupportedAudioCodecs(&audio_codecs_);
|
||||
channel_manager->GetSupportedVideoCodecs(&video_codecs_);
|
||||
}
|
||||
|
||||
SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
|
||||
const MediaSessionOptions& options) {
|
||||
SessionDescription* offer = new SessionDescription();
|
||||
|
||||
if (true) { // TODO: Allow audio to be optional
|
||||
AudioContentDescription* audio = new AudioContentDescription();
|
||||
for (AudioCodecs::const_iterator codec = audio_codecs_.begin();
|
||||
codec != audio_codecs_.end(); ++codec) {
|
||||
audio->AddCodec(*codec);
|
||||
}
|
||||
audio->SortCodecs();
|
||||
audio->set_ssrc(talk_base::CreateRandomNonZeroId());
|
||||
audio->set_rtcp_mux(true);
|
||||
audio->set_lang(lang_);
|
||||
audio->set_sources(options.audio_sources);
|
||||
|
||||
if (secure() != SEC_DISABLED) {
|
||||
CryptoParamsVec audio_cryptos;
|
||||
if (GetSupportedAudioCryptos(&audio_cryptos)) {
|
||||
for (CryptoParamsVec::const_iterator crypto = audio_cryptos.begin();
|
||||
crypto != audio_cryptos.end(); ++crypto) {
|
||||
audio->AddCrypto(*crypto);
|
||||
}
|
||||
}
|
||||
if (secure() == SEC_REQUIRED) {
|
||||
if (audio->cryptos().empty()) {
|
||||
return NULL; // Abort, crypto required but none found.
|
||||
}
|
||||
audio->set_crypto_required(true);
|
||||
}
|
||||
}
|
||||
|
||||
offer->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio);
|
||||
}
|
||||
|
||||
// add video codecs, if this is a video call
|
||||
if (options.is_video) {
|
||||
VideoContentDescription* video = new VideoContentDescription();
|
||||
for (VideoCodecs::const_iterator codec = video_codecs_.begin();
|
||||
codec != video_codecs_.end(); ++codec) {
|
||||
video->AddCodec(*codec);
|
||||
}
|
||||
|
||||
video->SortCodecs();
|
||||
video->set_ssrc(talk_base::CreateRandomNonZeroId());
|
||||
video->set_bandwidth(options.video_bandwidth);
|
||||
video->set_rtcp_mux(true);
|
||||
video->set_sources(options.video_sources);
|
||||
|
||||
if (secure() != SEC_DISABLED) {
|
||||
CryptoParamsVec video_cryptos;
|
||||
if (GetSupportedVideoCryptos(&video_cryptos)) {
|
||||
for (CryptoParamsVec::const_iterator crypto = video_cryptos.begin();
|
||||
crypto != video_cryptos.end(); ++crypto) {
|
||||
video->AddCrypto(*crypto);
|
||||
}
|
||||
}
|
||||
if (secure() == SEC_REQUIRED) {
|
||||
if (video->cryptos().empty()) {
|
||||
return NULL; // Abort, crypto required but none found.
|
||||
}
|
||||
video->set_crypto_required(true);
|
||||
}
|
||||
}
|
||||
|
||||
offer->AddContent(CN_VIDEO, NS_JINGLE_RTP, video);
|
||||
}
|
||||
|
||||
return offer;
|
||||
}
|
||||
|
||||
SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
|
||||
const SessionDescription* offer, const MediaSessionOptions& options) {
|
||||
// The answer contains the intersection of the codecs in the offer with the
|
||||
// codecs we support, ordered by our local preference. As indicated by
|
||||
// XEP-0167, we retain the same payload ids from the offer in the answer.
|
||||
SessionDescription* accept = new SessionDescription();
|
||||
|
||||
const ContentInfo* audio_content = GetFirstAudioContent(offer);
|
||||
if (audio_content) {
|
||||
const AudioContentDescription* audio_offer =
|
||||
static_cast<const AudioContentDescription*>(audio_content->description);
|
||||
AudioContentDescription* audio_accept = new AudioContentDescription();
|
||||
for (AudioCodecs::const_iterator ours = audio_codecs_.begin();
|
||||
ours != audio_codecs_.end(); ++ours) {
|
||||
for (AudioCodecs::const_iterator theirs = audio_offer->codecs().begin();
|
||||
theirs != audio_offer->codecs().end(); ++theirs) {
|
||||
if (ours->Matches(*theirs)) {
|
||||
AudioCodec negotiated(*ours);
|
||||
negotiated.id = theirs->id;
|
||||
audio_accept->AddCodec(negotiated);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
audio_accept->SortCodecs();
|
||||
audio_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
|
||||
audio_accept->set_rtcp_mux(audio_offer->rtcp_mux());
|
||||
audio_accept->set_sources(options.audio_sources);
|
||||
|
||||
if (secure() != SEC_DISABLED) {
|
||||
CryptoParams crypto;
|
||||
|
||||
if (SelectCrypto(audio_offer, &crypto)) {
|
||||
audio_accept->AddCrypto(crypto);
|
||||
}
|
||||
}
|
||||
|
||||
if (audio_accept->cryptos().empty() &&
|
||||
(audio_offer->crypto_required() || secure() == SEC_REQUIRED)) {
|
||||
return NULL; // Fails the session setup.
|
||||
}
|
||||
accept->AddContent(audio_content->name, audio_content->type, audio_accept);
|
||||
}
|
||||
|
||||
const ContentInfo* video_content = GetFirstVideoContent(offer);
|
||||
if (video_content && options.is_video) {
|
||||
const VideoContentDescription* video_offer =
|
||||
static_cast<const VideoContentDescription*>(video_content->description);
|
||||
VideoContentDescription* video_accept = new VideoContentDescription();
|
||||
for (VideoCodecs::const_iterator ours = video_codecs_.begin();
|
||||
ours != video_codecs_.end(); ++ours) {
|
||||
for (VideoCodecs::const_iterator theirs = video_offer->codecs().begin();
|
||||
theirs != video_offer->codecs().end(); ++theirs) {
|
||||
if (ours->Matches(*theirs)) {
|
||||
VideoCodec negotiated(*ours);
|
||||
negotiated.id = theirs->id;
|
||||
video_accept->AddCodec(negotiated);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
video_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
|
||||
video_accept->set_bandwidth(options.video_bandwidth);
|
||||
video_accept->set_rtcp_mux(video_offer->rtcp_mux());
|
||||
video_accept->SortCodecs();
|
||||
video_accept->set_sources(options.video_sources);
|
||||
|
||||
if (secure() != SEC_DISABLED) {
|
||||
CryptoParams crypto;
|
||||
|
||||
if (SelectCrypto(video_offer, &crypto)) {
|
||||
video_accept->AddCrypto(crypto);
|
||||
}
|
||||
}
|
||||
|
||||
if (video_accept->cryptos().empty() &&
|
||||
(video_offer->crypto_required() || secure() == SEC_REQUIRED)) {
|
||||
return NULL; // Fails the session setup.
|
||||
}
|
||||
accept->AddContent(video_content->name, video_content->type, video_accept);
|
||||
}
|
||||
return accept;
|
||||
}
|
||||
|
||||
static bool IsMediaContent(const ContentInfo* content, MediaType media_type) {
|
||||
if (content == NULL || content->type != NS_JINGLE_RTP) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const MediaContentDescription* media =
|
||||
static_cast<const MediaContentDescription*>(content->description);
|
||||
return media->type() == media_type;
|
||||
}
|
||||
|
||||
bool IsAudioContent(const ContentInfo* content) {
|
||||
return IsMediaContent(content, MEDIA_TYPE_AUDIO);
|
||||
}
|
||||
|
||||
bool IsVideoContent(const ContentInfo* content) {
|
||||
return IsMediaContent(content, MEDIA_TYPE_VIDEO);
|
||||
}
|
||||
|
||||
static const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc,
|
||||
MediaType media_type) {
|
||||
if (sdesc == NULL)
|
||||
return NULL;
|
||||
|
||||
const ContentInfos& contents = sdesc->contents();
|
||||
for (ContentInfos::const_iterator content = contents.begin();
|
||||
content != contents.end(); content++) {
|
||||
if (IsMediaContent(&*content, media_type)) {
|
||||
return &*content;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc) {
|
||||
return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO);
|
||||
}
|
||||
|
||||
const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc) {
|
||||
return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO);
|
||||
}
|
||||
|
||||
} // namespace cricket
|
@ -0,0 +1,256 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2004--2005, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// Types and classes used in media session descriptions.
|
||||
|
||||
#ifndef TALK_SESSION_PHONE_MEDIASESSION_H_
|
||||
#define TALK_SESSION_PHONE_MEDIASESSION_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
|
||||
#include "talk/session/phone/codec.h"
|
||||
#include "talk/session/phone/cryptoparams.h"
|
||||
#include "talk/session/phone/mediachannel.h"
|
||||
#include "talk/p2p/base/sessiondescription.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
class ChannelManager;
|
||||
typedef std::vector<AudioCodec> AudioCodecs;
|
||||
typedef std::vector<VideoCodec> VideoCodecs;
|
||||
typedef std::vector<CryptoParams> CryptoParamsVec;
|
||||
|
||||
// SEC_ENABLED and SEC_REQUIRED should only be used if the session
|
||||
// was negotiated over TLS, to protect the inline crypto material
|
||||
// exchange.
|
||||
// SEC_DISABLED: No crypto in outgoing offer and answer. Fail any
|
||||
// offer with crypto required.
|
||||
// SEC_ENABLED: Crypto in outgoing offer and answer. Fail any offer
|
||||
// with unsupported required crypto. Crypto set but not
|
||||
// required in outgoing offer.
|
||||
// SEC_REQUIRED: Crypto in outgoing offer and answer with
|
||||
// required='true'. Fail any offer with no or
|
||||
// unsupported crypto (implicit crypto required='true'
|
||||
// in the offer.)
|
||||
enum SecureMediaPolicy {
|
||||
SEC_DISABLED,
|
||||
SEC_ENABLED,
|
||||
SEC_REQUIRED
|
||||
};
|
||||
|
||||
// Structure to describe a sending source.
|
||||
struct SourceParam {
|
||||
SourceParam(uint32 ssrc,
|
||||
const std::string description,
|
||||
const std::string& cname)
|
||||
: ssrc(ssrc), description(description), cname(cname) {}
|
||||
uint32 ssrc;
|
||||
std::string description;
|
||||
std::string cname;
|
||||
};
|
||||
typedef std::vector<SourceParam> Sources;
|
||||
|
||||
// Options to control how session descriptions are generated.
|
||||
const int kAutoBandwidth = -1;
|
||||
struct MediaSessionOptions {
|
||||
MediaSessionOptions() :
|
||||
is_video(false),
|
||||
is_muc(false),
|
||||
video_bandwidth(kAutoBandwidth) {
|
||||
}
|
||||
Sources audio_sources;
|
||||
Sources video_sources;
|
||||
bool is_video;
|
||||
bool is_muc;
|
||||
// bps. -1 == auto.
|
||||
int video_bandwidth;
|
||||
};
|
||||
|
||||
enum MediaType {
|
||||
MEDIA_TYPE_AUDIO,
|
||||
MEDIA_TYPE_VIDEO
|
||||
};
|
||||
|
||||
// "content" (as used in XEP-0166) descriptions for voice and video.
|
||||
class MediaContentDescription : public ContentDescription {
|
||||
public:
|
||||
MediaContentDescription()
|
||||
: ssrc_(0),
|
||||
ssrc_set_(false),
|
||||
rtcp_mux_(false),
|
||||
bandwidth_(kAutoBandwidth),
|
||||
crypto_required_(false),
|
||||
rtp_header_extensions_set_(false) {
|
||||
}
|
||||
|
||||
virtual MediaType type() const = 0;
|
||||
|
||||
uint32 ssrc() const { return ssrc_; }
|
||||
bool ssrc_set() const { return ssrc_set_; }
|
||||
void set_ssrc(uint32 ssrc) {
|
||||
ssrc_ = ssrc;
|
||||
ssrc_set_ = true;
|
||||
}
|
||||
|
||||
bool rtcp_mux() const { return rtcp_mux_; }
|
||||
void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; }
|
||||
|
||||
int bandwidth() const { return bandwidth_; }
|
||||
void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; }
|
||||
|
||||
const std::vector<CryptoParams>& cryptos() const { return cryptos_; }
|
||||
void AddCrypto(const CryptoParams& params) {
|
||||
cryptos_.push_back(params);
|
||||
}
|
||||
bool crypto_required() const { return crypto_required_; }
|
||||
void set_crypto_required(bool crypto) {
|
||||
crypto_required_ = crypto;
|
||||
}
|
||||
|
||||
const std::vector<RtpHeaderExtension>& rtp_header_extensions() const {
|
||||
return rtp_header_extensions_;
|
||||
}
|
||||
void AddRtpHeaderExtension(const RtpHeaderExtension& ext) {
|
||||
rtp_header_extensions_.push_back(ext);
|
||||
rtp_header_extensions_set_ = true;
|
||||
}
|
||||
void ClearRtpHeaderExtensions() {
|
||||
rtp_header_extensions_.clear();
|
||||
rtp_header_extensions_set_ = true;
|
||||
}
|
||||
// We can't always tell if an empty list of header extensions is
|
||||
// because the other side doesn't support them, or just isn't hooked up to
|
||||
// signal them. For now we assume an empty list means no signaling, but
|
||||
// provide the ClearRtpHeaderExtensions method to allow "no support" to be
|
||||
// clearly indicated (i.e. when derived from other information).
|
||||
bool rtp_header_extensions_set() const {
|
||||
return rtp_header_extensions_set_;
|
||||
}
|
||||
const Sources& sources() const {
|
||||
return sources_;
|
||||
}
|
||||
void set_sources(const Sources& sources) {
|
||||
sources_ = sources;
|
||||
}
|
||||
|
||||
protected:
|
||||
uint32 ssrc_;
|
||||
bool ssrc_set_;
|
||||
bool rtcp_mux_;
|
||||
int bandwidth_;
|
||||
std::vector<CryptoParams> cryptos_;
|
||||
bool crypto_required_;
|
||||
std::vector<RtpHeaderExtension> rtp_header_extensions_;
|
||||
bool rtp_header_extensions_set_;
|
||||
std::vector<SourceParam> sources_;
|
||||
};
|
||||
|
||||
template <class C>
|
||||
class MediaContentDescriptionImpl : public MediaContentDescription {
|
||||
public:
|
||||
struct PreferenceSort {
|
||||
bool operator()(C a, C b) { return a.preference > b.preference; }
|
||||
};
|
||||
|
||||
const std::vector<C>& codecs() const { return codecs_; }
|
||||
void AddCodec(const C& codec) {
|
||||
codecs_.push_back(codec);
|
||||
}
|
||||
void SortCodecs() {
|
||||
std::sort(codecs_.begin(), codecs_.end(), PreferenceSort());
|
||||
}
|
||||
|
||||
private:
|
||||
std::vector<C> codecs_;
|
||||
};
|
||||
|
||||
class AudioContentDescription : public MediaContentDescriptionImpl<AudioCodec> {
|
||||
public:
|
||||
AudioContentDescription() :
|
||||
conference_mode_(false) {}
|
||||
|
||||
virtual MediaType type() const { return MEDIA_TYPE_AUDIO; }
|
||||
|
||||
bool conference_mode() const { return conference_mode_; }
|
||||
void set_conference_mode(bool enable) {
|
||||
conference_mode_ = enable;
|
||||
}
|
||||
|
||||
const std::string &lang() const { return lang_; }
|
||||
void set_lang(const std::string &lang) { lang_ = lang; }
|
||||
|
||||
|
||||
private:
|
||||
bool conference_mode_;
|
||||
std::string lang_;
|
||||
};
|
||||
|
||||
class VideoContentDescription : public MediaContentDescriptionImpl<VideoCodec> {
|
||||
public:
|
||||
virtual MediaType type() const { return MEDIA_TYPE_VIDEO; }
|
||||
};
|
||||
|
||||
// Creates media session descriptions according to the supplied codecs and
|
||||
// other fields, as well as the supplied per-call options.
|
||||
// When creating answers, performs the appropriate negotiation
|
||||
// of the various fields to determine the proper result.
|
||||
class MediaSessionDescriptionFactory {
|
||||
public:
|
||||
// Default ctor; use methods below to set configuration.
|
||||
MediaSessionDescriptionFactory();
|
||||
// Helper, to allow configuration to be loaded from a ChannelManager.
|
||||
explicit MediaSessionDescriptionFactory(ChannelManager* manager);
|
||||
|
||||
const AudioCodecs& audio_codecs() const { return audio_codecs_; }
|
||||
void set_audio_codecs(const AudioCodecs& codecs) { audio_codecs_ = codecs; }
|
||||
const VideoCodecs& video_codecs() const { return video_codecs_; }
|
||||
void set_video_codecs(const VideoCodecs& codecs) { video_codecs_ = codecs; }
|
||||
SecureMediaPolicy secure() const { return secure_; }
|
||||
void set_secure(SecureMediaPolicy s) { secure_ = s; }
|
||||
|
||||
SessionDescription* CreateOffer(const MediaSessionOptions& options);
|
||||
SessionDescription* CreateAnswer(const SessionDescription* offer,
|
||||
const MediaSessionOptions& options);
|
||||
|
||||
private:
|
||||
AudioCodecs audio_codecs_;
|
||||
VideoCodecs video_codecs_;
|
||||
SecureMediaPolicy secure_;
|
||||
std::string lang_;
|
||||
};
|
||||
|
||||
// Convenience functions.
|
||||
bool IsAudioContent(const ContentInfo* content);
|
||||
bool IsVideoContent(const ContentInfo* content);
|
||||
const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc);
|
||||
const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc);
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
#endif // TALK_SESSION_PHONE_MEDIASESSION_H_
|
Loading…
x
Reference in New Issue
Block a user