Merge streamparams and mediasession from libjingle and made necessary changes in peerconnection.

-Removed ssrc from tracks.
-Updated PeerConnectionMessage parsing and serialization.

BUG=
TEST=

Review URL: http://webrtc-codereview.appspot.com/239020

git-svn-id: http://webrtc.googlecode.com/svn/trunk@856 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2011-11-01 11:46:56 +00:00
parent d6837709cf
commit 36a992b030
28 changed files with 629 additions and 488 deletions

View File

@ -642,8 +642,10 @@
},{
'sources': [
'<(libjingle_orig)/source/talk/session/phone/channel.cc',
'<(libjingle_orig)/source/talk/session/phone/channel.h',
'<(libjingle_orig)/source/talk/session/phone/channel.h',
'<(libjingle_orig)/source/talk/session/phone/mediasession.cc',
'<(libjingle_orig)/source/talk/session/phone/mediasession.h',
'<(libjingle_orig)/source/talk/session/phone/sourceparams.h',
],
}], # peer_connection_dev
], # conditions
@ -717,14 +719,11 @@
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmessage.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/sessiondescriptionprovider.h'
'<(libjingle_mods)/source/talk/app/webrtc_dev/streamcollectionimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videorendererimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videotrackimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videotrackimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtc_devicemanager.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtc_devicemanager.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcjson.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcjson.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcsessionobserver',
@ -773,8 +772,9 @@
'sources': [
'<(libjingle_mods)/source/talk/app/webrtc_dev/test/filevideocapturemodule.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastream_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamhandler_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamhandler_unittest.cc',
# // TODO (henrike): Re add when there is no dependency to foreman.yuv
#'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittests.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionfactory_unittest.cc',

View File

@ -32,14 +32,14 @@ namespace webrtc {
static const char kAudioTrackKind[] = "audio";
AudioTrack::AudioTrack(const std::string& label, uint32 ssrc)
: MediaTrack<LocalAudioTrackInterface>(label, ssrc),
AudioTrack::AudioTrack(const std::string& label)
: MediaTrack<LocalAudioTrackInterface>(label),
audio_device_(NULL) {
}
AudioTrack::AudioTrack(const std::string& label,
AudioDeviceModule* audio_device)
: MediaTrack<LocalAudioTrackInterface>(label, 0),
: MediaTrack<LocalAudioTrackInterface>(label),
audio_device_(audio_device) {
}
@ -54,10 +54,9 @@ std::string AudioTrack::kind() const {
}
talk_base::scoped_refptr<AudioTrack> AudioTrack::CreateRemote(
const std::string& label,
uint32 ssrc) {
const std::string& label) {
talk_base::RefCountedObject<AudioTrack>* track =
new talk_base::RefCountedObject<AudioTrack>(label, ssrc);
new talk_base::RefCountedObject<AudioTrack>(label);
return track;
}

View File

@ -45,8 +45,7 @@ class AudioTrack : public MediaTrack<LocalAudioTrackInterface> {
public:
// Creates a remote audio track.
static talk_base::scoped_refptr<AudioTrack> CreateRemote(
const std::string& label,
uint32 ssrc);
const std::string& label);
// Creates a local audio track.
static talk_base::scoped_refptr<AudioTrack> CreateLocal(
const std::string& label,
@ -59,7 +58,7 @@ class AudioTrack : public MediaTrack<LocalAudioTrackInterface> {
virtual std::string kind() const;
protected:
AudioTrack(const std::string& label, uint32 ssrc);
explicit AudioTrack(const std::string& label);
AudioTrack(const std::string& label, AudioDeviceModule* audio_device);
private:

View File

@ -80,12 +80,10 @@ class MediaStreamTrackInterface : public talk_base::RefCountInterface,
virtual std::string kind() const = 0;
virtual std::string label() const = 0;
virtual uint32 ssrc() const = 0;
virtual bool enabled() const = 0;
virtual TrackState state() const = 0;
virtual bool set_enabled(bool enable) = 0;
// These methods should be called by implementation only.
virtual bool set_ssrc(uint32 ssrc) = 0;
virtual bool set_state(TrackState new_state) = 0;
};

View File

@ -57,19 +57,6 @@ class ReadyStateMessageData : public talk_base::MessageData {
webrtc::MediaStreamInterface::ReadyState ready_state_;
};
class SsrcMessageData : public talk_base::MessageData {
public:
SsrcMessageData(
webrtc::MediaStreamTrackInterface* track,
uint32 ssrc)
: track_(track),
ssrc_(ssrc) {
}
scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
uint32 ssrc_;
};
class TrackStateMessageData : public talk_base::MessageData {
public:
TrackStateMessageData(
@ -175,10 +162,6 @@ class MockMediaStreamTrack: public T {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->label();
}
virtual uint32 ssrc() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->ssrc();
}
virtual bool enabled() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->enabled();
@ -191,10 +174,6 @@ class MockMediaStreamTrack: public T {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->set_enabled(enabled);
}
virtual bool set_ssrc(uint32 ssrc) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->set_ssrc(ssrc);
}
virtual bool set_state(webrtc::MediaStreamTrackInterface::TrackState state) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->set_state(state);
@ -273,7 +252,6 @@ class MediaStreamTest: public testing::Test,
ASSERT_TRUE(video_track_.get());
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
EXPECT_EQ(0u, video_track_->ssrc());
// Create an audio track proxy object that uses our mocked
// version of a LocalAudioTrack
@ -287,12 +265,10 @@ class MediaStreamTest: public testing::Test,
ASSERT_TRUE(audio_track_.get());
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
EXPECT_EQ(0u, audio_track_->ssrc());
}
enum {
MSG_SET_READYSTATE,
MSG_SET_SSRC,
MSG_SET_TRACKSTATE,
};
@ -312,14 +288,6 @@ class MediaStreamTest: public testing::Test,
signaling_thread_->Send(this, MSG_SET_TRACKSTATE, &state);
}
// Set the ssrc on a track on the signaling thread.
// Ssrc can only be changed on the signaling thread.
void SetSsrc(MediaStreamTrackInterface* track,
uint32 new_ssrc) {
SsrcMessageData ssrc(track, new_ssrc);
signaling_thread_->Send(this, MSG_SET_SSRC, &ssrc);
}
talk_base::scoped_ptr<talk_base::Thread> signaling_thread_;
scoped_refptr<LocalMediaStreamInterface> stream_;
scoped_refptr<LocalVideoTrackInterface> video_track_;
@ -335,12 +303,6 @@ class MediaStreamTest: public testing::Test,
state->stream_->set_ready_state(state->ready_state_);
break;
}
case MSG_SET_SSRC: {
SsrcMessageData* ssrc =
static_cast<SsrcMessageData*>(msg->pdata);
ssrc->track_->set_ssrc(ssrc->ssrc_);
break;
}
case MSG_SET_TRACKSTATE: {
TrackStateMessageData* state =
static_cast<TrackStateMessageData*>(msg->pdata);
@ -393,15 +355,9 @@ TEST_F(MediaStreamTest, ChangeVideoTrack) {
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
video_track_->set_enabled(false);
EXPECT_FALSE(video_track_->state());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetSsrc(video_track_, 255);
EXPECT_EQ(255u, video_track_->ssrc());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetTrackState(video_track_, MediaStreamTrackInterface::kLive);
@ -421,15 +377,9 @@ TEST_F(MediaStreamTest, ChangeAudioTrack) {
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
audio_track_->set_enabled(false);
EXPECT_FALSE(audio_track_->enabled());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetSsrc(audio_track_, 255);
EXPECT_EQ(255u, audio_track_->ssrc());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetTrackState(audio_track_, MediaStreamTrackInterface::kLive);

View File

@ -74,26 +74,26 @@ LocalVideoTrackHandler::LocalVideoTrackHandler(
LocalVideoTrackHandler::~LocalVideoTrackHandler() {
// Since cricket::VideoRenderer is not reference counted
// we need to remove the renderer before we are deleted.
provider_->SetLocalRenderer(video_track_->ssrc(), NULL);
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
void LocalVideoTrackHandler::OnRendererChanged() {
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetLocalRenderer(video_track_->ssrc(), renderer->renderer());
provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetLocalRenderer(video_track_->ssrc(), NULL);
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
void LocalVideoTrackHandler::OnStateChanged() {
if (local_video_track_->state() == VideoTrackInterface::kLive) {
provider_->SetCaptureDevice(local_video_track_->ssrc(),
provider_->SetCaptureDevice(local_video_track_->label(),
local_video_track_->GetVideoCapture());
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetLocalRenderer(video_track_->ssrc(), renderer->renderer());
provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetLocalRenderer(video_track_->ssrc(), NULL);
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
}
@ -111,16 +111,16 @@ RemoteVideoTrackHandler::RemoteVideoTrackHandler(
RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
// Since cricket::VideoRenderer is not reference counted
// we need to remove the renderer before we are deleted.
provider_->SetRemoteRenderer(video_track_->ssrc(), NULL);
provider_->SetRemoteRenderer(video_track_->label(), NULL);
}
void RemoteVideoTrackHandler::OnRendererChanged() {
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetRemoteRenderer(video_track_->ssrc(), renderer->renderer());
provider_->SetRemoteRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetRemoteRenderer(video_track_->ssrc(), NULL);
provider_->SetRemoteRenderer(video_track_->label(), NULL);
}
void RemoteVideoTrackHandler::OnStateChanged() {

View File

@ -39,28 +39,28 @@ using ::testing::Exactly;
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoDeviceName[] = "dummy_video_cam_1";
static const uint32 kVideoSsrc = 1;
namespace webrtc {
// Helper class to test MediaStreamHandler.
class MockMediaProvier : public MediaProviderInterface {
public:
MOCK_METHOD1(SetCaptureDevice, void(uint32 ssrc));
MOCK_METHOD1(SetLocalRenderer, void(uint32 ssrc));
MOCK_METHOD1(SetRemoteRenderer, void(uint32 ssrc));
MOCK_METHOD1(SetCaptureDevice, void(const std::string& name));
MOCK_METHOD1(SetLocalRenderer, void(const std::string& name));
MOCK_METHOD1(SetRemoteRenderer, void(const std::string& name));
virtual void SetCaptureDevice(uint32 ssrc, VideoCaptureModule* camera) {
SetCaptureDevice(ssrc);
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) {
SetCaptureDevice(name);
}
virtual void SetLocalRenderer(uint32 ssrc,
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
SetLocalRenderer(ssrc);
SetLocalRenderer(name);
}
virtual void SetRemoteRenderer(uint32 ssrc,
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
SetRemoteRenderer(ssrc);
SetRemoteRenderer(name);
}
~MockMediaProvier() {}
};
@ -72,7 +72,6 @@ TEST(MediaStreamHandlerTest, LocalStreams) {
MediaStream::Create(label));
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
video_track->set_ssrc(kVideoSsrc);
EXPECT_TRUE(stream->AddTrack(video_track));
talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(
CreateVideoRenderer(NULL));
@ -85,10 +84,10 @@ TEST(MediaStreamHandlerTest, LocalStreams) {
StreamCollectionImpl::Create());
collection->AddStream(stream);
EXPECT_CALL(provider, SetLocalRenderer(kVideoSsrc))
.Times(Exactly(2)); // SetLocalRender will also be called from dtor of
// LocalVideoTrackHandler
EXPECT_CALL(provider, SetCaptureDevice(kVideoSsrc))
EXPECT_CALL(provider, SetLocalRenderer(kVideoDeviceName))
.Times(Exactly(2)); // SetLocalRender will also be called from dtor of
// LocalVideoTrackHandler
EXPECT_CALL(provider, SetCaptureDevice(kVideoDeviceName))
.Times(Exactly(1));
handlers.CommitLocalStreams(collection);
@ -113,7 +112,6 @@ TEST(MediaStreamHandlerTest, RemoteStreams) {
MediaStream::Create(label));
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
video_track->set_ssrc(kVideoSsrc);
EXPECT_TRUE(stream->AddTrack(video_track));
MockMediaProvier provider;
@ -121,9 +119,9 @@ TEST(MediaStreamHandlerTest, RemoteStreams) {
handlers.AddRemoteStream(stream);
EXPECT_CALL(provider, SetRemoteRenderer(kVideoSsrc))
.Times(Exactly(3)); // SetRemoteRenderer is also called from dtor of
// RemoteVideoTrackHandler.
EXPECT_CALL(provider, SetRemoteRenderer(kVideoDeviceName))
.Times(Exactly(3)); // SetRemoteRenderer is also called from dtor of
// RemoteVideoTrackHandler.
// Set the renderer once.
talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(

View File

@ -37,10 +37,11 @@ namespace webrtc {
// set new devices.
class MediaProviderInterface {
public:
virtual void SetCaptureDevice(uint32 ssrc, VideoCaptureModule* camera) = 0;
virtual void SetLocalRenderer(uint32 ssrc,
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) = 0;
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) = 0;
virtual void SetRemoteRenderer(uint32 ssrc,
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) = 0;
protected:
virtual ~MediaProviderInterface() {}

View File

@ -80,7 +80,7 @@ class MediaStreamProxy : public LocalMediaStreamInterface,
class MediaStreamTrackListProxy : public MediaStreamTrackListInterface<T>,
public talk_base::MessageHandler {
public:
MediaStreamTrackListProxy(talk_base::Thread* signaling_thread);
explicit MediaStreamTrackListProxy(talk_base::Thread* signaling_thread);
void SetImplementation(MediaStreamTrackListInterface<T>* track_list);
virtual size_t count();

View File

@ -36,7 +36,6 @@ enum {
MSG_ENABLED,
MSG_SET_ENABLED,
MSG_STATE,
MSG_SSRC,
MSG_GET_AUDIODEVICE,
MSG_GET_VIDEODEVICE,
MSG_GET_VIDEORENDERER,
@ -48,7 +47,6 @@ typedef talk_base::TypedMessageData<webrtc::ObserverInterface*>
ObserverMessageData;
typedef talk_base::TypedMessageData
<webrtc::MediaStreamTrackInterface::TrackState> TrackStateMessageData;
typedef talk_base::TypedMessageData<uint32> SsrcMessageData;
typedef talk_base::TypedMessageData<bool> EnableMessageData;
@ -99,16 +97,6 @@ std::string MediaStreamTrackProxy<T>::label() const {
return track_->label();
}
template <class T>
uint32 MediaStreamTrackProxy<T>::ssrc() const {
if (!signaling_thread_->IsCurrent()) {
SsrcMessageData msg(0);
Send(MSG_SSRC, &msg);
return msg.data();
}
return track_->ssrc();
}
template <class T>
MediaStreamTrackInterface::TrackState MediaStreamTrackProxy<T>::state() const {
if (!signaling_thread_->IsCurrent()) {
@ -150,16 +138,6 @@ bool MediaStreamTrackProxy<T>::set_state(
return track_->set_state(new_state);
}
template <class T>
bool MediaStreamTrackProxy<T>::set_ssrc(uint32 ssrc) {
if (!signaling_thread_->IsCurrent()) {
// ssrc should only be allowed to be changed from the signaling thread.
ASSERT(!"Not Allowed!");
return false;
}
return track_->set_ssrc(ssrc);
}
template <class T>
void MediaStreamTrackProxy<T>::RegisterObserver(ObserverInterface* observer) {
if (!signaling_thread_->IsCurrent()) {
@ -208,12 +186,6 @@ bool MediaStreamTrackProxy<T>::HandleMessage(talk_base::Message* msg) {
*(label->data()) = track_->label();
return true;
}
case MSG_SSRC: {
SsrcMessageData* ssrc = static_cast<SsrcMessageData*>(data);
ssrc->data() = track_->ssrc();
return true;
break;
}
case MSG_SET_ENABLED: {
EnableMessageData* enabled = static_cast<EnableMessageData*>(data);
enabled->data() = track_->set_enabled(enabled->data());
@ -238,10 +210,9 @@ bool MediaStreamTrackProxy<T>::HandleMessage(talk_base::Message* msg) {
}
AudioTrackProxy::AudioTrackProxy(const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
audio_track_(AudioTrack::CreateRemote(label, ssrc)) {
audio_track_(AudioTrack::CreateRemote(label)) {
Init(audio_track_);
}
@ -262,12 +233,10 @@ AudioTrackProxy::AudioTrackProxy(LocalAudioTrackInterface* implementation,
talk_base::scoped_refptr<AudioTrackInterface> AudioTrackProxy::CreateRemote(
const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<AudioTrackProxy>* track =
new talk_base::RefCountedObject<AudioTrackProxy>(label, ssrc,
signaling_thread);
new talk_base::RefCountedObject<AudioTrackProxy>(label, signaling_thread);
return track;
}
@ -315,10 +284,9 @@ void AudioTrackProxy::OnMessage(talk_base::Message* msg) {
}
VideoTrackProxy::VideoTrackProxy(const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
video_track_(VideoTrack::CreateRemote(label, ssrc)) {
video_track_(VideoTrack::CreateRemote(label)) {
Init(video_track_);
}
@ -339,12 +307,10 @@ VideoTrackProxy::VideoTrackProxy(LocalVideoTrackInterface* implementation,
talk_base::scoped_refptr<VideoTrackInterface> VideoTrackProxy::CreateRemote(
const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(label, ssrc,
signaling_thread);
new talk_base::RefCountedObject<VideoTrackProxy>(label, signaling_thread);
return track;
}
@ -355,7 +321,7 @@ talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(label, video_device,
signaling_thread);
signaling_thread);
return track;
}
@ -365,7 +331,7 @@ talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(implementation,
signaling_thread);
signaling_thread);
return track;
}

View File

@ -50,11 +50,9 @@ class MediaStreamTrackProxy : public T,
virtual std::string kind() const;
virtual std::string label() const;
virtual uint32 ssrc() const;
virtual bool enabled() const;
virtual MediaStreamTrackInterface::TrackState state() const;
virtual bool set_enabled(bool enable);
virtual bool set_ssrc(uint32 ssrc);
virtual bool set_state(MediaStreamTrackInterface::TrackState new_state);
// Implement Notifier
@ -79,7 +77,6 @@ class AudioTrackProxy : public MediaStreamTrackProxy<LocalAudioTrackInterface> {
public:
static talk_base::scoped_refptr<AudioTrackInterface> CreateRemote(
const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalAudioTrackInterface> CreateLocal(
const std::string& label,
@ -93,7 +90,6 @@ class AudioTrackProxy : public MediaStreamTrackProxy<LocalAudioTrackInterface> {
protected:
AudioTrackProxy(const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread);
AudioTrackProxy(const std::string& label,
AudioDeviceModule* audio_device,
@ -114,7 +110,6 @@ class VideoTrackProxy : public MediaStreamTrackProxy<LocalVideoTrackInterface> {
public:
static talk_base::scoped_refptr<VideoTrackInterface> CreateRemote(
const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalVideoTrackInterface> CreateLocal(
const std::string& label,
@ -130,7 +125,6 @@ class VideoTrackProxy : public MediaStreamTrackProxy<LocalVideoTrackInterface> {
protected:
VideoTrackProxy(const std::string& label,
uint32 ssrc,
talk_base::Thread* signaling_thread);
VideoTrackProxy(const std::string& label,
VideoCaptureModule* video_device,

View File

@ -43,7 +43,6 @@ class MediaTrack : public Notifier<T> {
typedef typename T::TrackState TypedTrackState;
virtual std::string label() const { return label_; }
virtual uint32 ssrc() const { return ssrc_; }
virtual TypedTrackState state() const { return state_; }
virtual bool enabled() const { return enabled_; }
virtual bool set_enabled(bool enable) {
@ -53,17 +52,6 @@ class MediaTrack : public Notifier<T> {
Notifier<T>::FireOnChanged();
}
}
virtual bool set_ssrc(uint32 ssrc) {
ASSERT(ssrc_ == 0);
ASSERT(ssrc != 0);
if (ssrc_ != 0)
return false;
ssrc_ = ssrc;
Notifier<T>::FireOnChanged();
return true;
}
virtual bool set_state(TypedTrackState new_state) {
bool fire_on_change = (state_ != new_state);
state_ = new_state;
@ -73,17 +61,15 @@ class MediaTrack : public Notifier<T> {
}
protected:
MediaTrack(const std::string& label, uint32 ssrc)
explicit MediaTrack(const std::string& label)
: enabled_(true),
label_(label),
ssrc_(ssrc),
state_(T::kInitializing) {
}
private:
bool enabled_;
std::string label_;
uint32 ssrc_;
TypedTrackState state_;
};

View File

@ -30,8 +30,8 @@
#include "talk/app/webrtc_dev/mediastreamproxy.h"
#include "talk/app/webrtc_dev/mediastreamtrackproxy.h"
#include "talk/app/webrtc_dev/peerconnectionimpl.h"
#include "talk/app/webrtc_dev/webrtc_devicemanager.h"
#include "talk/base/basicpacketsocketfactory.h"
#include "talk/session/phone/dummydevicemanager.h"
#include "talk/session/phone/webrtcmediaengine.h"
#ifdef WEBRTC_RELATIVE_PATH
@ -157,7 +157,8 @@ bool PeerConnectionFactoryImpl::Initialize_s() {
socket_factory_.reset(
new talk_base::BasicPacketSocketFactory(worker_thread_ptr_));
cricket::DeviceManager* device_manager(new WebRtcDeviceManager());
cricket::DummyDeviceManager* device_manager(
new cricket::DummyDeviceManager());
// TODO(perkj): Need to make sure only one VoE is created inside
// WebRtcMediaEngine.
cricket::WebRtcMediaEngine* webrtc_media_engine(

View File

@ -84,7 +84,7 @@ bool PeerConnectionMessage::Deserialize(std::string message) {
cricket::SessionDescription* desc(new cricket::SessionDescription());
bool result = JsonDeserialize(&type_, &error_code_, desc,
&candidates_, message);
if(!result) {
if (!result) {
delete desc;
desc = NULL;
}

View File

@ -46,27 +46,24 @@ static const char kVideoTrackLabel3[] = "local_video_3";
class PeerConnectionMessageTest: public testing::Test {
public:
PeerConnectionMessageTest()
: ssrc_counter_(0) {
PeerConnectionMessageTest() {
channel_manager_.reset(new cricket::ChannelManager(
talk_base::Thread::Current()));
EXPECT_TRUE(channel_manager_->Init());
session_description_factory_.reset(
new cricket::MediaSessionDescriptionFactory(channel_manager_.get()));
options_.audio_sources.push_back(cricket::SourceParam(++ssrc_counter_,
kAudioTrackLabel1, kStreamLabel1));
options_.video_sources.push_back(cricket::SourceParam(++ssrc_counter_,
kVideoTrackLabel1, kStreamLabel1));
options_.video_sources.push_back(cricket::SourceParam(++ssrc_counter_,
kVideoTrackLabel2, kStreamLabel1));
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrackLabel1,
kStreamLabel1);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel1,
kStreamLabel1);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel2,
kStreamLabel1);
// kStreamLabel2 with 1 audio track and 1 video track
options_.audio_sources.push_back(cricket::SourceParam(++ssrc_counter_,
kAudioTrackLabel2, kStreamLabel2));
options_.video_sources.push_back(cricket::SourceParam(++ssrc_counter_,
kVideoTrackLabel3, kStreamLabel2));
options_.is_video = true;
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrackLabel2,
kStreamLabel2);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel3,
kStreamLabel2);
int port = 1234;
talk_base::SocketAddress address("127.0.0.1", port++);
@ -94,9 +91,6 @@ class PeerConnectionMessageTest: public testing::Test {
session_description_factory_;
cricket::MediaSessionOptions options_;
cricket::Candidates candidates_;
private:
int ssrc_counter_;
};
TEST_F(PeerConnectionMessageTest, Serialize) {

View File

@ -35,6 +35,8 @@
#include "talk/base/messagequeue.h"
#include "talk/session/phone/channelmanager.h"
using talk_base::scoped_refptr;
namespace webrtc {
enum {
@ -77,8 +79,7 @@ PeerConnectionSignaling::PeerConnectionSignaling(
SessionDescriptionProvider* provider)
: signaling_thread_(signaling_thread),
provider_(provider),
state_(kInitializing),
ssrc_counter_(0) {
state_(kInitializing) {
}
PeerConnectionSignaling::~PeerConnectionSignaling() {
@ -169,8 +170,7 @@ void PeerConnectionSignaling::ProcessSignalingMessage(
signaling_message->candidates());
provider_->NegotiationDone();
UpdateRemoteStreams(remote_desc);
talk_base::scoped_refptr<StreamCollectionInterface> streams(
queued_offers_.front());
scoped_refptr<StreamCollectionInterface> streams(queued_offers_.front());
queued_offers_.pop_front();
UpdateSendingLocalStreams(remote_desc, streams);
// Check if we have more offers waiting in the queue.
@ -221,8 +221,8 @@ void PeerConnectionSignaling::OnMessage(talk_base::Message* msg) {
void PeerConnectionSignaling::CreateOffer_s() {
ASSERT(queued_offers_.size() > 0);
talk_base::scoped_refptr<StreamCollectionInterface>
local_streams(queued_offers_.front());
scoped_refptr<StreamCollectionInterface> local_streams(
queued_offers_.front());
cricket::MediaSessionOptions options;
InitMediaSessionOptions(&options, local_streams);
@ -244,7 +244,7 @@ void PeerConnectionSignaling::CreateAnswer_s() {
talk_base::scoped_ptr<PeerConnectionMessage> message(
queued_received_offer_.first);
queued_received_offer_.first = NULL;
talk_base::scoped_refptr<StreamCollectionInterface> local_streams(
scoped_refptr<StreamCollectionInterface> local_streams(
queued_received_offer_.second.release());
// Reset all pending offers. Instead, send the new streams in the answer.
@ -296,35 +296,25 @@ void PeerConnectionSignaling::InitMediaSessionOptions(
cricket::MediaSessionOptions* options,
StreamCollectionInterface* local_streams) {
// In order to be able to receive video,
// the is_video should always be true even if there are not video tracks.
options->is_video = true;
// the has_video should always be true even if there are not video tracks.
options->has_video = true;
for (size_t i = 0; i < local_streams->count(); ++i) {
MediaStreamInterface* stream = local_streams->at(i);
talk_base::scoped_refptr<AudioTracks>
audio_tracks = stream->audio_tracks();
scoped_refptr<AudioTracks> audio_tracks(stream->audio_tracks());
// For each audio track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < audio_tracks->count(); ++j) {
talk_base::scoped_refptr<MediaStreamTrackInterface> track =
audio_tracks->at(j);
if (track->ssrc() == 0)
track->set_ssrc(++ssrc_counter_);
options->audio_sources.push_back(cricket::SourceParam(track->ssrc(),
track->label(),
stream->label()));
scoped_refptr<MediaStreamTrackInterface> track(audio_tracks->at(j));
options->AddStream(cricket::MEDIA_TYPE_AUDIO, track->label(),
stream->label());
}
talk_base::scoped_refptr<VideoTracks>
video_tracks = stream->video_tracks();
scoped_refptr<VideoTracks> video_tracks(stream->video_tracks());
// For each video track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < video_tracks->count(); ++j) {
talk_base::scoped_refptr<MediaStreamTrackInterface> track =
video_tracks->at(j);
if (track->ssrc() == 0)
track->set_ssrc(++ssrc_counter_);
options->video_sources.push_back(cricket::SourceParam(track->ssrc(),
track->label(),
stream->label()));
scoped_refptr<MediaStreamTrackInterface> track(video_tracks->at(j));
options->AddStream(cricket::MEDIA_TYPE_VIDEO, track->label(),
stream->label());
}
}
}
@ -337,7 +327,7 @@ void PeerConnectionSignaling::InitMediaSessionOptions(
void PeerConnectionSignaling::UpdateRemoteStreams(
const cricket::SessionDescription* remote_desc) {
RemoteStreamMap current_streams;
typedef std::pair<std::string, talk_base::scoped_refptr<MediaStreamProxy> >
typedef std::pair<std::string, scoped_refptr<MediaStreamProxy> >
MediaStreamPair;
const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
@ -346,31 +336,28 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
for (cricket::Sources::const_iterator it = audio_desc->sources().begin();
it != audio_desc->sources().end();
++it) {
for (cricket::StreamParamsVec::const_iterator it =
audio_desc->streams().begin();
it != audio_desc->streams().end(); ++it) {
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(it->cname);
remote_streams_.find(it->sync_label);
RemoteStreamMap::iterator new_streams_it =
current_streams.find(it->cname);
current_streams.find(it->sync_label);
if (old_streams_it == remote_streams_.end()) {
if (new_streams_it == current_streams.end()) {
// New stream
talk_base::scoped_refptr<MediaStreamProxy> stream(
MediaStreamProxy::Create(it->cname, signaling_thread_));
scoped_refptr<MediaStreamProxy> stream(
MediaStreamProxy::Create(it->sync_label, signaling_thread_));
current_streams.insert(MediaStreamPair(stream->label(), stream));
new_streams_it = current_streams.find(it->cname);
new_streams_it = current_streams.find(it->sync_label);
}
talk_base::scoped_refptr<AudioTrackInterface> track(
AudioTrackProxy::CreateRemote(it->description, it->ssrc,
signaling_thread_));
scoped_refptr<AudioTrackInterface> track(
AudioTrackProxy::CreateRemote(it->name, signaling_thread_));
track->set_state(MediaStreamTrackInterface::kLive);
new_streams_it->second->AddTrack(track);
} else {
talk_base::scoped_refptr<MediaStreamProxy> stream(
old_streams_it->second);
scoped_refptr<MediaStreamProxy> stream(old_streams_it->second);
current_streams.insert(MediaStreamPair(stream->label(), stream));
}
}
@ -382,9 +369,9 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
for (cricket::Sources::const_iterator it = video_desc->sources().begin();
it != video_desc->sources().end();
++it) {
for (cricket::StreamParamsVec::const_iterator it =
video_desc->streams().begin();
it != video_desc->streams().end(); ++it) {
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(it->cname);
RemoteStreamMap::iterator new_streams_it =
@ -393,20 +380,18 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
if (old_streams_it == remote_streams_.end()) {
if (new_streams_it == current_streams.end()) {
// New stream
talk_base::scoped_refptr<MediaStreamProxy> stream(
scoped_refptr<MediaStreamProxy> stream(
MediaStreamProxy::Create(it->cname, signaling_thread_));
current_streams.insert(MediaStreamPair(stream->label(), stream));
new_streams_it = current_streams.find(it->cname);
}
talk_base::scoped_refptr<VideoTrackInterface> track(
VideoTrackProxy::CreateRemote(it->description, it->ssrc,
signaling_thread_));
scoped_refptr<VideoTrackInterface> track(
VideoTrackProxy::CreateRemote(it->name, signaling_thread_));
new_streams_it->second->AddTrack(track);
track->set_state(MediaStreamTrackInterface::kLive);
} else {
talk_base::scoped_refptr<MediaStreamProxy>
stream(old_streams_it->second);
scoped_refptr<MediaStreamProxy> stream(old_streams_it->second);
current_streams.insert(MediaStreamPair(stream->label(), stream));
}
}
@ -417,7 +402,7 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
for (RemoteStreamMap::iterator it = current_streams.begin();
it != current_streams.end();
++it) {
talk_base::scoped_refptr<MediaStreamProxy> new_stream(it->second);
scoped_refptr<MediaStreamProxy> new_stream(it->second);
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(new_stream->label());
if (old_streams_it == remote_streams_.end()) {
@ -432,18 +417,16 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
for (RemoteStreamMap::iterator it = remote_streams_.begin();
it != remote_streams_.end();
++it) {
talk_base::scoped_refptr<MediaStreamProxy> old_stream(it->second);
scoped_refptr<MediaStreamProxy> old_stream(it->second);
RemoteStreamMap::iterator new_streams_it =
current_streams.find(old_stream->label());
if (new_streams_it == current_streams.end()) {
old_stream->set_ready_state(MediaStreamInterface::kEnded);
talk_base::scoped_refptr<AudioTracks>
audio_tracklist(old_stream->audio_tracks());
scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
for (size_t j = 0; j < audio_tracklist->count(); ++j) {
audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
talk_base::scoped_refptr<VideoTracks>
video_tracklist(old_stream->video_tracks());
scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
for (size_t j = 0; j < video_tracklist->count(); ++j) {
video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
@ -461,24 +444,19 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
void PeerConnectionSignaling::UpdateSendingLocalStreams(
const cricket::SessionDescription* answer_desc,
StreamCollectionInterface* negotiated_streams) {
typedef std::pair<std::string,
talk_base::scoped_refptr<MediaStreamInterface> >
typedef std::pair<std::string, scoped_refptr<MediaStreamInterface> >
MediaStreamPair;
LocalStreamMap current_local_streams;
for (size_t i = 0; i < negotiated_streams->count(); ++i) {
talk_base::scoped_refptr<MediaStreamInterface> stream =
negotiated_streams->at(i);
talk_base::scoped_refptr<AudioTracks> audiotracklist(
stream->audio_tracks());
talk_base::scoped_refptr<VideoTracks> videotracklist(
stream->video_tracks());
scoped_refptr<MediaStreamInterface> stream(negotiated_streams->at(i));
scoped_refptr<AudioTracks> audiotracklist(stream->audio_tracks());
scoped_refptr<VideoTracks> videotracklist(stream->video_tracks());
bool stream_ok = false; // A stream is ok if at least one track succeed.
// Update tracks based on its type.
for (size_t j = 0; j < audiotracklist->count(); ++j) {
talk_base::scoped_refptr<MediaStreamTrackInterface> track =
audiotracklist->at(j);
scoped_refptr<MediaStreamTrackInterface> track(audiotracklist->at(j));
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(answer_desc);
if (!audio_content) { // The remote does not accept audio.
@ -489,7 +467,6 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
// TODO(perkj): Do we need to store the codec in the track?
if (audio_desc->codecs().size() <= 0) {
// No common codec.
track->set_state(MediaStreamTrackInterface::kFailed);
@ -499,8 +476,7 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
}
for (size_t j = 0; j < videotracklist->count(); ++j) {
talk_base::scoped_refptr<MediaStreamTrackInterface> track =
videotracklist->at(j);
scoped_refptr<MediaStreamTrackInterface> track(videotracklist->at(j));
const cricket::ContentInfo* video_content =
GetFirstVideoContent(answer_desc);
if (!video_content) { // The remote does not accept video.
@ -536,18 +512,16 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
for (LocalStreamMap::iterator it = local_streams_.begin();
it != local_streams_.end();
++it) {
talk_base::scoped_refptr<MediaStreamInterface> old_stream(it->second);
scoped_refptr<MediaStreamInterface> old_stream(it->second);
MediaStreamInterface* new_streams =
negotiated_streams->find(old_stream->label());
if (new_streams == NULL) {
old_stream->set_ready_state(MediaStreamInterface::kEnded);
talk_base::scoped_refptr<AudioTracks> audio_tracklist(
old_stream->audio_tracks());
scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
for (size_t j = 0; j < audio_tracklist->count(); ++j) {
audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
talk_base::scoped_refptr<VideoTracks> video_tracklist(
old_stream->video_tracks());
scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
for (size_t j = 0; j < video_tracklist->count(); ++j) {
video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}

View File

@ -148,7 +148,6 @@ class PeerConnectionSignaling : public WebRtcSessionObserver,
talk_base::Thread* signaling_thread_;
SessionDescriptionProvider* provider_;
State state_;
uint32 ssrc_counter_;
typedef std::map<std::string, talk_base::scoped_refptr<MediaStreamProxy> >
RemoteStreamMap;

View File

@ -32,14 +32,14 @@ namespace webrtc {
static const char kVideoTrackKind[] = "video";
VideoTrack::VideoTrack(const std::string& label, uint32 ssrc)
: MediaTrack<LocalVideoTrackInterface>(label, ssrc),
VideoTrack::VideoTrack(const std::string& label)
: MediaTrack<LocalVideoTrackInterface>(label),
video_device_(NULL) {
}
VideoTrack::VideoTrack(const std::string& label,
VideoCaptureModule* video_device)
: MediaTrack<LocalVideoTrackInterface>(label, 0),
: MediaTrack<LocalVideoTrackInterface>(label),
video_device_(video_device) {
}
@ -62,10 +62,9 @@ std::string VideoTrack::kind() const {
}
talk_base::scoped_refptr<VideoTrack> VideoTrack::CreateRemote(
const std::string& label,
uint32 ssrc) {
const std::string& label) {
talk_base::RefCountedObject<VideoTrack>* track =
new talk_base::RefCountedObject<VideoTrack>(label, ssrc);
new talk_base::RefCountedObject<VideoTrack>(label);
return track;
}

View File

@ -47,8 +47,7 @@ class VideoTrack : public MediaTrack<LocalVideoTrackInterface> {
public:
// Create a video track used for remote video tracks.
static talk_base::scoped_refptr<VideoTrack> CreateRemote(
const std::string& label,
uint32 ssrc);
const std::string& label);
// Create a video track used for local video tracks.
static talk_base::scoped_refptr<VideoTrack> CreateLocal(
const std::string& label,
@ -61,7 +60,7 @@ class VideoTrack : public MediaTrack<LocalVideoTrackInterface> {
virtual std::string kind() const;
protected:
VideoTrack(const std::string& label, uint32 ssrc);
explicit VideoTrack(const std::string& label);
VideoTrack(const std::string& label, VideoCaptureModule* video_device);
private:

View File

@ -1,76 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/webrtc_devicemanager.h"
using cricket::Device;
using cricket::DeviceManager;
const int WebRtcDeviceManager::kDefaultDeviceId = -1;
WebRtcDeviceManager::WebRtcDeviceManager()
: DeviceManager(),
default_device_(DeviceManager::kDefaultDeviceName, kDefaultDeviceId) {
}
WebRtcDeviceManager::~WebRtcDeviceManager() {
Terminate();
}
bool WebRtcDeviceManager::Init() {
return true;
}
void WebRtcDeviceManager::Terminate() {
}
bool WebRtcDeviceManager::GetAudioInputDevices(std::vector<Device>* devs) {
return GetDefaultDevices(devs);
}
bool WebRtcDeviceManager::GetAudioOutputDevices(std::vector<Device>* devs) {
return GetDefaultDevices(devs);
}
bool WebRtcDeviceManager::GetVideoCaptureDevices(std::vector<Device>* devs) {
return GetDefaultDevices(devs);
}
bool WebRtcDeviceManager::GetDefaultVideoCaptureDevice(Device* device) {
*device = default_device_;
return true;
}
bool WebRtcDeviceManager::GetDefaultDevices(
std::vector<cricket::Device>* devs) {
if (!devs)
return false;
devs->clear();
devs->push_back(default_device_);
return true;
}

View File

@ -1,53 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_DEVICEMANAGER_
#define TALK_APP_WEBRTC_DEVICEMANAGER_
#include <vector>
#include "talk/session/phone/devicemanager.h"
class WebRtcDeviceManager : public cricket::DeviceManager {
public:
WebRtcDeviceManager();
~WebRtcDeviceManager();
virtual bool Init();
virtual void Terminate();
virtual bool GetAudioInputDevices(std::vector<cricket::Device>* devs);
virtual bool GetAudioOutputDevices(std::vector<cricket::Device>* devs);
virtual bool GetVideoCaptureDevices(std::vector<cricket::Device>* devs);
virtual bool GetDefaultVideoCaptureDevice(cricket::Device* device);
private:
static const int kDefaultDeviceId;
bool GetDefaultDevices(std::vector<cricket::Device>* devs);
cricket::Device default_device_;
};
#endif // TALK_APP_WEBRTC_DEVICEMANAGER_

View File

@ -298,13 +298,16 @@ void BuildTrack(const cricket::SessionDescription* sdp,
const cricket::MediaContentDescription* desc =
static_cast<const cricket::MediaContentDescription*>(
content->description);
for (cricket::Sources::const_iterator it = desc->sources().begin();
it != desc->sources().end();
for (cricket::StreamParamsVec::const_iterator it = desc->streams().begin();
it != desc->streams().end();
++it) {
// TODO(ronghuawu): Support ssrcsgroups.
Json::Value track;
Append(&track, "ssrc", it->ssrc);
ASSERT(it->ssrcs.size() == 1);
Append(&track, "ssrc", it->ssrcs[0]);
Append(&track, "cname", it->cname);
Append(&track, "label", it->description);
Append(&track, "stream_label", it->sync_label);
Append(&track, "label", it->name);
tracks->push_back(track);
}
}
@ -567,20 +570,25 @@ bool ParseTrack(const Json::Value& content,
tracks.begin();
std::vector<Json::Value>::const_iterator iter_end =
tracks.end();
cricket::Sources sources;
for (; iter != iter_end; ++iter) {
uint32 ssrc;
std::string label;
std::string cname;
std::string stream_label;
if (!GetUIntFromJsonObject(*iter, "ssrc", &ssrc))
return false;
// label is optional, it will be empty string if doesn't exist
GetStringFromJsonObject(*iter, "label", &label);
if (!GetStringFromJsonObject(*iter, "cname", &cname))
return false;
sources.push_back(cricket::SourceParam(ssrc, label, cname));
// stream_label is optional, it will be the same as cname if it
// doesn't exist.
GetStringFromJsonObject(*iter, "stream_label", &stream_label);
if (stream_label.empty())
stream_label = cname;
content_desc->AddStream(cricket::StreamParams(label, ssrc, cname,
stream_label));
}
content_desc->set_sources(sources);
return true;
}

View File

@ -234,7 +234,7 @@ bool WebRtcSession::CheckCandidate(const std::string& name) {
return ret;
}
void WebRtcSession::SetCaptureDevice(uint32 ssrc,
void WebRtcSession::SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) {
// should be called from a signaling thread
ASSERT(signaling_thread()->IsCurrent());
@ -242,23 +242,25 @@ void WebRtcSession::SetCaptureDevice(uint32 ssrc,
// TODO(mallinath): Refactor this when there is support for multiple cameras.
// Register the the VideoCapture Module.
video_channel_->SetCaptureDevice(ssrc, camera);
// TODO(mallinath): Fix SetCaptureDevice.
video_channel_->SetCaptureDevice(0, camera);
// Actually associate the video capture module with the ViE channel.
channel_manager_->SetVideoOptions("");
}
void WebRtcSession::SetLocalRenderer(uint32 ssrc,
void WebRtcSession::SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
video_channel_->SetLocalRenderer(ssrc, renderer);
// TODO(mallinath): Fix SetLocalRenderer.
video_channel_->SetLocalRenderer(0, renderer);
}
void WebRtcSession::SetRemoteRenderer(uint32 ssrc,
void WebRtcSession::SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
//TODO(mallinath): Only the ssrc = 0 is supported at the moment.
// TODO(mallinath): Only the ssrc = 0 is supported at the moment.
// Only one channel.
video_channel_->SetRenderer(0, renderer);
}
@ -267,7 +269,7 @@ const cricket::SessionDescription* WebRtcSession::ProvideOffer(
const cricket::MediaSessionOptions& options) {
// TODO(mallinath) - Sanity check for options.
cricket::SessionDescription* offer(
session_desc_factory_.CreateOffer(options));
session_desc_factory_.CreateOffer(options, local_description()));
set_local_description(offer);
return offer;
}
@ -284,7 +286,8 @@ const cricket::SessionDescription* WebRtcSession::SetRemoteSessionDescription(
const cricket::SessionDescription* WebRtcSession::ProvideAnswer(
const cricket::MediaSessionOptions& options) {
cricket::SessionDescription* answer(
session_desc_factory_.CreateAnswer(remote_description(), options));
session_desc_factory_.CreateAnswer(remote_description(), options,
local_description()));
set_local_description(answer);
return answer;
}
@ -312,7 +315,7 @@ void WebRtcSession::NegotiationDone() {
// we can remove stream from a session by muting it.
// TODO(mallinath) - Change needed when multiple send streams support
// is available.
voice_channel_->Mute(audio_content->sources().size() == 0);
voice_channel_->Mute(audio_content->streams().size() == 0);
}
const cricket::ContentInfo* video_info =
@ -325,7 +328,7 @@ void WebRtcSession::NegotiationDone() {
// we can remove stream from a session by muting it.
// TODO(mallinath) - Change needed when multiple send streams support
// is available.
video_channel_->Mute(video_content->sources().size() == 0);
video_channel_->Mute(video_content->streams().size() == 0);
}
}

View File

@ -87,10 +87,11 @@ class WebRtcSession : public cricket::BaseSession,
virtual void NegotiationDone();
// Implements MediaProviderInterface.
virtual void SetCaptureDevice(uint32 ssrc, VideoCaptureModule* camera);
virtual void SetLocalRenderer(uint32 ssrc,
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera);
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer);
virtual void SetRemoteRenderer(uint32 ssrc,
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer);
// Transport related callbacks, override from cricket::BaseSession.

View File

@ -84,27 +84,16 @@ class WebRtcSessionTest : public testing::Test {
EXPECT_TRUE(InitializeSession());
}
// Creates an offer with one source ssrc, if ssrc = 0 no source info
// video ssrc + 1
void CreateOffer(uint32 ssrc) {
cricket::MediaSessionOptions options;
options.is_video = true;
if (ssrc != 0) {
options.audio_sources.push_back(cricket::SourceParam(ssrc, "", ""));
++ssrc;
options.video_sources.push_back(cricket::SourceParam(ssrc, "", ""));
}
// TODO(mallinath) - Adding test cases for session.
local_desc_ = desc_provider_->ProvideOffer(options);
ASSERT_TRUE(local_desc_ != NULL);
}
void CreateAnswer(uint32 ssrc) {
cricket::MediaSessionOptions options;
options.is_video = true;
if (ssrc != 0) {
options.audio_sources.push_back(cricket::SourceParam(ssrc, "", ""));
++ssrc;
options.video_sources.push_back(cricket::SourceParam(ssrc, "", ""));
}
// TODO(mallinath) - Adding test cases for session.
remote_desc_ = desc_factory_->CreateAnswer(local_desc_, options);
ASSERT_TRUE(remote_desc_ != NULL);
}

View File

@ -29,6 +29,7 @@
#include "talk/base/helpers.h"
#include "talk/base/logging.h"
#include "talk/base/scoped_ptr.h"
#include "talk/p2p/base/constants.h"
#include "talk/session/phone/channelmanager.h"
#include "talk/session/phone/cryptoparams.h"
@ -41,6 +42,8 @@ const char kInline[] = "inline:";
namespace cricket {
using talk_base::scoped_ptr;
static bool CreateCryptoParams(int tag, const std::string& cipher,
CryptoParams *out) {
std::string key;
@ -102,6 +105,171 @@ static bool SelectCrypto(const MediaContentDescription* offer,
return false;
}
static const StreamParams* FindStreamParamsByName(
const StreamParamsVec& params_vec,
const std::string& name) {
for (StreamParamsVec::const_iterator it = params_vec.begin();
it != params_vec.end(); ++it) {
if (it->name == name)
return &*it;
}
return NULL;
}
static const StreamParams* FindFirstStreamParamsByCname(
const StreamParamsVec& params_vec,
const std::string& cname) {
for (StreamParamsVec::const_iterator it = params_vec.begin();
it != params_vec.end(); ++it) {
if (cname == it->cname)
return &*it;
}
return NULL;
}
static const StreamParams* FindStreamParamsBySsrc(
const StreamParamsVec& params_vec,
uint32 ssrc) {
for (StreamParamsVec::const_iterator stream_it = params_vec.begin();
stream_it != params_vec.end(); ++stream_it) {
const std::vector<uint32>& ssrcs = stream_it->ssrcs;
for (std::vector<uint32>::const_iterator ssrc_it = ssrcs.begin();
ssrc_it != ssrcs.end(); ++ssrc_it) {
if (ssrc == *ssrc_it)
return &*stream_it;
}
}
return NULL;
}
// Generates a new CNAME or the CNAME of an already existing StreamParams
// if a StreamParams exist for another Stream in streams with sync_label
// sync_label.
static bool GenerateCname(const StreamParamsVec& params_vec,
const MediaSessionOptions::Streams& streams,
const std::string& synch_label,
std::string* cname) {
ASSERT(cname);
if (!cname)
return false;
// Check if a CNAME exist for any of the other synched streams.
for (MediaSessionOptions::Streams::const_iterator stream_it = streams.begin();
stream_it != streams.end() ; ++stream_it) {
if (synch_label != stream_it->sync_label)
continue;
const StreamParams* param = FindStreamParamsByName(params_vec,
stream_it->name);
if (param) {
*cname = param->cname;
return true;
}
}
// No other stream seems to exist that we should sync with.
// Generate a random string for the RTCP CNAME, as stated in RFC 6222.
// This string is only used for synchronization, and therefore is opaque.
do {
if (!talk_base::CreateRandomString(16, cname)) {
ASSERT(false);
return false;
}
} while (FindFirstStreamParamsByCname(params_vec, *cname));
return true;
}
// Generate a new SSRC and make sure it does not exist in params_vec.
static uint32 GenerateSsrc(const StreamParamsVec& params_vec) {
uint32 ssrc = 0;
do {
ssrc = talk_base::CreateRandomNonZeroId();
} while (FindStreamParamsBySsrc(params_vec, ssrc));
return ssrc;
}
// Finds all StreamParams of all media types and attach them to stream_params.
static void GetCurrentStreamParams(const SessionDescription* sdesc,
StreamParamsVec* stream_params) {
if (!sdesc)
return;
const ContentInfos& contents = sdesc->contents();
for (ContentInfos::const_iterator content = contents.begin();
content != contents.end(); content++) {
if (!IsAudioContent(&*content) && !IsVideoContent(&*content))
continue;
const MediaContentDescription* media =
static_cast<const MediaContentDescription*>(
content->description);
const StreamParamsVec& streams = media->streams();
for (StreamParamsVec::const_iterator it = streams.begin();
it != streams.end(); ++it) {
stream_params->push_back(*it);
}
}
}
// Adds a StreamParams for each Stream in Streams with media type
// media_type to content_description.
// current_parms - All currently known StreamParams of any media type.
static bool AddStreamParams(
MediaType media_type,
const MediaSessionOptions::Streams& streams,
StreamParamsVec* current_params,
MediaContentDescription* content_description) {
for (MediaSessionOptions::Streams::const_iterator stream_it = streams.begin();
stream_it != streams.end(); ++stream_it) {
if (stream_it->type != media_type)
continue; // Wrong media type.
const StreamParams* params = FindStreamParamsByName(*current_params,
stream_it->name);
if (!params) {
// This is a new stream.
// Get a CNAME. Either new or same as one of the other synched streams.
std::string cname;
if (!GenerateCname(*current_params, streams, stream_it->sync_label,
&cname)) {
return false;
}
uint32 ssrc = GenerateSsrc(*current_params);
// TODO(perkj): Generate the more complex types of stream_params.
StreamParams stream_param(stream_it->name, ssrc, cname,
stream_it->sync_label);
content_description->AddStream(stream_param);
// Store the new StreamParams in current_params.
// This is necessary so that we can use the CNAME for other media types.
current_params->push_back(stream_param);
} else {
content_description->AddStream(*params);
}
}
return true;
}
void MediaSessionOptions::AddStream(MediaType type,
const std::string& name,
const std::string& sync_label) {
streams.push_back(Stream(type, name, sync_label));
if (type == MEDIA_TYPE_VIDEO)
has_video = true;
else if (type == MEDIA_TYPE_AUDIO)
has_audio = true;
}
void MediaSessionOptions::RemoveStream(MediaType type,
const std::string& name) {
Streams::iterator stream_it = streams.begin();
for (; stream_it != streams.end(); ++stream_it) {
if (stream_it->type == type && stream_it->name == name) {
streams.erase(stream_it);
break;
}
}
ASSERT(stream_it != streams.end());
}
MediaSessionDescriptionFactory::MediaSessionDescriptionFactory()
: secure_(SEC_DISABLED) {
}
@ -115,28 +283,56 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
const MediaSessionOptions& options) {
SessionDescription* offer = new SessionDescription();
return CreateOffer(options, NULL);
}
if (true) { // TODO: Allow audio to be optional
AudioContentDescription* audio = new AudioContentDescription();
SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
const MediaSessionOptions& options,
const SessionDescription* current_description) {
scoped_ptr<SessionDescription> offer(new SessionDescription());
StreamParamsVec current_params;
GetCurrentStreamParams(current_description, &current_params);
if (options.has_audio) {
scoped_ptr<AudioContentDescription> audio(new AudioContentDescription());
for (AudioCodecs::const_iterator codec = audio_codecs_.begin();
codec != audio_codecs_.end(); ++codec) {
audio->AddCodec(*codec);
}
audio->SortCodecs();
audio->set_ssrc(talk_base::CreateRandomNonZeroId());
if (!AddStreamParams(MEDIA_TYPE_AUDIO, options.streams, &current_params,
audio.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
audio->set_ssrc(talk_base::CreateRandomNonZeroId());
}
audio->set_rtcp_mux(true);
audio->set_lang(lang_);
audio->set_sources(options.audio_sources);
if (secure() != SEC_DISABLED) {
CryptoParamsVec audio_cryptos;
if (GetSupportedAudioCryptos(&audio_cryptos)) {
for (CryptoParamsVec::const_iterator crypto = audio_cryptos.begin();
crypto != audio_cryptos.end(); ++crypto) {
audio->AddCrypto(*crypto);
if (current_description) {
// Copy crypto parameters from the previous offer.
const ContentInfo* info =
GetFirstAudioContent(current_description);
if (info) {
const AudioContentDescription* desc =
static_cast<const AudioContentDescription*>(info->description);
audio_cryptos = desc->cryptos();
}
}
if (audio_cryptos.empty())
GetSupportedAudioCryptos(&audio_cryptos); // Generate new cryptos.
for (CryptoParamsVec::const_iterator crypto = audio_cryptos.begin();
crypto != audio_cryptos.end(); ++crypto) {
audio->AddCrypto(*crypto);
}
if (secure() == SEC_REQUIRED) {
if (audio->cryptos().empty()) {
return NULL; // Abort, crypto required but none found.
@ -145,31 +341,48 @@ SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
}
}
offer->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio);
offer->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release());
}
// add video codecs, if this is a video call
if (options.is_video) {
VideoContentDescription* video = new VideoContentDescription();
if (options.has_video) {
scoped_ptr<VideoContentDescription> video(new VideoContentDescription());
for (VideoCodecs::const_iterator codec = video_codecs_.begin();
codec != video_codecs_.end(); ++codec) {
video->AddCodec(*codec);
}
video->SortCodecs();
video->set_ssrc(talk_base::CreateRandomNonZeroId());
if (!AddStreamParams(MEDIA_TYPE_VIDEO, options.streams, &current_params,
video.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
video->set_ssrc(talk_base::CreateRandomNonZeroId());
}
video->set_bandwidth(options.video_bandwidth);
video->set_rtcp_mux(true);
video->set_sources(options.video_sources);
if (secure() != SEC_DISABLED) {
CryptoParamsVec video_cryptos;
if (GetSupportedVideoCryptos(&video_cryptos)) {
for (CryptoParamsVec::const_iterator crypto = video_cryptos.begin();
crypto != video_cryptos.end(); ++crypto) {
video->AddCrypto(*crypto);
if (current_description) {
// Copy crypto parameters from the previous offer.
const ContentInfo* info =
GetFirstVideoContent(current_description);
if (info) {
const VideoContentDescription* desc =
static_cast<const VideoContentDescription*>(info->description);
video_cryptos = desc->cryptos();
}
}
if (video_cryptos.empty())
GetSupportedVideoCryptos(&video_cryptos); // Generate new crypto.
for (CryptoParamsVec::const_iterator crypto = video_cryptos.begin();
crypto != video_cryptos.end(); ++crypto) {
video->AddCrypto(*crypto);
}
if (secure() == SEC_REQUIRED) {
if (video->cryptos().empty()) {
return NULL; // Abort, crypto required but none found.
@ -178,24 +391,35 @@ SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
}
}
offer->AddContent(CN_VIDEO, NS_JINGLE_RTP, video);
offer->AddContent(CN_VIDEO, NS_JINGLE_RTP, video.release());
}
return offer;
return offer.release();
}
SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
const SessionDescription* offer, const MediaSessionOptions& options) {
const SessionDescription* offer,
const MediaSessionOptions& options) {
return CreateAnswer(offer, options, NULL);
}
SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
const SessionDescription* offer, const MediaSessionOptions& options,
const SessionDescription* current_description) {
// The answer contains the intersection of the codecs in the offer with the
// codecs we support, ordered by our local preference. As indicated by
// XEP-0167, we retain the same payload ids from the offer in the answer.
SessionDescription* accept = new SessionDescription();
scoped_ptr<SessionDescription> accept(new SessionDescription());
StreamParamsVec current_params;
GetCurrentStreamParams(current_description, &current_params);
const ContentInfo* audio_content = GetFirstAudioContent(offer);
if (audio_content) {
if (audio_content && options.has_audio) {
const AudioContentDescription* audio_offer =
static_cast<const AudioContentDescription*>(audio_content->description);
AudioContentDescription* audio_accept = new AudioContentDescription();
scoped_ptr<AudioContentDescription> audio_accept(
new AudioContentDescription());
for (AudioCodecs::const_iterator ours = audio_codecs_.begin();
ours != audio_codecs_.end(); ++ours) {
for (AudioCodecs::const_iterator theirs = audio_offer->codecs().begin();
@ -209,14 +433,39 @@ SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
}
audio_accept->SortCodecs();
audio_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
if (!AddStreamParams(MEDIA_TYPE_AUDIO, options.streams, &current_params,
audio_accept.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
audio_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
}
audio_accept->set_rtcp_mux(audio_offer->rtcp_mux());
audio_accept->set_sources(options.audio_sources);
if (secure() != SEC_DISABLED) {
CryptoParams crypto;
if (SelectCrypto(audio_offer, &crypto)) {
if (current_description) {
// Check if this crypto already exist in the previous
// session description. Use it in that case.
const ContentInfo* info =
GetFirstAudioContent(current_description);
if (info) {
const AudioContentDescription* desc =
static_cast<const AudioContentDescription*>(info->description);
const CryptoParamsVec& cryptos = desc->cryptos();
for (CryptoParamsVec::const_iterator it = cryptos.begin();
it != cryptos.end(); ++it) {
if (crypto.Matches(*it)) {
crypto = *it;
break;
}
}
}
}
audio_accept->AddCrypto(crypto);
}
}
@ -225,14 +474,18 @@ SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
(audio_offer->crypto_required() || secure() == SEC_REQUIRED)) {
return NULL; // Fails the session setup.
}
accept->AddContent(audio_content->name, audio_content->type, audio_accept);
accept->AddContent(audio_content->name, audio_content->type,
audio_accept.release());
} else {
LOG(LS_INFO) << "Audio is not supported in answer";
}
const ContentInfo* video_content = GetFirstVideoContent(offer);
if (video_content && options.is_video) {
if (video_content && options.has_video) {
const VideoContentDescription* video_offer =
static_cast<const VideoContentDescription*>(video_content->description);
VideoContentDescription* video_accept = new VideoContentDescription();
scoped_ptr<VideoContentDescription> video_accept(
new VideoContentDescription());
for (VideoCodecs::const_iterator ours = video_codecs_.begin();
ours != video_codecs_.end(); ++ours) {
for (VideoCodecs::const_iterator theirs = video_offer->codecs().begin();
@ -244,17 +497,40 @@ SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
}
}
}
if (!AddStreamParams(MEDIA_TYPE_VIDEO, options.streams, &current_params,
video_accept.get())) {
return NULL; // Abort, something went seriously wrong.
}
video_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
video_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
}
video_accept->set_bandwidth(options.video_bandwidth);
video_accept->set_rtcp_mux(video_offer->rtcp_mux());
video_accept->SortCodecs();
video_accept->set_sources(options.video_sources);
if (secure() != SEC_DISABLED) {
CryptoParams crypto;
if (SelectCrypto(video_offer, &crypto)) {
if (current_description) {
// Check if this crypto already exist in the previous
// session description. Use it in that case.
const ContentInfo* info = GetFirstVideoContent(current_description);
if (info) {
const VideoContentDescription* desc =
static_cast<const VideoContentDescription*>(info->description);
const CryptoParamsVec& cryptos = desc->cryptos();
for (CryptoParamsVec::const_iterator it = cryptos.begin();
it != cryptos.end(); ++it) {
if (crypto.Matches(*it)) {
crypto = *it;
break;
}
}
}
}
video_accept->AddCrypto(crypto);
}
}
@ -263,9 +539,12 @@ SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
(video_offer->crypto_required() || secure() == SEC_REQUIRED)) {
return NULL; // Fails the session setup.
}
accept->AddContent(video_content->name, video_content->type, video_accept);
accept->AddContent(video_content->name, video_content->type,
video_accept.release());
} else {
LOG(LS_INFO) << "Video is not supported in answer";
}
return accept;
return accept.release();
}
static bool IsMediaContent(const ContentInfo* content, MediaType media_type) {

View File

@ -37,6 +37,7 @@
#include "talk/session/phone/codec.h"
#include "talk/session/phone/cryptoparams.h"
#include "talk/session/phone/mediachannel.h"
#include "talk/session/phone/streamparams.h"
#include "talk/p2p/base/sessiondescription.h"
namespace cricket {
@ -45,6 +46,7 @@ class ChannelManager;
typedef std::vector<AudioCodec> AudioCodecs;
typedef std::vector<VideoCodec> VideoCodecs;
typedef std::vector<CryptoParams> CryptoParamsVec;
typedef std::vector<StreamParams> StreamParamsVec;
// SEC_ENABLED and SEC_REQUIRED should only be used if the session
// was negotiated over TLS, to protect the inline crypto material
@ -64,37 +66,48 @@ enum SecureMediaPolicy {
SEC_REQUIRED
};
// Structure to describe a sending source.
struct SourceParam {
SourceParam(uint32 ssrc,
const std::string description,
const std::string& cname)
: ssrc(ssrc), description(description), cname(cname) {}
uint32 ssrc;
std::string description;
std::string cname;
enum MediaType {
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO
};
typedef std::vector<SourceParam> Sources;
// Options to control how session descriptions are generated.
const int kAutoBandwidth = -1;
struct MediaSessionOptions {
MediaSessionOptions() :
is_video(false),
has_audio(true), // Audio enabled by default.
has_video(false),
is_muc(false),
video_bandwidth(kAutoBandwidth) {
}
Sources audio_sources;
Sources video_sources;
bool is_video;
// Add a stream with MediaType type and id name.
// All streams with the same sync_label will get the same CNAME.
// All names must be unique.
void AddStream(MediaType type,
const std::string& name,
const std::string& sync_label);
void RemoveStream(MediaType type, const std::string& name);
bool has_audio;
bool has_video;
bool is_muc;
// bps. -1 == auto.
int video_bandwidth;
};
enum MediaType {
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO
struct Stream {
Stream(MediaType type,
const std::string& name,
const std::string& sync_label)
: type(type), name(name), sync_label(sync_label) {
}
MediaType type;
std::string name;
std::string sync_label;
};
typedef std::vector<Stream> Streams;
Streams streams;
};
// "content" (as used in XEP-0166) descriptions for voice and video.
@ -152,11 +165,11 @@ class MediaContentDescription : public ContentDescription {
bool rtp_header_extensions_set() const {
return rtp_header_extensions_set_;
}
const Sources& sources() const {
return sources_;
const StreamParamsVec& streams() const {
return streams_;
}
void set_sources(const Sources& sources) {
sources_ = sources;
void AddStream(const StreamParams& stream) {
streams_.push_back(stream);
}
protected:
@ -168,7 +181,7 @@ class MediaContentDescription : public ContentDescription {
bool crypto_required_;
std::vector<RtpHeaderExtension> rtp_header_extensions_;
bool rtp_header_extensions_set_;
std::vector<SourceParam> sources_;
StreamParamsVec streams_;
};
template <class C>
@ -205,7 +218,6 @@ class AudioContentDescription : public MediaContentDescriptionImpl<AudioCodec> {
const std::string &lang() const { return lang_; }
void set_lang(const std::string &lang) { lang_ = lang; }
private:
bool conference_mode_;
std::string lang_;
@ -234,9 +246,25 @@ class MediaSessionDescriptionFactory {
SecureMediaPolicy secure() const { return secure_; }
void set_secure(SecureMediaPolicy s) { secure_ = s; }
SessionDescription* CreateOffer(const MediaSessionOptions& options);
SessionDescription* CreateAnswer(const SessionDescription* offer,
const MediaSessionOptions& options);
// TODO(perkj) Deprecate this version of CreateOffer and
// force to use the second alternative.
SessionDescription* CreateOffer(
const MediaSessionOptions& options);
SessionDescription* CreateOffer(
const MediaSessionOptions& options,
const SessionDescription* current_description);
// TODO(perkj) Deprecate this version of CreateAnswer and
// force to use the second alternative.
SessionDescription* CreateAnswer(
const SessionDescription* offer,
const MediaSessionOptions& options);
SessionDescription* CreateAnswer(
const SessionDescription* offer,
const MediaSessionOptions& options,
const SessionDescription* current_description);
private:
AudioCodecs audio_codecs_;

View File

@ -0,0 +1,105 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains structures for describing SSRCs from a media source such
// as a MediaStreamTrack when it is sent across an RTP session. Multiple media
// sources may be sent across the same RTP session, each of them will be
// described by one StreamParams object
// SsrcGroup is used to describe the relationship between the SSRCs that
// are used for this media source.
// E.x: Consider a source that is sent as 3 simulcast streams
// Let the simulcast elements have SSRC 10, 20, 30.
// Let each simulcast element use FEC and let the protection packets have
// SSRC 11,21,31.
// To describe this 4 SsrcGroups are needed,
// StreamParams would then contain ssrc = {10,11,20,21,30,31} and
// ssrc_groups = {{SIM,{10,20,30}, {FEC,{10,11}, {FEC, {20,21}, {FEC {30,31}}}
// Please see RFC 5576.
#ifndef TALK_SESSION_PHONE_STREAMPARAMS_H_
#define TALK_SESSION_PHONE_STREAMPARAMS_H_
#include <string>
#include <vector>
namespace cricket {
struct SsrcGroup {
SsrcGroup(const std::string& usage, const std::vector<uint32>& ssrcs)
: semantics(usage), ssrcs(ssrcs) {
}
bool operator==(const SsrcGroup& other) const {
return (semantics == other.semantics && ssrcs == other.ssrcs);
}
bool operator!=(const SsrcGroup &other) const {
return !(*this == other);
}
std::string semantics; // e.g FIX, FEC, SIM.
std::vector<uint32> ssrcs; // SSRCs of this type.
};
struct StreamParams {
StreamParams(const std::string& name,
const std::vector<uint32>& ssrcs,
const std::vector<SsrcGroup>& ssrc_groups,
const std::string& cname,
const std::string& sync_label)
: name(name),
ssrcs(ssrcs),
ssrc_groups(ssrc_groups),
cname(cname),
sync_label(sync_label) {
}
StreamParams(const std::string& name,
uint32 ssrc,
const std::string& cname,
const std::string& sync_label)
: name(name),
cname(cname),
sync_label(sync_label) {
ssrcs.push_back(ssrc);
}
bool operator==(const StreamParams& other) const {
return (name == other.name && ssrcs == other.ssrcs &&
ssrc_groups == other.ssrc_groups && cname == other.cname &&
sync_label == sync_label);
}
bool operator!=(const StreamParams &other) const {
return !(*this == other);
}
std::string name; // Unique name of this source.
std::vector<uint32> ssrcs; // All SSRCs for this source
std::vector<SsrcGroup> ssrc_groups; // e.g. FID, FEC, SIM
std::string cname; // RTCP CNAME
std::string sync_label; // Friendly name of cname.
};
} // namespace cricket
#endif // TALK_SESSION_PHONE_STREAMPARAMS_H_