One of Justin comment was to have XXXXInterface and XXXX, rather than XXXX and XXXXImpl. So here are the changes, i don't like to call some the classes as interfaces like MediaStreamTrackListInterface, but they fit the criteria to be called as interface.
Review URL: http://webrtc-codereview.appspot.com/226001 git-svn-id: http://webrtc.googlecode.com/svn/trunk@743 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
03a86998cd
commit
ebc0a00197
@ -32,7 +32,7 @@ namespace webrtc {
|
||||
|
||||
static const char kAudioTrackKind[] = "audio";
|
||||
|
||||
AudioTrackImpl::AudioTrackImpl(const std::string& label, uint32 ssrc)
|
||||
AudioTrack::AudioTrack(const std::string& label, uint32 ssrc)
|
||||
: enabled_(true),
|
||||
label_(label),
|
||||
ssrc_(ssrc),
|
||||
@ -40,8 +40,8 @@ AudioTrackImpl::AudioTrackImpl(const std::string& label, uint32 ssrc)
|
||||
audio_device_(NULL) {
|
||||
}
|
||||
|
||||
AudioTrackImpl::AudioTrackImpl(const std::string& label,
|
||||
AudioDeviceModule* audio_device)
|
||||
AudioTrack::AudioTrack(const std::string& label,
|
||||
AudioDeviceModule* audio_device)
|
||||
: enabled_(true),
|
||||
label_(label),
|
||||
ssrc_(0),
|
||||
@ -50,52 +50,52 @@ AudioTrackImpl::AudioTrackImpl(const std::string& label,
|
||||
}
|
||||
|
||||
// Get the AudioDeviceModule associated with this track.
|
||||
AudioDeviceModule* AudioTrackImpl::GetAudioDevice() {
|
||||
AudioDeviceModule* AudioTrack::GetAudioDevice() {
|
||||
return audio_device_.get();
|
||||
}
|
||||
|
||||
// Implement MediaStreamTrack
|
||||
const char* AudioTrackImpl::kind() const {
|
||||
const char* AudioTrack::kind() const {
|
||||
return kAudioTrackKind;
|
||||
}
|
||||
|
||||
bool AudioTrackImpl::set_enabled(bool enable) {
|
||||
bool AudioTrack::set_enabled(bool enable) {
|
||||
bool fire_on_change = (enable != enabled_);
|
||||
enabled_ = enable;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalAudioTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalAudioTrackInterface>::FireOnChanged();
|
||||
}
|
||||
|
||||
bool AudioTrackImpl::set_ssrc(uint32 ssrc) {
|
||||
bool AudioTrack::set_ssrc(uint32 ssrc) {
|
||||
ASSERT(ssrc_ == 0);
|
||||
ASSERT(ssrc != 0);
|
||||
if (ssrc_ != 0)
|
||||
return false;
|
||||
ssrc_ = ssrc;
|
||||
NotifierImpl<LocalAudioTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalAudioTrackInterface>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AudioTrackImpl::set_state(TrackState new_state) {
|
||||
bool AudioTrack::set_state(TrackState new_state) {
|
||||
bool fire_on_change = (state_ != new_state);
|
||||
state_ = new_state;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalAudioTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalAudioTrackInterface>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
scoped_refptr<AudioTrack> AudioTrackImpl::Create(
|
||||
scoped_refptr<AudioTrackInterface> AudioTrack::Create(
|
||||
const std::string& label, uint32 ssrc) {
|
||||
talk_base::RefCountImpl<AudioTrackImpl>* track =
|
||||
new talk_base::RefCountImpl<AudioTrackImpl>(label, ssrc);
|
||||
talk_base::RefCountImpl<AudioTrack>* track =
|
||||
new talk_base::RefCountImpl<AudioTrack>(label, ssrc);
|
||||
return track;
|
||||
}
|
||||
|
||||
scoped_refptr<LocalAudioTrack> CreateLocalAudioTrack(
|
||||
scoped_refptr<LocalAudioTrackInterface> CreateLocalAudioTrack(
|
||||
const std::string& label,
|
||||
AudioDeviceModule* audio_device) {
|
||||
talk_base::RefCountImpl<AudioTrackImpl>* track =
|
||||
new talk_base::RefCountImpl<AudioTrackImpl>(label, audio_device);
|
||||
talk_base::RefCountImpl<AudioTrack>* track =
|
||||
new talk_base::RefCountImpl<AudioTrack>(label, audio_device);
|
||||
return track;
|
||||
}
|
||||
|
||||
|
@ -41,11 +41,11 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class AudioTrackImpl : public NotifierImpl<LocalAudioTrack> {
|
||||
class AudioTrack : public NotifierImpl<LocalAudioTrackInterface> {
|
||||
public:
|
||||
// Creates an audio track. This can be used in remote media streams.
|
||||
// For local audio tracks use CreateLocalAudioTrack.
|
||||
static scoped_refptr<AudioTrack> Create(const std::string& label,
|
||||
static scoped_refptr<AudioTrackInterface> Create(const std::string& label,
|
||||
uint32 ssrc);
|
||||
|
||||
// Get the AudioDeviceModule associated with this track.
|
||||
@ -63,8 +63,8 @@ class AudioTrackImpl : public NotifierImpl<LocalAudioTrack> {
|
||||
virtual bool set_state(TrackState new_state);
|
||||
|
||||
protected:
|
||||
AudioTrackImpl(const std::string& label, uint32 ssrc);
|
||||
AudioTrackImpl(const std::string& label, AudioDeviceModule* audio_device);
|
||||
AudioTrack(const std::string& label, uint32 ssrc);
|
||||
AudioTrack(const std::string& label, AudioDeviceModule* audio_device);
|
||||
|
||||
private:
|
||||
bool enabled_;
|
||||
|
@ -60,8 +60,8 @@ class Notifier {
|
||||
};
|
||||
|
||||
// Information about a track.
|
||||
class MediaStreamTrack : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
class MediaStreamTrackInterface : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
public:
|
||||
enum TrackState {
|
||||
kInitializing, // Track is beeing negotiated.
|
||||
@ -88,79 +88,79 @@ class MediaStreamTrack : public talk_base::RefCount,
|
||||
};
|
||||
|
||||
// Reference counted wrapper for a VideoRenderer.
|
||||
class VideoRenderer : public talk_base::RefCount {
|
||||
class VideoRendererInterface : public talk_base::RefCount {
|
||||
public:
|
||||
virtual cricket::VideoRenderer* renderer() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VideoRenderer() {}
|
||||
virtual ~VideoRendererInterface() {}
|
||||
};
|
||||
|
||||
// Creates a reference counted object of type webrtc::VideoRenderer.
|
||||
// webrtc::VideoRenderer take ownership of cricket::VideoRenderer.
|
||||
scoped_refptr<VideoRenderer> CreateVideoRenderer(
|
||||
scoped_refptr<VideoRendererInterface> CreateVideoRenderer(
|
||||
cricket::VideoRenderer* renderer);
|
||||
|
||||
class VideoTrack : public MediaStreamTrack {
|
||||
class VideoTrackInterface : public MediaStreamTrackInterface {
|
||||
public:
|
||||
// Set the video renderer for a local or remote stream.
|
||||
// This call will start decoding the received video stream and render it.
|
||||
virtual void SetRenderer(VideoRenderer* renderer) = 0;
|
||||
virtual void SetRenderer(VideoRendererInterface* renderer) = 0;
|
||||
|
||||
// Get the VideoRenderer associated with this track.
|
||||
virtual VideoRenderer* GetRenderer() = 0;
|
||||
virtual VideoRendererInterface* GetRenderer() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VideoTrack() {}
|
||||
virtual ~VideoTrackInterface() {}
|
||||
};
|
||||
|
||||
class LocalVideoTrack : public VideoTrack {
|
||||
class LocalVideoTrackInterface : public VideoTrackInterface {
|
||||
public:
|
||||
// Get the VideoCapture device associated with this track.
|
||||
virtual VideoCaptureModule* GetVideoCapture() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~LocalVideoTrack() {}
|
||||
virtual ~LocalVideoTrackInterface() {}
|
||||
};
|
||||
|
||||
scoped_refptr<LocalVideoTrack> CreateLocalVideoTrack(
|
||||
scoped_refptr<LocalVideoTrackInterface> CreateLocalVideoTrack(
|
||||
const std::string& label,
|
||||
VideoCaptureModule* video_device);
|
||||
|
||||
class AudioTrack : public MediaStreamTrack {
|
||||
class AudioTrackInterface : public MediaStreamTrackInterface {
|
||||
public:
|
||||
protected:
|
||||
virtual ~AudioTrack() {}
|
||||
virtual ~AudioTrackInterface() {}
|
||||
};
|
||||
|
||||
class LocalAudioTrack : public AudioTrack {
|
||||
class LocalAudioTrackInterface : public AudioTrackInterface {
|
||||
public:
|
||||
// Get the AudioDeviceModule associated with this track.
|
||||
virtual AudioDeviceModule* GetAudioDevice() = 0;
|
||||
protected:
|
||||
virtual ~LocalAudioTrack() {}
|
||||
virtual ~LocalAudioTrackInterface() {}
|
||||
};
|
||||
|
||||
scoped_refptr<LocalAudioTrack> CreateLocalAudioTrack(
|
||||
scoped_refptr<LocalAudioTrackInterface> CreateLocalAudioTrack(
|
||||
const std::string& label,
|
||||
AudioDeviceModule* audio_device);
|
||||
|
||||
// List of of tracks.
|
||||
class MediaStreamTrackList : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
class MediaStreamTrackListInterface : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
public:
|
||||
virtual size_t count() = 0;
|
||||
virtual MediaStreamTrack* at(size_t index) = 0;
|
||||
virtual MediaStreamTrackInterface* at(size_t index) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~MediaStreamTrackList() {}
|
||||
virtual ~MediaStreamTrackListInterface() {}
|
||||
};
|
||||
|
||||
class MediaStream : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
class MediaStreamInterface : public talk_base::RefCount,
|
||||
public Notifier {
|
||||
public:
|
||||
virtual const std::string& label() = 0;
|
||||
virtual MediaStreamTrackList* tracks() = 0;
|
||||
virtual MediaStreamTrackListInterface* tracks() = 0;
|
||||
|
||||
enum ReadyState {
|
||||
kInitializing,
|
||||
@ -174,12 +174,12 @@ class MediaStream : public talk_base::RefCount,
|
||||
virtual void set_ready_state(ReadyState state) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~MediaStream() {}
|
||||
virtual ~MediaStreamInterface() {}
|
||||
};
|
||||
|
||||
class LocalMediaStream : public MediaStream {
|
||||
class LocalMediaStreamInterface : public MediaStreamInterface {
|
||||
public:
|
||||
virtual bool AddTrack(MediaStreamTrack* track) = 0;
|
||||
virtual bool AddTrack(MediaStreamTrackInterface* track) = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -43,13 +43,14 @@ enum {
|
||||
MSG_TRACK_ENABLEDCHANGED = 3,
|
||||
};
|
||||
|
||||
typedef talk_base::TypedMessageData<MediaStreamTrack::TrackState>
|
||||
typedef talk_base::TypedMessageData<MediaStreamTrackInterface::TrackState>
|
||||
TrackStateMessageData;
|
||||
typedef talk_base::TypedMessageData<bool> TrackEnabledMessageData;
|
||||
|
||||
VideoTrackHandler::VideoTrackHandler(VideoTrack* track,
|
||||
VideoTrackHandler::VideoTrackHandler(VideoTrackInterface* track,
|
||||
MediaProviderInterface* provider)
|
||||
: provider_(provider),
|
||||
video_track_(track),
|
||||
state_(track->state()),
|
||||
enabled_(track->enabled()),
|
||||
renderer_(track->GetRenderer()),
|
||||
@ -102,14 +103,14 @@ void VideoTrackHandler::OnMessage(talk_base::Message* msg) {
|
||||
}
|
||||
|
||||
LocalVideoTrackHandler::LocalVideoTrackHandler(
|
||||
LocalVideoTrack* track,
|
||||
LocalVideoTrackInterface* track,
|
||||
MediaProviderInterface* provider)
|
||||
: VideoTrackHandler(track, provider),
|
||||
local_video_track_(track) {
|
||||
}
|
||||
|
||||
void LocalVideoTrackHandler::OnRendererChanged() {
|
||||
VideoRenderer* renderer(video_track_->GetRenderer());
|
||||
VideoRendererInterface* renderer(video_track_->GetRenderer());
|
||||
if (renderer)
|
||||
provider_->SetLocalRenderer(video_track_->ssrc(), renderer->renderer());
|
||||
else
|
||||
@ -117,11 +118,11 @@ void LocalVideoTrackHandler::OnRendererChanged() {
|
||||
}
|
||||
|
||||
void LocalVideoTrackHandler::OnStateChanged(
|
||||
MediaStreamTrack::TrackState state) {
|
||||
if (state == VideoTrack::kLive) {
|
||||
MediaStreamTrackInterface::TrackState state) {
|
||||
if (state == VideoTrackInterface::kLive) {
|
||||
provider_->SetCaptureDevice(local_video_track_->ssrc(),
|
||||
local_video_track_->GetVideoCapture());
|
||||
VideoRenderer* renderer(video_track_->GetRenderer());
|
||||
VideoRendererInterface* renderer(video_track_->GetRenderer());
|
||||
if (renderer)
|
||||
provider_->SetLocalRenderer(video_track_->ssrc(), renderer->renderer());
|
||||
else
|
||||
@ -134,14 +135,14 @@ void LocalVideoTrackHandler::OnEnabledChanged(bool enabled) {
|
||||
}
|
||||
|
||||
RemoteVideoTrackHandler::RemoteVideoTrackHandler(
|
||||
VideoTrack* track,
|
||||
VideoTrackInterface* track,
|
||||
MediaProviderInterface* provider)
|
||||
: VideoTrackHandler(track, provider),
|
||||
remote_video_track_(track) {
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::OnRendererChanged() {
|
||||
VideoRenderer* renderer(video_track_->GetRenderer());
|
||||
VideoRendererInterface* renderer(video_track_->GetRenderer());
|
||||
if (renderer)
|
||||
provider_->SetRemoteRenderer(video_track_->ssrc(), renderer->renderer());
|
||||
else
|
||||
@ -149,14 +150,14 @@ void RemoteVideoTrackHandler::OnRendererChanged() {
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::OnStateChanged(
|
||||
MediaStreamTrack::TrackState state) {
|
||||
MediaStreamTrackInterface::TrackState state) {
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::OnEnabledChanged(bool enabled) {
|
||||
// TODO(perkj): What should happen when enabled is changed?
|
||||
}
|
||||
|
||||
MediaStreamHandler::MediaStreamHandler(MediaStream* stream,
|
||||
MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider)
|
||||
: stream_(stream),
|
||||
provider_(provider) {
|
||||
@ -169,7 +170,7 @@ MediaStreamHandler::~MediaStreamHandler() {
|
||||
}
|
||||
}
|
||||
|
||||
MediaStream* MediaStreamHandler::stream() {
|
||||
MediaStreamInterface* MediaStreamHandler::stream() {
|
||||
return stream_.get();
|
||||
}
|
||||
|
||||
@ -179,16 +180,16 @@ void MediaStreamHandler::OnChanged() {
|
||||
|
||||
|
||||
LocalMediaStreamHandler::LocalMediaStreamHandler(
|
||||
MediaStream* stream,
|
||||
MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider)
|
||||
: MediaStreamHandler(stream, provider) {
|
||||
MediaStreamTrackList* tracklist(stream->tracks());
|
||||
MediaStreamTrackListInterface* tracklist(stream->tracks());
|
||||
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
MediaStreamTrack* track = tracklist->at(j);
|
||||
if (track->type() == MediaStreamTrack::kVideo) {
|
||||
LocalVideoTrack* video_track =
|
||||
static_cast<LocalVideoTrack*>(track);
|
||||
MediaStreamTrackInterface* track = tracklist->at(j);
|
||||
if (track->type() == MediaStreamTrackInterface::kVideo) {
|
||||
LocalVideoTrackInterface* video_track =
|
||||
static_cast<LocalVideoTrackInterface*>(track);
|
||||
VideoTrackHandler* handler(new LocalVideoTrackHandler(video_track,
|
||||
provider));
|
||||
video_handlers_.push_back(handler);
|
||||
@ -197,15 +198,16 @@ LocalMediaStreamHandler::LocalMediaStreamHandler(
|
||||
}
|
||||
|
||||
RemoteMediaStreamHandler::RemoteMediaStreamHandler(
|
||||
MediaStream* stream,
|
||||
MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider)
|
||||
: MediaStreamHandler(stream, provider) {
|
||||
MediaStreamTrackList* tracklist(stream->tracks());
|
||||
MediaStreamTrackListInterface* tracklist(stream->tracks());
|
||||
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
MediaStreamTrack* track = tracklist->at(j);
|
||||
if (track->type() == MediaStreamTrack::kVideo) {
|
||||
VideoTrack* video_track = static_cast<VideoTrack*>(track);
|
||||
MediaStreamTrackInterface* track = tracklist->at(j);
|
||||
if (track->type() == MediaStreamTrackInterface::kVideo) {
|
||||
VideoTrackInterface* video_track =
|
||||
static_cast<VideoTrackInterface*>(track);
|
||||
VideoTrackHandler* handler(new RemoteVideoTrackHandler(video_track,
|
||||
provider));
|
||||
video_handlers_.push_back(handler);
|
||||
@ -228,13 +230,13 @@ MediaStreamHandlers::~MediaStreamHandlers() {
|
||||
}
|
||||
}
|
||||
|
||||
void MediaStreamHandlers::AddRemoteStream(MediaStream* stream) {
|
||||
void MediaStreamHandlers::AddRemoteStream(MediaStreamInterface* stream) {
|
||||
RemoteMediaStreamHandler* handler = new RemoteMediaStreamHandler(stream,
|
||||
provider_);
|
||||
remote_streams_handlers_.push_back(handler);
|
||||
}
|
||||
|
||||
void MediaStreamHandlers::RemoveRemoteStream(MediaStream* stream) {
|
||||
void MediaStreamHandlers::RemoveRemoteStream(MediaStreamInterface* stream) {
|
||||
StreamHandlerList::iterator it = remote_streams_handlers_.begin();
|
||||
for (; it != remote_streams_handlers_.end(); ++it) {
|
||||
if ((*it)->stream() == stream) {
|
||||
@ -269,7 +271,7 @@ void MediaStreamHandlers::CommitLocalStreams(StreamCollection* streams) {
|
||||
// Iterate the new collection of local streams.
|
||||
// If its not found in the old collection it have been added.
|
||||
for (size_t j = 0; j < streams->count(); ++j) {
|
||||
MediaStream* stream = streams->at(j);
|
||||
MediaStreamInterface* stream = streams->at(j);
|
||||
StreamHandlerList::iterator it = local_streams_handlers_.begin();
|
||||
for (; it != local_streams_handlers_.end(); ++it) {
|
||||
if (stream == (*it)->stream())
|
||||
|
@ -48,7 +48,7 @@ namespace webrtc {
|
||||
class VideoTrackHandler : public Observer,
|
||||
public talk_base::MessageHandler {
|
||||
public:
|
||||
VideoTrackHandler(VideoTrack* track,
|
||||
VideoTrackHandler(VideoTrackInterface* track,
|
||||
MediaProviderInterface* provider);
|
||||
virtual ~VideoTrackHandler();
|
||||
virtual void OnChanged();
|
||||
@ -57,70 +57,71 @@ class VideoTrackHandler : public Observer,
|
||||
virtual void OnMessage(talk_base::Message* msg);
|
||||
|
||||
virtual void OnRendererChanged() = 0;
|
||||
virtual void OnStateChanged(MediaStreamTrack::TrackState state) = 0;
|
||||
virtual void OnStateChanged(MediaStreamTrackInterface::TrackState state) = 0;
|
||||
virtual void OnEnabledChanged(bool enabled) = 0;
|
||||
|
||||
MediaProviderInterface* provider_;
|
||||
VideoTrack* video_track_; // a weak reference of Local or Remote handler.
|
||||
VideoTrackInterface* video_track_;
|
||||
|
||||
private:
|
||||
MediaStreamTrack::TrackState state_;
|
||||
MediaStreamTrackInterface::TrackState state_;
|
||||
bool enabled_;
|
||||
scoped_refptr<VideoRenderer> renderer_;
|
||||
scoped_refptr<VideoRendererInterface> renderer_;
|
||||
talk_base::Thread* signaling_thread_;
|
||||
};
|
||||
|
||||
class LocalVideoTrackHandler : public VideoTrackHandler {
|
||||
public:
|
||||
LocalVideoTrackHandler(LocalVideoTrack* track,
|
||||
LocalVideoTrackHandler(LocalVideoTrackInterface* track,
|
||||
MediaProviderInterface* provider);
|
||||
|
||||
protected:
|
||||
virtual void OnRendererChanged();
|
||||
virtual void OnStateChanged(MediaStreamTrack::TrackState state);
|
||||
virtual void OnStateChanged(MediaStreamTrackInterface::TrackState state);
|
||||
virtual void OnEnabledChanged(bool enabled);
|
||||
|
||||
private:
|
||||
scoped_refptr<LocalVideoTrack> local_video_track_;
|
||||
scoped_refptr<LocalVideoTrackInterface> local_video_track_;
|
||||
};
|
||||
|
||||
class RemoteVideoTrackHandler : public VideoTrackHandler {
|
||||
public:
|
||||
RemoteVideoTrackHandler(VideoTrack* track,
|
||||
RemoteVideoTrackHandler(VideoTrackInterface* track,
|
||||
MediaProviderInterface* provider);
|
||||
|
||||
protected:
|
||||
virtual void OnRendererChanged();
|
||||
virtual void OnStateChanged(MediaStreamTrack::TrackState state);
|
||||
virtual void OnStateChanged(MediaStreamTrackInterface::TrackState state);
|
||||
virtual void OnEnabledChanged(bool enabled);
|
||||
|
||||
private:
|
||||
scoped_refptr<VideoTrack> remote_video_track_;
|
||||
scoped_refptr<VideoTrackInterface> remote_video_track_;
|
||||
};
|
||||
|
||||
class MediaStreamHandler : public Observer {
|
||||
public:
|
||||
MediaStreamHandler(MediaStream* stream, MediaProviderInterface* provider);
|
||||
MediaStreamHandler(MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider);
|
||||
~MediaStreamHandler();
|
||||
MediaStream* stream();
|
||||
MediaStreamInterface* stream();
|
||||
virtual void OnChanged();
|
||||
|
||||
protected:
|
||||
MediaProviderInterface* provider_;
|
||||
typedef std::vector<VideoTrackHandler*> VideoTrackHandlers;
|
||||
VideoTrackHandlers video_handlers_;
|
||||
scoped_refptr<MediaStream> stream_;
|
||||
scoped_refptr<MediaStreamInterface> stream_;
|
||||
};
|
||||
|
||||
class LocalMediaStreamHandler : public MediaStreamHandler {
|
||||
public:
|
||||
LocalMediaStreamHandler(MediaStream* stream,
|
||||
LocalMediaStreamHandler(MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider);
|
||||
};
|
||||
|
||||
class RemoteMediaStreamHandler : public MediaStreamHandler {
|
||||
public:
|
||||
RemoteMediaStreamHandler(MediaStream* stream,
|
||||
RemoteMediaStreamHandler(MediaStreamInterface* stream,
|
||||
MediaProviderInterface* provider);
|
||||
};
|
||||
|
||||
@ -128,8 +129,8 @@ class MediaStreamHandlers {
|
||||
public:
|
||||
explicit MediaStreamHandlers(MediaProviderInterface* provider);
|
||||
~MediaStreamHandlers();
|
||||
void AddRemoteStream(MediaStream* stream);
|
||||
void RemoveRemoteStream(MediaStream* stream);
|
||||
void AddRemoteStream(MediaStreamInterface* stream);
|
||||
void RemoveRemoteStream(MediaStreamInterface* stream);
|
||||
void CommitLocalStreams(StreamCollection* streams);
|
||||
|
||||
private:
|
||||
|
@ -67,12 +67,13 @@ class MockMediaProvier : public MediaProviderInterface {
|
||||
TEST(MediaStreamHandlerTest, LocalStreams) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalVideoTrack> video_track(CreateLocalVideoTrack(
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalVideoTrackInterface> video_track(CreateLocalVideoTrack(
|
||||
kVideoDeviceName, NULL));
|
||||
video_track->set_ssrc(kVideoSsrc);
|
||||
EXPECT_TRUE(stream->AddTrack(video_track));
|
||||
scoped_refptr<VideoRenderer> renderer(CreateVideoRenderer(NULL));
|
||||
scoped_refptr<VideoRendererInterface> renderer(CreateVideoRenderer(NULL));
|
||||
video_track->SetRenderer(renderer);
|
||||
|
||||
MockMediaProvier provider;
|
||||
@ -88,14 +89,14 @@ TEST(MediaStreamHandlerTest, LocalStreams) {
|
||||
.Times(Exactly(1));
|
||||
handlers.CommitLocalStreams(collection);
|
||||
|
||||
video_track->set_state(MediaStreamTrack::kLive);
|
||||
video_track->set_state(MediaStreamTrackInterface::kLive);
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
collection->RemoveStream(stream);
|
||||
handlers.CommitLocalStreams(collection);
|
||||
|
||||
video_track->set_state(MediaStreamTrack::kEnded);
|
||||
video_track->set_state(MediaStreamTrackInterface::kEnded);
|
||||
// Process posted messages.
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
}
|
||||
@ -105,8 +106,9 @@ TEST(MediaStreamHandlerTest, RemoteStreams) {
|
||||
// they are easier to create.
|
||||
// LocalMediaStreams inherit from MediaStreams.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalVideoTrack> video_track(CreateLocalVideoTrack(
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalVideoTrackInterface> video_track(CreateLocalVideoTrack(
|
||||
kVideoDeviceName, NULL));
|
||||
video_track->set_ssrc(kVideoSsrc);
|
||||
EXPECT_TRUE(stream->AddTrack(video_track));
|
||||
@ -120,7 +122,7 @@ TEST(MediaStreamHandlerTest, RemoteStreams) {
|
||||
.Times(Exactly(2));
|
||||
|
||||
// Set the renderer once.
|
||||
scoped_refptr<VideoRenderer> renderer(CreateVideoRenderer(NULL));
|
||||
scoped_refptr<VideoRendererInterface> renderer(CreateVideoRenderer(NULL));
|
||||
video_track->SetRenderer(renderer);
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
|
@ -28,7 +28,7 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
scoped_refptr<LocalMediaStream> CreateLocalMediaStream(
|
||||
scoped_refptr<LocalMediaStreamInterface> CreateLocalMediaStream(
|
||||
const std::string& label) {
|
||||
return MediaStreamImpl::Create(label);
|
||||
}
|
||||
@ -42,18 +42,19 @@ scoped_refptr<MediaStreamImpl> MediaStreamImpl::Create(
|
||||
|
||||
MediaStreamImpl::MediaStreamImpl(const std::string& label)
|
||||
: label_(label),
|
||||
ready_state_(MediaStream::kInitializing),
|
||||
ready_state_(MediaStreamInterface::kInitializing),
|
||||
track_list_(new talk_base::RefCountImpl<MediaStreamTrackListImpl>()) {
|
||||
}
|
||||
|
||||
void MediaStreamImpl::set_ready_state(MediaStream::ReadyState new_state) {
|
||||
void MediaStreamImpl::set_ready_state(
|
||||
MediaStreamInterface::ReadyState new_state) {
|
||||
if (ready_state_ != new_state) {
|
||||
ready_state_ = new_state;
|
||||
NotifierImpl<LocalMediaStream>::FireOnChanged();
|
||||
NotifierImpl<LocalMediaStreamInterface>::FireOnChanged();
|
||||
}
|
||||
}
|
||||
|
||||
bool MediaStreamImpl::AddTrack(MediaStreamTrack* track) {
|
||||
bool MediaStreamImpl::AddTrack(MediaStreamTrackInterface* track) {
|
||||
if (ready_state() != kInitializing)
|
||||
return false;
|
||||
|
||||
@ -62,9 +63,9 @@ bool MediaStreamImpl::AddTrack(MediaStreamTrack* track) {
|
||||
}
|
||||
|
||||
void MediaStreamImpl::MediaStreamTrackListImpl::AddTrack(
|
||||
MediaStreamTrack* track) {
|
||||
MediaStreamTrackInterface* track) {
|
||||
tracks_.push_back(track);
|
||||
NotifierImpl<MediaStreamTrackList>::FireOnChanged();
|
||||
NotifierImpl<MediaStreamTrackListInterface>::FireOnChanged();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -37,28 +37,29 @@
|
||||
namespace webrtc {
|
||||
|
||||
class MediaStreamImpl
|
||||
: public NotifierImpl<LocalMediaStream> {
|
||||
: public NotifierImpl<LocalMediaStreamInterface> {
|
||||
public:
|
||||
class MediaStreamTrackListImpl : public NotifierImpl<MediaStreamTrackList> {
|
||||
class MediaStreamTrackListImpl :
|
||||
public NotifierImpl<MediaStreamTrackListInterface> {
|
||||
public:
|
||||
void AddTrack(MediaStreamTrack* track);
|
||||
void AddTrack(MediaStreamTrackInterface* track);
|
||||
virtual size_t count() { return tracks_.size(); }
|
||||
virtual MediaStreamTrack* at(size_t index) {
|
||||
virtual MediaStreamTrackInterface* at(size_t index) {
|
||||
return tracks_.at(index);
|
||||
}
|
||||
|
||||
private:
|
||||
std::vector<scoped_refptr<MediaStreamTrack> > tracks_;
|
||||
std::vector<scoped_refptr<MediaStreamTrackInterface> > tracks_;
|
||||
};
|
||||
|
||||
static scoped_refptr<MediaStreamImpl> Create(const std::string& label);
|
||||
|
||||
// Implement LocalStream.
|
||||
virtual bool AddTrack(MediaStreamTrack* track);
|
||||
virtual bool AddTrack(MediaStreamTrackInterface* track);
|
||||
|
||||
// Implement MediaStream.
|
||||
virtual const std::string& label() { return label_; }
|
||||
virtual MediaStreamTrackList* tracks() { return track_list_; }
|
||||
virtual MediaStreamTrackListInterface* tracks() { return track_list_; }
|
||||
virtual ReadyState ready_state() { return ready_state_; }
|
||||
virtual void set_ready_state(ReadyState new_state);
|
||||
void set_state(ReadyState new_state);
|
||||
@ -67,7 +68,7 @@ class MediaStreamImpl
|
||||
explicit MediaStreamImpl(const std::string& label);
|
||||
|
||||
std::string label_;
|
||||
MediaStream::ReadyState ready_state_;
|
||||
MediaStreamInterface::ReadyState ready_state_;
|
||||
scoped_refptr<MediaStreamTrackListImpl> track_list_;
|
||||
};
|
||||
|
||||
|
@ -56,18 +56,19 @@ class TestObserver : public Observer {
|
||||
TEST(LocalStreamTest, Create) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
|
||||
EXPECT_EQ(label, stream->label());
|
||||
// Check state.
|
||||
EXPECT_EQ(MediaStream::kInitializing, stream->ready_state());
|
||||
EXPECT_EQ(MediaStreamInterface::kInitializing, stream->ready_state());
|
||||
|
||||
// Create a local Video track.
|
||||
TestObserver tracklist_observer;
|
||||
scoped_refptr<LocalVideoTrack> video_track(CreateLocalVideoTrack(
|
||||
scoped_refptr<LocalVideoTrackInterface> video_track(CreateLocalVideoTrack(
|
||||
kVideoDeviceName, NULL));
|
||||
// Add an observer to the track list.
|
||||
scoped_refptr<MediaStreamTrackList> track_list(stream->tracks());
|
||||
scoped_refptr<MediaStreamTrackListInterface> track_list(stream->tracks());
|
||||
stream->tracks()->RegisterObserver(&tracklist_observer);
|
||||
// Add the track to the local stream.
|
||||
EXPECT_TRUE(stream->AddTrack(video_track));
|
||||
@ -77,8 +78,9 @@ TEST(LocalStreamTest, Create) {
|
||||
EXPECT_EQ(1u, stream->tracks()->count());
|
||||
|
||||
// Verify the track.
|
||||
scoped_refptr<webrtc::MediaStreamTrack> track(stream->tracks()->at(0));
|
||||
EXPECT_EQ(MediaStreamTrack::kVideo, track->type());
|
||||
scoped_refptr<webrtc::MediaStreamTrackInterface> track(
|
||||
stream->tracks()->at(0));
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kVideo, track->type());
|
||||
EXPECT_EQ(0, track->label().compare(kVideoDeviceName));
|
||||
EXPECT_TRUE(track->enabled());
|
||||
|
||||
|
@ -43,18 +43,19 @@ enum {
|
||||
|
||||
typedef talk_base::TypedMessageData<size_t> SizeTMessageData;
|
||||
typedef talk_base::TypedMessageData<webrtc::Observer*> ObserverMessageData;
|
||||
typedef talk_base::TypedMessageData<webrtc::MediaStream::ReadyState>
|
||||
typedef talk_base::TypedMessageData<webrtc::MediaStreamInterface::ReadyState>
|
||||
ReadyStateMessageData;
|
||||
|
||||
|
||||
class MediaStreamTrackMessageData : public talk_base::MessageData {
|
||||
public:
|
||||
explicit MediaStreamTrackMessageData(webrtc::MediaStreamTrack* track)
|
||||
explicit MediaStreamTrackMessageData(
|
||||
webrtc::MediaStreamTrackInterface* track)
|
||||
: track_(track),
|
||||
result_(false) {
|
||||
}
|
||||
|
||||
scoped_refptr<webrtc::MediaStreamTrack> track_;
|
||||
scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
|
||||
bool result_;
|
||||
};
|
||||
|
||||
@ -65,7 +66,7 @@ class MediaStreamTrackAtMessageData : public talk_base::MessageData {
|
||||
}
|
||||
|
||||
size_t index_;
|
||||
scoped_refptr<webrtc::MediaStreamTrack> track_;
|
||||
scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
|
||||
};
|
||||
|
||||
} // namespace anonymous
|
||||
@ -94,29 +95,30 @@ const std::string& MediaStreamProxy::label() {
|
||||
return media_stream_impl_->label();
|
||||
}
|
||||
|
||||
MediaStreamTrackList* MediaStreamProxy::tracks() {
|
||||
MediaStreamTrackListInterface* MediaStreamProxy::tracks() {
|
||||
return track_list_;
|
||||
}
|
||||
|
||||
MediaStream::ReadyState MediaStreamProxy::ready_state() {
|
||||
MediaStreamInterface::ReadyState MediaStreamProxy::ready_state() {
|
||||
if (!signaling_thread_->IsCurrent()) {
|
||||
ReadyStateMessageData msg(MediaStream::kInitializing);
|
||||
ReadyStateMessageData msg(MediaStreamInterface::kInitializing);
|
||||
Send(MSG_READY_STATE, &msg);
|
||||
return msg.data();
|
||||
}
|
||||
return media_stream_impl_->ready_state();
|
||||
}
|
||||
|
||||
void MediaStreamProxy::set_ready_state(MediaStream::ReadyState new_state) {
|
||||
void MediaStreamProxy::set_ready_state(
|
||||
MediaStreamInterface::ReadyState new_state) {
|
||||
if (!signaling_thread_->IsCurrent()) {
|
||||
ReadyStateMessageData msg(MediaStream::kInitializing);
|
||||
ReadyStateMessageData msg(MediaStreamInterface::kInitializing);
|
||||
Send(MSG_SET_READY_STATE, &msg);
|
||||
return;
|
||||
}
|
||||
media_stream_impl_->set_ready_state(new_state);
|
||||
}
|
||||
|
||||
bool MediaStreamProxy::AddTrack(MediaStreamTrack* track) {
|
||||
bool MediaStreamProxy::AddTrack(MediaStreamTrackInterface* track) {
|
||||
if (!signaling_thread_->IsCurrent()) {
|
||||
MediaStreamTrackMessageData msg(track);
|
||||
Send(MSG_ADD_TRACK, &msg);
|
||||
@ -184,7 +186,7 @@ void MediaStreamProxy::OnMessage(talk_base::Message* msg) {
|
||||
}
|
||||
|
||||
MediaStreamProxy::MediaStreamTrackListProxy::MediaStreamTrackListProxy(
|
||||
MediaStreamTrackList* track_list,
|
||||
MediaStreamTrackListInterface* track_list,
|
||||
talk_base::Thread* signaling_thread)
|
||||
: track_list_(track_list),
|
||||
signaling_thread_(signaling_thread) {
|
||||
@ -199,7 +201,7 @@ size_t MediaStreamProxy::MediaStreamTrackListProxy::count() {
|
||||
return track_list_->count();
|
||||
}
|
||||
|
||||
MediaStreamTrack* MediaStreamProxy::MediaStreamTrackListProxy::at(
|
||||
MediaStreamTrackInterface* MediaStreamProxy::MediaStreamTrackListProxy::at(
|
||||
size_t index) {
|
||||
if (!signaling_thread_->IsCurrent()) {
|
||||
MediaStreamTrackAtMessageData msg(index);
|
||||
|
@ -39,16 +39,16 @@ namespace webrtc {
|
||||
// MediaStreamProxy is a proxy for the MediaStream interface. The purpose is
|
||||
// to make sure MediaStreamImpl is only accessed from the signaling thread.
|
||||
// It can be used as a proxy for both local and remote MediaStreams.
|
||||
class MediaStreamProxy : public LocalMediaStream,
|
||||
class MediaStreamProxy : public LocalMediaStreamInterface,
|
||||
public talk_base::MessageHandler {
|
||||
public:
|
||||
class MediaStreamTrackListProxy : public MediaStreamTrackList,
|
||||
class MediaStreamTrackListProxy : public MediaStreamTrackListInterface,
|
||||
public talk_base::MessageHandler {
|
||||
public:
|
||||
MediaStreamTrackListProxy(MediaStreamTrackList* track_list,
|
||||
MediaStreamTrackListProxy(MediaStreamTrackListInterface* track_list,
|
||||
talk_base::Thread* signaling_thread);
|
||||
virtual size_t count();
|
||||
virtual MediaStreamTrack* at(size_t index);
|
||||
virtual MediaStreamTrackInterface* at(size_t index);
|
||||
|
||||
// Implement Notifier
|
||||
virtual void RegisterObserver(Observer* observer);
|
||||
@ -57,7 +57,7 @@ class MediaStreamProxy : public LocalMediaStream,
|
||||
void Send(uint32 id, talk_base::MessageData* data);
|
||||
void OnMessage(talk_base::Message* msg);
|
||||
|
||||
scoped_refptr<MediaStreamTrackList> track_list_;
|
||||
scoped_refptr<MediaStreamTrackListInterface> track_list_;
|
||||
talk_base::Thread* signaling_thread_;
|
||||
};
|
||||
|
||||
@ -66,11 +66,11 @@ class MediaStreamProxy : public LocalMediaStream,
|
||||
talk_base::Thread* signaling_thread);
|
||||
|
||||
// Implement LocalStream.
|
||||
virtual bool AddTrack(MediaStreamTrack* track);
|
||||
virtual bool AddTrack(MediaStreamTrackInterface* track);
|
||||
|
||||
// Implement MediaStream.
|
||||
virtual const std::string& label();
|
||||
virtual MediaStreamTrackList* tracks();
|
||||
virtual MediaStreamTrackListInterface* tracks();
|
||||
virtual ReadyState ready_state();
|
||||
virtual void set_ready_state(ReadyState new_state);
|
||||
|
||||
|
@ -43,8 +43,8 @@ namespace webrtc {
|
||||
class StreamCollection : public talk_base::RefCount {
|
||||
public:
|
||||
virtual size_t count() = 0;
|
||||
virtual MediaStream* at(size_t index) = 0;
|
||||
virtual MediaStream* find(const std::string& label) = 0;
|
||||
virtual MediaStreamInterface* at(size_t index) = 0;
|
||||
virtual MediaStreamInterface* find(const std::string& label) = 0;
|
||||
protected:
|
||||
// Dtor protected as objects shouldn't be deleted via this interface.
|
||||
~StreamCollection() {}
|
||||
@ -68,10 +68,10 @@ class PeerConnectionObserver {
|
||||
virtual void OnStateChange(Readiness state) = 0;
|
||||
|
||||
// Triggered when media is received on a new stream from remote peer.
|
||||
virtual void OnAddStream(MediaStream* stream) = 0;
|
||||
virtual void OnAddStream(MediaStreamInterface* stream) = 0;
|
||||
|
||||
// Triggered when a remote peer close a stream.
|
||||
virtual void OnRemoveStream(MediaStream* stream) = 0;
|
||||
virtual void OnRemoveStream(MediaStreamInterface* stream) = 0;
|
||||
|
||||
protected:
|
||||
// Dtor protected as objects shouldn't be deleted via this interface.
|
||||
@ -96,12 +96,12 @@ class PeerConnection : public talk_base::RefCount {
|
||||
// Add a new local stream.
|
||||
// This function does not trigger any changes to the stream until
|
||||
// CommitStreamChanges is called.
|
||||
virtual void AddStream(LocalMediaStream* stream) = 0;
|
||||
virtual void AddStream(LocalMediaStreamInterface* stream) = 0;
|
||||
|
||||
// Remove a local stream and stop sending it.
|
||||
// This function does not trigger any changes to the stream until
|
||||
// CommitStreamChanges is called.
|
||||
virtual void RemoveStream(LocalMediaStream* stream) = 0;
|
||||
virtual void RemoveStream(LocalMediaStreamInterface* stream) = 0;
|
||||
|
||||
// Commit Stream changes. This will start sending media on new streams
|
||||
// and stop sending media on removed stream.
|
||||
@ -160,7 +160,7 @@ class PeerConnectionManager : public talk_base::RefCount {
|
||||
const std::string& config,
|
||||
PeerConnectionObserver* observer) = 0;
|
||||
|
||||
virtual scoped_refptr<LocalMediaStream> CreateLocalMediaStream(
|
||||
virtual scoped_refptr<LocalMediaStreamInterface> CreateLocalMediaStream(
|
||||
const std::string& label) = 0;
|
||||
|
||||
|
||||
|
@ -216,11 +216,12 @@ bool PeerConnectionImpl::ProcessSignalingMessage(const std::string& msg) {
|
||||
signaling_thread_->Post(this, MSG_PROCESSSIGNALINGMESSAGE, parameter);
|
||||
}
|
||||
|
||||
void PeerConnectionImpl::AddStream(LocalMediaStream* local_stream) {
|
||||
void PeerConnectionImpl::AddStream(LocalMediaStreamInterface* local_stream) {
|
||||
local_media_streams_->AddStream(local_stream);
|
||||
}
|
||||
|
||||
void PeerConnectionImpl::RemoveStream(LocalMediaStream* remove_stream) {
|
||||
void PeerConnectionImpl::RemoveStream(
|
||||
LocalMediaStreamInterface* remove_stream) {
|
||||
local_media_streams_->RemoveStream(remove_stream);
|
||||
}
|
||||
|
||||
@ -266,7 +267,8 @@ void PeerConnectionImpl::OnNewPeerConnectionMessage(
|
||||
observer_->OnSignalingMessage(message);
|
||||
}
|
||||
|
||||
void PeerConnectionImpl::OnRemoteStreamAdded(MediaStream* remote_stream) {
|
||||
void PeerConnectionImpl::OnRemoteStreamAdded(
|
||||
MediaStreamInterface* remote_stream) {
|
||||
// TODO(perkj): add function in pc signaling to return a collection of
|
||||
// remote streams.
|
||||
// This way we can avoid keeping a separate list of remote_media_streams_.
|
||||
@ -275,7 +277,8 @@ void PeerConnectionImpl::OnRemoteStreamAdded(MediaStream* remote_stream) {
|
||||
observer_->OnAddStream(remote_stream);
|
||||
}
|
||||
|
||||
void PeerConnectionImpl::OnRemoteStreamRemoved(MediaStream* remote_stream) {
|
||||
void PeerConnectionImpl::OnRemoteStreamRemoved(
|
||||
MediaStreamInterface* remote_stream) {
|
||||
// TODO(perkj): add function in pc signaling to return a collection of
|
||||
// remote streams.
|
||||
// This way we can avoid keeping a separate list of remote_media_streams_.
|
||||
|
@ -71,8 +71,8 @@ class PeerConnectionImpl : public PeerConnection,
|
||||
}
|
||||
virtual scoped_refptr<StreamCollection> local_streams();
|
||||
virtual scoped_refptr<StreamCollection> remote_streams();
|
||||
virtual void AddStream(LocalMediaStream* stream);
|
||||
virtual void RemoveStream(LocalMediaStream* stream);
|
||||
virtual void AddStream(LocalMediaStreamInterface* stream);
|
||||
virtual void RemoveStream(LocalMediaStreamInterface* stream);
|
||||
virtual void CommitStreamChanges();
|
||||
|
||||
private:
|
||||
@ -81,8 +81,8 @@ class PeerConnectionImpl : public PeerConnection,
|
||||
|
||||
// Signals from PeerConnectionSignaling.
|
||||
void OnNewPeerConnectionMessage(const std::string& message);
|
||||
void OnRemoteStreamAdded(MediaStream* remote_stream);
|
||||
void OnRemoteStreamRemoved(MediaStream* remote_stream);
|
||||
void OnRemoteStreamAdded(MediaStreamInterface* remote_stream);
|
||||
void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream);
|
||||
|
||||
void Terminate_s();
|
||||
|
||||
|
@ -45,8 +45,8 @@ class MockPeerConnectionObserver : public PeerConnectionObserver {
|
||||
virtual void OnMessage(const std::string& msg) {}
|
||||
virtual void OnSignalingMessage(const std::string& msg) {}
|
||||
virtual void OnStateChange(Readiness state) {}
|
||||
virtual void OnAddStream(MediaStream* stream) {}
|
||||
virtual void OnRemoveStream(MediaStream* stream) {}
|
||||
virtual void OnAddStream(MediaStreamInterface* stream) {}
|
||||
virtual void OnRemoveStream(MediaStreamInterface* stream) {}
|
||||
};
|
||||
|
||||
class PeerConnectionImplTest : public testing::Test {
|
||||
@ -67,7 +67,7 @@ class PeerConnectionImplTest : public testing::Test {
|
||||
TEST_F(PeerConnectionImplTest, AddRemoveStream) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
pc_factory_->CreateLocalMediaStream(label));
|
||||
|
||||
pc_->AddStream(stream);
|
||||
|
@ -16,8 +16,6 @@
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
@ -53,11 +51,12 @@ class MockPeerConnectionObserver : public PeerConnectionObserver {
|
||||
virtual void OnMessage(const std::string& msg) {}
|
||||
virtual void OnSignalingMessage(const std::string& msg) {}
|
||||
virtual void OnStateChange(Readiness state) {}
|
||||
virtual void OnAddStream(MediaStream* stream) {}
|
||||
virtual void OnRemoveStream(MediaStream* stream) {}
|
||||
virtual void OnAddStream(MediaStreamInterface* stream) {}
|
||||
virtual void OnRemoveStream(MediaStreamInterface* stream) {}
|
||||
};
|
||||
|
||||
TEST(PeerConnectionManager, CreatePCUsingInternalModules) {
|
||||
// TODO(mallinath) - Fix drash when components are created in factory.
|
||||
TEST(PeerConnectionManager, DISABLED_CreatePCUsingInternalModules) {
|
||||
MockPeerConnectionObserver observer;
|
||||
scoped_refptr<PeerConnectionManager> manager(PeerConnectionManager::Create());
|
||||
ASSERT_TRUE(manager.get() != NULL);
|
||||
|
@ -235,7 +235,7 @@ scoped_refptr<PeerConnection> PeerConnectionManagerImpl::CreatePeerConnection_s(
|
||||
return pc;
|
||||
}
|
||||
|
||||
scoped_refptr<LocalMediaStream>
|
||||
scoped_refptr<LocalMediaStreamInterface>
|
||||
PeerConnectionManagerImpl::CreateLocalMediaStream(
|
||||
const std::string& label) {
|
||||
return MediaStreamProxy::Create(label, signaling_thread_ptr_);
|
||||
|
@ -45,7 +45,7 @@ class PeerConnectionManagerImpl : public PeerConnectionManager,
|
||||
PeerConnectionObserver* observer);
|
||||
bool Initialize();
|
||||
|
||||
scoped_refptr<LocalMediaStream> CreateLocalMediaStream(
|
||||
scoped_refptr<LocalMediaStreamInterface> CreateLocalMediaStream(
|
||||
const std::string& label);
|
||||
|
||||
protected:
|
||||
|
@ -297,13 +297,13 @@ void PeerConnectionSignaling::InitMediaSessionOptions(
|
||||
// the is_video should always be true even if there are not video tracks.
|
||||
options->is_video = true;
|
||||
for (size_t i = 0; i < local_streams->count(); ++i) {
|
||||
MediaStream* stream = local_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackList> tracks = stream->tracks();
|
||||
MediaStreamInterface* stream = local_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackListInterface> tracks = stream->tracks();
|
||||
|
||||
// For each track in the stream, add it to the MediaSessionOptions.
|
||||
for (size_t j = 0; j < tracks->count(); ++j) {
|
||||
scoped_refptr<MediaStreamTrack> track = tracks->at(j);
|
||||
if (MediaStreamTrack::kAudio == track->type()) {
|
||||
scoped_refptr<MediaStreamTrackInterface> track = tracks->at(j);
|
||||
if (MediaStreamTrackInterface::kAudio == track->type()) {
|
||||
// TODO(perkj): Better ssrc?
|
||||
// Does talk_base::CreateRandomNonZeroId() generate unique id?
|
||||
if (track->ssrc() == 0)
|
||||
@ -312,7 +312,7 @@ void PeerConnectionSignaling::InitMediaSessionOptions(
|
||||
track->label(),
|
||||
stream->label()));
|
||||
}
|
||||
if (MediaStreamTrack::kVideo == track->type()) {
|
||||
if (MediaStreamTrackInterface::kVideo == track->type()) {
|
||||
if (track->ssrc() == 0)
|
||||
track->set_ssrc(++ssrc_counter_); // TODO(perkj): Better ssrc?
|
||||
options->video_sources.push_back(cricket::SourceParam(track->ssrc(),
|
||||
@ -356,9 +356,9 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
new_streams_it = current_streams.find(it->cname);
|
||||
}
|
||||
scoped_refptr<AudioTrack> track(AudioTrackImpl::Create(it->description,
|
||||
it->ssrc));
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
scoped_refptr<AudioTrackInterface> track(
|
||||
AudioTrack::Create(it->description, it->ssrc));
|
||||
track->set_state(MediaStreamTrackInterface::kLive);
|
||||
new_streams_it->second->AddTrack(track);
|
||||
|
||||
} else {
|
||||
@ -390,10 +390,10 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
current_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
new_streams_it = current_streams.find(it->cname);
|
||||
}
|
||||
scoped_refptr<VideoTrack> track(VideoTrackImpl::Create(it->description,
|
||||
it->ssrc));
|
||||
scoped_refptr<VideoTrackInterface> track(
|
||||
VideoTrack::Create(it->description, it->ssrc));
|
||||
new_streams_it->second->AddTrack(track);
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
track->set_state(MediaStreamTrackInterface::kLive);
|
||||
|
||||
} else {
|
||||
scoped_refptr<MediaStreamProxy> stream(old_streams_it->second);
|
||||
@ -411,7 +411,7 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
RemoteStreamMap::iterator old_streams_it =
|
||||
remote_streams_.find(new_stream->label());
|
||||
if (old_streams_it == remote_streams_.end()) {
|
||||
new_stream->set_ready_state(MediaStream::kLive);
|
||||
new_stream->set_ready_state(MediaStreamInterface::kLive);
|
||||
SignalRemoteStreamAdded(new_stream);
|
||||
}
|
||||
}
|
||||
@ -426,10 +426,11 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
RemoteStreamMap::iterator new_streams_it =
|
||||
current_streams.find(old_stream->label());
|
||||
if (new_streams_it == current_streams.end()) {
|
||||
old_stream->set_ready_state(MediaStream::kEnded);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(old_stream->tracks());
|
||||
old_stream->set_ready_state(MediaStreamInterface::kEnded);
|
||||
scoped_refptr<MediaStreamTrackListInterface> tracklist(
|
||||
old_stream->tracks());
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
tracklist->at(j)->set_state(MediaStreamTrack::kEnded);
|
||||
tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
|
||||
}
|
||||
SignalRemoteStreamRemoved(old_stream);
|
||||
}
|
||||
@ -445,23 +446,24 @@ void PeerConnectionSignaling::UpdateRemoteStreams(
|
||||
void PeerConnectionSignaling::UpdateSendingLocalStreams(
|
||||
const cricket::SessionDescription* answer_desc,
|
||||
StreamCollection* negotiated_streams) {
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStream> > MediaStreamPair;
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStreamInterface> >
|
||||
MediaStreamPair;
|
||||
LocalStreamMap current_local_streams;
|
||||
|
||||
for (size_t i = 0; i < negotiated_streams->count(); ++i) {
|
||||
scoped_refptr<MediaStream> stream = negotiated_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(stream->tracks());
|
||||
scoped_refptr<MediaStreamInterface> stream = negotiated_streams->at(i);
|
||||
scoped_refptr<MediaStreamTrackListInterface> tracklist(stream->tracks());
|
||||
|
||||
bool stream_ok = false; // A stream is ok if at least one track succeed.
|
||||
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
scoped_refptr<MediaStreamTrack> track = tracklist->at(j);
|
||||
if (MediaStreamTrack::kAudio == track->type()) {
|
||||
scoped_refptr<MediaStreamTrackInterface> track = tracklist->at(j);
|
||||
if (MediaStreamTrackInterface::kAudio == track->type()) {
|
||||
const cricket::ContentInfo* audio_content =
|
||||
GetFirstAudioContent(answer_desc);
|
||||
|
||||
if (!audio_content) { // The remote does not accept audio.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
track->set_state(MediaStreamTrackInterface::kFailed);
|
||||
continue;
|
||||
}
|
||||
const cricket::AudioContentDescription* audio_desc =
|
||||
@ -470,17 +472,17 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
|
||||
// TODO(perkj): Do we need to store the codec in the track?
|
||||
if (audio_desc->codecs().size() <= 0) {
|
||||
// No common codec.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
track->set_state(MediaStreamTrackInterface::kFailed);
|
||||
}
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
track->set_state(MediaStreamTrackInterface::kLive);
|
||||
stream_ok = true;
|
||||
}
|
||||
if (MediaStreamTrack::kVideo == track->type()) {
|
||||
if (MediaStreamTrackInterface::kVideo == track->type()) {
|
||||
const cricket::ContentInfo* video_content =
|
||||
GetFirstVideoContent(answer_desc);
|
||||
|
||||
if (!video_content) { // The remote does not accept video.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
track->set_state(MediaStreamTrackInterface::kFailed);
|
||||
continue;
|
||||
}
|
||||
const cricket::VideoContentDescription* video_desc =
|
||||
@ -489,19 +491,19 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
|
||||
// TODO(perkj): Do we need to store the codec in the track?
|
||||
if (video_desc->codecs().size() <= 0) {
|
||||
// No common codec.
|
||||
track->set_state(MediaStreamTrack::kFailed);
|
||||
track->set_state(MediaStreamTrackInterface::kFailed);
|
||||
}
|
||||
track->set_state(MediaStreamTrack::kLive);
|
||||
track->set_state(MediaStreamTrackInterface::kLive);
|
||||
stream_ok = true;
|
||||
}
|
||||
}
|
||||
if (stream_ok) {
|
||||
// We have successfully negotiated to send this stream.
|
||||
// Change the stream and store it as successfully negotiated.
|
||||
stream->set_ready_state(MediaStream::kLive);
|
||||
stream->set_ready_state(MediaStreamInterface::kLive);
|
||||
current_local_streams.insert(MediaStreamPair(stream->label(), stream));
|
||||
} else {
|
||||
stream->set_ready_state(MediaStream::kEnded);
|
||||
stream->set_ready_state(MediaStreamInterface::kEnded);
|
||||
}
|
||||
}
|
||||
|
||||
@ -511,13 +513,15 @@ void PeerConnectionSignaling::UpdateSendingLocalStreams(
|
||||
for (LocalStreamMap::iterator it = local_streams_.begin();
|
||||
it != local_streams_.end();
|
||||
++it) {
|
||||
scoped_refptr<MediaStream> old_stream(it->second);
|
||||
MediaStream* new_streams = negotiated_streams->find(old_stream->label());
|
||||
scoped_refptr<MediaStreamInterface> old_stream(it->second);
|
||||
MediaStreamInterface* new_streams =
|
||||
negotiated_streams->find(old_stream->label());
|
||||
if (new_streams == NULL) {
|
||||
old_stream->set_ready_state(MediaStream::kEnded);
|
||||
scoped_refptr<MediaStreamTrackList> tracklist(old_stream->tracks());
|
||||
old_stream->set_ready_state(MediaStreamInterface::kEnded);
|
||||
scoped_refptr<MediaStreamTrackListInterface> tracklist(
|
||||
old_stream->tracks());
|
||||
for (size_t j = 0; j < tracklist->count(); ++j) {
|
||||
tracklist->at(j)->set_state(MediaStreamTrack::kEnded);
|
||||
tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -111,10 +111,10 @@ class PeerConnectionSignaling : public WebRtcSessionObserver,
|
||||
sigslot::signal1<const std::string&> SignalNewPeerConnectionMessage;
|
||||
|
||||
// A new remote stream have been discovered.
|
||||
sigslot::signal1<MediaStream*> SignalRemoteStreamAdded;
|
||||
sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamAdded;
|
||||
|
||||
// Remote stream is no longer available.
|
||||
sigslot::signal1<MediaStream*> SignalRemoteStreamRemoved;
|
||||
sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamRemoved;
|
||||
|
||||
// Remote PeerConnection sent an error message.
|
||||
sigslot::signal1<PeerConnectionMessage::ErrorCode> SignalErrorMessageReceived;
|
||||
@ -151,7 +151,7 @@ class PeerConnectionSignaling : public WebRtcSessionObserver,
|
||||
typedef std::map<std::string, scoped_refptr<MediaStreamProxy> >
|
||||
RemoteStreamMap;
|
||||
RemoteStreamMap remote_streams_;
|
||||
typedef std::map<std::string, scoped_refptr<MediaStream> >
|
||||
typedef std::map<std::string, scoped_refptr<MediaStreamInterface> >
|
||||
LocalStreamMap;
|
||||
LocalStreamMap local_streams_;
|
||||
cricket::Candidates candidates_;
|
||||
|
@ -44,12 +44,14 @@ static const int kWaitTime = 5000;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
typedef std::map<std::string, scoped_refptr<MediaStream> > MediaStreamMap;
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStream> > RemotePair;
|
||||
typedef std::map<std::string,
|
||||
scoped_refptr<MediaStreamInterface> > MediaStreamMap;
|
||||
typedef std::pair<std::string, scoped_refptr<MediaStreamInterface> > RemotePair;
|
||||
|
||||
class MockMediaTrackObserver : public webrtc::Observer {
|
||||
public:
|
||||
explicit MockMediaTrackObserver(MediaStreamTrack* track) : track_(track) {
|
||||
explicit MockMediaTrackObserver(MediaStreamTrackInterface* track)
|
||||
: track_(track) {
|
||||
track_state = track->state();
|
||||
track->RegisterObserver(this);
|
||||
}
|
||||
@ -58,14 +60,15 @@ class MockMediaTrackObserver : public webrtc::Observer {
|
||||
track_state = track_->state();
|
||||
}
|
||||
|
||||
webrtc::MediaStreamTrack::TrackState track_state;
|
||||
webrtc::MediaStreamTrackInterface::TrackState track_state;
|
||||
private:
|
||||
scoped_refptr<MediaStreamTrack> track_;
|
||||
scoped_refptr<MediaStreamTrackInterface> track_;
|
||||
};
|
||||
|
||||
class MockMediaStreamObserver : public webrtc::Observer {
|
||||
public:
|
||||
explicit MockMediaStreamObserver(MediaStream* stream) : stream_(stream) {
|
||||
explicit MockMediaStreamObserver(MediaStreamInterface* stream)
|
||||
: stream_(stream) {
|
||||
ready_state = stream->ready_state();
|
||||
stream_->RegisterObserver(this);
|
||||
}
|
||||
@ -74,9 +77,9 @@ class MockMediaStreamObserver : public webrtc::Observer {
|
||||
ready_state = stream_->ready_state();
|
||||
}
|
||||
|
||||
webrtc::MediaStream::ReadyState ready_state;
|
||||
webrtc::MediaStreamInterface::ReadyState ready_state;
|
||||
private:
|
||||
scoped_refptr<MediaStream> stream_;
|
||||
scoped_refptr<MediaStreamInterface> stream_;
|
||||
};
|
||||
|
||||
class MockSignalingObserver : public sigslot::has_slots<> {
|
||||
@ -86,14 +89,14 @@ class MockSignalingObserver : public sigslot::has_slots<> {
|
||||
}
|
||||
|
||||
// New remote stream have been discovered.
|
||||
virtual void OnRemoteStreamAdded(MediaStream* remote_stream) {
|
||||
EXPECT_EQ(MediaStream::kLive, remote_stream->ready_state());
|
||||
virtual void OnRemoteStreamAdded(MediaStreamInterface* remote_stream) {
|
||||
EXPECT_EQ(MediaStreamInterface::kLive, remote_stream->ready_state());
|
||||
remote_media_streams_.insert(RemotePair(remote_stream->label(),
|
||||
remote_stream));
|
||||
}
|
||||
|
||||
// Remote stream is no longer available.
|
||||
virtual void OnRemoteStreamRemoved(MediaStream* remote_stream) {
|
||||
virtual void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream) {
|
||||
EXPECT_NE(remote_media_streams_.find(remote_stream->label()),
|
||||
remote_media_streams_.end());
|
||||
remote_media_streams_.erase(remote_stream->label());
|
||||
@ -129,7 +132,7 @@ class MockSignalingObserver : public sigslot::has_slots<> {
|
||||
remote_local_collection_.release();
|
||||
}
|
||||
|
||||
MediaStream* RemoteStream(const std::string& label) {
|
||||
MediaStreamInterface* RemoteStream(const std::string& label) {
|
||||
MediaStreamMap::iterator it = remote_media_streams_.find(label);
|
||||
if (it != remote_media_streams_.end())
|
||||
return it->second;
|
||||
@ -234,11 +237,12 @@ class PeerConnectionSignalingTest: public testing::Test {
|
||||
TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
MockMediaStreamObserver stream_observer1(stream);
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
scoped_refptr<LocalAudioTrackInterface> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
MockMediaTrackObserver track_observer1(audio_track);
|
||||
@ -248,9 +252,10 @@ TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
|
||||
StreamCollectionImpl::Create());
|
||||
local_collection1->AddStream(stream);
|
||||
// Verify that the local stream is now initializing.
|
||||
EXPECT_EQ(MediaStream::kInitializing, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamInterface::kInitializing, stream_observer1.ready_state);
|
||||
// Verify that the audio track is now initializing.
|
||||
EXPECT_EQ(MediaStreamTrack::kInitializing, track_observer1.track_state);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kInitializing,
|
||||
track_observer1.track_state);
|
||||
|
||||
// Peer 2 only receive. Create an empty collection
|
||||
scoped_refptr<StreamCollectionImpl> local_collection2(
|
||||
@ -289,9 +294,9 @@ TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
|
||||
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
|
||||
|
||||
// Verify that the local stream is now sending.
|
||||
EXPECT_EQ(MediaStream::kLive, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamInterface::kLive, stream_observer1.ready_state);
|
||||
// Verify that the local audio track is now sending.
|
||||
EXPECT_EQ(MediaStreamTrack::kLive, track_observer1.track_state);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kLive, track_observer1.track_state);
|
||||
|
||||
// Verify that PeerConnection2 is aware of the sending stream.
|
||||
EXPECT_TRUE(observer2_->RemoteStream(label) != NULL);
|
||||
@ -307,10 +312,11 @@ TEST_F(PeerConnectionSignalingTest, Glare) {
|
||||
signaling2_->OnCandidatesReady(candidates_);
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
scoped_refptr<LocalAudioTrackInterface> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
|
||||
@ -370,18 +376,19 @@ TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
|
||||
signaling2_->OnCandidatesReady(candidates_);
|
||||
// Create a local stream.
|
||||
std::string label(kStreamLabel1);
|
||||
scoped_refptr<LocalMediaStream> stream(MediaStreamImpl::Create(label));
|
||||
scoped_refptr<LocalMediaStreamInterface> stream(
|
||||
MediaStreamImpl::Create(label));
|
||||
MockMediaStreamObserver stream_observer1(stream);
|
||||
|
||||
// Add a local audio track.
|
||||
scoped_refptr<LocalAudioTrack> audio_track(
|
||||
scoped_refptr<LocalAudioTrackInterface> audio_track(
|
||||
CreateLocalAudioTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
MockMediaTrackObserver track_observer1(audio_track);
|
||||
audio_track->RegisterObserver(&track_observer1);
|
||||
|
||||
// Add a local video track.
|
||||
scoped_refptr<LocalVideoTrack> video_track(
|
||||
scoped_refptr<LocalVideoTrackInterface> video_track(
|
||||
CreateLocalVideoTrack(kAudioTrackLabel1, NULL));
|
||||
stream->AddTrack(audio_track);
|
||||
|
||||
@ -414,8 +421,8 @@ TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
|
||||
talk_base::Thread::Current()->ProcessMessages(1);
|
||||
|
||||
// Verify that the PeerConnection 2 local stream is now sending.
|
||||
EXPECT_EQ(MediaStream::kLive, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrack::kLive, track_observer1.track_state);
|
||||
EXPECT_EQ(MediaStreamInterface::kLive, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kLive, track_observer1.track_state);
|
||||
|
||||
// Verify that PeerConnection1 is aware of the sending stream.
|
||||
EXPECT_TRUE(observer1_->RemoteStream(label) != NULL);
|
||||
@ -434,8 +441,8 @@ TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
|
||||
EXPECT_TRUE(observer1_->RemoteStream(label) == NULL);
|
||||
|
||||
// Verify that the PeerConnection 2 local stream is now ended.
|
||||
EXPECT_EQ(MediaStream::kEnded, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrack::kEnded, track_observer1.track_state);
|
||||
EXPECT_EQ(MediaStreamInterface::kEnded, stream_observer1.ready_state);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kEnded, track_observer1.track_state);
|
||||
|
||||
// Verify that both peers have updated the session descriptions.
|
||||
EXPECT_EQ(3u, provider1_->update_session_description_counter_);
|
||||
|
@ -55,11 +55,11 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
return media_streams_.size();
|
||||
}
|
||||
|
||||
virtual MediaStream* at(size_t index) {
|
||||
virtual MediaStreamInterface* at(size_t index) {
|
||||
return media_streams_.at(index);
|
||||
}
|
||||
|
||||
virtual MediaStream* find(const std::string& label) {
|
||||
virtual MediaStreamInterface* find(const std::string& label) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
if ((*it)->label().compare(label) == 0) {
|
||||
@ -69,7 +69,7 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void AddStream(MediaStream* stream) {
|
||||
void AddStream(MediaStreamInterface* stream) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
if ((*it)->label().compare(stream->label()) == 0)
|
||||
@ -78,7 +78,7 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
media_streams_.push_back(stream);
|
||||
}
|
||||
|
||||
void RemoveStream(MediaStream* remove_stream) {
|
||||
void RemoveStream(MediaStreamInterface* remove_stream) {
|
||||
for (StreamVector::iterator it = media_streams_.begin();
|
||||
it != media_streams_.end(); ++it) {
|
||||
if ((*it)->label().compare(remove_stream->label()) == 0) {
|
||||
@ -93,7 +93,7 @@ class StreamCollectionImpl : public StreamCollection {
|
||||
explicit StreamCollectionImpl(StreamCollectionImpl* original)
|
||||
: media_streams_(original->media_streams_) {
|
||||
}
|
||||
typedef std::vector<scoped_refptr<MediaStream> > StreamVector;
|
||||
typedef std::vector<scoped_refptr<MediaStreamInterface> > StreamVector;
|
||||
StreamVector media_streams_;
|
||||
};
|
||||
|
||||
|
@ -32,7 +32,7 @@
|
||||
namespace webrtc {
|
||||
|
||||
// VideoRendererImpl take ownership of cricket::VideoRenderer.
|
||||
class VideoRendererImpl : public VideoRenderer {
|
||||
class VideoRendererImpl : public VideoRendererInterface {
|
||||
public:
|
||||
explicit VideoRendererImpl(cricket::VideoRenderer* renderer)
|
||||
: renderer_(renderer) {
|
||||
@ -48,7 +48,7 @@ class VideoRendererImpl : public VideoRenderer {
|
||||
cricket::VideoRenderer* renderer_;
|
||||
};
|
||||
|
||||
scoped_refptr<VideoRenderer> CreateVideoRenderer(
|
||||
scoped_refptr<VideoRendererInterface> CreateVideoRenderer(
|
||||
cricket::VideoRenderer* renderer) {
|
||||
talk_base::RefCountImpl<VideoRendererImpl>* r =
|
||||
new talk_base::RefCountImpl<VideoRendererImpl>(renderer);
|
||||
|
@ -32,7 +32,7 @@ namespace webrtc {
|
||||
|
||||
static const char kVideoTrackKind[] = "video";
|
||||
|
||||
VideoTrackImpl::VideoTrackImpl(const std::string& label, uint32 ssrc)
|
||||
VideoTrack::VideoTrack(const std::string& label, uint32 ssrc)
|
||||
: enabled_(true),
|
||||
label_(label),
|
||||
ssrc_(ssrc),
|
||||
@ -40,8 +40,8 @@ VideoTrackImpl::VideoTrackImpl(const std::string& label, uint32 ssrc)
|
||||
video_device_(NULL) {
|
||||
}
|
||||
|
||||
VideoTrackImpl::VideoTrackImpl(const std::string& label,
|
||||
VideoCaptureModule* video_device)
|
||||
VideoTrack::VideoTrack(const std::string& label,
|
||||
VideoCaptureModule* video_device)
|
||||
: enabled_(true),
|
||||
label_(label),
|
||||
ssrc_(0),
|
||||
@ -49,61 +49,62 @@ VideoTrackImpl::VideoTrackImpl(const std::string& label,
|
||||
video_device_(video_device) {
|
||||
}
|
||||
|
||||
void VideoTrackImpl::SetRenderer(VideoRenderer* renderer) {
|
||||
void VideoTrack::SetRenderer(VideoRendererInterface* renderer) {
|
||||
video_renderer_ = renderer;
|
||||
NotifierImpl<LocalVideoTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalVideoTrackInterface>::FireOnChanged();
|
||||
}
|
||||
|
||||
VideoRenderer* VideoTrackImpl::GetRenderer() {
|
||||
VideoRendererInterface* VideoTrack::GetRenderer() {
|
||||
return video_renderer_.get();
|
||||
}
|
||||
|
||||
// Get the VideoCapture device associated with this track.
|
||||
VideoCaptureModule* VideoTrackImpl::GetVideoCapture() {
|
||||
VideoCaptureModule* VideoTrack::GetVideoCapture() {
|
||||
return video_device_.get();
|
||||
}
|
||||
|
||||
const char* VideoTrackImpl::kind() const {
|
||||
const char* VideoTrack::kind() const {
|
||||
return kVideoTrackKind;
|
||||
}
|
||||
|
||||
bool VideoTrackImpl::set_enabled(bool enable) {
|
||||
bool VideoTrack::set_enabled(bool enable) {
|
||||
bool fire_on_change = enable != enabled_;
|
||||
enabled_ = enable;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalVideoTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalVideoTrackInterface>::FireOnChanged();
|
||||
}
|
||||
|
||||
bool VideoTrackImpl::set_ssrc(uint32 ssrc) {
|
||||
bool VideoTrack::set_ssrc(uint32 ssrc) {
|
||||
ASSERT(ssrc_ == 0);
|
||||
ASSERT(ssrc != 0);
|
||||
if (ssrc_ != 0)
|
||||
return false;
|
||||
ssrc_ = ssrc;
|
||||
NotifierImpl<LocalVideoTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalVideoTrackInterface>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VideoTrackImpl::set_state(TrackState new_state) {
|
||||
bool VideoTrack::set_state(TrackState new_state) {
|
||||
bool fire_on_change = state_ != new_state;
|
||||
state_ = new_state;
|
||||
if (fire_on_change)
|
||||
NotifierImpl<LocalVideoTrack>::FireOnChanged();
|
||||
NotifierImpl<LocalVideoTrackInterface>::FireOnChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
scoped_refptr<VideoTrack> VideoTrackImpl::Create(const std::string& label,
|
||||
uint32 ssrc) {
|
||||
talk_base::RefCountImpl<VideoTrackImpl>* track =
|
||||
new talk_base::RefCountImpl<VideoTrackImpl>(label, ssrc);
|
||||
scoped_refptr<VideoTrackInterface> VideoTrack::Create(
|
||||
const std::string& label,
|
||||
uint32 ssrc) {
|
||||
talk_base::RefCountImpl<VideoTrack>* track =
|
||||
new talk_base::RefCountImpl<VideoTrack>(label, ssrc);
|
||||
return track;
|
||||
}
|
||||
|
||||
scoped_refptr<LocalVideoTrack> CreateLocalVideoTrack(
|
||||
scoped_refptr<LocalVideoTrackInterface> CreateLocalVideoTrack(
|
||||
const std::string& label,
|
||||
VideoCaptureModule* video_device) {
|
||||
talk_base::RefCountImpl<VideoTrackImpl>* track =
|
||||
new talk_base::RefCountImpl<VideoTrackImpl>(label, video_device);
|
||||
talk_base::RefCountImpl<VideoTrack>* track =
|
||||
new talk_base::RefCountImpl<VideoTrack>(label, video_device);
|
||||
return track;
|
||||
}
|
||||
|
||||
|
@ -42,15 +42,15 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoTrackImpl : public NotifierImpl<LocalVideoTrack> {
|
||||
class VideoTrack : public NotifierImpl<LocalVideoTrackInterface> {
|
||||
public:
|
||||
static scoped_refptr<VideoTrack> Create(const std::string& label,
|
||||
static scoped_refptr<VideoTrackInterface> Create(const std::string& label,
|
||||
uint32 ssrc);
|
||||
virtual VideoCaptureModule* GetVideoCapture();
|
||||
virtual void SetRenderer(VideoRenderer* renderer);
|
||||
VideoRenderer* GetRenderer();
|
||||
virtual void SetRenderer(VideoRendererInterface* renderer);
|
||||
VideoRendererInterface* GetRenderer();
|
||||
|
||||
virtual const char* kind() const ;
|
||||
virtual const char* kind() const;
|
||||
virtual const std::string& label() const { return label_; }
|
||||
virtual TrackType type() const { return kVideo; }
|
||||
virtual uint32 ssrc() const { return ssrc_; }
|
||||
@ -61,8 +61,8 @@ class VideoTrackImpl : public NotifierImpl<LocalVideoTrack> {
|
||||
virtual bool set_state(TrackState new_state);
|
||||
|
||||
protected:
|
||||
VideoTrackImpl(const std::string& label, uint32 ssrc);
|
||||
VideoTrackImpl(const std::string& label, VideoCaptureModule* video_device);
|
||||
VideoTrack(const std::string& label, uint32 ssrc);
|
||||
VideoTrack(const std::string& label, VideoCaptureModule* video_device);
|
||||
|
||||
private:
|
||||
bool enabled_;
|
||||
@ -70,7 +70,7 @@ class VideoTrackImpl : public NotifierImpl<LocalVideoTrack> {
|
||||
uint32 ssrc_;
|
||||
TrackState state_;
|
||||
scoped_refptr<VideoCaptureModule> video_device_;
|
||||
scoped_refptr<VideoRenderer> video_renderer_;
|
||||
scoped_refptr<VideoRendererInterface> video_renderer_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -1,331 +0,0 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2004--2011, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "talk/app/webrtc/webrtcsessionchannel.h"
|
||||
|
||||
#include "talk/app/webrtc/mediastream.h"
|
||||
#include "talk/app/webrtc/webrtc_json_dev.h"
|
||||
#include "talk/base/logging.h"
|
||||
#include "talk/base/thread.h"
|
||||
#include "talk/p2p/base/transportchannel.h"
|
||||
#include "talk/p2p/base/session.h"
|
||||
#include "talk/p2p/base/sessiondescription.h"
|
||||
#include "talk/session/phone/channel.h"
|
||||
#include "talk/session/phone/channelmanager.h"
|
||||
#include "talk/session/phone/codec.h"
|
||||
#include "talk/session/phone/mediasessionclient.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum {
|
||||
MSG_WEBRTC_SENDSIGNAL = 1,
|
||||
MSG_WEBRTC_STATECHANGE,
|
||||
};
|
||||
|
||||
static const char* direction_str[] = {
|
||||
"sendonly",
|
||||
"recvonly",
|
||||
"sendrecv",
|
||||
"inactive"
|
||||
};
|
||||
|
||||
typedef std::vector<cricket::AudioCodec> AudioCodecs;
|
||||
typedef std::vector<cricket::VideoCodec> VideoCodecs;
|
||||
|
||||
struct SendSignalMsgParams : public talk_base::MessageData {
|
||||
SendSignalMsgParams(const std::vector<cricket::Candidate> candidates)
|
||||
: candidates_(candidates) {
|
||||
}
|
||||
std::vector<cricket::Candidate> candidates_;
|
||||
};
|
||||
// TODO(mallinath) - Handling of RTCP packets when remote end point doesn't
|
||||
// support RTCP muxing.
|
||||
|
||||
WebRtcSessionChannel::WebRtcSessionChannel(MediaStreamTrack* track,
|
||||
cricket::ChannelManager* cmgr,
|
||||
talk_base::Thread* signal_thread)
|
||||
: video_(false),
|
||||
transport_channel_name_(),
|
||||
enabled_(false),
|
||||
media_channel_(NULL),
|
||||
media_stream_track_(track),
|
||||
channel_manager_(cmgr),
|
||||
direction_(SD_SENDRECV),
|
||||
signaling_thread_(signal_thread),
|
||||
state_(STATE_INIT) {
|
||||
if (track->kind().compare(kVideoTrackKind) == 0) {
|
||||
video_ = true;
|
||||
}
|
||||
// TODO(mallinath) Register "this" object with track to get OnChanged event.
|
||||
}
|
||||
|
||||
WebRtcSessionChannel::~WebRtcSessionChannel() {
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::OnChanged() {
|
||||
enabled_ = !enabled_;
|
||||
media_channel_->Enable(enabled_);
|
||||
}
|
||||
|
||||
bool WebRtcSessionChannel::Initialize(cricket::BaseSession* session) {
|
||||
// By default RTCP muxing is enabled on, rtcp flag is set to false
|
||||
// on cricket::BaseChannel.
|
||||
if (video_) {
|
||||
media_channel_.reset(channel_manager_->CreateVideoChannel(
|
||||
session, media_stream_track_->label(), false, NULL));
|
||||
transport_channel_name_ = "video_rtp";
|
||||
} else {
|
||||
media_channel_.reset(channel_manager_->CreateVoiceChannel(
|
||||
session, media_stream_track_->label(), false));
|
||||
transport_channel_name_ = "rtp";
|
||||
}
|
||||
ASSERT(!media_channel_.get());
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WebRtcSessionChannel::EnableMediaChannel(bool enable) {
|
||||
enabled_ = enable;
|
||||
return media_channel_->Enable(enable);
|
||||
}
|
||||
|
||||
cricket::SessionDescription* WebRtcSessionChannel::GetChannelMediaDesc() {
|
||||
cricket::SessionDescription* sdp =
|
||||
new cricket::SessionDescription();
|
||||
if (video_) {
|
||||
cricket::VideoContentDescription* video =
|
||||
new cricket::VideoContentDescription();
|
||||
std::vector<cricket::VideoCodec> video_codecs;
|
||||
channel_manager_->GetSupportedVideoCodecs(&video_codecs);
|
||||
for (VideoCodecs::const_iterator codec = video_codecs.begin();
|
||||
codec != video_codecs.end(); ++codec) {
|
||||
video->AddCodec(*codec);
|
||||
}
|
||||
video->SortCodecs();
|
||||
// Enable RTCP muxing with RTP port
|
||||
video->set_rtcp_mux(true);
|
||||
sdp->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP, video);
|
||||
} else {
|
||||
cricket::AudioContentDescription* audio =
|
||||
new cricket::AudioContentDescription();
|
||||
std::vector<cricket::AudioCodec> audio_codecs;
|
||||
channel_manager_->GetSupportedAudioCodecs(&audio_codecs);
|
||||
for (AudioCodecs::const_iterator codec = audio_codecs.begin();
|
||||
codec != audio_codecs.end(); ++codec) {
|
||||
audio->AddCodec(*codec);
|
||||
}
|
||||
audio->SortCodecs();
|
||||
// Enable RTCP muxing with RTP port
|
||||
audio->set_rtcp_mux(true);
|
||||
sdp->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP, audio);
|
||||
}
|
||||
return sdp;
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::SendSignalingMessage(
|
||||
const std::vector<cricket::Candidate>& candidates) {
|
||||
SendSignalMsgParams* msg_param = new SendSignalMsgParams(candidates);
|
||||
signaling_thread_->Post(this, MSG_WEBRTC_SENDSIGNAL, msg_param);
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::SendSignalingMessage_s(
|
||||
const std::vector<cricket::Candidate>& candidates) {
|
||||
cricket::SessionDescription* sdp = GetChannelMediaDesc();
|
||||
ASSERT(sdp);
|
||||
std::string signaling_message;
|
||||
if (GetSignalingMessage(sdp,
|
||||
candidates,
|
||||
video_,
|
||||
media_stream_track_->label(),
|
||||
direction_str[direction_],
|
||||
&signaling_message)) {
|
||||
set_local_description(sdp);
|
||||
SignalJSONMessageReady(this, signaling_message);
|
||||
if (state_ == STATE_INIT) {
|
||||
SetState(STATE_SENTINITIATE);
|
||||
} else {
|
||||
SetState(STATE_SENDRECV);
|
||||
}
|
||||
}
|
||||
// TODO(mallinath) - Handling on error
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::SetState(State state) {
|
||||
if (state != state) {
|
||||
state_ = state;
|
||||
signaling_thread_->Post(this, MSG_WEBRTC_STATECHANGE);
|
||||
}
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::OnStateChange() {
|
||||
switch (state_) {
|
||||
case STATE_SENTINITIATE:
|
||||
case STATE_RECEIVING: {
|
||||
// Don't do anything yet.
|
||||
break;
|
||||
}
|
||||
case STATE_RECEIVEDINITIATE: {
|
||||
SetState(STATE_SENTACCEPT);
|
||||
break;
|
||||
}
|
||||
case STATE_SENTACCEPT: {
|
||||
if (!SetLocalMediaContent(remote_description_, cricket::CA_OFFER)) {
|
||||
LOG(LS_ERROR) << "Failure in SetLocalMediaContent with CA_OFFER";
|
||||
SignalSessionChannelError(this, ERROR_CONTENT);
|
||||
return;
|
||||
}
|
||||
SetState(STATE_RECEIVING);
|
||||
break;
|
||||
}
|
||||
case STATE_RECEIVEDACCEPT: {
|
||||
// Start sending
|
||||
if (!SetRemoteMediaContent(remote_description_, cricket::CA_ANSWER)) {
|
||||
LOG(LS_ERROR) << "Failure in SetRemoteMediaContent with CA_ANSWER";
|
||||
SignalSessionChannelError(this, ERROR_CONTENT);
|
||||
return;
|
||||
}
|
||||
SetState(STATE_SENDING);
|
||||
break;
|
||||
}
|
||||
case STATE_SENDING: {
|
||||
// Enable channel to start sending to peer
|
||||
media_channel_->Enable(true);
|
||||
break;
|
||||
}
|
||||
case STATE_SENDRECV: {
|
||||
// Start sending
|
||||
if (media_channel_->enabled() &&
|
||||
!SetLocalMediaContent(remote_description_, cricket::CA_OFFER)) {
|
||||
LOG(LS_ERROR) << "Failure in SetRemoteMediaContent with CA_ANSWER";
|
||||
SignalSessionChannelError(this, ERROR_CONTENT);
|
||||
return;
|
||||
} else {
|
||||
if (!SetRemoteMediaContent(local_description_, cricket::CA_ANSWER)) {
|
||||
LOG(LS_ERROR) << "Failure in SetLocalmediaContent with CA_ANSWER";
|
||||
SignalSessionChannelError(this, ERROR_CONTENT);
|
||||
return;
|
||||
}
|
||||
media_channel_->Enable(true);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
ASSERT(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool WebRtcSessionChannel::ProcessRemoteMessage(
|
||||
cricket::SessionDescription* sdp) {
|
||||
set_remote_description(sdp);
|
||||
if (state_ == STATE_SENTINITIATE) {
|
||||
SetState(STATE_RECEIVEDACCEPT);
|
||||
} else if (state_ == STATE_INIT) {
|
||||
SetState(STATE_RECEIVEDINITIATE);
|
||||
} else if (state_ == STATE_SENDING) {
|
||||
SetState(STATE_SENDRECV);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WebRtcSessionChannel::SetLocalMediaContent(
|
||||
const cricket::SessionDescription* sdp,
|
||||
cricket::ContentAction action) {
|
||||
ASSERT(!media_channel_.get());
|
||||
const cricket::MediaContentDescription* content = NULL;
|
||||
content = GetFirstContent(sdp, video_);
|
||||
if (content && !media_channel_->SetLocalContent(content, action)) {
|
||||
LOG(LS_ERROR) << "Failure in SetLocaContent";
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WebRtcSessionChannel::SetRemoteMediaContent(
|
||||
const cricket::SessionDescription* sdp,
|
||||
cricket::ContentAction action) {
|
||||
ASSERT(!media_channel_.get());
|
||||
const cricket::MediaContentDescription* content = NULL;
|
||||
content = GetFirstContent(sdp, video_);
|
||||
if (content && !media_channel_->SetRemoteContent(content, action)) {
|
||||
LOG(LS_ERROR) << "Failure in SetRemoteContent";
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const cricket::MediaContentDescription* WebRtcSessionChannel::GetFirstContent(
|
||||
const cricket::SessionDescription* sdp,
|
||||
bool video) {
|
||||
const cricket::ContentInfo* cinfo = NULL;
|
||||
if (video) {
|
||||
cinfo = cricket::GetFirstVideoContent(sdp);
|
||||
} else {
|
||||
cinfo = cricket::GetFirstAudioContent(sdp);
|
||||
}
|
||||
if (cinfo == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
return static_cast<const cricket::MediaContentDescription*>(
|
||||
cinfo->description);
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::DestroyMediaChannel() {
|
||||
ASSERT(media_channel_.get());
|
||||
if (video_) {
|
||||
cricket::VideoChannel* video_channel =
|
||||
static_cast<cricket::VideoChannel*> (media_channel_.get());
|
||||
channel_manager_->DestroyVideoChannel(video_channel);
|
||||
} else {
|
||||
cricket::VoiceChannel* voice_channel =
|
||||
static_cast<cricket::VoiceChannel*> (media_channel_.get());
|
||||
channel_manager_->DestroyVoiceChannel(voice_channel);
|
||||
}
|
||||
media_channel_.reset(NULL);
|
||||
enabled_ = false;
|
||||
}
|
||||
|
||||
void WebRtcSessionChannel::OnMessage(talk_base::Message* message) {
|
||||
talk_base::MessageData* data = message->pdata;
|
||||
switch (message->message_id) {
|
||||
case MSG_WEBRTC_SENDSIGNAL: {
|
||||
SendSignalMsgParams* p = static_cast<SendSignalMsgParams*>(data);
|
||||
SendSignalingMessage_s(p->candidates_);
|
||||
delete p;
|
||||
break;
|
||||
}
|
||||
case MSG_WEBRTC_STATECHANGE: {
|
||||
OnStateChange();
|
||||
break;
|
||||
}
|
||||
default : {
|
||||
ASSERT(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -1,188 +0,0 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2004--2011, Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_WEBRTCSESSIONCHANNEL_H_
|
||||
#define TALK_APP_WEBRTC_WEBRTCSESSIONCHANNEL_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "talk/app/webrtc/mediastream.h"
|
||||
#include "talk/base/messagehandler.h"
|
||||
#include "talk/base/scoped_ptr.h"
|
||||
#include "talk/base/sigslot.h"
|
||||
#include "talk/p2p/base/candidate.h"
|
||||
#include "talk/p2p/base/sessiondescription.h"
|
||||
|
||||
namespace talk_base {
|
||||
class Thread;
|
||||
}
|
||||
|
||||
namespace cricket {
|
||||
class BaseChannel;
|
||||
class ChannelManager;
|
||||
class BaseSession;
|
||||
class SessionDescription;
|
||||
class MediaContentDescription;
|
||||
}
|
||||
|
||||
namespace webrtc {
|
||||
// NOTE: Channels are responsible for creating the JSON message for media
|
||||
// stream. This was done to accommodate additional signaling attributes which
|
||||
// are currenly not available in part of cricket::SessionDescription.
|
||||
// One example is StreamDirection which will be added as "sendonly"
|
||||
// "recvonly" "sendrecv" and "inactive".
|
||||
// Another reason to create session channels is to support uni-directional
|
||||
// stream management and these channels apply content to cricket::BaseChannel
|
||||
// not through cricket::BaseSession::SetState.
|
||||
//
|
||||
// State transition at local and remote peer
|
||||
// (Local) (Remote)
|
||||
// INIT INIT
|
||||
// | |
|
||||
// SENTINITIATE (AddStream) RECEIVEDINITIATE (OnAddStream)
|
||||
// | |
|
||||
// RECEIVEDACCEPT (StartSend) SENTACCEPT (StartReceive)
|
||||
// | |
|
||||
// SENDING RECEIVING
|
||||
// | |
|
||||
// SENDRECV (OnAddStream,StartRecv) SENDRECV (AddStream, StartSend)
|
||||
//
|
||||
//
|
||||
class WebRtcSessionChannel : public talk_base::MessageHandler,
|
||||
public Observer {
|
||||
public:
|
||||
enum State {
|
||||
STATE_INIT, // Channel Initialization state
|
||||
STATE_SENTINITIATE, // After local AddStream (sendrecv)
|
||||
STATE_SENTACCEPT, // Accepted incoming stream (recvonly)
|
||||
STATE_RECEIVEDACCEPT, // Receives acceptance from remote (sendonly)
|
||||
STATE_RECEIVEDINITIATE, // Initial stream request (onAddStream)
|
||||
STATE_SENDING, // Starts sending media to remote
|
||||
STATE_RECEIVING, // starts receiving media
|
||||
STATE_SENDRECV, // Send and Recv from/to remote
|
||||
STATE_INVALID, // Invalid state
|
||||
};
|
||||
|
||||
enum StreamDirection {
|
||||
SD_SENDONLY, // media stream is sendonly
|
||||
SD_RECVONLY, // media stream is recvonly
|
||||
SD_SENDRECV, // media stream is both sendrecv
|
||||
SD_INACTIVE, // media stream is inactive
|
||||
};
|
||||
|
||||
// From cricket::BaseSession
|
||||
enum Error {
|
||||
ERROR_NONE = 0, // no error
|
||||
ERROR_CONTENT = 1, // channel errors in SetLocalContent/SetRemoteContent
|
||||
};
|
||||
|
||||
WebRtcSessionChannel(MediaStreamTrack* track,
|
||||
cricket::ChannelManager* channel_manager,
|
||||
talk_base::Thread* signaling_thread);
|
||||
virtual ~WebRtcSessionChannel();
|
||||
|
||||
bool Initialize(cricket::BaseSession* session);
|
||||
void DestroyMediaChannel();
|
||||
void OnChanged();
|
||||
void set_enabled(bool enabled) {
|
||||
enabled_ = enabled;
|
||||
}
|
||||
bool enabled() {
|
||||
return enabled_;
|
||||
}
|
||||
|
||||
// This will be called from WebRtcSession not from MediaStreamTrack
|
||||
bool EnableMediaChannel(bool enable);
|
||||
std::string name() {
|
||||
return transport_channel_name_;
|
||||
}
|
||||
void set_transport_channel_name(const std::string& name) {
|
||||
transport_channel_name_ = name;
|
||||
}
|
||||
|
||||
MediaStreamTrack* media_stream_track() {
|
||||
return media_stream_track_;
|
||||
}
|
||||
void SendSignalingMessage(
|
||||
const std::vector<cricket::Candidate>& candidates);
|
||||
|
||||
sigslot::signal2<WebRtcSessionChannel*,
|
||||
const std::string&> SignalJSONMessageReady;
|
||||
sigslot::signal2<WebRtcSessionChannel*, Error> SignalSessionChannelError;
|
||||
void SetState(State state);
|
||||
bool ProcessRemoteMessage(cricket::SessionDescription* sdp);
|
||||
|
||||
void set_local_description(cricket::SessionDescription* sdesc) {
|
||||
if (sdesc != local_description_) {
|
||||
delete local_description_;
|
||||
local_description_ = sdesc;
|
||||
}
|
||||
}
|
||||
|
||||
void set_remote_description(cricket::SessionDescription* sdesc) {
|
||||
if (sdesc != remote_description_) {
|
||||
delete remote_description_;
|
||||
remote_description_ = sdesc;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void OnMessage(talk_base::Message* message);
|
||||
void OnStateChange();
|
||||
// These two methods are used to set directly the media content description
|
||||
// On BaseChannel, rather than going through BaseSession::SetState
|
||||
// This will give us the flexibility when to send and receive the data
|
||||
// based on AddStream
|
||||
bool SetLocalMediaContent(const cricket::SessionDescription* sdp,
|
||||
cricket::ContentAction action);
|
||||
bool SetRemoteMediaContent(const cricket::SessionDescription* sdp,
|
||||
cricket::ContentAction action);
|
||||
cricket::SessionDescription* GetChannelMediaDesc();
|
||||
void SendSignalingMessage_s(
|
||||
const std::vector<cricket::Candidate>& candidates);
|
||||
// methods from BaseChannel
|
||||
const cricket::MediaContentDescription* GetFirstContent(
|
||||
const cricket::SessionDescription* sdesc,
|
||||
bool video);
|
||||
|
||||
bool video_;
|
||||
std::string transport_channel_name_;
|
||||
bool enabled_;
|
||||
talk_base::scoped_ptr<cricket::BaseChannel> media_channel_;
|
||||
MediaStreamTrack* media_stream_track_;
|
||||
cricket::ChannelManager* channel_manager_;
|
||||
StreamDirection direction_;
|
||||
talk_base::Thread* signaling_thread_;
|
||||
State state_;
|
||||
const cricket::SessionDescription* local_description_;
|
||||
cricket::SessionDescription* remote_description_;
|
||||
DISALLOW_COPY_AND_ASSIGN(WebRtcSessionChannel);
|
||||
};
|
||||
} // namspace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_WEBRTCSESSIONCHANNEL_H_
|
@ -96,7 +96,7 @@ void Conductor::OnSignalingMessage(const std::string& msg) {
|
||||
}
|
||||
|
||||
// Called when a remote stream is added
|
||||
void Conductor::OnAddStream(webrtc::MediaStream* stream) {
|
||||
void Conductor::OnAddStream(webrtc::MediaStreamInterface* stream) {
|
||||
LOG(INFO) << __FUNCTION__ << " " << stream->label();
|
||||
|
||||
stream->AddRef();
|
||||
@ -104,7 +104,7 @@ void Conductor::OnAddStream(webrtc::MediaStream* stream) {
|
||||
stream);
|
||||
}
|
||||
|
||||
void Conductor::OnRemoveStream(webrtc::MediaStream* stream) {
|
||||
void Conductor::OnRemoveStream(webrtc::MediaStreamInterface* stream) {
|
||||
LOG(INFO) << __FUNCTION__ << " " << stream->label();
|
||||
stream->AddRef();
|
||||
main_wnd_->QueueUIThreadCallback(STREAM_REMOVED,
|
||||
@ -249,24 +249,25 @@ void Conductor::AddStreams() {
|
||||
if (active_streams_.find(kStreamLabel) != active_streams_.end())
|
||||
return; // Already added.
|
||||
|
||||
scoped_refptr<webrtc::LocalAudioTrack> audio_track(
|
||||
scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
|
||||
webrtc::CreateLocalAudioTrack(kAudioLabel, NULL));
|
||||
|
||||
scoped_refptr<webrtc::LocalVideoTrack> video_track(
|
||||
scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
|
||||
webrtc::CreateLocalVideoTrack(kVideoLabel, OpenVideoCaptureDevice()));
|
||||
|
||||
scoped_refptr<webrtc::VideoRenderer> renderer(webrtc::CreateVideoRenderer(
|
||||
scoped_refptr<webrtc::VideoRendererInterface> renderer(
|
||||
webrtc::CreateVideoRenderer(
|
||||
main_wnd_->local_renderer()));
|
||||
video_track->SetRenderer(renderer);
|
||||
|
||||
scoped_refptr<webrtc::LocalMediaStream> stream =
|
||||
scoped_refptr<webrtc::LocalMediaStreamInterface> stream =
|
||||
peer_connection_factory_->CreateLocalMediaStream(kStreamLabel);
|
||||
|
||||
stream->AddTrack(audio_track);
|
||||
stream->AddTrack(video_track);
|
||||
peer_connection_->AddStream(stream);
|
||||
peer_connection_->CommitStreamChanges();
|
||||
typedef std::pair<std::string, scoped_refptr<webrtc::MediaStream> >
|
||||
typedef std::pair<std::string, scoped_refptr<webrtc::MediaStreamInterface> >
|
||||
MediaStreamPair;
|
||||
active_streams_.insert(MediaStreamPair(stream->label(), stream));
|
||||
main_wnd_->SwitchToStreamingUI();
|
||||
@ -338,16 +339,18 @@ void Conductor::UIThreadCallback(int msg_id, void* data) {
|
||||
break;
|
||||
|
||||
case NEW_STREAM_ADDED: {
|
||||
webrtc::MediaStream* stream = reinterpret_cast<webrtc::MediaStream*>(
|
||||
webrtc::MediaStreamInterface* stream =
|
||||
reinterpret_cast<webrtc::MediaStreamInterface*>(
|
||||
data);
|
||||
scoped_refptr<webrtc::MediaStreamTrackList> tracks =
|
||||
scoped_refptr<webrtc::MediaStreamTrackListInterface> tracks =
|
||||
stream->tracks();
|
||||
for (size_t i = 0; i < tracks->count(); ++i) {
|
||||
if (tracks->at(i)->type() == webrtc::MediaStreamTrack::kVideo) {
|
||||
webrtc::VideoTrack* track =
|
||||
reinterpret_cast<webrtc::VideoTrack*>(tracks->at(i));
|
||||
if (tracks->at(i)->type() ==
|
||||
webrtc::MediaStreamTrackInterface::kVideo) {
|
||||
webrtc::VideoTrackInterface* track =
|
||||
reinterpret_cast<webrtc::VideoTrackInterface*>(tracks->at(i));
|
||||
LOG(INFO) << "Setting video renderer for track: " << track->label();
|
||||
scoped_refptr<webrtc::VideoRenderer> renderer(
|
||||
scoped_refptr<webrtc::VideoRendererInterface> renderer(
|
||||
webrtc::CreateVideoRenderer(main_wnd_->remote_renderer()));
|
||||
track->SetRenderer(renderer);
|
||||
}
|
||||
@ -361,7 +364,8 @@ void Conductor::UIThreadCallback(int msg_id, void* data) {
|
||||
}
|
||||
|
||||
case STREAM_REMOVED: {
|
||||
webrtc::MediaStream* stream = reinterpret_cast<webrtc::MediaStream*>(
|
||||
webrtc::MediaStreamInterface* stream =
|
||||
reinterpret_cast<webrtc::MediaStreamInterface*>(
|
||||
data);
|
||||
active_streams_.erase(stream->label());
|
||||
stream->Release();
|
||||
|
@ -67,8 +67,8 @@ class Conductor
|
||||
virtual void OnMessage(const std::string& msg) {}
|
||||
virtual void OnSignalingMessage(const std::string& msg);
|
||||
virtual void OnStateChange(Readiness state) {}
|
||||
virtual void OnAddStream(webrtc::MediaStream* stream);
|
||||
virtual void OnRemoveStream(webrtc::MediaStream* stream);
|
||||
virtual void OnAddStream(webrtc::MediaStreamInterface* stream);
|
||||
virtual void OnRemoveStream(webrtc::MediaStreamInterface* stream);
|
||||
|
||||
|
||||
//
|
||||
@ -108,7 +108,8 @@ class Conductor
|
||||
PeerConnectionClient* client_;
|
||||
MainWindow* main_wnd_;
|
||||
std::deque<std::string*> pending_messages_;
|
||||
std::map<std::string, scoped_refptr<webrtc::MediaStream> > active_streams_;
|
||||
std::map<std::string,
|
||||
scoped_refptr<webrtc::MediaStreamInterface> > active_streams_;
|
||||
};
|
||||
|
||||
#endif // PEERCONNECTION_SAMPLES_CLIENT_CONDUCTOR_H_
|
||||
|
Loading…
x
Reference in New Issue
Block a user