Remove Peerconnection Dev branch.

BUG= Issue 139:	libjingle gyp warning on Windows
TEST=

Review URL: http://webrtc-codereview.appspot.com/281001

git-svn-id: http://webrtc.googlecode.com/svn/trunk@960 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2011-11-17 08:42:33 +00:00
parent 0403ef419f
commit 9f9af7df7b
68 changed files with 6 additions and 14023 deletions

View File

@ -5,7 +5,6 @@
{
'variables': {
'no_libjingle_logging%': 0,
'peer_connection_dev%': 0,
'libjingle_orig': '../../third_party/libjingle',
'libjingle_mods': '../../third_party_mods/libjingle',
'conditions': [
@ -109,11 +108,6 @@
},
},
'conditions': [
['peer_connection_dev==1', {
'include_dirs': [
'<(libjingle_mods)/source',
],
}],
['inside_chromium_build==1', {
'defines': [
'NO_SOUND_SYSTEM',
@ -479,6 +473,8 @@
'type': 'static_library',
'sources': [
'<(libjingle_orig)/source/talk/p2p/base/candidate.h',
'<(libjingle_orig)/source/talk/session/phone/channel.cc',
'<(libjingle_orig)/source/talk/session/phone/channel.h',
'<(libjingle_orig)/source/talk/p2p/base/common.h',
'<(libjingle_orig)/source/talk/p2p/base/constants.cc',
'<(libjingle_orig)/source/talk/p2p/base/constants.h',
@ -556,6 +552,8 @@
'<(libjingle_orig)/source/talk/session/phone/mediamessages.h',
'<(libjingle_orig)/source/talk/session/phone/mediamonitor.cc',
'<(libjingle_orig)/source/talk/session/phone/mediamonitor.h',
'<(libjingle_orig)/source/talk/session/phone/mediasession.cc',
'<(libjingle_orig)/source/talk/session/phone/mediasession.h',
'<(libjingle_orig)/source/talk/session/phone/mediasessionclient.cc',
'<(libjingle_orig)/source/talk/session/phone/mediasessionclient.h',
'<(libjingle_orig)/source/talk/session/phone/mediasink.h',
@ -630,24 +628,6 @@
'libjingle',
],
} ], # inside_chromium_build
['peer_connection_dev==1', {
'sources': [
'<(libjingle_mods)/source/talk/base/refcount.h',
'<(libjingle_mods)/source/talk/base/scoped_refptr.h',
'<(libjingle_mods)/source/talk/p2p/client/fakeportallocator.h',
'<(libjingle_mods)/source/talk/session/phone/channel.cc',
'<(libjingle_mods)/source/talk/session/phone/channel.h',
'<(libjingle_mods)/source/talk/session/phone/mediasession.cc',
],
},{
'sources': [
'<(libjingle_orig)/source/talk/session/phone/channel.cc',
'<(libjingle_orig)/source/talk/session/phone/channel.h',
'<(libjingle_orig)/source/talk/session/phone/mediasession.cc',
'<(libjingle_orig)/source/talk/session/phone/mediasession.h',
'<(libjingle_orig)/source/talk/session/phone/sourceparams.h',
],
}], # peer_connection_dev
], # conditions
},
# seperate project for app
@ -689,104 +669,8 @@
'../../src/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'libjingle_p2p',
],
} ], # inside_chromium_build
['peer_connection_dev==1', {
'include_dirs': [
'<(libjingle_mods)/source',
],
# sources= empties the list of source file and start new.
# peer_connection_dev is independent of the main branch.
'sources=': [
'<(libjingle_mods)/source/talk/app/webrtc_dev/audiotrackimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/audiotrackimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastream.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamhandler.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamhandler.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamprovider.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamtrackproxy.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamtrackproxy.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamproxy.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamproxy.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediatrackimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionfactoryimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionfactoryimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmessage.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmessage.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/sessiondescriptionprovider.h'
'<(libjingle_mods)/source/talk/app/webrtc_dev/streamcollectionimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videorendererimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videotrackimpl.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/videotrackimpl.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcjson.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcjson.h',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcsessionobserver',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcsession.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcsession.h',
],
}], # peer_connection_dev
} ], # inside_chromium_build
], # conditions
},
{
'target_name': 'peerconnection_unittests',
'conditions': [
['peer_connection_dev==1', {
'dependencies': [
'libjingle_app',
'../../testing/gmock.gyp:gmock',
'../../testing/gtest.gyp:gtest',
'../../testing/gtest.gyp:gtest_main',
# TODO(perkj): Temporary build the client app here to make sure
# nothing is broken.
'source/talk/examples/peerconnection_client/'
'peerconnection_client.gyp:peerconnection_client_dev',
],
'type': 'executable',
'conditions': [
['inside_chromium_build==1', {
'dependencies': [
'../../third_party/webrtc/modules/modules.gyp:audio_device',
'../../third_party/webrtc/modules/modules.gyp:video_capture_module',
'../../third_party/webrtc/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
}, {
'dependencies': [
'../../src/modules/modules.gyp:audio_device',
'../../src/modules/modules.gyp:video_capture_module',
'../../src/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
}],
['OS=="linux"', {
'libraries': [
'-lXext',
'-lX11',
],
}],
], #conditions
'sources': [
'<(libjingle_mods)/source/talk/app/webrtc_dev/test/filevideocapturemodule.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastream_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/mediastreamhandler_unittest.cc',
# // TODO (henrike): Re add when there is no dependency to foreman.yuv
#'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnection_unittests.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionimpl_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionfactory_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionmessage_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/peerconnectionsignaling_unittest.cc',
'<(libjingle_mods)/source/talk/app/webrtc_dev/webrtcsession_unittest.cc',
],
} , {
'type': 'none',
}
], # peer_connection_dev
], # conditions
},
},
],
}

View File

@ -1,71 +0,0 @@
/*
* libjingle
* Copyright 2004--2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/audiotrackimpl.h"
#include <string>
namespace webrtc {
static const char kAudioTrackKind[] = "audio";
AudioTrack::AudioTrack(const std::string& label)
: MediaTrack<LocalAudioTrackInterface>(label),
audio_device_(NULL) {
}
AudioTrack::AudioTrack(const std::string& label,
AudioDeviceModule* audio_device)
: MediaTrack<LocalAudioTrackInterface>(label),
audio_device_(audio_device) {
}
// Get the AudioDeviceModule associated with this track.
AudioDeviceModule* AudioTrack::GetAudioDevice() {
return audio_device_.get();
}
// Implement MediaStreamTrack
std::string AudioTrack::kind() const {
return std::string(kAudioTrackKind);
}
talk_base::scoped_refptr<AudioTrack> AudioTrack::CreateRemote(
const std::string& label) {
talk_base::RefCountedObject<AudioTrack>* track =
new talk_base::RefCountedObject<AudioTrack>(label);
return track;
}
talk_base::scoped_refptr<AudioTrack> AudioTrack::CreateLocal(
const std::string& label,
AudioDeviceModule* audio_device) {
talk_base::RefCountedObject<AudioTrack>* track =
new talk_base::RefCountedObject<AudioTrack>(label, audio_device);
return track;
}
} // namespace webrtc

View File

@ -1,70 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_
#define TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/mediatrackimpl.h"
#include "talk/app/webrtc_dev/notifierimpl.h"
#include "talk/base/scoped_refptr.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "modules/audio_device/main/interface/audio_device.h"
#else
#include "third_party/webrtc/files/include/audio_device.h"
#endif
namespace webrtc {
class AudioTrack : public MediaTrack<LocalAudioTrackInterface> {
public:
// Creates a remote audio track.
static talk_base::scoped_refptr<AudioTrack> CreateRemote(
const std::string& label);
// Creates a local audio track.
static talk_base::scoped_refptr<AudioTrack> CreateLocal(
const std::string& label,
AudioDeviceModule* audio_device);
// Get the AudioDeviceModule associated with this track.
virtual AudioDeviceModule* GetAudioDevice();
// Implement MediaStreamTrack
virtual std::string kind() const;
protected:
explicit AudioTrack(const std::string& label);
AudioTrack(const std::string& label, AudioDeviceModule* audio_device);
private:
talk_base::scoped_refptr<AudioDeviceModule> audio_device_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_

View File

@ -1,189 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains interfaces for MediaStream and MediaTrack. These
// interfaces are used for implementing MediaStream and MediaTrack as defined
// in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
// interfaces must be used only with PeerConnection. PeerConnectionManager
// interface provides the factory methods to create MediaStream and MediaTracks.
#ifndef TALK_APP_WEBRTC_MEDIASTREAM_H_
#define TALK_APP_WEBRTC_MEDIASTREAM_H_
#include <string>
#include "talk/base/basictypes.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_refptr.h"
namespace cricket {
class VideoRenderer;
class MediaEngine;
} // namespace cricket
namespace webrtc {
class AudioDeviceModule;
class VideoCaptureModule;
// Generic observer interface.
class ObserverInterface {
public:
virtual void OnChanged() = 0;
protected:
virtual ~ObserverInterface() {}
};
class NotifierInterface {
public:
virtual void RegisterObserver(ObserverInterface* observer) = 0;
virtual void UnregisterObserver(ObserverInterface* observer) = 0;
virtual ~NotifierInterface() {}
};
// Information about a track.
class MediaStreamTrackInterface : public talk_base::RefCountInterface,
public NotifierInterface {
public:
enum TrackState {
kInitializing, // Track is beeing negotiated.
kLive = 1, // Track alive
kEnded = 2, // Track have ended
kFailed = 3, // Track negotiation failed.
};
virtual std::string kind() const = 0;
virtual std::string label() const = 0;
virtual bool enabled() const = 0;
virtual TrackState state() const = 0;
virtual bool set_enabled(bool enable) = 0;
// These methods should be called by implementation only.
virtual bool set_state(TrackState new_state) = 0;
};
// Reference counted wrapper for a VideoRenderer.
class VideoRendererWrapperInterface : public talk_base::RefCountInterface {
public:
virtual cricket::VideoRenderer* renderer() = 0;
protected:
virtual ~VideoRendererWrapperInterface() {}
};
// Creates a reference counted object of type cricket::VideoRenderer.
// webrtc::VideoRendererWrapperInterface take ownership of
// cricket::VideoRenderer.
talk_base::scoped_refptr<VideoRendererWrapperInterface> CreateVideoRenderer(
cricket::VideoRenderer* renderer);
class VideoTrackInterface : public MediaStreamTrackInterface {
public:
// Set the video renderer for a local or remote stream.
// This call will start decoding the received video stream and render it.
// The VideoRendererInterface is stored as a scoped_refptr. This means that
// it is not allowed to call delete renderer after this API has been called.
virtual void SetRenderer(VideoRendererWrapperInterface* renderer) = 0;
// Get the VideoRenderer associated with this track.
virtual VideoRendererWrapperInterface* GetRenderer() = 0;
protected:
virtual ~VideoTrackInterface() {}
};
class LocalVideoTrackInterface : public VideoTrackInterface {
public:
// Get the VideoCapture device associated with this track.
// TODO(mallinath) - Update with VideoCapturerWrapper to support both
// cricket and webrtc capture interface.
virtual VideoCaptureModule* GetVideoCapture() = 0;
protected:
virtual ~LocalVideoTrackInterface() {}
};
class AudioTrackInterface : public MediaStreamTrackInterface {
public:
protected:
virtual ~AudioTrackInterface() {}
};
class LocalAudioTrackInterface : public AudioTrackInterface {
public:
// Get the AudioDeviceModule associated with this track.
virtual AudioDeviceModule* GetAudioDevice() = 0;
protected:
virtual ~LocalAudioTrackInterface() {}
};
// List of of tracks.
template <class TrackType>
class MediaStreamTrackListInterface : public talk_base::RefCountInterface {
public:
virtual size_t count() = 0;
virtual TrackType* at(size_t index) = 0;
protected:
virtual ~MediaStreamTrackListInterface() {}
};
typedef MediaStreamTrackListInterface<AudioTrackInterface> AudioTracks;
typedef MediaStreamTrackListInterface<VideoTrackInterface> VideoTracks;
class MediaStreamInterface : public talk_base::RefCountInterface,
public NotifierInterface {
public:
virtual std::string label() const = 0;
virtual AudioTracks* audio_tracks() = 0;
virtual VideoTracks* video_tracks() = 0;
enum ReadyState {
kInitializing,
kLive = 1, // Stream alive
kEnded = 2, // Stream have ended
};
virtual ReadyState ready_state() = 0;
// These methods should be called by implementation only.
virtual void set_ready_state(ReadyState state) = 0;
protected:
virtual ~MediaStreamInterface() {}
};
class LocalMediaStreamInterface : public MediaStreamInterface {
public:
virtual bool AddTrack(AudioTrackInterface* track) = 0;
virtual bool AddTrack(VideoTrackInterface* track) = 0;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAM_H_

View File

@ -1,389 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamproxy.h"
#include "talk/app/webrtc_dev/mediastreamtrackproxy.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/thread.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackLabel[] = "dummy_video_cam_1";
static const char kAudioTrackLabel[] = "dummy_microphone_1";
using talk_base::scoped_refptr;
using ::testing::Exactly;
namespace {
class ReadyStateMessageData : public talk_base::MessageData {
public:
ReadyStateMessageData(
webrtc::MediaStreamInterface* stream,
webrtc::MediaStreamInterface::ReadyState new_state)
: stream_(stream),
ready_state_(new_state) {
}
scoped_refptr<webrtc::MediaStreamInterface> stream_;
webrtc::MediaStreamInterface::ReadyState ready_state_;
};
class TrackStateMessageData : public talk_base::MessageData {
public:
TrackStateMessageData(
webrtc::MediaStreamTrackInterface* track,
webrtc::MediaStreamTrackInterface::TrackState state)
: track_(track),
state_(state) {
}
scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
webrtc::MediaStreamTrackInterface::TrackState state_;
};
} // namespace anonymous
namespace webrtc {
// Helper class to test Observer.
class MockObserver : public ObserverInterface {
public:
explicit MockObserver(talk_base::Thread* signaling_thread)
: signaling_thread_(signaling_thread) {
}
MOCK_METHOD0(DoOnChanged, void());
virtual void OnChanged() {
ASSERT_TRUE(talk_base::Thread::Current() == signaling_thread_);
DoOnChanged();
}
private:
talk_base::Thread* signaling_thread_;
};
class MockMediaStream: public LocalMediaStreamInterface {
public:
MockMediaStream(const std::string& label, talk_base::Thread* signaling_thread)
: stream_impl_(MediaStream::Create(label)),
signaling_thread_(signaling_thread) {
}
virtual void RegisterObserver(webrtc::ObserverInterface* observer) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
stream_impl_->RegisterObserver(observer);
}
virtual void UnregisterObserver(webrtc::ObserverInterface* observer) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
stream_impl_->UnregisterObserver(observer);
}
virtual std::string label() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->label();
}
virtual AudioTracks* audio_tracks() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->audio_tracks();
}
virtual VideoTracks* video_tracks() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->video_tracks();
}
virtual ReadyState ready_state() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->ready_state();
}
virtual void set_ready_state(ReadyState state) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->set_ready_state(state);
}
virtual bool AddTrack(AudioTrackInterface* audio_track) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->AddTrack(audio_track);
}
virtual bool AddTrack(VideoTrackInterface* video_track) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return stream_impl_->AddTrack(video_track);
}
private:
scoped_refptr<MediaStream> stream_impl_;
talk_base::Thread* signaling_thread_;
};
template <class T>
class MockMediaStreamTrack: public T {
public:
MockMediaStreamTrack(T* implementation,
talk_base::Thread* signaling_thread)
: track_impl_(implementation),
signaling_thread_(signaling_thread) {
}
virtual void RegisterObserver(webrtc::ObserverInterface* observer) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
track_impl_->RegisterObserver(observer);
}
virtual void UnregisterObserver(webrtc::ObserverInterface* observer) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
track_impl_->UnregisterObserver(observer);
}
virtual std::string kind() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->kind();
}
virtual std::string label() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->label();
}
virtual bool enabled() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->enabled();
}
virtual MediaStreamTrackInterface::TrackState state() const {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->state();
}
virtual bool set_enabled(bool enabled) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->set_enabled(enabled);
}
virtual bool set_state(webrtc::MediaStreamTrackInterface::TrackState state) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->set_state(state);
}
protected:
scoped_refptr<T> track_impl_;
talk_base::Thread* signaling_thread_;
};
class MockLocalVideoTrack
: public MockMediaStreamTrack<LocalVideoTrackInterface> {
public:
MockLocalVideoTrack(LocalVideoTrackInterface* implementation,
talk_base::Thread* signaling_thread)
: MockMediaStreamTrack<LocalVideoTrackInterface>(implementation,
signaling_thread) {
}
virtual void SetRenderer(webrtc::VideoRendererWrapperInterface* renderer) {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
track_impl_->SetRenderer(renderer);
}
virtual VideoRendererWrapperInterface* GetRenderer() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->GetRenderer();
}
virtual VideoCaptureModule* GetVideoCapture() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->GetVideoCapture();
}
};
class MockLocalAudioTrack
: public MockMediaStreamTrack<LocalAudioTrackInterface> {
public:
MockLocalAudioTrack(LocalAudioTrackInterface* implementation,
talk_base::Thread* signaling_thread)
: MockMediaStreamTrack<LocalAudioTrackInterface>(implementation,
signaling_thread) {
}
virtual AudioDeviceModule* GetAudioDevice() {
EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
return track_impl_->GetAudioDevice();
}
};
class MediaStreamTest: public testing::Test,
public talk_base::MessageHandler {
protected:
virtual void SetUp() {
signaling_thread_ .reset(new talk_base::Thread());
ASSERT_TRUE(signaling_thread_->Start());
std::string label(kStreamLabel1);
// Create a stream proxy object that uses our mocked
// version of a LocalMediaStream.
scoped_refptr<MockMediaStream> mock_stream(
new talk_base::RefCountedObject<MockMediaStream>(label,
signaling_thread_.get()));
stream_ = MediaStreamProxy::Create(label, signaling_thread_.get(),
mock_stream);
ASSERT_TRUE(stream_.get());
EXPECT_EQ(label, stream_->label());
EXPECT_EQ(MediaStreamInterface::kInitializing, stream_->ready_state());
// Create a video track proxy object that uses our mocked
// version of a LocalVideoTrack
scoped_refptr<VideoTrack> video_track_impl(
VideoTrack::CreateLocal(kVideoTrackLabel, NULL));
scoped_refptr<MockLocalVideoTrack> mock_videotrack(
new talk_base::RefCountedObject<MockLocalVideoTrack>(video_track_impl,
signaling_thread_.get()));
video_track_ = VideoTrackProxy::CreateLocal(mock_videotrack,
signaling_thread_.get());
ASSERT_TRUE(video_track_.get());
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
// Create an audio track proxy object that uses our mocked
// version of a LocalAudioTrack
scoped_refptr<AudioTrack> audio_track_impl(
AudioTrack::CreateLocal(kAudioTrackLabel, NULL));
scoped_refptr<MockLocalAudioTrack> mock_audiotrack(
new talk_base::RefCountedObject<MockLocalAudioTrack>(audio_track_impl,
signaling_thread_.get()));
audio_track_ = AudioTrackProxy::CreateLocal(mock_audiotrack,
signaling_thread_.get());
ASSERT_TRUE(audio_track_.get());
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
}
enum {
MSG_SET_READYSTATE,
MSG_SET_TRACKSTATE,
};
// Set the ready state on the signaling thread.
// State can only be changed on the signaling thread.
void SetReadyState(MediaStreamInterface* stream,
MediaStreamInterface::ReadyState new_state) {
ReadyStateMessageData state(stream, new_state);
signaling_thread_->Send(this, MSG_SET_READYSTATE, &state);
}
// Set the track state on the signaling thread.
// State can only be changed on the signaling thread.
void SetTrackState(MediaStreamTrackInterface* track,
MediaStreamTrackInterface::TrackState new_state) {
TrackStateMessageData state(track, new_state);
signaling_thread_->Send(this, MSG_SET_TRACKSTATE, &state);
}
talk_base::scoped_ptr<talk_base::Thread> signaling_thread_;
scoped_refptr<LocalMediaStreamInterface> stream_;
scoped_refptr<LocalVideoTrackInterface> video_track_;
scoped_refptr<LocalAudioTrackInterface> audio_track_;
private:
// Implements talk_base::MessageHandler.
virtual void OnMessage(talk_base::Message* msg) {
switch (msg->message_id) {
case MSG_SET_READYSTATE: {
ReadyStateMessageData* state =
static_cast<ReadyStateMessageData*>(msg->pdata);
state->stream_->set_ready_state(state->ready_state_);
break;
}
case MSG_SET_TRACKSTATE: {
TrackStateMessageData* state =
static_cast<TrackStateMessageData*>(msg->pdata);
state->track_->set_state(state->state_);
break;
}
default:
break;
}
}
};
TEST_F(MediaStreamTest, CreateLocalStream) {
EXPECT_TRUE(stream_->AddTrack(video_track_));
EXPECT_TRUE(stream_->AddTrack(audio_track_));
ASSERT_EQ(1u, stream_->video_tracks()->count());
ASSERT_EQ(1u, stream_->audio_tracks()->count());
// Verify the video track.
scoped_refptr<webrtc::MediaStreamTrackInterface> track(
stream_->video_tracks()->at(0));
EXPECT_EQ(0, track->label().compare(kVideoTrackLabel));
EXPECT_TRUE(track->enabled());
// Verify the audio track.
track = stream_->audio_tracks()->at(0);
EXPECT_EQ(0, track->label().compare(kAudioTrackLabel));
EXPECT_TRUE(track->enabled());
}
TEST_F(MediaStreamTest, ChangeStreamState) {
MockObserver observer(signaling_thread_.get());
stream_->RegisterObserver(&observer);
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetReadyState(stream_, MediaStreamInterface::kLive);
EXPECT_EQ(MediaStreamInterface::kLive, stream_->ready_state());
// It should not be possible to add
// streams when the state has changed to live.
EXPECT_FALSE(stream_->AddTrack(audio_track_));
EXPECT_EQ(0u, stream_->audio_tracks()->count());
}
TEST_F(MediaStreamTest, ChangeVideoTrack) {
MockObserver observer(signaling_thread_.get());
video_track_->RegisterObserver(&observer);
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
video_track_->set_enabled(false);
EXPECT_FALSE(video_track_->state());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetTrackState(video_track_, MediaStreamTrackInterface::kLive);
EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
scoped_refptr<VideoRendererWrapperInterface> renderer(
CreateVideoRenderer(NULL));
video_track_->SetRenderer(renderer.get());
EXPECT_TRUE(renderer.get() == video_track_->GetRenderer());
}
TEST_F(MediaStreamTest, ChangeAudioTrack) {
MockObserver observer(signaling_thread_.get());
audio_track_->RegisterObserver(&observer);
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
audio_track_->set_enabled(false);
EXPECT_FALSE(audio_track_->enabled());
EXPECT_CALL(observer, DoOnChanged())
.Times(Exactly(1));
SetTrackState(audio_track_, MediaStreamTrackInterface::kLive);
EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state());
}
} // namespace webrtc

View File

@ -1,255 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/mediastreamhandler.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "modules/video_capture/main/interface/video_capture.h"
#else
#include "third_party/webrtc/files/include/video_capture.h"
#endif
namespace webrtc {
VideoTrackHandler::VideoTrackHandler(VideoTrackInterface* track,
MediaProviderInterface* provider)
: provider_(provider),
video_track_(track),
state_(track->state()),
enabled_(track->enabled()),
renderer_(track->GetRenderer()) {
video_track_->RegisterObserver(this);
}
VideoTrackHandler::~VideoTrackHandler() {
video_track_->UnregisterObserver(this);
}
void VideoTrackHandler::OnChanged() {
if (state_ != video_track_->state()) {
state_ = video_track_->state();
OnStateChanged();
}
if (renderer_.get() != video_track_->GetRenderer()) {
renderer_ = video_track_->GetRenderer();
OnRendererChanged();
}
if (enabled_ != video_track_->enabled()) {
enabled_ = video_track_->enabled();
OnEnabledChanged();
}
}
LocalVideoTrackHandler::LocalVideoTrackHandler(
LocalVideoTrackInterface* track,
MediaProviderInterface* provider)
: VideoTrackHandler(track, provider),
local_video_track_(track) {
}
LocalVideoTrackHandler::~LocalVideoTrackHandler() {
// Since cricket::VideoRenderer is not reference counted
// we need to remove the renderer before we are deleted.
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
void LocalVideoTrackHandler::OnRendererChanged() {
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
void LocalVideoTrackHandler::OnStateChanged() {
if (local_video_track_->state() == VideoTrackInterface::kLive) {
provider_->SetCaptureDevice(local_video_track_->label(),
local_video_track_->GetVideoCapture());
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetLocalRenderer(video_track_->label(), NULL);
}
}
void LocalVideoTrackHandler::OnEnabledChanged() {
// TODO(perkj) What should happen when enabled is changed?
}
RemoteVideoTrackHandler::RemoteVideoTrackHandler(
VideoTrackInterface* track,
MediaProviderInterface* provider)
: VideoTrackHandler(track, provider),
remote_video_track_(track) {
}
RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
// Since cricket::VideoRenderer is not reference counted
// we need to remove the renderer before we are deleted.
provider_->SetRemoteRenderer(video_track_->label(), NULL);
}
void RemoteVideoTrackHandler::OnRendererChanged() {
VideoRendererWrapperInterface* renderer(video_track_->GetRenderer());
if (renderer)
provider_->SetRemoteRenderer(video_track_->label(), renderer->renderer());
else
provider_->SetRemoteRenderer(video_track_->label(), NULL);
}
void RemoteVideoTrackHandler::OnStateChanged() {
}
void RemoteVideoTrackHandler::OnEnabledChanged() {
// TODO(perkj): What should happen when enabled is changed?
}
MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,
MediaProviderInterface* provider)
: stream_(stream),
provider_(provider) {
}
MediaStreamHandler::~MediaStreamHandler() {
for (VideoTrackHandlers::iterator it = video_handlers_.begin();
it != video_handlers_.end(); ++it) {
delete *it;
}
}
MediaStreamInterface* MediaStreamHandler::stream() {
return stream_.get();
}
void MediaStreamHandler::OnChanged() {
// TODO(perkj): Implement state change and enabled changed.
}
LocalMediaStreamHandler::LocalMediaStreamHandler(
MediaStreamInterface* stream,
MediaProviderInterface* provider)
: MediaStreamHandler(stream, provider) {
VideoTracks* tracklist(stream->video_tracks());
for (size_t j = 0; j < tracklist->count(); ++j) {
LocalVideoTrackInterface* track =
static_cast<LocalVideoTrackInterface*>(tracklist->at(j));
VideoTrackHandler* handler(new LocalVideoTrackHandler(track, provider));
video_handlers_.push_back(handler);
}
}
RemoteMediaStreamHandler::RemoteMediaStreamHandler(
MediaStreamInterface* stream,
MediaProviderInterface* provider)
: MediaStreamHandler(stream, provider) {
VideoTracks* tracklist(stream->video_tracks());
for (size_t j = 0; j < tracklist->count(); ++j) {
VideoTrackInterface* track =
static_cast<VideoTrackInterface*>(tracklist->at(j));
VideoTrackHandler* handler(new RemoteVideoTrackHandler(track, provider));
video_handlers_.push_back(handler);
}
}
MediaStreamHandlers::MediaStreamHandlers(MediaProviderInterface* provider)
: provider_(provider) {
}
MediaStreamHandlers::~MediaStreamHandlers() {
for (StreamHandlerList::iterator it = remote_streams_handlers_.begin();
it != remote_streams_handlers_.end(); ++it) {
delete *it;
}
for (StreamHandlerList::iterator it = local_streams_handlers_.begin();
it != local_streams_handlers_.end(); ++it) {
delete *it;
}
}
void MediaStreamHandlers::AddRemoteStream(MediaStreamInterface* stream) {
RemoteMediaStreamHandler* handler = new RemoteMediaStreamHandler(stream,
provider_);
remote_streams_handlers_.push_back(handler);
}
void MediaStreamHandlers::RemoveRemoteStream(MediaStreamInterface* stream) {
StreamHandlerList::iterator it = remote_streams_handlers_.begin();
for (; it != remote_streams_handlers_.end(); ++it) {
if ((*it)->stream() == stream) {
delete *it;
break;
}
}
ASSERT(it != remote_streams_handlers_.end());
remote_streams_handlers_.erase(it);
}
void MediaStreamHandlers::CommitLocalStreams(
StreamCollectionInterface* streams) {
// Iterate the old list of local streams.
// If its not found in the new collection it have been removed.
// We can not erase from the old collection at the same time as we iterate.
// That is what the ugly while(1) fix.
while (1) {
StreamHandlerList::iterator it = local_streams_handlers_.begin();
for (; it != local_streams_handlers_.end(); ++it) {
if (streams->find((*it)->stream()->label()) == NULL) {
delete *it;
break;
}
}
if (it != local_streams_handlers_.end()) {
local_streams_handlers_.erase(it);
continue;
}
break;
}
// Iterate the new collection of local streams.
// If its not found in the old collection it have been added.
for (size_t j = 0; j < streams->count(); ++j) {
MediaStreamInterface* stream = streams->at(j);
StreamHandlerList::iterator it = local_streams_handlers_.begin();
for (; it != local_streams_handlers_.end(); ++it) {
if (stream == (*it)->stream())
break;
}
if (it == local_streams_handlers_.end()) {
LocalMediaStreamHandler* handler = new LocalMediaStreamHandler(
stream, provider_);
local_streams_handlers_.push_back(handler);
}
}
};
} // namespace webrtc

View File

@ -1,144 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains classes for listening on changes on MediaStreams and
// MediaTracks and making sure appropriate action is taken.
// Example: If a user sets a rendererer on a local video track the renderer is
// connected to the appropriate camera.
#ifndef TALK_APP_WEBRTC_DEV_MEDIASTREAMHANDLER_H_
#define TALK_APP_WEBRTC_DEV_MEDIASTREAMHANDLER_H_
#include <list>
#include <vector>
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/mediastreamprovider.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/base/thread.h"
namespace webrtc {
// VideoTrackHandler listen to events on a VideoTrack instance and
// executes the requested change.
class VideoTrackHandler : public ObserverInterface {
public:
VideoTrackHandler(VideoTrackInterface* track,
MediaProviderInterface* provider);
virtual ~VideoTrackHandler();
virtual void OnChanged();
protected:
virtual void OnRendererChanged() = 0;
virtual void OnStateChanged() = 0;
virtual void OnEnabledChanged() = 0;
MediaProviderInterface* provider_;
VideoTrackInterface* video_track_;
private:
MediaStreamTrackInterface::TrackState state_;
bool enabled_;
talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer_;
};
class LocalVideoTrackHandler : public VideoTrackHandler {
public:
LocalVideoTrackHandler(LocalVideoTrackInterface* track,
MediaProviderInterface* provider);
virtual ~LocalVideoTrackHandler();
protected:
virtual void OnRendererChanged();
virtual void OnStateChanged();
virtual void OnEnabledChanged();
private:
talk_base::scoped_refptr<LocalVideoTrackInterface> local_video_track_;
};
class RemoteVideoTrackHandler : public VideoTrackHandler {
public:
RemoteVideoTrackHandler(VideoTrackInterface* track,
MediaProviderInterface* provider);
virtual ~RemoteVideoTrackHandler();
protected:
virtual void OnRendererChanged();
virtual void OnStateChanged();
virtual void OnEnabledChanged();
private:
talk_base::scoped_refptr<VideoTrackInterface> remote_video_track_;
};
class MediaStreamHandler : public ObserverInterface {
public:
MediaStreamHandler(MediaStreamInterface* stream,
MediaProviderInterface* provider);
~MediaStreamHandler();
MediaStreamInterface* stream();
virtual void OnChanged();
protected:
MediaProviderInterface* provider_;
typedef std::vector<VideoTrackHandler*> VideoTrackHandlers;
VideoTrackHandlers video_handlers_;
talk_base::scoped_refptr<MediaStreamInterface> stream_;
};
class LocalMediaStreamHandler : public MediaStreamHandler {
public:
LocalMediaStreamHandler(MediaStreamInterface* stream,
MediaProviderInterface* provider);
};
class RemoteMediaStreamHandler : public MediaStreamHandler {
public:
RemoteMediaStreamHandler(MediaStreamInterface* stream,
MediaProviderInterface* provider);
};
class MediaStreamHandlers {
public:
explicit MediaStreamHandlers(MediaProviderInterface* provider);
~MediaStreamHandlers();
void AddRemoteStream(MediaStreamInterface* stream);
void RemoveRemoteStream(MediaStreamInterface* stream);
void CommitLocalStreams(StreamCollectionInterface* streams);
private:
typedef std::list<MediaStreamHandler*> StreamHandlerList;
StreamHandlerList local_streams_handlers_;
StreamHandlerList remote_streams_handlers_;
MediaProviderInterface* provider_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMOBSERVER_H_

View File

@ -1,147 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/app/webrtc_dev/videotrackimpl.h"
#include "talk/app/webrtc_dev/mediastreamhandler.h"
#include "talk/app/webrtc_dev/streamcollectionimpl.h"
#include "talk/base/thread.h"
using ::testing::Exactly;
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoDeviceName[] = "dummy_video_cam_1";
namespace webrtc {
// Helper class to test MediaStreamHandler.
class MockMediaProvier : public MediaProviderInterface {
public:
MOCK_METHOD1(SetCaptureDevice, void(const std::string& name));
MOCK_METHOD1(SetLocalRenderer, void(const std::string& name));
MOCK_METHOD1(SetRemoteRenderer, void(const std::string& name));
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) {
SetCaptureDevice(name);
}
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
SetLocalRenderer(name);
}
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
SetRemoteRenderer(name);
}
~MockMediaProvier() {}
};
TEST(MediaStreamHandlerTest, LocalStreams) {
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
EXPECT_TRUE(stream->AddTrack(video_track));
talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(
CreateVideoRenderer(NULL));
video_track->SetRenderer(renderer);
MockMediaProvier provider;
MediaStreamHandlers handlers(&provider);
talk_base::scoped_refptr<StreamCollectionImpl> collection(
StreamCollectionImpl::Create());
collection->AddStream(stream);
EXPECT_CALL(provider, SetLocalRenderer(kVideoDeviceName))
.Times(Exactly(2)); // SetLocalRender will also be called from dtor of
// LocalVideoTrackHandler
EXPECT_CALL(provider, SetCaptureDevice(kVideoDeviceName))
.Times(Exactly(1));
handlers.CommitLocalStreams(collection);
video_track->set_state(MediaStreamTrackInterface::kLive);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
collection->RemoveStream(stream);
handlers.CommitLocalStreams(collection);
video_track->set_state(MediaStreamTrackInterface::kEnded);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
}
TEST(MediaStreamHandlerTest, RemoteStreams) {
// Create a local stream. We use local stream in this test as well because
// they are easier to create.
// LocalMediaStreams inherit from MediaStreams.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
EXPECT_TRUE(stream->AddTrack(video_track));
MockMediaProvier provider;
MediaStreamHandlers handlers(&provider);
handlers.AddRemoteStream(stream);
EXPECT_CALL(provider, SetRemoteRenderer(kVideoDeviceName))
.Times(Exactly(3)); // SetRemoteRenderer is also called from dtor of
// RemoteVideoTrackHandler.
// Set the renderer once.
talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(
CreateVideoRenderer(NULL));
video_track->SetRenderer(renderer);
talk_base::Thread::Current()->ProcessMessages(1);
// Change the already set renderer.
renderer = CreateVideoRenderer(NULL);
video_track->SetRenderer(renderer);
talk_base::Thread::Current()->ProcessMessages(1);
handlers.RemoveRemoteStream(stream);
// Change the renderer after the stream have been removed from handler.
// This should not trigger a call to SetRemoteRenderer.
renderer = CreateVideoRenderer(NULL);
video_track->SetRenderer(renderer);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
}
} // namespace webrtc

View File

@ -1,73 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/base/logging.h"
namespace webrtc {
talk_base::scoped_refptr<MediaStream> MediaStream::Create(
const std::string& label) {
talk_base::RefCountedObject<MediaStream>* stream =
new talk_base::RefCountedObject<MediaStream>(label);
return stream;
}
MediaStream::MediaStream(const std::string& label)
: label_(label),
ready_state_(MediaStreamInterface::kInitializing),
audio_track_list_(
new talk_base::RefCountedObject<
MediaStreamTrackList<AudioTrackInterface> >()),
video_track_list_(
new talk_base::RefCountedObject<
MediaStreamTrackList<VideoTrackInterface> >()) {
}
void MediaStream::set_ready_state(
MediaStreamInterface::ReadyState new_state) {
if (ready_state_ != new_state) {
ready_state_ = new_state;
Notifier<LocalMediaStreamInterface>::FireOnChanged();
}
}
bool MediaStream::AddTrack(AudioTrackInterface* track) {
if (ready_state() != kInitializing)
return false;
audio_track_list_->AddTrack(track);
return true;
}
bool MediaStream::AddTrack(VideoTrackInterface* track) {
if (ready_state() != kInitializing)
return false;
video_track_list_->AddTrack(track);
return true;
}
} // namespace webrtc

View File

@ -1,90 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains the implementation of MediaStreamInterface interface.
#ifndef TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_
#define TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/notifierimpl.h"
namespace webrtc {
class AudioTrack;
class VideoTrack;
class MediaStream : public Notifier<LocalMediaStreamInterface> {
public:
template<class T>
class MediaStreamTrackList : public MediaStreamTrackListInterface<T> {
public:
void AddTrack(T* track) {
tracks_.push_back(track);
}
virtual size_t count() { return tracks_.size(); }
virtual T* at(size_t index) {
return tracks_.at(index);
}
private:
std::vector<talk_base::scoped_refptr<T> > tracks_;
};
static talk_base::scoped_refptr<MediaStream> Create(const std::string& label);
// Implement LocalMediaStreamInterface.
virtual bool AddTrack(AudioTrackInterface* track);
virtual bool AddTrack(VideoTrackInterface* track);
// Implement MediaStreamInterface.
virtual std::string label() const { return label_; }
virtual MediaStreamTrackListInterface<AudioTrackInterface>* audio_tracks() {
return audio_track_list_;
}
virtual MediaStreamTrackListInterface<VideoTrackInterface>* video_tracks() {
return video_track_list_;
}
virtual ReadyState ready_state() { return ready_state_; }
virtual void set_ready_state(ReadyState new_state);
void set_state(ReadyState new_state);
protected:
explicit MediaStream(const std::string& label);
std::string label_;
MediaStreamInterface::ReadyState ready_state_;
talk_base::scoped_refptr<MediaStreamTrackList<AudioTrackInterface> >
audio_track_list_;
talk_base::scoped_refptr<MediaStreamTrackList<VideoTrackInterface> >
video_track_list_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_

View File

@ -1,95 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/app/webrtc_dev/videotrackimpl.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoDeviceName[] = "dummy_video_cam_1";
namespace webrtc {
// Helper class to test the Observer.
class TestObserver : public ObserverInterface {
public:
TestObserver() : changed_(0) {}
void OnChanged() {
++changed_;
}
int NumChanges() {
return changed_;
}
void Reset() {
changed_ = 0;
}
protected:
int changed_;
};
TEST(LocalStreamTest, Create) {
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
EXPECT_EQ(label, stream->label());
// Check state.
EXPECT_EQ(MediaStreamInterface::kInitializing, stream->ready_state());
// Create a local Video track.
TestObserver tracklist_observer;
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
// Add an observer to the track list.
talk_base::scoped_refptr<MediaStreamTrackListInterface<VideoTrackInterface> >
track_list(stream->video_tracks());
// Add the track to the local stream.
EXPECT_TRUE(stream->AddTrack(video_track));
EXPECT_EQ(1u, stream->video_tracks()->count());
// Verify the track.
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface> track(
stream->video_tracks()->at(0));
EXPECT_EQ(0, track->label().compare(kVideoDeviceName));
EXPECT_TRUE(track->enabled());
// Verify the Track observer.
TestObserver observer1;
TestObserver observer2;
track->RegisterObserver(&observer1);
track->RegisterObserver(&observer2);
track->set_enabled(false);
EXPECT_EQ(1u, observer1.NumChanges());
EXPECT_EQ(1u, observer2.NumChanges());
EXPECT_FALSE(track->enabled());
}
} // namespace webrtc

View File

@ -1,52 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_DEV_MEDIASTREAMPROVIDER_H_
#define TALK_APP_WEBRTC_DEV_MEDIASTREAMPROVIDER_H_
#include "talk/app/webrtc_dev/mediastream.h"
namespace webrtc {
// Interface for setting media devices on a certain MediaTrack.
// This interface is called by classes in mediastreamhandler.h to
// set new devices.
class MediaProviderInterface {
public:
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) = 0;
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) = 0;
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) = 0;
protected:
virtual ~MediaProviderInterface() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DEV_MEDIASTREAMPROVIDER_H_

View File

@ -1,318 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/mediastreamproxy.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_refptr.h"
namespace {
enum {
MSG_SET_TRACKLIST_IMPLEMENTATION = 1,
MSG_REGISTER_OBSERVER,
MSG_UNREGISTER_OBSERVER,
MSG_LABEL,
MSG_ADD_AUDIO_TRACK,
MSG_ADD_VIDEO_TRACK,
MSG_READY_STATE,
MSG_COUNT,
MSG_AT
};
typedef talk_base::TypedMessageData<std::string*> LabelMessageData;
typedef talk_base::TypedMessageData<size_t> SizeTMessageData;
typedef talk_base::TypedMessageData<webrtc::ObserverInterface*>
ObserverMessageData;
typedef talk_base::TypedMessageData<webrtc::MediaStreamInterface::ReadyState>
ReadyStateMessageData;
template<typename T>
class MediaStreamTrackMessageData : public talk_base::MessageData {
public:
explicit MediaStreamTrackMessageData(T* track)
: track_(track),
result_(false) {
}
talk_base::scoped_refptr<T> track_;
bool result_;
};
typedef MediaStreamTrackMessageData<webrtc::AudioTrackInterface>
AudioTrackMsgData;
typedef MediaStreamTrackMessageData<webrtc::VideoTrackInterface>
VideoTrackMsgData;
template <class TrackType>
class MediaStreamTrackAtMessageData : public talk_base::MessageData {
public:
explicit MediaStreamTrackAtMessageData(size_t index)
: index_(index) {
}
size_t index_;
talk_base::scoped_refptr<TrackType> track_;
};
class MediaStreamTrackListsMessageData : public talk_base::MessageData {
public:
talk_base::scoped_refptr<webrtc::AudioTracks> audio_tracks_;
talk_base::scoped_refptr<webrtc::VideoTracks> video_tracks_;
};
} // namespace anonymous
namespace webrtc {
talk_base::scoped_refptr<MediaStreamProxy> MediaStreamProxy::Create(
const std::string& label,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<MediaStreamProxy>* stream =
new talk_base::RefCountedObject<MediaStreamProxy>(
label, signaling_thread,
reinterpret_cast<LocalMediaStreamInterface*>(NULL));
return stream;
}
talk_base::scoped_refptr<MediaStreamProxy> MediaStreamProxy::Create(
const std::string& label,
talk_base::Thread* signaling_thread,
LocalMediaStreamInterface* media_stream_impl) {
ASSERT(signaling_thread);
ASSERT(media_stream_impl);
talk_base::RefCountedObject<MediaStreamProxy>* stream =
new talk_base::RefCountedObject<MediaStreamProxy>(label, signaling_thread,
media_stream_impl);
return stream;
}
MediaStreamProxy::MediaStreamProxy(const std::string& label,
talk_base::Thread* signaling_thread,
LocalMediaStreamInterface* media_stream_impl)
: signaling_thread_(signaling_thread),
media_stream_impl_(media_stream_impl),
audio_tracks_(new talk_base::RefCountedObject<
MediaStreamTrackListProxy<AudioTrackInterface> >(
signaling_thread_)),
video_tracks_(new talk_base::RefCountedObject<
MediaStreamTrackListProxy<VideoTrackInterface> >(
signaling_thread_)) {
if (media_stream_impl_ == NULL) {
media_stream_impl_ = MediaStream::Create(label);
}
MediaStreamTrackListsMessageData tracklists;
Send(MSG_SET_TRACKLIST_IMPLEMENTATION, &tracklists);
audio_tracks_->SetImplementation(tracklists.audio_tracks_);
video_tracks_->SetImplementation(tracklists.video_tracks_);
}
std::string MediaStreamProxy::label() const {
if (!signaling_thread_->IsCurrent()) {
std::string label;
LabelMessageData msg(&label);
Send(MSG_LABEL, &msg);
return label;
}
return media_stream_impl_->label();
}
MediaStreamInterface::ReadyState MediaStreamProxy::ready_state() {
if (!signaling_thread_->IsCurrent()) {
ReadyStateMessageData msg(MediaStreamInterface::kInitializing);
Send(MSG_READY_STATE, &msg);
return msg.data();
}
return media_stream_impl_->ready_state();
}
void MediaStreamProxy::set_ready_state(
MediaStreamInterface::ReadyState new_state) {
if (!signaling_thread_->IsCurrent()) {
// State should only be allowed to be changed from the signaling thread.
ASSERT(!"Not Allowed!");
return;
}
media_stream_impl_->set_ready_state(new_state);
}
bool MediaStreamProxy::AddTrack(AudioTrackInterface* track) {
if (!signaling_thread_->IsCurrent()) {
AudioTrackMsgData msg(track);
Send(MSG_ADD_AUDIO_TRACK, &msg);
return msg.result_;
}
return media_stream_impl_->AddTrack(track);
}
bool MediaStreamProxy::AddTrack(VideoTrackInterface* track) {
if (!signaling_thread_->IsCurrent()) {
VideoTrackMsgData msg(track);
Send(MSG_ADD_VIDEO_TRACK, &msg);
return msg.result_;
}
return media_stream_impl_->AddTrack(track);
}
void MediaStreamProxy::RegisterObserver(ObserverInterface* observer) {
if (!signaling_thread_->IsCurrent()) {
ObserverMessageData msg(observer);
Send(MSG_REGISTER_OBSERVER, &msg);
return;
}
media_stream_impl_->RegisterObserver(observer);
}
void MediaStreamProxy::UnregisterObserver(ObserverInterface* observer) {
if (!signaling_thread_->IsCurrent()) {
ObserverMessageData msg(observer);
Send(MSG_UNREGISTER_OBSERVER, &msg);
return;
}
media_stream_impl_->UnregisterObserver(observer);
}
void MediaStreamProxy::Send(uint32 id, talk_base::MessageData* data) const {
signaling_thread_->Send(const_cast<MediaStreamProxy*>(this), id,
data);
}
// Implement MessageHandler
void MediaStreamProxy::OnMessage(talk_base::Message* msg) {
talk_base::MessageData* data = msg->pdata;
switch (msg->message_id) {
case MSG_SET_TRACKLIST_IMPLEMENTATION: {
MediaStreamTrackListsMessageData* lists =
static_cast<MediaStreamTrackListsMessageData*>(data);
lists->audio_tracks_ = media_stream_impl_->audio_tracks();
lists->video_tracks_ = media_stream_impl_->video_tracks();
break;
}
case MSG_REGISTER_OBSERVER: {
ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
media_stream_impl_->RegisterObserver(observer->data());
break;
}
case MSG_UNREGISTER_OBSERVER: {
ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
media_stream_impl_->UnregisterObserver(observer->data());
break;
}
case MSG_LABEL: {
LabelMessageData * label = static_cast<LabelMessageData*>(data);
*(label->data()) = media_stream_impl_->label();
break;
}
case MSG_ADD_AUDIO_TRACK: {
AudioTrackMsgData * track =
static_cast<AudioTrackMsgData *>(data);
track->result_ = media_stream_impl_->AddTrack(track->track_.get());
break;
}
case MSG_ADD_VIDEO_TRACK: {
VideoTrackMsgData * track =
static_cast<VideoTrackMsgData *>(data);
track->result_ = media_stream_impl_->AddTrack(track->track_.get());
break;
}
case MSG_READY_STATE: {
ReadyStateMessageData* state = static_cast<ReadyStateMessageData*>(data);
state->data() = media_stream_impl_->ready_state();
break;
}
default:
ASSERT(!"Not Implemented!");
break;
}
}
template <class T>
MediaStreamProxy::MediaStreamTrackListProxy<T>::MediaStreamTrackListProxy(
talk_base::Thread* signaling_thread)
: signaling_thread_(signaling_thread) {
}
template <class T>
void MediaStreamProxy::MediaStreamTrackListProxy<T>::SetImplementation(
MediaStreamTrackListInterface<T>* track_list) {
track_list_ = track_list;
}
template <class T>
size_t MediaStreamProxy::MediaStreamTrackListProxy<T>::count() {
if (!signaling_thread_->IsCurrent()) {
SizeTMessageData msg(0u);
Send(MSG_COUNT, &msg);
return msg.data();
}
return track_list_->count();
}
template <class T>
T* MediaStreamProxy::MediaStreamTrackListProxy<T>::at(
size_t index) {
if (!signaling_thread_->IsCurrent()) {
MediaStreamTrackAtMessageData<T> msg(index);
Send(MSG_AT, &msg);
return msg.track_;
}
return track_list_->at(index);
}
template <class T>
void MediaStreamProxy::MediaStreamTrackListProxy<T>::Send(
uint32 id, talk_base::MessageData* data) const {
signaling_thread_->Send(
const_cast<MediaStreamProxy::MediaStreamTrackListProxy<T>*>(
this), id, data);
}
// Implement MessageHandler
template <class T>
void MediaStreamProxy::MediaStreamTrackListProxy<T>::OnMessage(
talk_base::Message* msg) {
talk_base::MessageData* data = msg->pdata;
switch (msg->message_id) {
case MSG_COUNT: {
SizeTMessageData* count = static_cast<SizeTMessageData*>(data);
count->data() = track_list_->count();
break;
}
case MSG_AT: {
MediaStreamTrackAtMessageData<T>* track =
static_cast<MediaStreamTrackAtMessageData<T>*>(data);
track->track_ = track_list_->at(track->index_);
break;
}
default:
ASSERT(!"Not Implemented!");
break;
}
}
} // namespace webrtc

View File

@ -1,111 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
#define TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/base/thread.h"
namespace webrtc {
using talk_base::scoped_refptr;
// MediaStreamProxy is a proxy for the MediaStream interface. The purpose is
// to make sure MediaStreamImpl is only accessed from the signaling thread.
// It can be used as a proxy for both local and remote MediaStreams.
class MediaStreamProxy : public LocalMediaStreamInterface,
public talk_base::MessageHandler {
public:
static scoped_refptr<MediaStreamProxy> Create(
const std::string& label,
talk_base::Thread* signaling_thread);
static scoped_refptr<MediaStreamProxy> Create(
const std::string& label,
talk_base::Thread* signaling_thread,
LocalMediaStreamInterface* media_stream_impl);
// Implement LocalStream.
virtual bool AddTrack(AudioTrackInterface* track);
virtual bool AddTrack(VideoTrackInterface* track);
// Implement MediaStream.
virtual std::string label() const;
virtual AudioTracks* audio_tracks() {
return audio_tracks_;
}
virtual VideoTracks* video_tracks() {
return video_tracks_;
}
virtual ReadyState ready_state();
virtual void set_ready_state(ReadyState new_state);
// Implement Notifier
virtual void RegisterObserver(ObserverInterface* observer);
virtual void UnregisterObserver(ObserverInterface* observer);
protected:
MediaStreamProxy(const std::string& label,
talk_base::Thread* signaling_thread,
LocalMediaStreamInterface* media_stream_impl);
template <class T>
class MediaStreamTrackListProxy : public MediaStreamTrackListInterface<T>,
public talk_base::MessageHandler {
public:
explicit MediaStreamTrackListProxy(talk_base::Thread* signaling_thread);
void SetImplementation(MediaStreamTrackListInterface<T>* track_list);
virtual size_t count();
virtual T* at(size_t index);
private:
void Send(uint32 id, talk_base::MessageData* data) const;
void OnMessage(talk_base::Message* msg);
talk_base::scoped_refptr<MediaStreamTrackListInterface<T> > track_list_;
mutable talk_base::Thread* signaling_thread_;
};
typedef MediaStreamTrackListProxy<AudioTrackInterface> AudioTrackListProxy;
typedef MediaStreamTrackListProxy<VideoTrackInterface> VideoTrackListProxy;
void Send(uint32 id, talk_base::MessageData* data) const;
// Implement MessageHandler.
virtual void OnMessage(talk_base::Message* msg);
mutable talk_base::Thread* signaling_thread_;
scoped_refptr<LocalMediaStreamInterface> media_stream_impl_;
scoped_refptr<AudioTrackListProxy> audio_tracks_;
scoped_refptr<VideoTrackListProxy> video_tracks_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_

View File

@ -1,394 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/mediastreamtrackproxy.h"
namespace {
enum {
MSG_REGISTER_OBSERVER = 1,
MSG_UNREGISTER_OBSERVER,
MSG_LABEL,
MSG_ENABLED,
MSG_SET_ENABLED,
MSG_STATE,
MSG_GET_AUDIODEVICE,
MSG_GET_VIDEODEVICE,
MSG_GET_VIDEORENDERER,
MSG_SET_VIDEORENDERER,
};
typedef talk_base::TypedMessageData<std::string*> LabelMessageData;
typedef talk_base::TypedMessageData<webrtc::ObserverInterface*>
ObserverMessageData;
typedef talk_base::TypedMessageData
<webrtc::MediaStreamTrackInterface::TrackState> TrackStateMessageData;
typedef talk_base::TypedMessageData<bool> EnableMessageData;
class AudioDeviceMessageData : public talk_base::MessageData {
public:
talk_base::scoped_refptr<webrtc::AudioDeviceModule> audio_device_;
};
class VideoDeviceMessageData : public talk_base::MessageData {
public:
talk_base::scoped_refptr<webrtc::VideoCaptureModule> video_device_;
};
class VideoRendererMessageData : public talk_base::MessageData {
public:
talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
video_renderer_;
};
} // namespace anonymous
namespace webrtc {
template <class T>
MediaStreamTrackProxy<T>::MediaStreamTrackProxy(
talk_base::Thread* signaling_thread)
: signaling_thread_(signaling_thread) {
}
template <class T>
void MediaStreamTrackProxy<T>::Init(MediaStreamTrackInterface* track) {
track_ = track;
}
template <class T>
std::string MediaStreamTrackProxy<T>::kind() const {
return track_->kind();
}
template <class T>
std::string MediaStreamTrackProxy<T>::label() const {
if (!signaling_thread_->IsCurrent()) {
std::string label;
LabelMessageData msg(&label);
Send(MSG_LABEL, &msg);
return label;
}
return track_->label();
}
template <class T>
MediaStreamTrackInterface::TrackState MediaStreamTrackProxy<T>::state() const {
if (!signaling_thread_->IsCurrent()) {
TrackStateMessageData msg(MediaStreamTrackInterface::kInitializing);
Send(MSG_STATE, &msg);
return msg.data();
}
return track_->state();
}
template <class T>
bool MediaStreamTrackProxy<T>::enabled() const {
if (!signaling_thread_->IsCurrent()) {
EnableMessageData msg(false);
Send(MSG_ENABLED, &msg);
return msg.data();
}
return track_->enabled();
}
template <class T>
bool MediaStreamTrackProxy<T>::set_enabled(bool enable) {
if (!signaling_thread_->IsCurrent()) {
EnableMessageData msg(enable);
Send(MSG_SET_ENABLED, &msg);
return msg.data();
}
return track_->set_enabled(enable);
}
template <class T>
bool MediaStreamTrackProxy<T>::set_state(
MediaStreamTrackInterface::TrackState new_state) {
if (!signaling_thread_->IsCurrent()) {
// State should only be allowed to be changed from the signaling thread.
ASSERT(!"Not Allowed!");
return false;
}
return track_->set_state(new_state);
}
template <class T>
void MediaStreamTrackProxy<T>::RegisterObserver(ObserverInterface* observer) {
if (!signaling_thread_->IsCurrent()) {
ObserverMessageData msg(observer);
Send(MSG_REGISTER_OBSERVER, &msg);
return;
}
track_->RegisterObserver(observer);
}
template <class T>
void MediaStreamTrackProxy<T>::UnregisterObserver(ObserverInterface* observer) {
if (!signaling_thread_->IsCurrent()) {
ObserverMessageData msg(observer);
Send(MSG_UNREGISTER_OBSERVER, &msg);
return;
}
track_->UnregisterObserver(observer);
}
template <class T>
void MediaStreamTrackProxy<T>::Send(uint32 id,
talk_base::MessageData* data) const {
signaling_thread_->Send(const_cast<MediaStreamTrackProxy<T>*>(this), id,
data);
}
template <class T>
bool MediaStreamTrackProxy<T>::HandleMessage(talk_base::Message* msg) {
talk_base::MessageData* data = msg->pdata;
switch (msg->message_id) {
case MSG_REGISTER_OBSERVER: {
ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
track_->RegisterObserver(observer->data());
return true;
break;
}
case MSG_UNREGISTER_OBSERVER: {
ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
track_->UnregisterObserver(observer->data());
return true;
break;
}
case MSG_LABEL: {
LabelMessageData* label = static_cast<LabelMessageData*>(data);
*(label->data()) = track_->label();
return true;
}
case MSG_SET_ENABLED: {
EnableMessageData* enabled = static_cast<EnableMessageData*>(data);
enabled->data() = track_->set_enabled(enabled->data());
return true;
break;
}
case MSG_ENABLED: {
EnableMessageData* enabled = static_cast<EnableMessageData*>(data);
enabled->data() = track_->enabled();
return true;
break;
}
case MSG_STATE: {
TrackStateMessageData* state = static_cast<TrackStateMessageData*>(data);
state->data() = track_->state();
return true;
break;
}
default:
return false;
}
}
AudioTrackProxy::AudioTrackProxy(const std::string& label,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
audio_track_(AudioTrack::CreateRemote(label)) {
Init(audio_track_);
}
AudioTrackProxy::AudioTrackProxy(const std::string& label,
AudioDeviceModule* audio_device,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
audio_track_(AudioTrack::CreateLocal(label, audio_device)) {
Init(audio_track_);
}
AudioTrackProxy::AudioTrackProxy(LocalAudioTrackInterface* implementation,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
audio_track_(implementation) {
Init(audio_track_);
}
talk_base::scoped_refptr<AudioTrackInterface> AudioTrackProxy::CreateRemote(
const std::string& label,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<AudioTrackProxy>* track =
new talk_base::RefCountedObject<AudioTrackProxy>(label, signaling_thread);
return track;
}
talk_base::scoped_refptr<LocalAudioTrackInterface> AudioTrackProxy::CreateLocal(
const std::string& label,
AudioDeviceModule* audio_device,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<AudioTrackProxy>* track =
new talk_base::RefCountedObject<AudioTrackProxy>(label,
audio_device,
signaling_thread);
return track;
}
talk_base::scoped_refptr<LocalAudioTrackInterface> AudioTrackProxy::CreateLocal(
LocalAudioTrackInterface* implementation,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<AudioTrackProxy>* track =
new talk_base::RefCountedObject<AudioTrackProxy>(implementation,
signaling_thread);
return track;
}
AudioDeviceModule* AudioTrackProxy::GetAudioDevice() {
if (!signaling_thread_->IsCurrent()) {
AudioDeviceMessageData msg;
Send(MSG_GET_AUDIODEVICE, &msg);
return msg.audio_device_;
}
return audio_track_->GetAudioDevice();
}
void AudioTrackProxy::OnMessage(talk_base::Message* msg) {
if (!MediaStreamTrackProxy<LocalAudioTrackInterface>::HandleMessage(msg)) {
if (msg->message_id == MSG_GET_AUDIODEVICE) {
AudioDeviceMessageData* audio_device =
static_cast<AudioDeviceMessageData*>(msg->pdata);
audio_device->audio_device_ = audio_track_->GetAudioDevice();
} else {
ASSERT(!"Not Implemented!");
}
}
}
VideoTrackProxy::VideoTrackProxy(const std::string& label,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
video_track_(VideoTrack::CreateRemote(label)) {
Init(video_track_);
}
VideoTrackProxy::VideoTrackProxy(const std::string& label,
VideoCaptureModule* video_device,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
video_track_(VideoTrack::CreateLocal(label, video_device)) {
Init(video_track_);
}
VideoTrackProxy::VideoTrackProxy(LocalVideoTrackInterface* implementation,
talk_base::Thread* signaling_thread)
: MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
video_track_(implementation) {
Init(video_track_);
}
talk_base::scoped_refptr<VideoTrackInterface> VideoTrackProxy::CreateRemote(
const std::string& label,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(label, signaling_thread);
return track;
}
talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
const std::string& label,
VideoCaptureModule* video_device,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(label, video_device,
signaling_thread);
return track;
}
talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
LocalVideoTrackInterface* implementation,
talk_base::Thread* signaling_thread) {
ASSERT(signaling_thread);
talk_base::RefCountedObject<VideoTrackProxy>* track =
new talk_base::RefCountedObject<VideoTrackProxy>(implementation,
signaling_thread);
return track;
}
VideoCaptureModule* VideoTrackProxy::GetVideoCapture() {
if (!signaling_thread_->IsCurrent()) {
VideoDeviceMessageData msg;
Send(MSG_GET_VIDEODEVICE, &msg);
return msg.video_device_;
}
return video_track_->GetVideoCapture();
}
void VideoTrackProxy::SetRenderer(VideoRendererWrapperInterface* renderer) {
if (!signaling_thread_->IsCurrent()) {
VideoRendererMessageData msg;
msg.video_renderer_ = renderer;
Send(MSG_SET_VIDEORENDERER, &msg);
return;
}
return video_track_->SetRenderer(renderer);
}
VideoRendererWrapperInterface* VideoTrackProxy::GetRenderer() {
if (!signaling_thread_->IsCurrent()) {
VideoRendererMessageData msg;
Send(MSG_GET_VIDEORENDERER, &msg);
return msg.video_renderer_;
}
return video_track_->GetRenderer();
}
void VideoTrackProxy::OnMessage(talk_base::Message* msg) {
if (!MediaStreamTrackProxy<LocalVideoTrackInterface>::HandleMessage(msg)) {
switch (msg->message_id) {
case MSG_GET_VIDEODEVICE: {
VideoDeviceMessageData* video_device =
static_cast<VideoDeviceMessageData*>(msg->pdata);
video_device->video_device_ = video_track_->GetVideoCapture();
break;
}
case MSG_GET_VIDEORENDERER: {
VideoRendererMessageData* video_renderer =
static_cast<VideoRendererMessageData*>(msg->pdata);
video_renderer->video_renderer_ = video_track_->GetRenderer();
break;
}
case MSG_SET_VIDEORENDERER: {
VideoRendererMessageData* video_renderer =
static_cast<VideoRendererMessageData*>(msg->pdata);
video_track_->SetRenderer(video_renderer->video_renderer_.get());
break;
}
default:
ASSERT(!"Not Implemented!");
break;
}
}
}
} // namespace webrtc

View File

@ -1,143 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file includes proxy classes for tracks. The purpose is
// to make sure tracks are only accessed from the signaling thread.
#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
#define TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/audiotrackimpl.h"
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/videotrackimpl.h"
#include "talk/base/thread.h"
namespace webrtc {
template <class T>
class MediaStreamTrackProxy : public T,
talk_base::MessageHandler {
public:
void Init(MediaStreamTrackInterface* track);
// Implement MediaStreamTrack.
virtual std::string kind() const;
virtual std::string label() const;
virtual bool enabled() const;
virtual MediaStreamTrackInterface::TrackState state() const;
virtual bool set_enabled(bool enable);
virtual bool set_state(MediaStreamTrackInterface::TrackState new_state);
// Implement Notifier
virtual void RegisterObserver(ObserverInterface* observer);
virtual void UnregisterObserver(ObserverInterface* observer);
protected:
explicit MediaStreamTrackProxy(talk_base::Thread* signaling_thread);
void Send(uint32 id, talk_base::MessageData* data) const;
// Returns true if the message is handled.
bool HandleMessage(talk_base::Message* msg);
mutable talk_base::Thread* signaling_thread_;
MediaStreamTrackInterface* track_;
};
// AudioTrackProxy is a proxy for the AudioTrackInterface. The purpose is
// to make sure AudioTrack is only accessed from the signaling thread.
// It can be used as a proxy for both local and remote audio tracks.
class AudioTrackProxy : public MediaStreamTrackProxy<LocalAudioTrackInterface> {
public:
static talk_base::scoped_refptr<AudioTrackInterface> CreateRemote(
const std::string& label,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalAudioTrackInterface> CreateLocal(
const std::string& label,
AudioDeviceModule* audio_device,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalAudioTrackInterface> CreateLocal(
LocalAudioTrackInterface* implementation,
talk_base::Thread* signaling_thread);
virtual AudioDeviceModule* GetAudioDevice();
protected:
AudioTrackProxy(const std::string& label,
talk_base::Thread* signaling_thread);
AudioTrackProxy(const std::string& label,
AudioDeviceModule* audio_device,
talk_base::Thread* signaling_thread);
AudioTrackProxy(LocalAudioTrackInterface* implementation,
talk_base::Thread* signaling_thread);
// Implement MessageHandler
virtual void OnMessage(talk_base::Message* msg);
talk_base::scoped_refptr<LocalAudioTrackInterface> audio_track_;
};
// VideoTrackProxy is a proxy for the VideoTrackInterface and
// LocalVideoTrackInterface. The purpose is
// to make sure VideoTrack is only accessed from the signaling thread.
// It can be used as a proxy for both local and remote video tracks.
class VideoTrackProxy : public MediaStreamTrackProxy<LocalVideoTrackInterface> {
public:
static talk_base::scoped_refptr<VideoTrackInterface> CreateRemote(
const std::string& label,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalVideoTrackInterface> CreateLocal(
const std::string& label,
VideoCaptureModule* video_device,
talk_base::Thread* signaling_thread);
static talk_base::scoped_refptr<LocalVideoTrackInterface> CreateLocal(
LocalVideoTrackInterface* implementation,
talk_base::Thread* signaling_thread);
virtual VideoCaptureModule* GetVideoCapture();
virtual void SetRenderer(VideoRendererWrapperInterface* renderer);
VideoRendererWrapperInterface* GetRenderer();
protected:
VideoTrackProxy(const std::string& label,
talk_base::Thread* signaling_thread);
VideoTrackProxy(const std::string& label,
VideoCaptureModule* video_device,
talk_base::Thread* signaling_thread);
VideoTrackProxy(LocalVideoTrackInterface* implementation,
talk_base::Thread* signaling_thread);
// Implement MessageHandler
virtual void OnMessage(talk_base::Message* msg);
talk_base::scoped_refptr<LocalVideoTrackInterface> video_track_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_

View File

@ -1,78 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIATRACKIMPL_H_
#define TALK_APP_WEBRTC_MEDIATRACKIMPL_H_
#include <string>
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/notifierimpl.h"
namespace webrtc {
// MediaTrack implements the interface common to AudioTrackInterface and
// VideoTrackInterface.
template <typename T>
class MediaTrack : public Notifier<T> {
public:
typedef typename T::TrackState TypedTrackState;
virtual std::string label() const { return label_; }
virtual TypedTrackState state() const { return state_; }
virtual bool enabled() const { return enabled_; }
virtual bool set_enabled(bool enable) {
bool fire_on_change = (enable != enabled_);
enabled_ = enable;
if (fire_on_change) {
Notifier<T>::FireOnChanged();
}
}
virtual bool set_state(TypedTrackState new_state) {
bool fire_on_change = (state_ != new_state);
state_ = new_state;
if (fire_on_change)
Notifier<T>::FireOnChanged();
return true;
}
protected:
explicit MediaTrack(const std::string& label)
: enabled_(true),
label_(label),
state_(T::kInitializing) {
}
private:
bool enabled_;
std::string label_;
TypedTrackState state_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIATRACKIMPL_H_

View File

@ -1,73 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_NOTIFIERIMPL_H_
#define TALK_APP_WEBRTC_NOTIFIERIMPL_H_
#include <list>
#include "talk/base/common.h"
#include "talk/app/webrtc_dev/mediastream.h"
namespace webrtc {
// Implement a template version of a notifier.
template <class T>
class Notifier : public T {
public:
Notifier() {
}
virtual void RegisterObserver(ObserverInterface* observer) {
ASSERT(observer != NULL);
observers_.push_back(observer);
}
virtual void UnregisterObserver(ObserverInterface* observer) {
for (std::list<ObserverInterface*>::iterator it = observers_.begin();
it != observers_.end(); it++) {
if (*it == observer) {
observers_.erase(it);
break;
}
}
}
void FireOnChanged() {
for (std::list<ObserverInterface*>::iterator it = observers_.begin();
it != observers_.end(); ++it) {
(*it)-> OnChanged();
}
}
protected:
std::list<ObserverInterface*> observers_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_NOTIFIERIMPL_H_

View File

@ -1,208 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains the PeerConnection interface as defined in
// http://dev.w3.org/2011/webrtc/editor/webrtc.html#peer-to-peer-connections.
// Applications must use this interface to implement peerconnection.
// PeerConnectionFactory class provides factory methods to create
// peerconnection, mediastream and media tracks objects.
//
// The Following steps are needed to setup a typical call.
// 1. Create a PeerConnectionFactoryInterface. Check constructors for more
// information about input parameters.
// 2. Create a PeerConnection object. Provide a configuration string which
// points either to stun or turn server to generate ICE candidates and provide
// an object that implements the PeerConnectionObserver interface.
// Now PeerConnection will startcollecting ICE candidates.
// 3. Create local MediaStream and MediaTracks using the PeerConnectionFactory
// and add it to PeerConnection by calling AddStream.
// 4. Once all mediastreams are added to peerconnection, call
// CommitStreamChanges. Now PeerConnection starts generating an offer based on
// the local mediastreams.
// 5. When PeerConnection have generated the ICE candidates it will call the
// observer OnSignalingMessage callback with the initial offer.
// 6. When an Answer from peer received it must be supplied to the
// PeerConnection by calling ProcessSignalingMessage.
// At this point PeerConnection knows remote capabilities and ICE candidates.
// Media will start flowing to the remote peer.
// The Receiver of a call can decide to accept or reject the call.
// This decision will be taken by the application not peerconnection.
// If application decides to accept the call
// 1. Create PeerConnectionFactoryInterface if it doesn't exist.
// 2. Create new PeerConnection
// 3. Provide the remote offer to the new PeerConnection object by calling
// ProcessSignalingMessage.
// 4. PeerConnection will call the observer function OnAddStream with remote
// MediaStream and tracks information.
// 5. PeerConnection will call the observer function OnSignalingMessage with
// local ICE candidates in a answer message.
// 6. Application can add it's own MediaStreams by calling AddStream.
// When all streams have been added the application must call
// CommitStreamChanges. Streams can be added at any time after the
// PeerConnection object have been created.
#ifndef TALK_APP_WEBRTC_PEERCONNECTION_H_
#define TALK_APP_WEBRTC_PEERCONNECTION_H_
#include <string>
#include "talk/app/webrtc_dev/mediastream.h"
namespace talk_base {
class Thread;
class NetworkManager;
class PacketSocketFactory;
}
namespace webrtc {
// MediaStream container interface.
class StreamCollectionInterface : public talk_base::RefCountInterface {
public:
virtual size_t count() = 0;
virtual MediaStreamInterface* at(size_t index) = 0;
virtual MediaStreamInterface* find(const std::string& label) = 0;
protected:
// Dtor protected as objects shouldn't be deleted via this interface.
~StreamCollectionInterface() {}
};
// PeerConnection callback interface. Application should implement these
// methods.
class PeerConnectionObserver {
public:
enum Readiness {
kNegotiating,
kActive,
};
virtual void OnError() = 0;
virtual void OnMessage(const std::string& msg) = 0;
// Serialized signaling message
virtual void OnSignalingMessage(const std::string& msg) = 0;
virtual void OnStateChange(Readiness state) = 0;
// Triggered when media is received on a new stream from remote peer.
virtual void OnAddStream(MediaStreamInterface* stream) = 0;
// Triggered when a remote peer close a stream.
virtual void OnRemoveStream(MediaStreamInterface* stream) = 0;
protected:
// Dtor protected as objects shouldn't be deleted via this interface.
~PeerConnectionObserver() {}
};
class PeerConnectionInterface : public talk_base::RefCountInterface {
public:
// SignalingMessage in json format
virtual bool ProcessSignalingMessage(const std::string& msg) = 0;
// Sends the msg over a data stream.
virtual bool Send(const std::string& msg) = 0;
// Accessor methods to active local streams.
virtual talk_base::scoped_refptr<StreamCollectionInterface>
local_streams() = 0;
// Accessor methods to remote streams.
virtual talk_base::scoped_refptr<StreamCollectionInterface>
remote_streams() = 0;
// Add a new local stream.
// This function does not trigger any changes to the stream until
// CommitStreamChanges is called.
virtual void AddStream(LocalMediaStreamInterface* stream) = 0;
// Remove a local stream and stop sending it.
// This function does not trigger any changes to the stream until
// CommitStreamChanges is called.
virtual void RemoveStream(LocalMediaStreamInterface* stream) = 0;
// Commit Stream changes. This will start sending media on new streams
// and stop sending media on removed stream.
virtual void CommitStreamChanges() = 0;
protected:
// Dtor protected as objects shouldn't be deleted via this interface.
~PeerConnectionInterface() {}
};
// PeerConnectionFactoryInterface is the factory interface use for creating
// PeerConnection, MediaStream and media tracks.
// PeerConnectionFactoryInterface will create required libjingle threads,
// socket and network manager factory classes for networking.
// If application decides to provide its own implementation of these classes
// it should use alternate create method which accepts these parameters
// as input.
class PeerConnectionFactoryInterface : public talk_base::RefCountInterface {
public:
virtual talk_base::scoped_refptr<PeerConnectionInterface>
CreatePeerConnection(const std::string& config,
PeerConnectionObserver* observer) = 0;
virtual talk_base::scoped_refptr<LocalMediaStreamInterface>
CreateLocalMediaStream(const std::string& label) = 0;
virtual talk_base::scoped_refptr<LocalVideoTrackInterface>
CreateLocalVideoTrack(const std::string& label,
VideoCaptureModule* video_device) = 0;
virtual talk_base::scoped_refptr<LocalAudioTrackInterface>
CreateLocalAudioTrack(const std::string& label,
AudioDeviceModule* audio_device) = 0;
protected:
// Dtor and ctor protected as objects shouldn't be created or deleted via
// this interface.
PeerConnectionFactoryInterface() {}
~PeerConnectionFactoryInterface() {} // NOLINT
};
// Create a new instance of PeerConnectionFactoryInterface.
talk_base::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory();
// Create a new instance of PeerConnectionFactoryInterface.
// Ownership of the arguments are not transfered to this object and must
// remain in scope for the lifetime of the PeerConnectionFactoryInterface.
talk_base::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
talk_base::Thread* worker_thread,
talk_base::Thread* signaling_thread,
talk_base::NetworkManager* network_manager,
talk_base::PacketSocketFactory* packet_socket_factory,
AudioDeviceModule* default_adm);
} // namespace webrtc
#endif // TALK_APP_WEBRTC_PEERCONNECTION_H_

View File

@ -1,348 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdio.h>
#include <list>
#include "gtest/gtest.h"
#include "modules/video_capture/main/interface/video_capture_factory.h"
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/test/filevideocapturemodule.h"
#include "talk/base/thread.h"
#include "talk/session/phone/videoframe.h"
#include "talk/session/phone/videorenderer.h"
void GetAllVideoTracks(webrtc::MediaStreamInterface* media_stream,
std::list<webrtc::VideoTrackInterface*>* video_tracks) {
webrtc::VideoTracks* track_list = media_stream->video_tracks();
for (size_t i = 0; i < track_list->count(); ++i) {
webrtc::VideoTrackInterface* track = track_list->at(i);
video_tracks->push_back(
static_cast<webrtc::VideoTrackInterface*>(track));
}
}
webrtc::VideoCaptureModule* OpenVideoCaptureDevice() {
webrtc::VideoCaptureModule::DeviceInfo* device_info(
webrtc::VideoCaptureFactory::CreateDeviceInfo(0));
webrtc::VideoCaptureModule* video_device;
const size_t kMaxDeviceNameLength = 128;
const size_t kMaxUniqueIdLength = 256;
uint8 device_name[kMaxDeviceNameLength];
uint8 unique_id[kMaxUniqueIdLength];
const size_t device_count = device_info->NumberOfDevices();
for (size_t i = 0; i < device_count; ++i) {
// Get the name of the video capture device.
device_info->GetDeviceName(i, device_name, kMaxDeviceNameLength, unique_id,
kMaxUniqueIdLength);
// Try to open this device.
video_device =
webrtc::VideoCaptureFactory::Create(0, unique_id);
if (video_device != NULL)
break;
}
delete device_info;
return video_device;
}
class VideoRecorder : public cricket::VideoRenderer {
public:
static VideoRecorder* CreateVideoRecorder(
const char* file_name) {
VideoRecorder* renderer = new VideoRecorder();
if (!renderer->Init(file_name)) {
delete renderer;
return NULL;
}
return renderer;
}
virtual ~VideoRecorder() {
if (output_file_ != NULL) {
fclose(output_file_);
}
}
// Set up files so that recording can start immediately.
bool Init(const char* file_name) {
output_file_ = fopen(file_name, "wb");
if (output_file_ == NULL) {
return false;
}
return true;
}
virtual bool SetSize(int width, int height, int /*reserved*/) {
width_ = width;
height_ = height;
image_.reset(new uint8[buffersize()]);
}
// |frame| is in I420
virtual bool RenderFrame(const cricket::VideoFrame* frame) {
const int actual_size = frame->CopyToBuffer(image_.get(),
buffersize());
if (actual_size > buffersize()) {
ASSERT(false);
// Skip frame.
return true;
}
// Write to file.
fwrite(image_.get(), sizeof(uint8), actual_size, output_file_);
return true;
}
const uint8* image() const {
return image_.get();
}
int buffersize() const {
// I420 buffer size
return (width_ * height_ * 3) >> 1;
}
int width() const {
return width_;
}
int height() const {
return height_;
}
protected:
VideoRecorder()
: width_(0),
height_(0),
output_file_(NULL) {}
talk_base::scoped_array<uint8> image_;
int width_;
int height_;
// File to record to.
FILE* output_file_;
};
class SignalingMessageReceiver {
public:
virtual void ReceiveMessage(const std::string& msg) = 0;
protected:
SignalingMessageReceiver() {}
virtual ~SignalingMessageReceiver() {}
};
class PeerConnectionP2PTestClient
: public webrtc::PeerConnectionObserver,
public SignalingMessageReceiver {
public:
static PeerConnectionP2PTestClient* CreateClient(int id) {
PeerConnectionP2PTestClient* client = new PeerConnectionP2PTestClient(id);
if (!client->Init()) {
delete client;
return NULL;
}
return client;
}
~PeerConnectionP2PTestClient() {
}
void StartSession() {
// Audio track doesn't seem to be implemented yet. No need to pass a device
// to it.
talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
peer_connection_factory_->CreateLocalAudioTrack("audio_track", NULL));
talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
peer_connection_factory_->CreateLocalVideoTrack(
"video_track", OpenVideoFileCaptureDevice()));
talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface> stream =
peer_connection_factory_->CreateLocalMediaStream("stream_label");
stream->AddTrack(audio_track);
stream->AddTrack(video_track);
peer_connection_->AddStream(stream);
peer_connection_->CommitStreamChanges();
}
void set_signaling_message_receiver(
SignalingMessageReceiver* signaling_message_receiver) {
signaling_message_receiver_ = signaling_message_receiver;
}
// SignalingMessageReceiver callback.
virtual void ReceiveMessage(const std::string& msg) {
peer_connection_->ProcessSignalingMessage(msg);
}
// PeerConnectionObserver callbacks.
virtual void OnError() {}
virtual void OnMessage(const std::string&) {}
virtual void OnSignalingMessage(const std::string& msg) {
if (signaling_message_receiver_ == NULL) {
// Remote party may be deleted.
return;
}
signaling_message_receiver_->ReceiveMessage(msg);
}
virtual void OnStateChange(Readiness) {}
virtual void OnAddStream(webrtc::MediaStreamInterface* media_stream) {
std::list<webrtc::VideoTrackInterface*> video_tracks;
GetAllVideoTracks(media_stream, &video_tracks);
int track_id = 0;
for (std::list<webrtc::VideoTrackInterface*>::iterator iter =
video_tracks.begin();
iter != video_tracks.end();
++iter) {
char file_name[256];
GenerateRecordingFileName(track_id, file_name);
talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
video_renderer = webrtc::CreateVideoRenderer(
VideoRecorder::CreateVideoRecorder(file_name));
if (video_renderer == NULL) {
ADD_FAILURE();
continue;
}
(*iter)->SetRenderer(video_renderer);
track_id++;
}
}
virtual void OnRemoveStream(webrtc::MediaStreamInterface*) {
}
private:
static const int kFileNameSize = 256;
explicit PeerConnectionP2PTestClient(int id)
: id_(id),
peer_connection_(),
peer_connection_factory_(),
signaling_message_receiver_(NULL) {
}
bool Init() {
EXPECT_TRUE(peer_connection_.get() == NULL);
EXPECT_TRUE(peer_connection_factory_.get() == NULL);
peer_connection_factory_ = webrtc::CreatePeerConnectionFactory();
if (peer_connection_factory_.get() == NULL) {
ADD_FAILURE();
return false;
}
const char server_configuration[] = "STUN stun.l.google.com:19302";
peer_connection_ = peer_connection_factory_->CreatePeerConnection(
server_configuration, this);
return peer_connection_.get() != NULL;
}
void GenerateRecordingFileName(int track, char file_name[kFileNameSize]) {
if (file_name == NULL) {
return;
}
snprintf(file_name, kFileNameSize,
"p2p_test_client_%d_videotrack_%d.yuv", id_, track);
}
webrtc::VideoCaptureModule* OpenVideoFileCaptureDevice() {
const char filename[] = "foreman_cif.yuv";
webrtc::VideoCaptureModule* video_device =
FileVideoCaptureModule::CreateFileVideoCaptureModule(filename);
return video_device;
}
int id_;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
// Remote peer communication.
SignalingMessageReceiver* signaling_message_receiver_;
};
class P2PTestConductor {
public:
static P2PTestConductor* CreateConductor() {
P2PTestConductor* conductor = new P2PTestConductor();
if (!conductor->Init()) {
delete conductor;
return NULL;
}
return conductor;
}
~P2PTestConductor() {
clients[0]->set_signaling_message_receiver(NULL);
clients[1]->set_signaling_message_receiver(NULL);
for (int i = 0; i < kClients; ++i) {
if (clients[i] != NULL) {
// TODO(hellner): currently deleting the clients will trigger an assert
// in cricket::BaseChannel::DisableMedia_w (not due to the unit test).
// Fix that problem and remove the below comment.
delete clients[i];
}
}
}
void StartSession() {
PeerConnectionP2PTestClient* initiating_client = clients[0];
initiating_client->StartSession();
}
private:
static const int kClients = 2;
P2PTestConductor() {
clients[0] = NULL;
clients[1] = NULL;
}
bool Init() {
for (int i = 0; i < kClients; ++i) {
clients[i] = PeerConnectionP2PTestClient::CreateClient(i);
if (clients[i] == NULL) {
return false;
}
}
clients[0]->set_signaling_message_receiver(clients[1]);
clients[1]->set_signaling_message_receiver(clients[0]);
return true;
}
PeerConnectionP2PTestClient* clients[kClients];
};
TEST(PeerConnection2, LocalP2PTest) {
P2PTestConductor* test = P2PTestConductor::CreateConductor();
ASSERT_TRUE(test != NULL);
test->StartSession();
talk_base::Thread::Current()->ProcessMessages(10000);
delete test;
}

View File

@ -1,36 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "gtest/gtest.h"
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
// Added return_value so that it's convenient to put a breakpoint before
// exiting please note that the return value from RUN_ALL_TESTS() must
// be returned by the main function.
const int return_value = RUN_ALL_TESTS();
return return_value;
}

View File

@ -1,111 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/app/webrtc_dev/peerconnectionfactoryimpl.h"
#include "talk/base/basicpacketsocketfactory.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/thread.h"
#include "talk/session/phone/webrtccommon.h"
#include "talk/session/phone/webrtcvoe.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "modules/audio_device/main/source/audio_device_impl.h"
#else
#include "third_party/webrtc/files/include/audio_device_impl.h"
#endif
static const char kAudioDeviceLabel[] = "dummy_audio_device";
static const char kStunConfiguration[] = "STUN stun.l.google.com:19302";
namespace webrtc {
class MockPeerConnectionObserver : public PeerConnectionObserver {
public:
virtual void OnError() {}
virtual void OnMessage(const std::string& msg) {}
virtual void OnSignalingMessage(const std::string& msg) {}
virtual void OnStateChange(Readiness state) {}
virtual void OnAddStream(MediaStreamInterface* stream) {}
virtual void OnRemoveStream(MediaStreamInterface* stream) {}
};
// TODO(mallinath) - Fix drash when components are created in factory.
TEST(PeerConnectionFactory, DISABLED_CreatePCUsingInternalModules) {
MockPeerConnectionObserver observer;
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
CreatePeerConnectionFactory());
ASSERT_TRUE(factory.get() != NULL);
talk_base::scoped_refptr<PeerConnectionInterface> pc1(
factory->CreatePeerConnection("", &observer));
EXPECT_TRUE(pc1.get() == NULL);
talk_base::scoped_refptr<PeerConnectionInterface> pc2(
factory->CreatePeerConnection(kStunConfiguration, &observer));
EXPECT_TRUE(pc2.get() != NULL);
}
TEST(PeerConnectionFactory, CreatePCUsingExternalModules) {
// Create an audio device. Use the default sound card.
talk_base::scoped_refptr<AudioDeviceModule> audio_device(
AudioDeviceModuleImpl::Create(0));
// Creata a libjingle thread used as internal worker thread.
talk_base::scoped_ptr<talk_base::Thread> w_thread(new talk_base::Thread);
EXPECT_TRUE(w_thread->Start());
// Ownership of these pointers is handed over to the PeerConnectionFactory.
// TODO(henrike): add a check that ensures that the destructor is called for
// these classes. E.g. by writing a wrapper and set a flag in the wrappers
// destructor, or e.g. add a callback.
talk_base::NetworkManager* network_manager =
new talk_base::BasicNetworkManager();
talk_base::PacketSocketFactory* socket_factory =
new talk_base::BasicPacketSocketFactory();
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory =
CreatePeerConnectionFactory(talk_base::Thread::Current(),
talk_base::Thread::Current(),
network_manager,
socket_factory,
audio_device);
ASSERT_TRUE(factory.get() != NULL);
MockPeerConnectionObserver observer;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> pc1(
factory->CreatePeerConnection("", &observer));
EXPECT_TRUE(pc1.get() == NULL);
talk_base::scoped_refptr<PeerConnectionInterface> pc2(
factory->CreatePeerConnection(kStunConfiguration, &observer));
EXPECT_TRUE(pc2.get() != NULL);
}
} // namespace webrtc

View File

@ -1,241 +0,0 @@
/*
* libjingle
* Copyright 2004--2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/peerconnectionfactoryimpl.h"
#include "talk/app/webrtc_dev/mediastreamproxy.h"
#include "talk/app/webrtc_dev/mediastreamtrackproxy.h"
#include "talk/app/webrtc_dev/peerconnectionimpl.h"
#include "talk/base/basicpacketsocketfactory.h"
#include "talk/session/phone/dummydevicemanager.h"
#include "talk/session/phone/webrtcmediaengine.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "modules/audio_device/main/interface/audio_device.h"
#else
#include "third_party/webrtc/files/include/audio_device.h"
#endif
using talk_base::scoped_refptr;
namespace {
typedef talk_base::TypedMessageData<bool> InitMessageData;
struct CreatePeerConnectionParams : public talk_base::MessageData {
CreatePeerConnectionParams(const std::string& configuration,
webrtc::PeerConnectionObserver* observer)
: configuration(configuration), observer(observer) {
}
scoped_refptr<webrtc::PeerConnectionInterface> peerconnection;
const std::string& configuration;
webrtc::PeerConnectionObserver* observer;
};
enum {
MSG_INIT_FACTORY = 1,
MSG_CREATE_PEERCONNECTION = 2,
};
} // namespace anonymous
namespace webrtc {
scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory() {
talk_base::RefCountedObject<PeerConnectionFactoryImpl>* pc_factory =
new talk_base::RefCountedObject<PeerConnectionFactoryImpl>();
if (!pc_factory->Initialize()) {
delete pc_factory;
pc_factory = NULL;
}
return pc_factory;
}
scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
talk_base::Thread* worker_thread,
talk_base::Thread* signaling_thread,
talk_base::NetworkManager* network_manager,
talk_base::PacketSocketFactory* socket_factory,
AudioDeviceModule* default_adm) {
talk_base::RefCountedObject<PeerConnectionFactoryImpl>* pc_factory =
new talk_base::RefCountedObject<PeerConnectionFactoryImpl>(
worker_thread, signaling_thread, network_manager, socket_factory,
default_adm);
if (!pc_factory->Initialize()) {
delete pc_factory;
pc_factory = NULL;
}
return pc_factory;
}
PeerConnectionFactoryImpl::PeerConnectionFactoryImpl()
: worker_thread_(new talk_base::Thread),
signaling_thread_(new talk_base::Thread) {
worker_thread_ptr_ = worker_thread_.get();
signaling_thread_ptr_ = signaling_thread_.get();
bool result = worker_thread_->Start();
ASSERT(result);
result = signaling_thread_->Start();
ASSERT(result);
}
PeerConnectionFactoryImpl::PeerConnectionFactoryImpl(
talk_base::Thread* worker_thread,
talk_base::Thread* signaling_thread,
talk_base::NetworkManager* network_manager,
talk_base::PacketSocketFactory* socket_factory,
AudioDeviceModule* default_adm)
: worker_thread_ptr_(worker_thread),
signaling_thread_ptr_(signaling_thread),
network_manager_(network_manager),
socket_factory_(socket_factory),
default_adm_(default_adm) {
ASSERT(worker_thread);
ASSERT(signaling_thread);
ASSERT(network_manager);
ASSERT(socket_factory);
ASSERT(default_adm);
}
PeerConnectionFactoryImpl::~PeerConnectionFactoryImpl() {
}
bool PeerConnectionFactoryImpl::Initialize() {
InitMessageData result(false);
signaling_thread_ptr_->Send(this, MSG_INIT_FACTORY, &result);
return result.data();
}
void PeerConnectionFactoryImpl::OnMessage(talk_base::Message* msg) {
switch (msg->message_id) {
case MSG_INIT_FACTORY: {
InitMessageData* pdata = static_cast<InitMessageData*> (msg->pdata);
pdata->data() = Initialize_s();
break;
}
case MSG_CREATE_PEERCONNECTION: {
CreatePeerConnectionParams* pdata =
static_cast<CreatePeerConnectionParams*> (msg->pdata);
pdata->peerconnection = CreatePeerConnection_s(pdata->configuration,
pdata->observer);
break;
}
}
}
bool PeerConnectionFactoryImpl::Initialize_s() {
if (!network_manager_.get())
network_manager_.reset(new talk_base::BasicNetworkManager());
if (!socket_factory_.get())
socket_factory_.reset(
new talk_base::BasicPacketSocketFactory(worker_thread_ptr_));
cricket::DummyDeviceManager* device_manager(
new cricket::DummyDeviceManager());
// TODO(perkj): Need to make sure only one VoE is created inside
// WebRtcMediaEngine.
cricket::WebRtcMediaEngine* webrtc_media_engine(
new cricket::WebRtcMediaEngine(default_adm_.get(),
NULL, // No secondary adm.
NULL)); // No vcm available.
channel_manager_.reset(new cricket::ChannelManager(
webrtc_media_engine, device_manager, worker_thread_ptr_));
if (!channel_manager_->Init()) {
return false;
}
return true;
}
scoped_refptr<PeerConnectionInterface>
PeerConnectionFactoryImpl::CreatePeerConnection(
const std::string& configuration,
PeerConnectionObserver* observer) {
CreatePeerConnectionParams params(configuration, observer);
signaling_thread_ptr_->Send(this, MSG_CREATE_PEERCONNECTION, &params);
return params.peerconnection;
}
scoped_refptr<PeerConnectionInterface>
PeerConnectionFactoryImpl::CreatePeerConnection_s(
const std::string& configuration,
PeerConnectionObserver* observer) {
talk_base::RefCountedObject<PeerConnectionImpl>* pc(
new talk_base::RefCountedObject<PeerConnectionImpl>(this));
if (!pc->Initialize(configuration, observer)) {
delete pc;
pc = NULL;
}
return pc;
}
scoped_refptr<LocalMediaStreamInterface>
PeerConnectionFactoryImpl::CreateLocalMediaStream(
const std::string& label) {
return MediaStreamProxy::Create(label, signaling_thread_ptr_);
}
scoped_refptr<LocalVideoTrackInterface>
PeerConnectionFactoryImpl::CreateLocalVideoTrack(
const std::string& label,
VideoCaptureModule* video_device) {
return VideoTrackProxy::CreateLocal(label, video_device,
signaling_thread_ptr_);
}
scoped_refptr<LocalAudioTrackInterface>
PeerConnectionFactoryImpl::CreateLocalAudioTrack(
const std::string& label,
AudioDeviceModule* audio_device) {
return AudioTrackProxy::CreateLocal(label, audio_device,
signaling_thread_ptr_);
}
cricket::ChannelManager* PeerConnectionFactoryImpl::channel_manager() {
return channel_manager_.get();
}
talk_base::Thread* PeerConnectionFactoryImpl::signaling_thread() {
return signaling_thread_ptr_;
}
talk_base::Thread* PeerConnectionFactoryImpl::worker_thread() {
return worker_thread_ptr_;
}
talk_base::NetworkManager* PeerConnectionFactoryImpl::network_manager() {
return network_manager_.get();
}
talk_base::PacketSocketFactory* PeerConnectionFactoryImpl::socket_factory() {
return socket_factory_.get();
}
} // namespace webrtc

View File

@ -1,96 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_
#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_
#include <string>
#include "talk/base/scoped_ptr.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/base/thread.h"
#include "talk/session/phone/channelmanager.h"
namespace webrtc {
class PeerConnectionFactoryImpl : public PeerConnectionFactoryInterface,
public talk_base::MessageHandler {
public:
talk_base::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const std::string& config,
PeerConnectionObserver* observer);
bool Initialize();
virtual talk_base::scoped_refptr<LocalMediaStreamInterface>
CreateLocalMediaStream(const std::string& label);
virtual talk_base::scoped_refptr<LocalVideoTrackInterface>
CreateLocalVideoTrack(const std::string& label,
VideoCaptureModule* video_device);
virtual talk_base::scoped_refptr<LocalAudioTrackInterface>
CreateLocalAudioTrack(const std::string& label,
AudioDeviceModule* audio_device);
virtual cricket::ChannelManager* channel_manager();
virtual talk_base::Thread* signaling_thread();
virtual talk_base::Thread* worker_thread();
virtual talk_base::NetworkManager* network_manager();
virtual talk_base::PacketSocketFactory* socket_factory();
protected:
PeerConnectionFactoryImpl();
PeerConnectionFactoryImpl(talk_base::Thread* worker_thread,
talk_base::Thread* signaling_thread,
talk_base::NetworkManager* network_manager,
talk_base::PacketSocketFactory* socket_factory,
AudioDeviceModule* default_adm);
virtual ~PeerConnectionFactoryImpl();
private:
bool Initialize_s();
talk_base::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_s(
const std::string& configuration,
PeerConnectionObserver* observer);
// Implements talk_base::MessageHandler.
void OnMessage(talk_base::Message* msg);
talk_base::scoped_ptr<talk_base::Thread> signaling_thread_;
talk_base::Thread* signaling_thread_ptr_;
talk_base::scoped_ptr<talk_base::Thread> worker_thread_;
talk_base::Thread* worker_thread_ptr_;
talk_base::scoped_ptr<talk_base::NetworkManager> network_manager_;
talk_base::scoped_ptr<talk_base::PacketSocketFactory> socket_factory_;
// External Audio device used for audio playback.
talk_base::scoped_refptr<AudioDeviceModule> default_adm_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_

View File

@ -1,290 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/peerconnectionimpl.h"
#include <vector>
#include "talk/app/webrtc_dev/mediastreamhandler.h"
#include "talk/app/webrtc_dev/streamcollectionimpl.h"
#include "talk/base/logging.h"
#include "talk/session/phone/channelmanager.h"
namespace {
// The number of the tokens in the config string.
static const size_t kConfigTokens = 2;
static const int kServiceCount = 5;
// The default stun port.
static const int kDefaultPort = 3478;
// NOTE: Must be in the same order as the ServiceType enum.
static const char* kValidServiceTypes[kServiceCount] = {
"STUN", "STUNS", "TURN", "TURNS", "INVALID" };
static const char kUserAgent[] = "PeerConnection User Agent";
enum ServiceType {
STUN, // Indicates a STUN server.
STUNS, // Indicates a STUN server used with a TLS session.
TURN, // Indicates a TURN server
TURNS, // Indicates a TURN server used with a TLS session.
INVALID, // Unknown.
};
enum {
MSG_COMMITSTREAMCHANGES = 1,
MSG_PROCESSSIGNALINGMESSAGE = 2,
MSG_RETURNREMOTEMEDIASTREAMS = 3,
MSG_TERMINATE = 4
};
bool static ParseConfigString(const std::string& config,
talk_base::SocketAddress* addr,
ServiceType* service_type) {
std::vector<std::string> tokens;
talk_base::tokenize(config, ' ', &tokens);
if (tokens.size() != kConfigTokens) {
LOG(WARNING) << "Invalid config string";
return false;
}
*service_type = INVALID;
const std::string& type = tokens[0];
for (size_t i = 0; i < kServiceCount; ++i) {
if (type.compare(kValidServiceTypes[i]) == 0) {
*service_type = static_cast<ServiceType>(i);
break;
}
}
if (*service_type == INVALID) {
LOG(WARNING) << "Invalid service type: " << type;
return false;
}
std::string service_address = tokens[1];
int port;
tokens.clear();
talk_base::tokenize(service_address, ':', &tokens);
if (tokens.size() != kConfigTokens) {
port = kDefaultPort;
} else {
port = atoi(tokens[1].c_str());
if (port <= 0 || port > 0xffff) {
LOG(WARNING) << "Invalid port: " << tokens[1];
return false;
}
}
addr->SetIP(service_address);
addr->SetPort(port);
return true;
}
struct SignalingParams : public talk_base::MessageData {
SignalingParams(const std::string& msg,
webrtc::StreamCollectionInterface* local_streams)
: msg(msg),
local_streams(local_streams) {}
const std::string msg;
talk_base::scoped_refptr<webrtc::StreamCollectionInterface> local_streams;
};
struct StreamCollectionParams : public talk_base::MessageData {
explicit StreamCollectionParams(webrtc::StreamCollectionInterface* streams)
: streams(streams) {}
talk_base::scoped_refptr<webrtc::StreamCollectionInterface> streams;
};
} // namespace
namespace webrtc {
PeerConnectionImpl::PeerConnectionImpl(
PeerConnectionFactoryImpl* factory)
: factory_(factory),
observer_(NULL),
local_media_streams_(StreamCollectionImpl::Create()),
remote_media_streams_(StreamCollectionImpl::Create()),
port_allocator_(new cricket::HttpPortAllocator(
factory->network_manager(),
factory->socket_factory(),
std::string(kUserAgent))),
session_(new WebRtcSession(factory->channel_manager(),
factory->signaling_thread(),
factory->worker_thread(),
port_allocator_.get())),
signaling_(new PeerConnectionSignaling(factory->signaling_thread(),
session_.get())),
stream_handler_(new MediaStreamHandlers(session_.get())) {
signaling_->SignalNewPeerConnectionMessage.connect(
this, &PeerConnectionImpl::OnNewPeerConnectionMessage);
signaling_->SignalRemoteStreamAdded.connect(
this, &PeerConnectionImpl::OnRemoteStreamAdded);
signaling_->SignalRemoteStreamRemoved.connect(
this, &PeerConnectionImpl::OnRemoteStreamRemoved);
// Register with WebRtcSession
session_->RegisterObserver(signaling_.get());
}
PeerConnectionImpl::~PeerConnectionImpl() {
signaling_thread()->Clear(this);
signaling_thread()->Send(this, MSG_TERMINATE);
}
// Clean up what needs to be cleaned up on the signaling thread.
void PeerConnectionImpl::Terminate_s() {
stream_handler_.reset();
signaling_.reset();
session_.reset();
port_allocator_.reset();
}
bool PeerConnectionImpl::Initialize(const std::string& configuration,
PeerConnectionObserver* observer) {
ASSERT(observer);
if (!observer)
return false;
observer_ = observer;
talk_base::SocketAddress address;
ServiceType service;
if (!ParseConfigString(configuration, &address, &service))
return false;
switch (service) {
case STUN: {
std::vector<talk_base::SocketAddress> address_vector;
address_vector.push_back(address);
port_allocator_->SetStunHosts(address_vector);
break;
}
case TURN: {
std::vector<std::string> address_vector;
address_vector.push_back(address.ToString());
port_allocator_->SetRelayHosts(address_vector);
break;
}
default:
ASSERT(!"NOT SUPPORTED");
return false;
}
// Initialize the WebRtcSession. It creates transport channels etc.
return session_->Initialize();
}
talk_base::scoped_refptr<StreamCollectionInterface>
PeerConnectionImpl::local_streams() {
return local_media_streams_;
}
talk_base::scoped_refptr<StreamCollectionInterface>
PeerConnectionImpl::remote_streams() {
StreamCollectionParams msg(NULL);
signaling_thread()->Send(this, MSG_RETURNREMOTEMEDIASTREAMS, &msg);
return msg.streams;
}
bool PeerConnectionImpl::ProcessSignalingMessage(const std::string& msg) {
SignalingParams* parameter(new SignalingParams(
msg, StreamCollectionImpl::Create(local_media_streams_)));
signaling_thread()->Post(this, MSG_PROCESSSIGNALINGMESSAGE, parameter);
}
void PeerConnectionImpl::AddStream(LocalMediaStreamInterface* local_stream) {
local_media_streams_->AddStream(local_stream);
}
void PeerConnectionImpl::RemoveStream(
LocalMediaStreamInterface* remove_stream) {
local_media_streams_->RemoveStream(remove_stream);
}
void PeerConnectionImpl::CommitStreamChanges() {
StreamCollectionParams* msg(new StreamCollectionParams(
StreamCollectionImpl::Create(local_media_streams_)));
signaling_thread()->Post(this, MSG_COMMITSTREAMCHANGES, msg);
}
void PeerConnectionImpl::OnMessage(talk_base::Message* msg) {
talk_base::MessageData* data = msg->pdata;
switch (msg->message_id) {
case MSG_COMMITSTREAMCHANGES: {
StreamCollectionParams* param(
static_cast<StreamCollectionParams*> (data));
signaling_->CreateOffer(param->streams);
stream_handler_->CommitLocalStreams(param->streams);
delete data; // Because it is Posted.
break;
}
case MSG_PROCESSSIGNALINGMESSAGE: {
SignalingParams* params(static_cast<SignalingParams*> (data));
signaling_->ProcessSignalingMessage(params->msg, params->local_streams);
delete data; // Because it is Posted.
break;
}
case MSG_RETURNREMOTEMEDIASTREAMS: {
StreamCollectionParams* param(
static_cast<StreamCollectionParams*> (data));
param->streams = StreamCollectionImpl::Create(remote_media_streams_);
break;
}
case MSG_TERMINATE: {
Terminate_s();
break;
}
}
}
void PeerConnectionImpl::OnNewPeerConnectionMessage(
const std::string& message) {
observer_->OnSignalingMessage(message);
}
void PeerConnectionImpl::OnRemoteStreamAdded(
MediaStreamInterface* remote_stream) {
// TODO(perkj): add function in pc signaling to return a collection of
// remote streams.
// This way we can avoid keeping a separate list of remote_media_streams_.
remote_media_streams_->AddStream(remote_stream);
stream_handler_->AddRemoteStream(remote_stream);
observer_->OnAddStream(remote_stream);
}
void PeerConnectionImpl::OnRemoteStreamRemoved(
MediaStreamInterface* remote_stream) {
// TODO(perkj): add function in pc signaling to return a collection of
// remote streams.
// This way we can avoid keeping a separate list of remote_media_streams_.
remote_media_streams_->RemoveStream(remote_stream);
stream_handler_->RemoveRemoteStream(remote_stream);
observer_->OnRemoveStream(remote_stream);
}
} // namespace webrtc

View File

@ -1,109 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_PEERCONNECTIONIMPL_H_
#define TALK_APP_WEBRTC_PEERCONNECTIONIMPL_H_
#include <map>
#include <string>
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/peerconnectionfactoryimpl.h"
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
#include "talk/app/webrtc_dev/webrtcsession.h"
#include "talk/base/scoped_ptr.h"
#include "talk/p2p/client/httpportallocator.h"
namespace cricket {
class ChannelManager;
}
namespace webrtc {
class MediaStreamHandlers;
class StreamCollectionImpl;
// PeerConnectionImpl implements the PeerConnection interface.
// It uses PeerConnectionSignaling and WebRtcSession to implement
// the PeerConnection functionality.
class PeerConnectionImpl : public PeerConnectionInterface,
public talk_base::MessageHandler,
public sigslot::has_slots<> {
public:
explicit PeerConnectionImpl(PeerConnectionFactoryImpl* factory);
bool Initialize(const std::string& configuration,
PeerConnectionObserver* observer);
virtual ~PeerConnectionImpl();
virtual bool ProcessSignalingMessage(const std::string& msg);
virtual bool Send(const std::string& msg) {
// TODO(perkj): implement
ASSERT(false);
}
virtual talk_base::scoped_refptr<StreamCollectionInterface> local_streams();
virtual talk_base::scoped_refptr<StreamCollectionInterface> remote_streams();
virtual void AddStream(LocalMediaStreamInterface* stream);
virtual void RemoveStream(LocalMediaStreamInterface* stream);
virtual void CommitStreamChanges();
private:
// Implement talk_base::MessageHandler.
void OnMessage(talk_base::Message* msg);
// Signals from PeerConnectionSignaling.
void OnNewPeerConnectionMessage(const std::string& message);
void OnRemoteStreamAdded(MediaStreamInterface* remote_stream);
void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream);
void Terminate_s();
talk_base::Thread* signaling_thread() {
return factory_->signaling_thread();
}
// Storing the factory as a scoped reference pointer ensures that the memory
// in the PeerConnectionFactoryImpl remains available as long as the
// PeerConnection is running. It is passed to PeerConnection as a raw pointer.
// However, since the reference counting is done in the
// PeerConnectionFactoryInteface all instances created using the raw pointer
// will refer to the same reference count.
talk_base::scoped_refptr<PeerConnectionFactoryImpl> factory_;
PeerConnectionObserver* observer_;
talk_base::scoped_refptr<StreamCollectionImpl> local_media_streams_;
talk_base::scoped_refptr<StreamCollectionImpl> remote_media_streams_;
talk_base::scoped_ptr<cricket::HttpPortAllocator> port_allocator_;
talk_base::scoped_ptr<WebRtcSession> session_;
talk_base::scoped_ptr<PeerConnectionSignaling> signaling_;
talk_base::scoped_ptr<MediaStreamHandlers> stream_handler_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_PEERCONNECTIONIMPL_H_

View File

@ -1,81 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/peerconnectionimpl.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/thread.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kStunConfiguration[] = "STUN stun.l.google.com:19302";
namespace webrtc {
class MockPeerConnectionObserver : public PeerConnectionObserver {
public:
virtual void OnError() {}
virtual void OnMessage(const std::string& msg) {}
virtual void OnSignalingMessage(const std::string& msg) {}
virtual void OnStateChange(Readiness state) {}
virtual void OnAddStream(MediaStreamInterface* stream) {}
virtual void OnRemoveStream(MediaStreamInterface* stream) {}
};
class PeerConnectionImplTest : public testing::Test {
public:
protected:
virtual void SetUp() {
pc_factory_ = webrtc::CreatePeerConnectionFactory();
ASSERT_TRUE(pc_factory_.get() != NULL);
pc_ = pc_factory_->CreatePeerConnection(kStunConfiguration, &observer_);
ASSERT_TRUE(pc_.get() != NULL);
}
talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
talk_base::scoped_refptr<PeerConnectionInterface> pc_;
MockPeerConnectionObserver observer_;
};
TEST_F(PeerConnectionImplTest, DISABLED_AddRemoveStream) {
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
pc_factory_->CreateLocalMediaStream(label));
pc_->AddStream(stream);
pc_->CommitStreamChanges();
EXPECT_EQ(1l, pc_->local_streams()->count());
if (pc_->local_streams()->count() <= 0)
return;
EXPECT_EQ(0, pc_->local_streams()->at(0)->label().compare(kStreamLabel1));
}
} // namespace webrtc

View File

@ -1,95 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/peerconnectionmessage.h"
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/webrtcjson.h"
namespace webrtc {
PeerConnectionMessage* PeerConnectionMessage::Create(
PeerConnectionMessageType type,
const cricket::SessionDescription* desc,
const std::vector<cricket::Candidate>& candidates) {
return new PeerConnectionMessage(type, desc, candidates);
}
PeerConnectionMessage* PeerConnectionMessage::Create(
const std::string& message) {
PeerConnectionMessage* pc_message(new PeerConnectionMessage());
if (!pc_message->Deserialize(message))
return NULL;
return pc_message;
}
PeerConnectionMessage* PeerConnectionMessage::CreateErrorMessage(
ErrorCode error) {
return new PeerConnectionMessage(error);
}
PeerConnectionMessage::PeerConnectionMessage(
PeerConnectionMessageType type,
const cricket::SessionDescription* desc,
const std::vector<cricket::Candidate>& candidates)
: type_(type),
error_code_(kNoError),
desc_(desc),
candidates_(candidates) {
}
PeerConnectionMessage::PeerConnectionMessage()
: type_(kOffer),
error_code_(kNoError),
desc_(NULL) {
}
PeerConnectionMessage::PeerConnectionMessage(ErrorCode error)
: type_(kError),
desc_(NULL),
error_code_(error) {
}
std::string PeerConnectionMessage::Serialize() {
return JsonSerialize(type_, error_code_, desc_, candidates_);
}
bool PeerConnectionMessage::Deserialize(std::string message) {
cricket::SessionDescription* desc(new cricket::SessionDescription());
bool result = JsonDeserialize(&type_, &error_code_, desc,
&candidates_, message);
if (!result) {
delete desc;
desc = NULL;
}
desc_ = desc;
return result;
}
} // namespace webrtc

View File

@ -1,102 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains classes used for handling signaling between
// two PeerConnections.
#ifndef TALK_APP_WEBRTC_DEV_PEERCONNECTIONMESSAGE_H_
#define TALK_APP_WEBRTC_DEV_PEERCONNECTIONMESSAGE_H_
#include <string>
#include <vector>
#include "talk/base/refcount.h"
#include "talk/base/basictypes.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/scoped_refptr.h"
#include "talk/session/phone/mediasession.h"
#include "talk/p2p/base/sessiondescription.h"
namespace webrtc {
// PeerConnectionMessage represent an SDP offer or an answer.
// Instances of this class can be serialized / deserialized and are used for
// signaling between PeerConnection objects.
// Each instance has a type and a session description.
class PeerConnectionMessage {
public:
enum PeerConnectionMessageType {
kOffer,
kAnswer,
kError
};
enum ErrorCode {
kNoError = 0,
kWrongState = 10, // Offer received when Answer was expected.
kParseError = 20, // Can't parse / process offer.
kOfferNotAcceptable = 30, // The offer have been rejected.
kMessageNotDeliverable = 40 // The signaling channel is broken.
};
static PeerConnectionMessage* Create(
PeerConnectionMessageType type,
const cricket::SessionDescription* desc,
const std::vector<cricket::Candidate>& candidates);
static PeerConnectionMessage* Create(
const std::string& message);
static PeerConnectionMessage* CreateErrorMessage(
ErrorCode error);
PeerConnectionMessageType type() {return type_;}
ErrorCode error() {return error_code_;}
const cricket::SessionDescription* desc() {return desc_;}
std::string Serialize();
std::vector<cricket::Candidate>& candidates() { return candidates_; }
protected:
PeerConnectionMessage(PeerConnectionMessageType type,
const cricket::SessionDescription* desc,
const std::vector<cricket::Candidate>& candidates);
explicit PeerConnectionMessage(ErrorCode error);
PeerConnectionMessage();
bool Deserialize(std::string message);
private:
PeerConnectionMessageType type_;
ErrorCode error_code_;
const cricket::SessionDescription* desc_; // Weak ref.
std::vector<cricket::Candidate> candidates_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DEV_PEERCONNECTIONMESSAGE_H_

View File

@ -1,169 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/peerconnectionmessage.h"
#include "talk/base/logging.h"
#include "talk/base/scoped_ptr.h"
#include "talk/session/phone/channelmanager.h"
using webrtc::PeerConnectionMessage;
static const char kStreamLabel1[] = "local_stream_1";
static const char kAudioTrackLabel1[] = "local_audio_1";
static const char kVideoTrackLabel1[] = "local_video_1";
static const char kVideoTrackLabel2[] = "local_video_2";
static const char kStreamLabel2[] = "local_stream_2";
static const char kAudioTrackLabel2[] = "local_audio_2";
static const char kVideoTrackLabel3[] = "local_video_3";
class PeerConnectionMessageTest: public testing::Test {
public:
PeerConnectionMessageTest() {
channel_manager_.reset(new cricket::ChannelManager(
talk_base::Thread::Current()));
EXPECT_TRUE(channel_manager_->Init());
session_description_factory_.reset(
new cricket::MediaSessionDescriptionFactory(channel_manager_.get()));
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrackLabel1,
kStreamLabel1);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel1,
kStreamLabel1);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel2,
kStreamLabel1);
// kStreamLabel2 with 1 audio track and 1 video track
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrackLabel2,
kStreamLabel2);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrackLabel3,
kStreamLabel2);
int port = 1234;
talk_base::SocketAddress address("127.0.0.1", port++);
cricket::Candidate candidate1("video_rtcp", "udp", address, 1,
"user_video_rtcp", "password_video_rtcp", "local", "eth0", 0);
address.SetPort(port++);
cricket::Candidate candidate2("video_rtp", "udp", address, 1,
"user_video_rtp", "password_video_rtp", "local", "eth0", 0);
address.SetPort(port++);
cricket::Candidate candidate3("rtp", "udp", address, 1,
"user_rtp", "password_rtp", "local", "eth0", 0);
address.SetPort(port++);
cricket::Candidate candidate4("rtcp", "udp", address, 1,
"user_rtcp", "password_rtcp", "local", "eth0", 0);
candidates_.push_back(candidate1);
candidates_.push_back(candidate2);
candidates_.push_back(candidate3);
candidates_.push_back(candidate4);
}
protected:
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_ptr<cricket::MediaSessionDescriptionFactory>
session_description_factory_;
cricket::MediaSessionOptions options_;
cricket::Candidates candidates_;
};
TEST_F(PeerConnectionMessageTest, Serialize) {
std::string message;
talk_base::scoped_ptr<PeerConnectionMessage> pc_message;
// Offer
talk_base::scoped_ptr<const cricket::SessionDescription> offer(
session_description_factory_->CreateOffer(options_));
pc_message.reset(PeerConnectionMessage::Create(PeerConnectionMessage::kOffer,
offer.get(), candidates_));
message = pc_message->Serialize();
LOG(LS_INFO) << message;
// Answer
talk_base::scoped_ptr<const cricket::SessionDescription> answer(
session_description_factory_->CreateAnswer(offer.get(), options_));
pc_message.reset(PeerConnectionMessage::Create(PeerConnectionMessage::kAnswer,
answer.get(), candidates_));
message = pc_message->Serialize();
LOG(LS_INFO) << message;
// Error
pc_message.reset(PeerConnectionMessage::CreateErrorMessage(
PeerConnectionMessage::kParseError));
message = pc_message->Serialize();
LOG(LS_INFO) << message;
// TODO(ronghuawu): Verify the serialized message.
}
TEST_F(PeerConnectionMessageTest, Deserialize) {
std::string message_ref;
std::string message_result;
talk_base::scoped_ptr<PeerConnectionMessage> pc_message;
talk_base::scoped_ptr<cricket::SessionDescription> offer(
session_description_factory_->CreateOffer(options_));
talk_base::scoped_ptr<cricket::SessionDescription> answer(
session_description_factory_->CreateAnswer(offer.get(), options_));
// Offer
pc_message.reset(PeerConnectionMessage::Create(PeerConnectionMessage::kOffer,
offer.get(), candidates_));
message_ref = pc_message->Serialize();
LOG(LS_INFO) << "The reference message: " << message_ref;
// Deserialize Offer
pc_message.reset(PeerConnectionMessage::Create(message_ref));
message_result = pc_message->Serialize();
LOG(LS_INFO) << "The result message: " << message_result;
EXPECT_EQ(message_ref, message_result);
// Answer
pc_message.reset(PeerConnectionMessage::Create(PeerConnectionMessage::kAnswer,
answer.get(), candidates_));
message_ref = pc_message->Serialize();
LOG(LS_INFO) << "The reference message: " << message_ref;
// Deserialize Answer
pc_message.reset(PeerConnectionMessage::Create(message_ref));
message_result = pc_message->Serialize();
LOG(LS_INFO) << "The result message: " << message_result;
EXPECT_EQ(message_ref, message_result);
// Error
pc_message.reset(PeerConnectionMessage::CreateErrorMessage(
PeerConnectionMessage::kParseError));
message_ref = pc_message->Serialize();
LOG(LS_INFO) << "The reference message: " << message_ref;
// Deserialize Error
pc_message.reset(PeerConnectionMessage::Create(message_ref));
message_result = pc_message->Serialize();
LOG(LS_INFO) << "The result message: " << message_result;
EXPECT_EQ(message_ref, message_result);
}

View File

@ -1,535 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
#include <utility>
#include "talk/app/webrtc_dev/mediastreamtrackproxy.h"
#include "talk/app/webrtc_dev/sessiondescriptionprovider.h"
#include "talk/base/helpers.h"
#include "talk/base/messagequeue.h"
#include "talk/session/phone/channelmanager.h"
using talk_base::scoped_refptr;
namespace webrtc {
enum {
MSG_SEND_QUEUED_OFFER = 1,
MSG_GENERATE_ANSWER = 2,
};
static const int kGlareMinWaitTime = 2 * 1000; // 2 sec
static const int kGlareWaitIntervall = 1 * 1000; // 1 sec
// Verifies that a SessionDescription contains as least one valid media content
// and a valid codec.
static bool VerifyAnswer(const cricket::SessionDescription* answer_desc) {
// We need to verify that at least one media content with
// a codec is available.
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(answer_desc);
if (audio_content) {
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
if (audio_desc->codecs().size() > 0) {
return true;
}
}
const cricket::ContentInfo* video_content =
GetFirstVideoContent(answer_desc);
if (video_content) {
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
if (video_desc->codecs().size() > 0) {
return true;
}
}
return false;
}
PeerConnectionSignaling::PeerConnectionSignaling(
talk_base::Thread* signaling_thread,
SessionDescriptionProvider* provider)
: signaling_thread_(signaling_thread),
provider_(provider),
state_(kInitializing) {
}
PeerConnectionSignaling::~PeerConnectionSignaling() {
// Cleanup the queued_received_offer_ if this object is
// deleted before the offer can be processed.
// That means we have parsed an offer and created the remote
// session description but we have not transfered the ownership
// to the provider yet.
if (queued_received_offer_.first) {
const cricket::SessionDescription* remote_desc =
queued_received_offer_.first->desc();
delete remote_desc;
delete queued_received_offer_.first;
}
}
void PeerConnectionSignaling::OnCandidatesReady(
const cricket::Candidates& candidates) {
ASSERT(state_ == kInitializing);
if (state_ != kInitializing)
return;
// Store the candidates.
candidates_ = candidates;
// If we have a queued remote offer we need to handle this first.
if (queued_received_offer_.first != NULL) {
state_ = kIdle;
signaling_thread_->Post(this, MSG_GENERATE_ANSWER);
} else if (queued_offers_.size() >0) {
// Else if we have local queued offers.
state_ = PeerConnectionSignaling::kWaitingForAnswer;
signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
} else {
state_ = kIdle;
}
}
void PeerConnectionSignaling::ProcessSignalingMessage(
const std::string& message,
StreamCollectionInterface* local_streams) {
ASSERT(talk_base::Thread::Current() == signaling_thread_);
talk_base::scoped_ptr<PeerConnectionMessage> signaling_message(
PeerConnectionMessage::Create(message));
if (!signaling_message.get()) {
signaling_message.reset(PeerConnectionMessage::CreateErrorMessage(
PeerConnectionMessage::kParseError));
SignalNewPeerConnectionMessage(signaling_message->Serialize());
}
switch (signaling_message->type()) {
case PeerConnectionMessage::kOffer: {
queued_received_offer_ =
RemoteOfferPair(signaling_message.release(), local_streams);
// If we are still Initializing we need to wait before we can handle
// the offer. Queue it and handle it when the state change.
if (state_ == kInitializing) {
break;
}
// Don't handle offers when we are waiting for an answer.
if (state_ == kWaitingForAnswer) {
state_ = kGlare;
// Resends our last offer in 2 to 3s.
const int timeout = kGlareMinWaitTime +
talk_base::CreateRandomId() % kGlareWaitIntervall;
signaling_thread_->PostDelayed(
timeout, this, MSG_SEND_QUEUED_OFFER, NULL);
talk_base::scoped_ptr<PeerConnectionMessage> msg(
PeerConnectionMessage::CreateErrorMessage(
PeerConnectionMessage::kWrongState));
SignalNewPeerConnectionMessage(msg->Serialize());
break;
}
if (state_ == kGlare) {
state_ = kIdle;
}
// Clear the MSG_SEND_QUEUED_OFFER we posted delayed.
signaling_thread_->Clear(this, MSG_SEND_QUEUED_OFFER);
signaling_thread_->Post(this, MSG_GENERATE_ANSWER);
break;
}
case PeerConnectionMessage::kAnswer: {
ASSERT(state_ != PeerConnectionSignaling::kIdle);
if (state_ == PeerConnectionSignaling::kIdle)
return;
const cricket::SessionDescription* remote_desc =
provider_->SetRemoteSessionDescription(
signaling_message->desc(),
signaling_message->candidates());
provider_->NegotiationDone();
UpdateRemoteStreams(remote_desc);
scoped_refptr<StreamCollectionInterface> streams(queued_offers_.front());
queued_offers_.pop_front();
UpdateSendingLocalStreams(remote_desc, streams);
// Check if we have more offers waiting in the queue.
if (queued_offers_.size() > 0) {
// Send the next offer.
signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
} else {
state_ = PeerConnectionSignaling::kIdle;
}
break;
}
case PeerConnectionMessage::kError: {
if (signaling_message->error() != PeerConnectionMessage::kWrongState) {
SignalErrorMessageReceived(signaling_message->error());
// An error have occurred that we can't do anything about.
// Reset the state and wait for user action.
queued_offers_.clear();
state_ = kIdle;
}
break;
}
}
}
void PeerConnectionSignaling::CreateOffer(
StreamCollectionInterface* local_streams) {
ASSERT(talk_base::Thread::Current() == signaling_thread_);
queued_offers_.push_back(local_streams);
if (state_ == kIdle) {
// Check if we can sent a new offer.
// Only one offer is allowed at the time.
state_ = PeerConnectionSignaling::kWaitingForAnswer;
signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
}
}
// Implement talk_base::MessageHandler.
void PeerConnectionSignaling::OnMessage(talk_base::Message* msg) {
switch (msg->message_id) {
case MSG_SEND_QUEUED_OFFER:
CreateOffer_s();
break;
case MSG_GENERATE_ANSWER:
CreateAnswer_s();
break;
}
}
void PeerConnectionSignaling::CreateOffer_s() {
ASSERT(queued_offers_.size() > 0);
scoped_refptr<StreamCollectionInterface> local_streams(
queued_offers_.front());
cricket::MediaSessionOptions options;
InitMediaSessionOptions(&options, local_streams);
const cricket::SessionDescription* local_desc =
provider_->ProvideOffer(options);
talk_base::scoped_ptr<PeerConnectionMessage> offer_message(
PeerConnectionMessage::Create(PeerConnectionMessage::kOffer,
local_desc,
candidates_));
SignalNewPeerConnectionMessage(offer_message->Serialize());
}
PeerConnectionSignaling::State PeerConnectionSignaling::GetState() {
return state_;
}
void PeerConnectionSignaling::CreateAnswer_s() {
talk_base::scoped_ptr<PeerConnectionMessage> message(
queued_received_offer_.first);
queued_received_offer_.first = NULL;
scoped_refptr<StreamCollectionInterface> local_streams(
queued_received_offer_.second.release());
// Reset all pending offers. Instead, send the new streams in the answer.
signaling_thread_->Clear(this, MSG_SEND_QUEUED_OFFER, NULL);
queued_offers_.clear();
// Let the provider know about the remote offer.
// The provider takes ownership and return a pointer for us to use.
const cricket::SessionDescription* remote_desc =
provider_->SetRemoteSessionDescription(message->desc(),
message->candidates());
// Create a MediaSessionOptions object with the sources we want to send.
cricket::MediaSessionOptions options;
InitMediaSessionOptions(&options, local_streams);
// Create an local session description based on this.
const cricket::SessionDescription* local_desc =
provider_->ProvideAnswer(options);
talk_base::scoped_ptr<PeerConnectionMessage> answer_message;
if (!VerifyAnswer(local_desc)) {
answer_message.reset(PeerConnectionMessage::CreateErrorMessage(
PeerConnectionMessage::kOfferNotAcceptable));
// Signal that the new answer is ready to be sent.
SignalNewPeerConnectionMessage(answer_message->Serialize());
return;
}
answer_message.reset(PeerConnectionMessage::Create(
PeerConnectionMessage::kAnswer, local_desc, candidates_));
// Let the provider know the negotiation is done.
provider_->NegotiationDone();
SignalNewPeerConnectionMessage(answer_message->Serialize());
UpdateRemoteStreams(message->desc());
// Update the state of the local streams.
UpdateSendingLocalStreams(answer_message->desc(), local_streams);
}
// Fills a MediaSessionOptions struct with the MediaTracks we want to sent given
// the local MediaStreams.
// MediaSessionOptions contains the ssrc of the media track, the cname
// corresponding to the MediaStream and a label of the track.
void PeerConnectionSignaling::InitMediaSessionOptions(
cricket::MediaSessionOptions* options,
StreamCollectionInterface* local_streams) {
// In order to be able to receive video,
// the has_video should always be true even if there are not video tracks.
options->has_video = true;
for (size_t i = 0; i < local_streams->count(); ++i) {
MediaStreamInterface* stream = local_streams->at(i);
scoped_refptr<AudioTracks> audio_tracks(stream->audio_tracks());
// For each audio track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < audio_tracks->count(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(audio_tracks->at(j));
options->AddStream(cricket::MEDIA_TYPE_AUDIO, track->label(),
stream->label());
}
scoped_refptr<VideoTracks> video_tracks(stream->video_tracks());
// For each video track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < video_tracks->count(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(video_tracks->at(j));
options->AddStream(cricket::MEDIA_TYPE_VIDEO, track->label(),
stream->label());
}
}
}
// Updates or Creates remote MediaStream objects given a
// remote SessionDesription.
// If the remote SessionDesription contain new remote MediaStreams
// SignalRemoteStreamAdded is triggered. If a remote MediaStream is missing from
// the remote SessionDescription SignalRemoteStreamRemoved is triggered.
void PeerConnectionSignaling::UpdateRemoteStreams(
const cricket::SessionDescription* remote_desc) {
RemoteStreamMap current_streams;
typedef std::pair<std::string, scoped_refptr<MediaStreamProxy> >
MediaStreamPair;
const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
if (audio_content) {
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
for (cricket::StreamParamsVec::const_iterator it =
audio_desc->streams().begin();
it != audio_desc->streams().end(); ++it) {
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(it->sync_label);
RemoteStreamMap::iterator new_streams_it =
current_streams.find(it->sync_label);
if (old_streams_it == remote_streams_.end()) {
if (new_streams_it == current_streams.end()) {
// New stream
scoped_refptr<MediaStreamProxy> stream(
MediaStreamProxy::Create(it->sync_label, signaling_thread_));
current_streams.insert(MediaStreamPair(stream->label(), stream));
new_streams_it = current_streams.find(it->sync_label);
}
scoped_refptr<AudioTrackInterface> track(
AudioTrackProxy::CreateRemote(it->name, signaling_thread_));
track->set_state(MediaStreamTrackInterface::kLive);
new_streams_it->second->AddTrack(track);
} else {
scoped_refptr<MediaStreamProxy> stream(old_streams_it->second);
current_streams.insert(MediaStreamPair(stream->label(), stream));
}
}
}
const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
if (video_content) {
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
for (cricket::StreamParamsVec::const_iterator it =
video_desc->streams().begin();
it != video_desc->streams().end(); ++it) {
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(it->cname);
RemoteStreamMap::iterator new_streams_it =
current_streams.find(it->cname);
if (old_streams_it == remote_streams_.end()) {
if (new_streams_it == current_streams.end()) {
// New stream
scoped_refptr<MediaStreamProxy> stream(
MediaStreamProxy::Create(it->cname, signaling_thread_));
current_streams.insert(MediaStreamPair(stream->label(), stream));
new_streams_it = current_streams.find(it->cname);
}
scoped_refptr<VideoTrackInterface> track(
VideoTrackProxy::CreateRemote(it->name, signaling_thread_));
new_streams_it->second->AddTrack(track);
track->set_state(MediaStreamTrackInterface::kLive);
} else {
scoped_refptr<MediaStreamProxy> stream(old_streams_it->second);
current_streams.insert(MediaStreamPair(stream->label(), stream));
}
}
}
// Iterate current_streams to find all new streams.
// Change the state of the new stream and SignalRemoteStreamAdded.
for (RemoteStreamMap::iterator it = current_streams.begin();
it != current_streams.end();
++it) {
scoped_refptr<MediaStreamProxy> new_stream(it->second);
RemoteStreamMap::iterator old_streams_it =
remote_streams_.find(new_stream->label());
if (old_streams_it == remote_streams_.end()) {
new_stream->set_ready_state(MediaStreamInterface::kLive);
SignalRemoteStreamAdded(new_stream);
}
}
// Iterate the old list of remote streams.
// If a stream is not found in the new list it have been removed.
// Change the state of the removed stream and SignalRemoteStreamRemoved.
for (RemoteStreamMap::iterator it = remote_streams_.begin();
it != remote_streams_.end();
++it) {
scoped_refptr<MediaStreamProxy> old_stream(it->second);
RemoteStreamMap::iterator new_streams_it =
current_streams.find(old_stream->label());
if (new_streams_it == current_streams.end()) {
old_stream->set_ready_state(MediaStreamInterface::kEnded);
scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
for (size_t j = 0; j < audio_tracklist->count(); ++j) {
audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
for (size_t j = 0; j < video_tracklist->count(); ++j) {
video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
SignalRemoteStreamRemoved(old_stream);
}
}
// Set the remote_streams_ map to the map of MediaStreams we just created to
// be prepared for the next offer.
remote_streams_ = current_streams;
}
// Update the state of all local streams we have just negotiated. If the
// negotiation succeeded the state is changed to kLive, if the negotiation
// failed the state is changed to kEnded.
void PeerConnectionSignaling::UpdateSendingLocalStreams(
const cricket::SessionDescription* answer_desc,
StreamCollectionInterface* negotiated_streams) {
typedef std::pair<std::string, scoped_refptr<MediaStreamInterface> >
MediaStreamPair;
LocalStreamMap current_local_streams;
for (size_t i = 0; i < negotiated_streams->count(); ++i) {
scoped_refptr<MediaStreamInterface> stream(negotiated_streams->at(i));
scoped_refptr<AudioTracks> audiotracklist(stream->audio_tracks());
scoped_refptr<VideoTracks> videotracklist(stream->video_tracks());
bool stream_ok = false; // A stream is ok if at least one track succeed.
// Update tracks based on its type.
for (size_t j = 0; j < audiotracklist->count(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(audiotracklist->at(j));
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(answer_desc);
if (!audio_content) { // The remote does not accept audio.
track->set_state(MediaStreamTrackInterface::kFailed);
continue;
}
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
if (audio_desc->codecs().size() <= 0) {
// No common codec.
track->set_state(MediaStreamTrackInterface::kFailed);
}
track->set_state(MediaStreamTrackInterface::kLive);
stream_ok = true;
}
for (size_t j = 0; j < videotracklist->count(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(videotracklist->at(j));
const cricket::ContentInfo* video_content =
GetFirstVideoContent(answer_desc);
if (!video_content) { // The remote does not accept video.
track->set_state(MediaStreamTrackInterface::kFailed);
continue;
}
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
// TODO(perkj): Do we need to store the codec in the track?
if (video_desc->codecs().size() <= 0) {
// No common codec.
track->set_state(MediaStreamTrackInterface::kFailed);
}
track->set_state(MediaStreamTrackInterface::kLive);
stream_ok = true;
}
if (stream_ok) {
// We have successfully negotiated to send this stream.
// Change the stream and store it as successfully negotiated.
stream->set_ready_state(MediaStreamInterface::kLive);
current_local_streams.insert(MediaStreamPair(stream->label(), stream));
} else {
stream->set_ready_state(MediaStreamInterface::kEnded);
}
}
// Iterate the old list of remote streams.
// If a stream is not found in the new list it have been removed.
// Change the state of the removed stream and all its tracks to kEnded.
for (LocalStreamMap::iterator it = local_streams_.begin();
it != local_streams_.end();
++it) {
scoped_refptr<MediaStreamInterface> old_stream(it->second);
MediaStreamInterface* new_streams =
negotiated_streams->find(old_stream->label());
if (new_streams == NULL) {
old_stream->set_ready_state(MediaStreamInterface::kEnded);
scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
for (size_t j = 0; j < audio_tracklist->count(); ++j) {
audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
for (size_t j = 0; j < video_tracklist->count(); ++j) {
video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
}
}
}
// Update the local_streams_ for next update.
local_streams_ = current_local_streams;
}
} // namespace webrtc

View File

@ -1,163 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains classes used for handling signaling between
// two PeerConnections.
#ifndef TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
#define TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
#include <list>
#include <map>
#include <string>
#include <utility>
#include <vector>
#include "talk/app/webrtc_dev/mediastreamproxy.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/peerconnectionmessage.h"
#include "talk/app/webrtc_dev/sessiondescriptionprovider.h"
#include "talk/app/webrtc_dev/webrtcsessionobserver.h"
#include "talk/base/basictypes.h"
#include "talk/base/messagehandler.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/scoped_refptr.h"
#include "talk/base/thread.h"
#include "talk/session/phone/mediasession.h"
#include "talk/p2p/base/sessiondescription.h"
namespace cricket {
class ChannelManager;
class Candidate;
typedef std::vector<Candidate> Candidates;
}
namespace webrtc {
// PeerConnectionSignaling is a class responsible for handling signaling
// between PeerConnection objects.
// It creates remote MediaStream objects when the remote peer signals it wants
// to send a new MediaStream.
// It changes the state of local MediaStreams and tracks
// when a remote peer is ready to receive media.
// Call Initialize when local Candidates are ready.
// Call CreateOffer to negotiate new local streams to send.
// Call ProcessSignalingMessage when a new PeerConnectionMessage have been
// received from the remote peer.
// Before PeerConnectionSignaling can process an answer or create an offer,
// Initialize have to be called. The last request to create an offer or process
// an answer will be processed after Initialize have been called.
class PeerConnectionSignaling : public WebRtcSessionObserver,
public talk_base::MessageHandler {
public:
enum State {
// Awaiting the local candidates.
kInitializing,
// Ready to sent new offer or receive a new offer.
kIdle,
// We have sent an offer and expect an answer, or we want to update
// our own offer.
kWaitingForAnswer,
// While waiting for an answer to our offer we received an offer from
// the remote peer.
kGlare
};
PeerConnectionSignaling(talk_base::Thread* signaling_thread,
SessionDescriptionProvider* provider);
~PeerConnectionSignaling();
// Process a received offer/answer from the remote peer.
void ProcessSignalingMessage(const std::string& message,
StreamCollectionInterface* local_streams);
// Creates an offer containing all tracks in local_streams.
// When the offer is ready it is signaled by SignalNewPeerConnectionMessage.
// When the remote peer is ready to receive media on a stream , the state of
// the local stream will change to kAlive.
void CreateOffer(StreamCollectionInterface* local_streams);
// Returns the current state.
State GetState();
// New PeerConnectionMessage with an SDP offer/answer is ready to be sent.
// The listener to this signal is expected to serialize and send the
// PeerConnectionMessage to the remote peer.
sigslot::signal1<const std::string&> SignalNewPeerConnectionMessage;
// A new remote stream have been discovered.
sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamAdded;
// Remote stream is no longer available.
sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamRemoved;
// Remote PeerConnection sent an error message.
sigslot::signal1<PeerConnectionMessage::ErrorCode> SignalErrorMessageReceived;
// Implements WebRtcSessionObserver
virtual void OnCandidatesReady(const cricket::Candidates& candidates);
// Implements talk_base::MessageHandler.
virtual void OnMessage(talk_base::Message* msg);
private:
void CreateOffer_s();
void CreateAnswer_s();
void InitMediaSessionOptions(cricket::MediaSessionOptions* options,
StreamCollectionInterface* local_streams);
void UpdateRemoteStreams(const cricket::SessionDescription* remote_desc);
void UpdateSendingLocalStreams(
const cricket::SessionDescription* answer_desc,
StreamCollectionInterface* negotiated_streams);
typedef std::list<talk_base::scoped_refptr<StreamCollectionInterface> >
StreamCollectionList;
StreamCollectionList queued_offers_;
typedef std::pair<PeerConnectionMessage*,
talk_base::scoped_refptr<StreamCollectionInterface> >
RemoteOfferPair;
RemoteOfferPair queued_received_offer_;
talk_base::Thread* signaling_thread_;
SessionDescriptionProvider* provider_;
State state_;
typedef std::map<std::string, talk_base::scoped_refptr<MediaStreamProxy> >
RemoteStreamMap;
RemoteStreamMap remote_streams_;
typedef std::map<std::string, talk_base::scoped_refptr<MediaStreamInterface> >
LocalStreamMap;
LocalStreamMap local_streams_;
cricket::Candidates candidates_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_

View File

@ -1,455 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <map>
#include <string>
#include <utility>
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/app/webrtc_dev/videotrackimpl.h"
#include "talk/app/webrtc_dev/audiotrackimpl.h"
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
#include "talk/app/webrtc_dev/streamcollectionimpl.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/thread.h"
#include "talk/session/phone/channelmanager.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kAudioTrackLabel1[] = "local_audio_1";
static const char kVideoTrackLabel1[] = "local_video_1";
static const int kWaitTime = 5000;
namespace webrtc {
typedef std::map<std::string, talk_base::scoped_refptr<MediaStreamInterface> >
MediaStreamMap;
typedef std::pair<std::string, talk_base::scoped_refptr<MediaStreamInterface> >
RemotePair;
class MockMediaTrackObserver : public webrtc::ObserverInterface {
public:
explicit MockMediaTrackObserver(MediaStreamTrackInterface* track)
: track_(track) {
track_state = track->state();
track->RegisterObserver(this);
}
virtual void OnChanged() {
track_state = track_->state();
}
webrtc::MediaStreamTrackInterface::TrackState track_state;
private:
talk_base::scoped_refptr<MediaStreamTrackInterface> track_;
};
class MockMediaStreamObserver : public webrtc::ObserverInterface {
public:
explicit MockMediaStreamObserver(MediaStreamInterface* stream)
: stream_(stream) {
ready_state = stream->ready_state();
stream_->RegisterObserver(this);
}
virtual void OnChanged() {
ready_state = stream_->ready_state();
}
webrtc::MediaStreamInterface::ReadyState ready_state;
private:
talk_base::scoped_refptr<MediaStreamInterface> stream_;
};
class MockSignalingObserver : public sigslot::has_slots<> {
public:
MockSignalingObserver()
: remote_peer_(NULL) {
}
// New remote stream have been discovered.
virtual void OnRemoteStreamAdded(MediaStreamInterface* remote_stream) {
EXPECT_EQ(MediaStreamInterface::kLive, remote_stream->ready_state());
remote_media_streams_.insert(RemotePair(remote_stream->label(),
remote_stream));
}
// Remote stream is no longer available.
virtual void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream) {
EXPECT_NE(remote_media_streams_.find(remote_stream->label()),
remote_media_streams_.end());
remote_media_streams_.erase(remote_stream->label());
}
// New answer ready to be sent.
void OnSignalingMessage(const std::string& smessage) {
if (remote_peer_) {
remote_peer_->ProcessSignalingMessage(smessage, remote_local_collection_);
// Process posted messages to allow the remote peer to process
// the message.
talk_base::Thread::Current()->ProcessMessages(1);
}
talk_base::scoped_ptr<PeerConnectionMessage> message(
PeerConnectionMessage::Create(smessage));
if (message.get() != NULL &&
message->type() != PeerConnectionMessage::kError) {
last_message = smessage;
}
}
// Tell this object to answer the remote_peer.
// remote_local_collection is the local collection the remote peer want to
// send in an answer.
void AnswerPeer(PeerConnectionSignaling* remote_peer,
StreamCollectionImpl* remote_local_collection) {
remote_peer_ = remote_peer;
remote_local_collection_ = remote_local_collection;
}
void CancelAnswerPeer() {
remote_peer_ = NULL;
remote_local_collection_.release();
}
MediaStreamInterface* RemoteStream(const std::string& label) {
MediaStreamMap::iterator it = remote_media_streams_.find(label);
if (it != remote_media_streams_.end())
return it->second;
return NULL;
}
virtual ~MockSignalingObserver() {}
std::string last_message;
private:
MediaStreamMap remote_media_streams_;
talk_base::scoped_refptr<StreamCollectionImpl> remote_local_collection_;
PeerConnectionSignaling* remote_peer_;
};
class MockSessionDescriptionProvider : public SessionDescriptionProvider {
public:
explicit MockSessionDescriptionProvider(
cricket::ChannelManager* channel_manager)
: update_session_description_counter_(0),
session_description_factory_(
new cricket::MediaSessionDescriptionFactory(channel_manager)) {
}
virtual const cricket::SessionDescription* ProvideOffer(
const cricket::MediaSessionOptions& options) {
offer_.reset(session_description_factory_->CreateOffer(options));
return offer_.get();
}
// Transfer ownership of remote_offer.
virtual const cricket::SessionDescription* SetRemoteSessionDescription(
const cricket::SessionDescription* remote_offer,
const cricket::Candidates& remote_candidates) {
remote_desc_.reset(remote_offer);
return remote_desc_.get();
}
virtual const cricket::SessionDescription* ProvideAnswer(
const cricket::MediaSessionOptions& options) {
answer_.reset(session_description_factory_->CreateAnswer(remote_desc_.get(),
options));
return answer_.get();
}
virtual void NegotiationDone() {
++update_session_description_counter_;
}
int update_session_description_counter_;
protected:
talk_base::scoped_ptr<cricket::MediaSessionDescriptionFactory>
session_description_factory_;
talk_base::scoped_ptr<const cricket::SessionDescription> offer_;
talk_base::scoped_ptr<const cricket::SessionDescription> answer_;
talk_base::scoped_ptr<const cricket::SessionDescription> remote_desc_;
};
class PeerConnectionSignalingTest: public testing::Test {
protected:
virtual void SetUp() {
channel_manager_.reset(new cricket::ChannelManager(
talk_base::Thread::Current()));
EXPECT_TRUE(channel_manager_->Init());
provider1_.reset(new MockSessionDescriptionProvider(
channel_manager_.get()));
provider2_.reset(new MockSessionDescriptionProvider(
channel_manager_.get()));
signaling1_.reset(new PeerConnectionSignaling(
talk_base::Thread::Current(), provider1_.get()));
observer1_.reset(new MockSignalingObserver());
signaling1_->SignalNewPeerConnectionMessage.connect(
observer1_.get(), &MockSignalingObserver::OnSignalingMessage);
signaling1_->SignalRemoteStreamAdded.connect(
observer1_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
signaling1_->SignalRemoteStreamRemoved.connect(
observer1_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
signaling2_.reset(new PeerConnectionSignaling(
talk_base::Thread::Current(), provider2_.get()));
observer2_.reset(new MockSignalingObserver());
signaling2_->SignalNewPeerConnectionMessage.connect(
observer2_.get(), &MockSignalingObserver::OnSignalingMessage);
signaling2_->SignalRemoteStreamAdded.connect(
observer2_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
signaling2_->SignalRemoteStreamRemoved.connect(
observer2_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
}
cricket::Candidates candidates_;
talk_base::scoped_ptr<MockSignalingObserver> observer1_;
talk_base::scoped_ptr<MockSignalingObserver> observer2_;
talk_base::scoped_ptr<MockSessionDescriptionProvider> provider1_;
talk_base::scoped_ptr<MockSessionDescriptionProvider> provider2_;
talk_base::scoped_ptr<PeerConnectionSignaling> signaling1_;
talk_base::scoped_ptr<PeerConnectionSignaling> signaling2_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
};
TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
MockMediaStreamObserver stream_observer1(stream);
// Add a local audio track.
talk_base::scoped_refptr<LocalAudioTrackInterface>
audio_track(AudioTrack::CreateLocal(kAudioTrackLabel1, NULL));
stream->AddTrack(audio_track);
MockMediaTrackObserver track_observer1(audio_track);
// Peer 1 create an offer with only one audio track.
talk_base::scoped_refptr<StreamCollectionImpl> local_collection1(
StreamCollectionImpl::Create());
local_collection1->AddStream(stream);
// Verify that the local stream is now initializing.
EXPECT_EQ(MediaStreamInterface::kInitializing, stream_observer1.ready_state);
// Verify that the audio track is now initializing.
EXPECT_EQ(MediaStreamTrackInterface::kInitializing,
track_observer1.track_state);
// Peer 2 only receive. Create an empty collection
talk_base::scoped_refptr<StreamCollectionImpl> local_collection2(
StreamCollectionImpl::Create());
// Connect all messages sent from Peer1 to be received on Peer2
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
// Connect all messages sent from Peer2 to be received on Peer1
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
// Peer 1 generates the offer. It is not sent since there is no
// local candidates ready.
signaling1_->CreateOffer(local_collection1);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
EXPECT_EQ(PeerConnectionSignaling::kInitializing, signaling1_->GetState());
// Initialize signaling1_ by providing the candidates.
signaling1_->OnCandidatesReady(candidates_);
EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
signaling1_->GetState());
// Process posted messages to allow signaling_1 to send the offer.
talk_base::Thread::Current()->ProcessMessages(1);
// Verify that signaling_2 is still not initialized.
// Even though it have received an offer.
EXPECT_EQ(PeerConnectionSignaling::kInitializing, signaling2_->GetState());
// Provide the candidates to signaling_2 and let it process the offer.
signaling2_->OnCandidatesReady(candidates_);
talk_base::Thread::Current()->ProcessMessages(1);
// Verify that the offer/answer have been exchanged and the state is good.
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
// Verify that the local stream is now sending.
EXPECT_EQ(MediaStreamInterface::kLive, stream_observer1.ready_state);
// Verify that the local audio track is now sending.
EXPECT_EQ(MediaStreamTrackInterface::kLive, track_observer1.track_state);
// Verify that PeerConnection2 is aware of the sending stream.
EXPECT_TRUE(observer2_->RemoteStream(label) != NULL);
// Verify that both peers have updated the session descriptions.
EXPECT_EQ(1u, provider1_->update_session_description_counter_);
EXPECT_EQ(1u, provider2_->update_session_description_counter_);
}
TEST_F(PeerConnectionSignalingTest, Glare) {
// Initialize signaling1_ and signaling_2 by providing the candidates.
signaling1_->OnCandidatesReady(candidates_);
signaling2_->OnCandidatesReady(candidates_);
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
// Add a local audio track.
talk_base::scoped_refptr<LocalAudioTrackInterface>
audio_track(AudioTrack::CreateLocal(kAudioTrackLabel1, NULL));
stream->AddTrack(audio_track);
// Peer 1 create an offer with only one audio track.
talk_base::scoped_refptr<StreamCollectionImpl> local_collection1(
StreamCollectionImpl::Create());
local_collection1->AddStream(stream);
signaling1_->CreateOffer(local_collection1);
EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
signaling1_->GetState());
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
// Peer 2 only receive. Create an empty collection.
talk_base::scoped_refptr<StreamCollectionImpl> local_collection2(
StreamCollectionImpl::Create());
// Peer 2 create an empty offer.
signaling2_->CreateOffer(local_collection2);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
// Peer 2 sends the offer to Peer1 and Peer1 sends its offer to Peer2.
ASSERT_TRUE(!observer1_->last_message.empty());
ASSERT_TRUE(!observer2_->last_message.empty());
signaling2_->ProcessSignalingMessage(observer1_->last_message,
local_collection2);
signaling1_->ProcessSignalingMessage(observer2_->last_message,
local_collection1);
EXPECT_EQ(PeerConnectionSignaling::kGlare, signaling1_->GetState());
EXPECT_EQ(PeerConnectionSignaling::kGlare, signaling2_->GetState());
// Make sure all messages are send between
// the two PeerConnectionSignaling objects.
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
// Process all delayed posted messages.
talk_base::Thread::Current()->ProcessMessages(kWaitTime);
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
// Verify that PeerConnection2 is aware of the sending stream.
EXPECT_TRUE(observer2_->RemoteStream(label) != NULL);
// Verify that both peers have updated the session descriptions.
EXPECT_EQ(1u, provider1_->update_session_description_counter_);
EXPECT_EQ(1u, provider2_->update_session_description_counter_);
}
TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
// Initialize signaling1_ and signaling_2 by providing the candidates.
signaling1_->OnCandidatesReady(candidates_);
signaling2_->OnCandidatesReady(candidates_);
// Create a local stream.
std::string label(kStreamLabel1);
talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
MediaStream::Create(label));
MockMediaStreamObserver stream_observer1(stream);
// Add a local audio track.
talk_base::scoped_refptr<LocalAudioTrackInterface>
audio_track(AudioTrack::CreateLocal(kAudioTrackLabel1, NULL));
stream->AddTrack(audio_track);
MockMediaTrackObserver track_observer1(audio_track);
audio_track->RegisterObserver(&track_observer1);
// Add a local video track.
talk_base::scoped_refptr<LocalVideoTrackInterface>
video_track(VideoTrack::CreateLocal(kVideoTrackLabel1, NULL));
stream->AddTrack(video_track);
// Peer 1 create an empty collection
talk_base::scoped_refptr<StreamCollectionImpl> local_collection1(
StreamCollectionImpl::Create());
// Peer 2 create an empty collection
talk_base::scoped_refptr<StreamCollectionImpl> local_collection2(
StreamCollectionImpl::Create());
// Connect all messages sent from Peer1 to be received on Peer2
observer1_->AnswerPeer(signaling2_.get(), local_collection2);
// Connect all messages sent from Peer2 to be received on Peer1
observer2_->AnswerPeer(signaling1_.get(), local_collection1);
// Peer 1 creates an empty offer and send it to Peer2.
signaling1_->CreateOffer(local_collection1);
// Process posted messages.
talk_base::Thread::Current()->ProcessMessages(1);
// Verify that both peers have updated the session descriptions.
EXPECT_EQ(1u, provider1_->update_session_description_counter_);
EXPECT_EQ(1u, provider2_->update_session_description_counter_);
// Peer2 add a stream.
local_collection2->AddStream(stream);
signaling2_->CreateOffer(local_collection2);
talk_base::Thread::Current()->ProcessMessages(1);
// Verify that the PeerConnection 2 local stream is now sending.
EXPECT_EQ(MediaStreamInterface::kLive, stream_observer1.ready_state);
EXPECT_EQ(MediaStreamTrackInterface::kLive, track_observer1.track_state);
// Verify that PeerConnection1 is aware of the sending stream.
EXPECT_TRUE(observer1_->RemoteStream(label) != NULL);
// Verify that both peers have updated the session descriptions.
EXPECT_EQ(2u, provider1_->update_session_description_counter_);
EXPECT_EQ(2u, provider2_->update_session_description_counter_);
// Remove the stream
local_collection2->RemoveStream(stream);
signaling2_->CreateOffer(local_collection2);
talk_base::Thread::Current()->ProcessMessages(1);
// Verify that PeerConnection1 is not aware of the sending stream.
EXPECT_TRUE(observer1_->RemoteStream(label) == NULL);
// Verify that the PeerConnection 2 local stream is now ended.
EXPECT_EQ(MediaStreamInterface::kEnded, stream_observer1.ready_state);
EXPECT_EQ(MediaStreamTrackInterface::kEnded, track_observer1.track_state);
// Verify that both peers have updated the session descriptions.
EXPECT_EQ(3u, provider1_->update_session_description_counter_);
EXPECT_EQ(3u, provider2_->update_session_description_counter_);
}
} // namespace webrtc

View File

@ -1,58 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_
#define TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_
#include "talk/session/phone/mediasession.h"
#include "talk/p2p/base/candidate.h"
#include "talk/p2p/base/sessiondescription.h"
namespace webrtc {
class SessionDescriptionProvider {
public:
virtual const cricket::SessionDescription* ProvideOffer(
const cricket::MediaSessionOptions& options) = 0;
// Transfer ownership of remote_offer.
virtual const cricket::SessionDescription* SetRemoteSessionDescription(
const cricket::SessionDescription* remote_offer,
const std::vector<cricket::Candidate>& remote_candidates) = 0;
virtual const cricket::SessionDescription* ProvideAnswer(
const cricket::MediaSessionOptions& options) = 0;
virtual void NegotiationDone() = 0;
protected:
virtual ~SessionDescriptionProvider() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_

View File

@ -1,103 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
#define TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/peerconnection.h"
namespace webrtc {
// Implementation of StreamCollection.
class StreamCollectionImpl : public StreamCollectionInterface {
public:
static talk_base::scoped_refptr<StreamCollectionImpl> Create() {
talk_base::RefCountedObject<StreamCollectionImpl>* implementation =
new talk_base::RefCountedObject<StreamCollectionImpl>();
return implementation;
}
static talk_base::scoped_refptr<StreamCollectionImpl> Create(
StreamCollectionImpl* streams) {
talk_base::RefCountedObject<StreamCollectionImpl>* implementation =
new talk_base::RefCountedObject<StreamCollectionImpl>(streams);
return implementation;
}
virtual size_t count() {
return media_streams_.size();
}
virtual MediaStreamInterface* at(size_t index) {
return media_streams_.at(index);
}
virtual MediaStreamInterface* find(const std::string& label) {
for (StreamVector::iterator it = media_streams_.begin();
it != media_streams_.end(); ++it) {
if ((*it)->label().compare(label) == 0) {
return (*it);
}
}
return NULL;
}
void AddStream(MediaStreamInterface* stream) {
for (StreamVector::iterator it = media_streams_.begin();
it != media_streams_.end(); ++it) {
if ((*it)->label().compare(stream->label()) == 0)
return;
}
media_streams_.push_back(stream);
}
void RemoveStream(MediaStreamInterface* remove_stream) {
for (StreamVector::iterator it = media_streams_.begin();
it != media_streams_.end(); ++it) {
if ((*it)->label().compare(remove_stream->label()) == 0) {
media_streams_.erase(it);
break;
}
}
}
protected:
StreamCollectionImpl() {}
explicit StreamCollectionImpl(StreamCollectionImpl* original)
: media_streams_(original->media_streams_) {
}
typedef std::vector<talk_base::scoped_refptr<MediaStreamInterface> >
StreamVector;
StreamVector media_streams_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_

View File

@ -1,165 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/test/filevideocapturemodule.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "system_wrappers/interface/ref_count.h"
#else
#include "third_party/webrtc/files/include/ref_count.h"
#endif
FileVideoCaptureModule::~FileVideoCaptureModule() {
camera_thread_->Stop();
if (i420_file_ != NULL) fclose(i420_file_);
// The memory associated with video_capture_ is owned by impl_.
}
webrtc::VideoCaptureModule*
FileVideoCaptureModule::CreateFileVideoCaptureModule(const char* file_name) {
webrtc::RefCountImpl<FileVideoCaptureModule>* capture_module =
new webrtc::RefCountImpl<FileVideoCaptureModule>();
if (!capture_module->Init(file_name)) {
capture_module->Release();
return NULL;
}
return capture_module;
}
// TODO(henrike): deal with the rounding error.
bool FileVideoCaptureModule::SetFrameRate(int fps) {
fps_ = fps;
time_per_frame_ms_ = 1000 / fps;
return true;
}
void FileVideoCaptureModule::SetSize(int width, int height) {
width_ = width;
height_ = height;
image_.reset(new uint8[GetI420FrameLength()]);
}
FileVideoCaptureModule::FileVideoCaptureModule()
: impl_(),
i420_file_(NULL),
camera_thread_(new talk_base::Thread()),
video_capture_(NULL),
started_(false),
sent_frames_(0),
next_frame_time_(0),
time_per_frame_ms_(0),
fps_(0),
width_(0),
height_(0),
image_() {}
bool FileVideoCaptureModule::Init(const char* file_name) {
impl_ = webrtc::VideoCaptureFactory::Create(0, // id
video_capture_);
if (impl_.get() == NULL) {
return false;
}
if (video_capture_ == NULL) {
return false;
}
if (!SetFrameRate(kStartFrameRate)) {
return false;
}
SetSize(kStartWidth, kStartHeight);
i420_file_ = fopen(file_name, "rb");
if (i420_file_ == NULL) {
// Not generally unexpected but for this class it is.
ASSERT(false);
return false;
}
if (!camera_thread_->Start()) {
return false;
}
// Only one post, no need to add any data to post.
camera_thread_->Post(this);
return true;
}
// TODO(henrike): handle time wrapparound.
void FileVideoCaptureModule::GenerateNewFrame() {
if (!started_) {
next_frame_time_ = talk_base::Time();
started_ = true;
}
// Read from file.
int read = fread(image_.get(), sizeof(uint8), GetI420FrameLength(),
i420_file_);
// Loop file if end is reached.
if (read != GetI420FrameLength()) {
fseek(i420_file_, 0, SEEK_SET);
read = fread(image_.get(), sizeof(uint8), GetI420FrameLength(),
i420_file_);
if (read != GetI420FrameLength()) {
ASSERT(false);
return;
}
}
webrtc::VideoCaptureCapability capability;
capability.width = width_;
capability.height = height_;
capability.maxFPS = 0;
capability.expectedCaptureDelay = 0;
capability.rawType =webrtc:: kVideoI420;
capability.codecType = webrtc::kVideoCodecUnknown;
capability.interlaced = false;
video_capture_->IncomingFrame(image_.get(), GetI420FrameLength(),
capability, GetTimestamp());
++sent_frames_;
next_frame_time_ += time_per_frame_ms_;
const uint32 current_time = talk_base::Time();
const uint32 wait_time = (next_frame_time_ > current_time) ?
next_frame_time_ - current_time : 0;
camera_thread_->PostDelayed(wait_time, this);
}
int FileVideoCaptureModule::GetI420FrameLength() {
return width_ * height_ * 3 >> 1;
}
// TODO(henrike): use this function instead of/in addition to reading from a
// file.
void FileVideoCaptureModule::SetFrame(uint8* image) {
// Set Y plane.
memset(image, 128, width_ * height_);
// Set U plane.
int write_position = width_ * height_;
memset(&image[write_position], 64, width_ * height_ / 4);
// Set V plane.
write_position += width_ * height_ / 4;
memset(&image[write_position], 32, width_ * height_ / 4);
}
// TODO(henrike): handle timestamp wrapparound.
uint32 FileVideoCaptureModule::GetTimestamp() {
return kStartTimeStamp + sent_frames_ * time_per_frame_ms_;
}

View File

@ -1,203 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This class implements the VideoCaptureModule interface. Instead of capturing
// frames from a camera it captures frames from a file.
#ifndef TALK_APP_WEBRTC_TEST_FILEVIDEOCAPTUREMODULE_H_
#define TALK_APP_WEBRTC_TEST_FILEVIDEOCAPTUREMODULE_H_
#include <stdio.h>
#include "talk/base/common.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/thread.h"
#include "talk/base/time.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "common_types.h"
#include "modules/video_capture/main/interface/video_capture.h"
#include "modules/video_capture/main/interface/video_capture_defines.h"
#include "modules/video_capture/main/interface/video_capture_factory.h"
#include "system_wrappers/interface/ref_count.h"
#include "system_wrappers/interface/scoped_refptr.h"
#else
#include "third_party/webrtc/files/include/common_types.h"
#include "third_party/webrtc/files/include/video_capture.h"
#include "third_party/webrtc/files/include/video_capture_defines.h"
#include "third_party/webrtc/files/include/video_capture_factory.h"
#include "third_party/webrtc/files/include/ref_count.h"
#include "third_party/webrtc/files/include/scoped_refptr.h"
#endif
// TODO(henrike): replace playing file with playing a buffer.
class FileVideoCaptureModule
: public webrtc::VideoCaptureModule,
public talk_base::MessageHandler {
public:
virtual ~FileVideoCaptureModule();
static VideoCaptureModule* CreateFileVideoCaptureModule(
const char* file_name);
bool SetFrameRate(int fps);
void SetSize(int width, int height);
virtual int32_t Version(char* version,
uint32_t& remaining_buffer_in_bytes,
uint32_t& position) const {
return impl_->Version(version, remaining_buffer_in_bytes,
position);
}
virtual int32_t ChangeUniqueId(const int32_t id) {
return impl_->ChangeUniqueId(id);
}
virtual int32_t TimeUntilNextProcess() {
return impl_->TimeUntilNextProcess();
}
virtual int32_t Process() {
return impl_->Process();
}
virtual WebRtc_Word32 RegisterCaptureDataCallback(
webrtc::VideoCaptureDataCallback& dataCallback) {
return impl_->RegisterCaptureDataCallback(dataCallback);
}
virtual WebRtc_Word32 DeRegisterCaptureDataCallback() {
return impl_->DeRegisterCaptureDataCallback();
}
virtual WebRtc_Word32 RegisterCaptureCallback(
webrtc::VideoCaptureFeedBack& callBack) {
return impl_->RegisterCaptureCallback(callBack);
}
virtual WebRtc_Word32 DeRegisterCaptureCallback() {
return impl_->DeRegisterCaptureCallback();
}
virtual WebRtc_Word32 StartCapture(
const webrtc::VideoCaptureCapability& capability) {
return impl_->StartCapture(capability);
}
virtual WebRtc_Word32 StopCapture() {
return impl_->StopCapture();
}
virtual WebRtc_Word32 StartSendImage(const webrtc::VideoFrame& videoFrame,
WebRtc_Word32 frameRate = 1) {
return impl_->StartSendImage(videoFrame, frameRate = 1);
}
virtual WebRtc_Word32 StopSendImage() {
return impl_->StopSendImage();
}
virtual const WebRtc_UWord8* CurrentDeviceName() const {
return impl_->CurrentDeviceName();
}
virtual bool CaptureStarted() {
return impl_->CaptureStarted();
}
virtual WebRtc_Word32 CaptureSettings(
webrtc::VideoCaptureCapability& settings) {
return impl_->CaptureSettings(settings);
}
virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS) {
return impl_->SetCaptureDelay(delayMS);
}
virtual WebRtc_Word32 CaptureDelay() {
return impl_->CaptureDelay();
}
virtual WebRtc_Word32 SetCaptureRotation(
webrtc::VideoCaptureRotation rotation) {
return impl_->SetCaptureRotation(rotation);
}
virtual VideoCaptureEncodeInterface* GetEncodeInterface(
const webrtc::VideoCodec& codec) {
return impl_->GetEncodeInterface(codec);
}
virtual WebRtc_Word32 EnableFrameRateCallback(const bool enable) {
return impl_->EnableFrameRateCallback(enable);
}
virtual WebRtc_Word32 EnableNoPictureAlarm(const bool enable) {
return impl_->EnableNoPictureAlarm(enable);
}
// Inherited from MesageHandler.
virtual void OnMessage(talk_base::Message* msg) {
GenerateNewFrame();
}
protected:
FileVideoCaptureModule();
private:
bool Init(const char* file_name);
void GenerateNewFrame();
int GetI420FrameLength();
// Generate an arbitrary frame. (Will be used when file reading is replaced
// with reading a buffer).
void SetFrame(uint8* image);
uint32 GetTimestamp();
// Module interface implementation.
webrtc::scoped_refptr<VideoCaptureModule> impl_;
// File playing implementation.
static const int kStartFrameRate = 30;
// CIF
static const int kStartWidth = 352;
static const int kStartHeight = 288;
static const uint32 kStartTimeStamp = 2000;
FILE* i420_file_;
talk_base::scoped_ptr<talk_base::Thread> camera_thread_;
webrtc::VideoCaptureExternal* video_capture_;
bool started_;
int sent_frames_;
uint32 next_frame_time_;
uint32 time_per_frame_ms_;
int fps_;
int width_;
int height_;
talk_base::scoped_array<uint8> image_;
};
#endif // TALK_APP_WEBRTC_TEST_FILEVIDEOCAPTUREMODULE_H_

View File

@ -1,58 +0,0 @@
/*
* libjingle
* Copyright 2004--2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/mediastreamimpl.h"
#include "talk/session/phone/videorenderer.h"
namespace webrtc {
// VideoRendererImpl take ownership of cricket::VideoRenderer.
class VideoRendererImpl : public VideoRendererWrapperInterface {
public:
explicit VideoRendererImpl(cricket::VideoRenderer* renderer)
: renderer_(renderer) {
}
virtual cricket::VideoRenderer* renderer() {
return renderer_;
}
protected:
~VideoRendererImpl() {
delete renderer_;
}
private:
cricket::VideoRenderer* renderer_;
};
talk_base::scoped_refptr<VideoRendererWrapperInterface> CreateVideoRenderer(
cricket::VideoRenderer* renderer) {
talk_base::RefCountedObject<VideoRendererImpl>* r =
new talk_base::RefCountedObject<VideoRendererImpl>(renderer);
return r;
}
} // namespace webrtc

View File

@ -1,79 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/videotrackimpl.h"
#include <string>
namespace webrtc {
static const char kVideoTrackKind[] = "video";
VideoTrack::VideoTrack(const std::string& label)
: MediaTrack<LocalVideoTrackInterface>(label),
video_device_(NULL) {
}
VideoTrack::VideoTrack(const std::string& label,
VideoCaptureModule* video_device)
: MediaTrack<LocalVideoTrackInterface>(label),
video_device_(video_device) {
}
void VideoTrack::SetRenderer(VideoRendererWrapperInterface* renderer) {
video_renderer_ = renderer;
Notifier<LocalVideoTrackInterface>::FireOnChanged();
}
VideoRendererWrapperInterface* VideoTrack::GetRenderer() {
return video_renderer_.get();
}
// Get the VideoCapture device associated with this track.
VideoCaptureModule* VideoTrack::GetVideoCapture() {
return video_device_.get();
}
std::string VideoTrack::kind() const {
return std::string(kVideoTrackKind);
}
talk_base::scoped_refptr<VideoTrack> VideoTrack::CreateRemote(
const std::string& label) {
talk_base::RefCountedObject<VideoTrack>* track =
new talk_base::RefCountedObject<VideoTrack>(label);
return track;
}
talk_base::scoped_refptr<VideoTrack> VideoTrack::CreateLocal(
const std::string& label,
VideoCaptureModule* video_device) {
talk_base::RefCountedObject<VideoTrack>* track =
new talk_base::RefCountedObject<VideoTrack>(label, video_device);
return track;
}
} // namespace webrtc

View File

@ -1,73 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_
#define TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_
#include <string>
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/mediatrackimpl.h"
#include "talk/app/webrtc_dev/notifierimpl.h"
#include "talk/base/scoped_refptr.h"
#ifdef WEBRTC_RELATIVE_PATH
#include "modules/video_capture/main/interface/video_capture.h"
#else
#include "third_party/webrtc/files/include/video_capture.h"
#endif
namespace webrtc {
class VideoTrack : public MediaTrack<LocalVideoTrackInterface> {
public:
// Create a video track used for remote video tracks.
static talk_base::scoped_refptr<VideoTrack> CreateRemote(
const std::string& label);
// Create a video track used for local video tracks.
static talk_base::scoped_refptr<VideoTrack> CreateLocal(
const std::string& label,
VideoCaptureModule* video_device);
virtual VideoCaptureModule* GetVideoCapture();
virtual void SetRenderer(VideoRendererWrapperInterface* renderer);
VideoRendererWrapperInterface* GetRenderer();
virtual std::string kind() const;
protected:
explicit VideoTrack(const std::string& label);
VideoTrack(const std::string& label, VideoCaptureModule* video_device);
private:
talk_base::scoped_refptr<VideoCaptureModule> video_device_;
talk_base::scoped_refptr<VideoRendererWrapperInterface> video_renderer_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_

View File

@ -1,639 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/webrtcjson.h"
#include <stdio.h>
#include <string>
#include <vector>
#include "talk/base/json.h"
#include "talk/base/logging.h"
#include "talk/base/stringutils.h"
#include "talk/session/phone/codec.h"
#include "talk/session/phone/cryptoparams.h"
#include "talk/session/phone/mediasession.h"
#include "talk/session/phone/mediasessionclient.h"
namespace webrtc {
static const int kIceComponent = 1;
static const int kIceFoundation = 1;
static const char* kMessageType[] = {
"OFFER",
"ANSWER",
"ERROR",
};
static std::vector<Json::Value> ReadValues(const Json::Value& value,
const std::string& key);
static void BuildContent(
const cricket::SessionDescription* sdp,
const cricket::ContentInfo& content_info,
const std::vector<cricket::Candidate>& candidates,
bool video,
Json::Value* content);
static void BuildCandidate(const std::vector<cricket::Candidate>& candidates,
bool video,
std::vector<Json::Value>* jcandidates);
static void BuildRtpMapParams(const cricket::ContentInfo& content_info,
bool video,
std::vector<Json::Value>* rtpmap);
static void BuildCrypto(const cricket::ContentInfo& content_info,
bool video,
std::vector<Json::Value>* cryptos);
static void BuildTrack(const cricket::SessionDescription* sdp,
bool video,
std::vector<Json::Value>* track);
static std::string Serialize(const Json::Value& value);
static bool Deserialize(const std::string& message, Json::Value& value);
bool ParseContent(const Json::Value& jmessage,
cricket::SessionDescription* sdp,
std::vector<cricket::Candidate>* candidates);
static bool ParseAudioCodec(const Json::Value& value,
cricket::AudioContentDescription* content);
static bool ParseVideoCodec(const Json::Value& value,
cricket::VideoContentDescription* content);
static bool ParseCrypto(const Json::Value& content,
cricket::MediaContentDescription* desc);
static bool ParseCandidates(const Json::Value& content,
std::vector<cricket::Candidate>* candidates);
static bool ParseTrack(const Json::Value& content,
cricket::MediaContentDescription* content_desc);
static void Append(Json::Value* object, const std::string& key, bool value);
static void Append(Json::Value* object, const std::string& key,
const char* value);
static void Append(Json::Value* object, const std::string& key, int value);
static void Append(Json::Value* object, const std::string& key,
const std::string& value);
static void Append(Json::Value* object, const std::string& key, uint32 value);
static void Append(Json::Value* object, const std::string& key,
const Json::Value& value);
static void Append(Json::Value* object,
const std::string& key,
const std::vector<Json::Value>& values);
std::string JsonSerialize(
const webrtc::PeerConnectionMessage::PeerConnectionMessageType type,
int error_code,
const cricket::SessionDescription* sdp,
const std::vector<cricket::Candidate>& candidates) {
Json::Value media;
// TODO(ronghuawu): Replace magic strings.
Append(&media, "SDP", kMessageType[type]);
if (type == webrtc::PeerConnectionMessage::kError) {
Append(&media, "error_code", error_code);
return Serialize(media);
}
const cricket::ContentInfo* audio_content = GetFirstAudioContent(sdp);
const cricket::ContentInfo* video_content = GetFirstVideoContent(sdp);
std::vector<Json::Value> together;
together.push_back("audio");
together.push_back("video");
std::vector<Json::Value> contents;
if (audio_content) {
Json::Value content;
BuildContent(sdp, *audio_content, candidates, false, &content);
contents.push_back(content);
}
if (video_content) {
Json::Value content;
BuildContent(sdp, *video_content, candidates, true, &content);
contents.push_back(content);
}
Append(&media, "content", contents);
Append(&media, "TOGETHER", together);
// Now serialize.
return Serialize(media);
}
void BuildContent(
const cricket::SessionDescription* sdp,
const cricket::ContentInfo& content_info,
const std::vector<cricket::Candidate>& candidates,
bool video,
Json::Value* content) {
std::string label("media");
// TODO(ronghuawu): Use enum instead of bool video to prepare for other
// media types such as the data media stream.
if (video) {
Append(content, label, "video");
} else {
Append(content, label, "audio");
}
const cricket::MediaContentDescription* media_info =
static_cast<const cricket::MediaContentDescription*> (
content_info.description);
if (media_info->rtcp_mux()) {
Append(content, "rtcp_mux", true);
}
// rtpmap
std::vector<Json::Value> rtpmap;
BuildRtpMapParams(content_info, video, &rtpmap);
Append(content, "rtpmap", rtpmap);
// crypto
std::vector<Json::Value> crypto;
BuildCrypto(content_info, video, &crypto);
Append(content, "crypto", crypto);
// candidate
std::vector<Json::Value> jcandidates;
BuildCandidate(candidates, video, &jcandidates);
Append(content, "candidate", jcandidates);
// track
std::vector<Json::Value> track;
BuildTrack(sdp, video, &track);
Append(content, "track", track);
}
void BuildRtpMapParams(const cricket::ContentInfo& content_info,
bool video,
std::vector<Json::Value>* rtpmap) {
if (!video) {
const cricket::AudioContentDescription* audio =
static_cast<const cricket::AudioContentDescription*>(
content_info.description);
std::vector<cricket::AudioCodec>::const_iterator iter =
audio->codecs().begin();
std::vector<cricket::AudioCodec>::const_iterator iter_end =
audio->codecs().end();
for (; iter != iter_end; ++iter) {
Json::Value codec;
std::string codec_str(std::string("audio/").append(iter->name));
// adding clockrate
Append(&codec, "clockrate", iter->clockrate);
Append(&codec, "codec", codec_str);
Json::Value codec_id;
Append(&codec_id, talk_base::ToString(iter->id), codec);
rtpmap->push_back(codec_id);
}
} else {
const cricket::VideoContentDescription* video =
static_cast<const cricket::VideoContentDescription*>(
content_info.description);
std::vector<cricket::VideoCodec>::const_iterator iter =
video->codecs().begin();
std::vector<cricket::VideoCodec>::const_iterator iter_end =
video->codecs().end();
for (; iter != iter_end; ++iter) {
Json::Value codec;
std::string codec_str(std::string("video/").append(iter->name));
Append(&codec, "codec", codec_str);
Json::Value codec_id;
Append(&codec_id, talk_base::ToString(iter->id), codec);
rtpmap->push_back(codec_id);
}
}
}
void BuildCrypto(const cricket::ContentInfo& content_info,
bool video,
std::vector<Json::Value>* cryptos) {
const cricket::MediaContentDescription* content_desc =
static_cast<const cricket::MediaContentDescription*>(
content_info.description);
std::vector<cricket::CryptoParams>::const_iterator iter =
content_desc->cryptos().begin();
std::vector<cricket::CryptoParams>::const_iterator iter_end =
content_desc->cryptos().end();
for (; iter != iter_end; ++iter) {
Json::Value crypto;
Append(&crypto, "cipher_suite", iter->cipher_suite);
Append(&crypto, "key_params", iter->key_params);
cryptos->push_back(crypto);
}
}
void BuildCandidate(const std::vector<cricket::Candidate>& candidates,
bool video,
std::vector<Json::Value>* jcandidates) {
std::vector<cricket::Candidate>::const_iterator iter =
candidates.begin();
std::vector<cricket::Candidate>::const_iterator iter_end =
candidates.end();
for (; iter != iter_end; ++iter) {
if ((video && (!iter->name().compare("video_rtcp") ||
(!iter->name().compare("video_rtp")))) ||
(!video && (!iter->name().compare("rtp") ||
(!iter->name().compare("rtcp"))))) {
Json::Value jcandidate;
Append(&jcandidate, "component", kIceComponent);
Append(&jcandidate, "foundation", kIceFoundation);
Append(&jcandidate, "generation", iter->generation());
Append(&jcandidate, "proto", iter->protocol());
Append(&jcandidate, "priority", iter->preference_str());
Append(&jcandidate, "ip", iter->address().IPAsString());
Append(&jcandidate, "port", iter->address().PortAsString());
Append(&jcandidate, "type", iter->type());
Append(&jcandidate, "name", iter->name());
Append(&jcandidate, "network_name", iter->network_name());
Append(&jcandidate, "username", iter->username());
Append(&jcandidate, "password", iter->password());
jcandidates->push_back(jcandidate);
}
}
}
void BuildTrack(const cricket::SessionDescription* sdp,
bool video,
std::vector<Json::Value>* tracks) {
const cricket::ContentInfo* content;
if (video)
content = GetFirstVideoContent(sdp);
else
content = GetFirstAudioContent(sdp);
if (!content)
return;
const cricket::MediaContentDescription* desc =
static_cast<const cricket::MediaContentDescription*>(
content->description);
for (cricket::StreamParamsVec::const_iterator it = desc->streams().begin();
it != desc->streams().end();
++it) {
// TODO(ronghuawu): Support ssrcsgroups.
Json::Value track;
ASSERT(it->ssrcs.size() == 1);
Append(&track, "ssrc", it->ssrcs[0]);
Append(&track, "cname", it->cname);
Append(&track, "stream_label", it->sync_label);
Append(&track, "label", it->name);
tracks->push_back(track);
}
}
std::string Serialize(const Json::Value& value) {
Json::StyledWriter writer;
return writer.write(value);
}
bool Deserialize(const std::string& message, Json::Value* value) {
Json::Reader reader;
return reader.parse(message, *value);
}
bool JsonDeserialize(
webrtc::PeerConnectionMessage::PeerConnectionMessageType* type,
webrtc::PeerConnectionMessage::ErrorCode* error_code,
cricket::SessionDescription* sdp,
std::vector<cricket::Candidate>* candidates,
const std::string& signaling_message) {
ASSERT(type);
ASSERT(error_code);
ASSERT(sdp);
ASSERT(candidates);
if (type == NULL || error_code == NULL || sdp == NULL || candidates == NULL)
return false;
// first deserialize message
Json::Value jmessage;
if (!Deserialize(signaling_message, &jmessage)) {
return false;
}
// Get the message type
std::string message_type;
bool valid_message_type = false;
if (!GetStringFromJsonObject(jmessage, "SDP", &message_type))
return false;
for (int i = 0; i < ARRAY_SIZE(kMessageType); i++) {
if (message_type.compare(kMessageType[i]) == 0) {
*type = static_cast<
webrtc::PeerConnectionMessage::PeerConnectionMessageType>(i);
valid_message_type = true;
break;
}
}
if (!valid_message_type)
return false;
if (*type == webrtc::PeerConnectionMessage::kError) {
int code;
if (!GetIntFromJsonObject(jmessage, "error_code", &code))
return false;
*error_code = static_cast<webrtc::PeerConnectionMessage::ErrorCode>(code);
return true;
}
return ParseContent(jmessage, sdp, candidates);
}
bool ParseContent(const Json::Value& jmessage,
cricket::SessionDescription* sdp,
std::vector<cricket::Candidate>* candidates) {
// Get content
std::vector<Json::Value> contents = ReadValues(jmessage, "content");
if (contents.size() == 0)
return false;
for (size_t i = 0; i < contents.size(); ++i) {
Json::Value content = contents[i];
// candidates
if (!ParseCandidates(content, candidates))
return false;
// rtcp_mux
bool rtcp_mux;
if (!GetBoolFromJsonObject(content, "rtcp_mux", &rtcp_mux))
rtcp_mux = false;
// rtpmap
if (content["media"].asString().compare("audio") == 0) {
cricket::AudioContentDescription* audio_content =
new cricket::AudioContentDescription();
if (!ParseAudioCodec(content, audio_content))
return false;
audio_content->set_rtcp_mux(rtcp_mux);
audio_content->SortCodecs();
// crypto
if (!ParseCrypto(content, audio_content))
return false;
// tracks
if (!ParseTrack(content, audio_content))
return false;
(sdp)->AddContent(cricket::CN_AUDIO,
cricket::NS_JINGLE_RTP, audio_content);
} else if (content["media"].asString().compare("video") == 0) {
cricket::VideoContentDescription* video_content =
new cricket::VideoContentDescription();
if (!ParseVideoCodec(content, video_content))
return false;
video_content->set_rtcp_mux(rtcp_mux);
video_content->SortCodecs();
// crypto
if (!ParseCrypto(content, video_content))
return false;
if (!ParseTrack(content, video_content))
return false;
(sdp)->AddContent(cricket::CN_VIDEO,
cricket::NS_JINGLE_RTP, video_content);
}
}
return true;
}
bool ParseAudioCodec(const Json::Value& value,
cricket::AudioContentDescription* content) {
std::vector<Json::Value> rtpmap(ReadValues(value, "rtpmap"));
// When there's no codecs in common, rtpmap can be empty.
if (rtpmap.empty())
return true;
std::vector<Json::Value>::const_iterator iter =
rtpmap.begin();
std::vector<Json::Value>::const_iterator iter_end =
rtpmap.end();
for (; iter != iter_end; ++iter) {
cricket::AudioCodec codec;
std::string pltype(iter->begin().memberName());
talk_base::FromString(pltype, &codec.id);
Json::Value codec_info((*iter)[pltype]);
std::string codec_name;
if (!GetStringFromJsonObject(codec_info, "codec", &codec_name))
continue;
std::vector<std::string> tokens;
talk_base::split(codec_name, '/', &tokens);
codec.name = tokens[1];
GetIntFromJsonObject(codec_info, "clockrate", &codec.clockrate);
content->AddCodec(codec);
}
return true;
}
bool ParseVideoCodec(const Json::Value& value,
cricket::VideoContentDescription* content) {
std::vector<Json::Value> rtpmap(ReadValues(value, "rtpmap"));
// When there's no codecs in common, rtpmap can be empty.
if (rtpmap.empty())
return true;
std::vector<Json::Value>::const_iterator iter =
rtpmap.begin();
std::vector<Json::Value>::const_iterator iter_end =
rtpmap.end();
for (; iter != iter_end; ++iter) {
cricket::VideoCodec codec;
std::string pltype(iter->begin().memberName());
talk_base::FromString(pltype, &codec.id);
Json::Value codec_info((*iter)[pltype]);
std::vector<std::string> tokens;
talk_base::split(codec_info["codec"].asString(), '/', &tokens);
codec.name = tokens[1];
content->AddCodec(codec);
}
return true;
}
bool ParseCandidates(const Json::Value& content,
std::vector<cricket::Candidate>* candidates) {
std::vector<Json::Value> jcandidates(ReadValues(content, "candidate"));
std::vector<Json::Value>::const_iterator iter =
jcandidates.begin();
std::vector<Json::Value>::const_iterator iter_end =
jcandidates.end();
for (; iter != iter_end; ++iter) {
cricket::Candidate cand;
unsigned int generation;
if (!GetUIntFromJsonObject(*iter, "generation", &generation))
return false;
cand.set_generation_str(talk_base::ToString(generation));
std::string proto;
if (!GetStringFromJsonObject(*iter, "proto", &proto))
return false;
cand.set_protocol(proto);
std::string priority;
if (!GetStringFromJsonObject(*iter, "priority", &priority))
return false;
cand.set_preference_str(priority);
std::string str;
talk_base::SocketAddress addr;
if (!GetStringFromJsonObject(*iter, "ip", &str))
return false;
addr.SetIP(str);
if (!GetStringFromJsonObject(*iter, "port", &str))
return false;
int port;
if (!talk_base::FromString(str, &port))
return false;
addr.SetPort(port);
cand.set_address(addr);
if (!GetStringFromJsonObject(*iter, "type", &str))
return false;
cand.set_type(str);
if (!GetStringFromJsonObject(*iter, "name", &str))
return false;
cand.set_name(str);
if (!GetStringFromJsonObject(*iter, "network_name", &str))
return false;
cand.set_network_name(str);
if (!GetStringFromJsonObject(*iter, "username", &str))
return false;
cand.set_username(str);
if (!GetStringFromJsonObject(*iter, "password", &str))
return false;
cand.set_password(str);
candidates->push_back(cand);
}
return true;
}
bool ParseCrypto(const Json::Value& content,
cricket::MediaContentDescription* desc) {
std::vector<Json::Value> jcryptos(ReadValues(content, "crypto"));
std::vector<Json::Value>::const_iterator iter =
jcryptos.begin();
std::vector<Json::Value>::const_iterator iter_end =
jcryptos.end();
for (; iter != iter_end; ++iter) {
cricket::CryptoParams crypto;
std::string cipher_suite;
if (!GetStringFromJsonObject(*iter, "cipher_suite", &cipher_suite))
return false;
crypto.cipher_suite = cipher_suite;
std::string key_params;
if (!GetStringFromJsonObject(*iter, "key_params", &key_params))
return false;
crypto.key_params= key_params;
desc->AddCrypto(crypto);
}
return true;
}
bool ParseTrack(const Json::Value& content,
cricket::MediaContentDescription* content_desc) {
ASSERT(content_desc);
if (!content_desc)
return false;
std::vector<Json::Value> tracks(ReadValues(content, "track"));
std::vector<Json::Value>::const_iterator iter =
tracks.begin();
std::vector<Json::Value>::const_iterator iter_end =
tracks.end();
for (; iter != iter_end; ++iter) {
uint32 ssrc;
std::string label;
std::string cname;
std::string stream_label;
if (!GetUIntFromJsonObject(*iter, "ssrc", &ssrc))
return false;
// label is optional, it will be empty string if doesn't exist
GetStringFromJsonObject(*iter, "label", &label);
if (!GetStringFromJsonObject(*iter, "cname", &cname))
return false;
// stream_label is optional, it will be the same as cname if it
// doesn't exist.
GetStringFromJsonObject(*iter, "stream_label", &stream_label);
if (stream_label.empty())
stream_label = cname;
content_desc->AddStream(cricket::StreamParams(label, ssrc, cname,
stream_label));
}
return true;
}
std::vector<Json::Value> ReadValues(
const Json::Value& value, const std::string& key) {
std::vector<Json::Value> objects;
for (Json::Value::ArrayIndex i = 0; i < value[key].size(); ++i) {
objects.push_back(value[key][i]);
}
return objects;
}
void Append(Json::Value* object, const std::string& key, bool value) {
(*object)[key] = Json::Value(value);
}
void Append(Json::Value* object, const std::string& key, const char* value) {
(*object)[key] = Json::Value(value);
}
void Append(Json::Value* object, const std::string& key, int value) {
(*object)[key] = Json::Value(value);
}
void Append(Json::Value* object, const std::string& key,
const std::string& value) {
(*object)[key] = Json::Value(value);
}
void Append(Json::Value* object, const std::string& key, uint32 value) {
(*object)[key] = Json::Value(value);
}
void Append(Json::Value* object, const std::string& key,
const Json::Value& value) {
(*object)[key] = value;
}
void Append(Json::Value* object,
const std::string & key,
const std::vector<Json::Value>& values) {
for (std::vector<Json::Value>::const_iterator iter = values.begin();
iter != values.end(); ++iter) {
(*object)[key].append(*iter);
}
}
} // namespace webrtc

View File

@ -1,62 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_WEBRTCJSON_H_
#define TALK_APP_WEBRTC_WEBRTCJSON_H_
#include <string>
#include <vector>
#ifdef WEBRTC_RELATIVE_PATH
#include "json/json.h"
#else
#include "third_party/jsoncpp/json.h"
#endif
#include "talk/app/webrtc_dev/peerconnectionmessage.h"
#include "talk/p2p/base/candidate.h"
namespace cricket {
class SessionDescription;
}
namespace webrtc {
std::string JsonSerialize(
const webrtc::PeerConnectionMessage::PeerConnectionMessageType type,
int error_code,
const cricket::SessionDescription* sdp,
const std::vector<cricket::Candidate>& candidates);
bool JsonDeserialize(
webrtc::PeerConnectionMessage::PeerConnectionMessageType* type,
webrtc::PeerConnectionMessage::ErrorCode* error_code,
cricket::SessionDescription* sdp,
std::vector<cricket::Candidate>* candidates,
const std::string& signaling_message);
}
#endif // TALK_APP_WEBRTC_WEBRTCJSON_H_

View File

@ -1,335 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc_dev/webrtcsession.h"
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
#include "talk/base/helpers.h"
#include "talk/base/logging.h"
#include "talk/session/phone/channel.h"
#include "talk/session/phone/channelmanager.h"
#include "talk/session/phone/mediasession.h"
using cricket::MediaContentDescription;
namespace webrtc {
enum {
MSG_CANDIDATE_TIMEOUT = 101,
};
// We allow 30 seconds to establish a connection, otherwise it's an error.
static const int kCallSetupTimeout = 30 * 1000;
// Session will accept one candidate per transport channel and dropping other
// candidates generated for that channel. During the session initialization
// one cricket::VoiceChannel and one cricket::VideoChannel will be created with
// rtcp enabled.
static const int kAllowedCandidates = 4;
// TODO(mallinath) - These are magic string used by cricket::VideoChannel.
// These should be moved to a common place.
static const std::string kRtpVideoChannelStr = "video_rtp";
static const std::string kRtcpVideoChannelStr = "video_rtcp";
WebRtcSession::WebRtcSession(cricket::ChannelManager* channel_manager,
talk_base::Thread* signaling_thread,
talk_base::Thread* worker_thread,
cricket::PortAllocator* port_allocator)
: cricket::BaseSession(signaling_thread, worker_thread, port_allocator,
talk_base::ToString(talk_base::CreateRandomId()),
cricket::NS_JINGLE_RTP, true),
channel_manager_(channel_manager),
observer_(NULL),
session_desc_factory_(channel_manager) {
}
WebRtcSession::~WebRtcSession() {
Terminate();
}
bool WebRtcSession::Initialize() {
return CreateChannels();
}
void WebRtcSession::Terminate() {
if (voice_channel_.get()) {
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
}
if (video_channel_.get()) {
channel_manager_->DestroyVideoChannel(video_channel_.release());
}
}
bool WebRtcSession::CreateChannels() {
voice_channel_.reset(channel_manager_->CreateVoiceChannel(
this, cricket::CN_AUDIO, true));
if (!voice_channel_.get()) {
LOG(LS_ERROR) << "Failed to create voice channel";
return false;
}
video_channel_.reset(channel_manager_->CreateVideoChannel(
this, cricket::CN_VIDEO, true, voice_channel_.get()));
if (!video_channel_.get()) {
LOG(LS_ERROR) << "Failed to create video channel";
return false;
}
// TransportProxies and TransportChannels will be created when
// CreateVoiceChannel and CreateVideoChannel are called.
// Try connecting all transport channels. This is necessary to generate
// ICE candidates.
SpeculativelyConnectAllTransportChannels();
return true;
}
void WebRtcSession::SetRemoteCandidates(
const cricket::Candidates& candidates) {
// First partition the candidates for the proxies. During creation of channels
// we created CN_AUDIO (audio) and CN_VIDEO (video) proxies.
cricket::Candidates audio_candidates;
cricket::Candidates video_candidates;
for (cricket::Candidates::const_iterator citer = candidates.begin();
citer != candidates.end(); ++citer) {
if (((*citer).name().compare(kRtpVideoChannelStr) == 0) ||
((*citer).name().compare(kRtcpVideoChannelStr)) == 0) {
// Candidate names for video rtp and rtcp channel
video_candidates.push_back(*citer);
} else {
// Candidates for audio rtp and rtcp channel
// Channel name will be "rtp" and "rtcp"
audio_candidates.push_back(*citer);
}
}
if (!audio_candidates.empty()) {
cricket::TransportProxy* audio_proxy = GetTransportProxy(cricket::CN_AUDIO);
if (audio_proxy) {
// CompleteNegotiation will set actual impl's in Proxy.
if (!audio_proxy->negotiated())
audio_proxy->CompleteNegotiation();
// TODO(mallinath) - Add a interface to TransportProxy to accept
// remote candidate list.
audio_proxy->impl()->OnRemoteCandidates(audio_candidates);
} else {
LOG(LS_INFO) << "No audio TransportProxy exists";
}
}
if (!video_candidates.empty()) {
cricket::TransportProxy* video_proxy = GetTransportProxy(cricket::CN_VIDEO);
if (video_proxy) {
// CompleteNegotiation will set actual impl's in Proxy.
if (!video_proxy->negotiated())
video_proxy->CompleteNegotiation();
// TODO(mallinath) - Add a interface to TransportProxy to accept
// remote candidate list.
video_proxy->impl()->OnRemoteCandidates(video_candidates);
} else {
LOG(LS_INFO) << "No video TransportProxy exists";
}
}
}
void WebRtcSession::OnTransportRequestSignaling(
cricket::Transport* transport) {
ASSERT(signaling_thread()->IsCurrent());
transport->OnSignalingReady();
}
void WebRtcSession::OnTransportConnecting(cricket::Transport* transport) {
ASSERT(signaling_thread()->IsCurrent());
// start monitoring for the write state of the transport.
OnTransportWritable(transport);
}
void WebRtcSession::OnTransportWritable(cricket::Transport* transport) {
ASSERT(signaling_thread()->IsCurrent());
// If the transport is not in writable state, start a timer to monitor
// the state. If the transport doesn't become writable state in 30 seconds
// then we are assuming call can't be continued.
signaling_thread()->Clear(this, MSG_CANDIDATE_TIMEOUT);
if (transport->HasChannels() && !transport->writable()) {
signaling_thread()->PostDelayed(
kCallSetupTimeout, this, MSG_CANDIDATE_TIMEOUT);
}
}
void WebRtcSession::OnTransportCandidatesReady(
cricket::Transport* transport, const cricket::Candidates& candidates) {
ASSERT(signaling_thread()->IsCurrent());
// Drop additional candidates for the same channel;
// local_candidates_ will have one candidate per channel.
if (local_candidates_.size() == kAllowedCandidates)
return;
InsertTransportCandidates(candidates);
if (local_candidates_.size() == kAllowedCandidates && observer_) {
observer_->OnCandidatesReady(local_candidates_);
}
}
void WebRtcSession::OnTransportChannelGone(cricket::Transport* transport) {
ASSERT(signaling_thread()->IsCurrent());
}
void WebRtcSession::OnMessage(talk_base::Message* msg) {
switch (msg->message_id) {
case MSG_CANDIDATE_TIMEOUT:
LOG(LS_ERROR) << "Transport is not in writable state.";
SignalError();
break;
default:
break;
}
}
void WebRtcSession::InsertTransportCandidates(
const cricket::Candidates& candidates) {
for (cricket::Candidates::const_iterator citer = candidates.begin();
citer != candidates.end(); ++citer) {
// Find candidates by name, if this channel name not exists in local
// candidate list, store it.
if (!CheckCandidate((*citer).name())) {
local_candidates_.push_back(*citer);
}
}
}
// Check transport candidate already available for transport channel as only
// one cricket::Candidate allower per channel.
bool WebRtcSession::CheckCandidate(const std::string& name) {
bool ret = false;
for (cricket::Candidates::iterator iter = local_candidates_.begin();
iter != local_candidates_.end(); ++iter) {
if ((*iter).name().compare(name) == 0) {
ret = true;
break;
}
}
return ret;
}
void WebRtcSession::SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera) {
// should be called from a signaling thread
ASSERT(signaling_thread()->IsCurrent());
// TODO(mallinath): Refactor this when there is support for multiple cameras.
// Register the the VideoCapture Module.
// TODO(mallinath): Fix SetCaptureDevice.
video_channel_->SetCaptureDevice(0, camera);
// Actually associate the video capture module with the ViE channel.
channel_manager_->SetVideoOptions("");
}
void WebRtcSession::SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
// TODO(mallinath): Fix SetLocalRenderer.
video_channel_->SetLocalRenderer(0, renderer);
}
void WebRtcSession::SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
// TODO(mallinath): Only the ssrc = 0 is supported at the moment.
// Only one channel.
video_channel_->SetRenderer(0, renderer);
}
const cricket::SessionDescription* WebRtcSession::ProvideOffer(
const cricket::MediaSessionOptions& options) {
// TODO(mallinath) - Sanity check for options.
cricket::SessionDescription* offer(
session_desc_factory_.CreateOffer(options, local_description()));
set_local_description(offer);
return offer;
}
const cricket::SessionDescription* WebRtcSession::SetRemoteSessionDescription(
const cricket::SessionDescription* remote_offer,
const std::vector<cricket::Candidate>& remote_candidates) {
set_remote_description(
const_cast<cricket::SessionDescription*>(remote_offer));
SetRemoteCandidates(remote_candidates);
return remote_offer;
}
const cricket::SessionDescription* WebRtcSession::ProvideAnswer(
const cricket::MediaSessionOptions& options) {
cricket::SessionDescription* answer(
session_desc_factory_.CreateAnswer(remote_description(), options,
local_description()));
set_local_description(answer);
return answer;
}
void WebRtcSession::NegotiationDone() {
// SetState of session is called after session receives both local and
// remote descriptions. State transition will happen only when session
// is in INIT state.
if (state() == STATE_INIT) {
SetState(STATE_SENTINITIATE);
SetState(STATE_RECEIVEDACCEPT);
// Enabling voice and video channel.
voice_channel_->Enable(true);
video_channel_->Enable(true);
}
const cricket::ContentInfo* audio_info =
cricket::GetFirstAudioContent(local_description());
if (audio_info) {
const cricket::MediaContentDescription* audio_content =
static_cast<const cricket::MediaContentDescription*>(
audio_info->description);
// Since channels are currently not supporting multiple send streams,
// we can remove stream from a session by muting it.
// TODO(mallinath) - Change needed when multiple send streams support
// is available.
voice_channel_->Mute(audio_content->streams().size() == 0);
}
const cricket::ContentInfo* video_info =
cricket::GetFirstVideoContent(local_description());
if (video_info) {
const cricket::MediaContentDescription* video_content =
static_cast<const cricket::MediaContentDescription*>(
video_info->description);
// Since channels are currently not supporting multiple send streams,
// we can remove stream from a session by muting it.
// TODO(mallinath) - Change needed when multiple send streams support
// is available.
video_channel_->Mute(video_content->streams().size() == 0);
}
}
} // namespace webrtc

View File

@ -1,126 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_WEBRTCSESSION_H_
#define TALK_APP_WEBRTC_WEBRTCSESSION_H_
#include <string>
#include <vector>
#include "talk/app/webrtc_dev/mediastreamprovider.h"
#include "talk/app/webrtc_dev/sessiondescriptionprovider.h"
#include "talk/app/webrtc_dev/webrtcsessionobserver.h"
#include "talk/base/sigslot.h"
#include "talk/base/thread.h"
#include "talk/p2p/base/session.h"
#include "talk/session/phone/mediasession.h"
namespace cricket {
class ChannelManager;
class Transport;
class VideoChannel;
class VoiceChannel;
}
namespace webrtc {
class WebRtcSession : public cricket::BaseSession,
public MediaProviderInterface,
public SessionDescriptionProvider {
public:
WebRtcSession(cricket::ChannelManager* channel_manager,
talk_base::Thread* signaling_thread,
talk_base::Thread* worker_thread,
cricket::PortAllocator* port_allocator);
~WebRtcSession();
bool Initialize();
void RegisterObserver(WebRtcSessionObserver* observer) {
observer_ = observer;
}
const cricket::VoiceChannel* voice_channel() const {
return voice_channel_.get();
}
const cricket::VideoChannel* video_channel() const {
return video_channel_.get();
}
// Generic error message callback from WebRtcSession.
// TODO(mallinath) - It may be necessary to supply error code as well.
sigslot::signal0<> SignalError;
private:
// Implements SessionDescriptionProvider
virtual const cricket::SessionDescription* ProvideOffer(
const cricket::MediaSessionOptions& options);
virtual const cricket::SessionDescription* SetRemoteSessionDescription(
const cricket::SessionDescription* remote_offer,
const std::vector<cricket::Candidate>& remote_candidates);
virtual const cricket::SessionDescription* ProvideAnswer(
const cricket::MediaSessionOptions& options);
virtual void NegotiationDone();
// Implements MediaProviderInterface.
virtual void SetCaptureDevice(const std::string& name,
VideoCaptureModule* camera);
virtual void SetLocalRenderer(const std::string& name,
cricket::VideoRenderer* renderer);
virtual void SetRemoteRenderer(const std::string& name,
cricket::VideoRenderer* renderer);
// Transport related callbacks, override from cricket::BaseSession.
virtual void OnTransportRequestSignaling(cricket::Transport* transport);
virtual void OnTransportConnecting(cricket::Transport* transport);
virtual void OnTransportWritable(cricket::Transport* transport);
virtual void OnTransportCandidatesReady(
cricket::Transport* transport,
const cricket::Candidates& candidates);
virtual void OnTransportChannelGone(cricket::Transport* transport);
// Creates channels for voice and video.
bool CreateChannels();
virtual void OnMessage(talk_base::Message* msg);
void InsertTransportCandidates(const cricket::Candidates& candidates);
void Terminate();
// Get candidate from the local candidates list by the name.
bool CheckCandidate(const std::string& name);
void SetRemoteCandidates(const cricket::Candidates& candidates);
private:
talk_base::scoped_ptr<cricket::VoiceChannel> voice_channel_;
talk_base::scoped_ptr<cricket::VideoChannel> video_channel_;
cricket::ChannelManager* channel_manager_;
cricket::Candidates local_candidates_;
WebRtcSessionObserver* observer_;
cricket::MediaSessionDescriptionFactory session_desc_factory_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_WEBRTCSESSION_H_

View File

@ -1,135 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "gtest/gtest.h"
#include "talk/app/webrtc_dev/webrtcsession.h"
#include "talk/app/webrtc_dev/peerconnectionsignaling.h"
#include "talk/base/thread.h"
#include "talk/session/phone/channelmanager.h"
#include "talk/p2p/client/fakeportallocator.h"
class MockWebRtcSessionObserver : public webrtc::WebRtcSessionObserver {
public:
virtual void OnCandidatesReady(
const std::vector<cricket::Candidate>& candidates) {
for (cricket::Candidates::const_iterator iter = candidates.begin();
iter != candidates.end(); ++iter) {
candidates_.push_back(*iter);
}
}
std::vector<cricket::Candidate> candidates_;
};
class WebRtcSessionTest : public testing::Test {
protected:
virtual void SetUp() {
signaling_thread_ = talk_base::Thread::Current();
worker_thread_ = talk_base::Thread::Current();
channel_manager_.reset(new cricket::ChannelManager(worker_thread_));
port_allocator_.reset(
new cricket::FakePortAllocator(worker_thread_, NULL));
desc_factory_.reset(
new cricket::MediaSessionDescriptionFactory(channel_manager_.get()));
}
bool InitializeSession() {
return session_.get()->Initialize();
}
bool CheckChannels() {
return (session_->voice_channel() != NULL &&
session_->video_channel() != NULL);
}
bool CheckTransportChannels() {
EXPECT_TRUE(session_->GetChannel(cricket::CN_AUDIO, "rtp") != NULL);
EXPECT_TRUE(session_->GetChannel(cricket::CN_AUDIO, "rtcp") != NULL);
EXPECT_TRUE(session_->GetChannel(cricket::CN_VIDEO, "video_rtp") != NULL);
EXPECT_TRUE(session_->GetChannel(cricket::CN_VIDEO, "video_rtcp") != NULL);
}
void Init() {
ASSERT_TRUE(channel_manager_.get() != NULL);
ASSERT_TRUE(session_.get() == NULL);
EXPECT_TRUE(channel_manager_.get()->Init());
session_.reset(new webrtc::WebRtcSession(
channel_manager_.get(), worker_thread_, signaling_thread_,
port_allocator_.get()));
session_->RegisterObserver(&observer_);
desc_provider_ = session_.get();
EXPECT_TRUE(InitializeSession());
}
void CreateOffer(uint32 ssrc) {
cricket::MediaSessionOptions options;
// TODO(mallinath) - Adding test cases for session.
local_desc_ = desc_provider_->ProvideOffer(options);
ASSERT_TRUE(local_desc_ != NULL);
}
void CreateAnswer(uint32 ssrc) {
cricket::MediaSessionOptions options;
// TODO(mallinath) - Adding test cases for session.
remote_desc_ = desc_factory_->CreateAnswer(local_desc_, options);
ASSERT_TRUE(remote_desc_ != NULL);
}
void SetRemoteContents() {
desc_provider_->SetRemoteSessionDescription(
remote_desc_, observer_.candidates_);
}
void NegotiationDone() {
desc_provider_->NegotiationDone();
}
const cricket::SessionDescription* local_desc_;
const cricket::SessionDescription* remote_desc_;
talk_base::Thread* signaling_thread_;
talk_base::Thread* worker_thread_;
talk_base::scoped_ptr<cricket::PortAllocator> port_allocator_;
talk_base::scoped_ptr<webrtc::WebRtcSession> session_;
webrtc::SessionDescriptionProvider* desc_provider_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
MockWebRtcSessionObserver observer_;
};
TEST_F(WebRtcSessionTest, TestInitialize) {
WebRtcSessionTest::Init();
EXPECT_TRUE(CheckChannels());
CheckTransportChannels();
talk_base::Thread::Current()->ProcessMessages(1000);
EXPECT_EQ(4u, observer_.candidates_.size());
}
// TODO(mallinath) - Adding test cases for session.
TEST_F(WebRtcSessionTest, DISABLE_TestOfferAnswer) {
WebRtcSessionTest::Init();
EXPECT_TRUE(CheckChannels());
CheckTransportChannels();
talk_base::Thread::Current()->ProcessMessages(1);
}

View File

@ -1,47 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_
#define TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_
#include <vector>
#include "talk/p2p/base/candidate.h"
namespace webrtc {
class WebRtcSessionObserver {
public:
virtual void OnCandidatesReady(
const std::vector<cricket::Candidate>& candiddates) = 0;
protected:
virtual ~WebRtcSessionObserver() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_

View File

@ -1,90 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_BASE_REF_COUNT_H_
#define TALK_BASE_REF_COUNT_H_
#include <cstring>
#include "talk/base/criticalsection.h"
namespace talk_base {
// Reference count interface.
class RefCountInterface {
public:
virtual int AddRef() = 0;
virtual int Release() = 0;
};
template <class T>
class RefCountedObject : public T {
public:
RefCountedObject() : ref_count_(0) {
}
template<typename P>
explicit RefCountedObject(P p) : ref_count_(0), T(p) {
}
template<typename P1, typename P2>
RefCountedObject(P1 p1, P2 p2) : ref_count_(0), T(p1, p2) {
}
template<typename P1, typename P2, typename P3>
RefCountedObject(P1 p1, P2 p2, P3 p3) : ref_count_(0), T(p1, p2, p3) {
}
template<typename P1, typename P2, typename P3, typename P4>
RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4)
: ref_count_(0), T(p1, p2, p3, p4) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5>
RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5)
: ref_count_(0), T(p1, p2, p3, p4, p5) {
}
virtual int AddRef() {
return talk_base::AtomicOps::Increment(&ref_count_);
}
virtual int Release() {
int count = talk_base::AtomicOps::Decrement(&ref_count_);
if (!count) {
delete this;
}
return count;
}
protected:
int ref_count_;
};
} // namespace talk_base
#endif // TALK_BASE_REF_COUNT_H_

View File

@ -1,132 +0,0 @@
#ifndef TALK_BASE_SCOPED_REFPTR_H_
#define TALK_BASE_SCOPED_REFPTR_H_
// Originally these classes are copied from Chromium.
//
// A smart pointer class for reference counted objects. Use this class instead
// of calling AddRef and Release manually on a reference counted object to
// avoid common memory leaks caused by forgetting to Release an object
// reference. Sample usage:
//
// class MyFoo : public RefCounted<MyFoo> {
// ...
// };
//
// void some_function() {
// scoped_refptr<MyFoo> foo = new MyFoo();
// foo->Method(param);
// // |foo| is released when this function returns
// }
//
// void some_other_function() {
// scoped_refptr<MyFoo> foo = new MyFoo();
// ...
// foo = NULL; // explicitly releases |foo|
// ...
// if (foo)
// foo->Method(param);
// }
//
// The above examples show how scoped_refptr<T> acts like a pointer to T.
// Given two scoped_refptr<T> classes, it is also possible to exchange
// references between the two objects, like so:
//
// {
// scoped_refptr<MyFoo> a = new MyFoo();
// scoped_refptr<MyFoo> b;
//
// b.swap(a);
// // now, |b| references the MyFoo object, and |a| references NULL.
// }
//
// To make both |a| and |b| in the above example reference the same MyFoo
// object, simply use the assignment operator:
//
// {
// scoped_refptr<MyFoo> a = new MyFoo();
// scoped_refptr<MyFoo> b;
//
// b = a;
// // now, |a| and |b| each own a reference to the same MyFoo object.
// }
//
namespace talk_base {
template <class T>
class scoped_refptr {
public:
scoped_refptr() : ptr_(NULL) {
}
scoped_refptr(T* p) : ptr_(p) {
if (ptr_)
ptr_->AddRef();
}
scoped_refptr(const scoped_refptr<T>& r) : ptr_(r.ptr_) {
if (ptr_)
ptr_->AddRef();
}
template <typename U>
scoped_refptr(const scoped_refptr<U>& r) : ptr_(r.get()) {
if (ptr_)
ptr_->AddRef();
}
~scoped_refptr() {
if (ptr_)
ptr_->Release();
}
T* get() const { return ptr_; }
operator T*() const { return ptr_; }
T* operator->() const { return ptr_; }
// Release a pointer.
// The return value is the current pointer held by this object.
// If this object holds a NULL pointer, the return value is NULL.
// After this operation, this object will hold a NULL pointer,
// and will not own the object any more.
T* release() {
T* retVal = ptr_;
ptr_ = NULL;
return retVal;
}
scoped_refptr<T>& operator=(T* p) {
// AddRef first so that self assignment should work
if (p)
p->AddRef();
if (ptr_ )
ptr_ ->Release();
ptr_ = p;
return *this;
}
scoped_refptr<T>& operator=(const scoped_refptr<T>& r) {
return *this = r.ptr_;
}
template <typename U>
scoped_refptr<T>& operator=(const scoped_refptr<U>& r) {
return *this = r.get();
}
void swap(T** pp) {
T* p = ptr_;
ptr_ = *pp;
*pp = p;
}
void swap(scoped_refptr<T>& r) {
swap(&r.ptr_);
}
protected:
T* ptr_;
};
} // namespace talk_base
#endif // TALK_BASE_SCOPED_REFPTR_H_

View File

@ -1,378 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "talk/examples/peerconnection_client/conductor.h"
#include <utility>
#include "modules/video_capture/main/interface/video_capture_factory.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/examples/peerconnection_client/defaults.h"
#include "talk/base/common.h"
#include "talk/base/logging.h"
#include "talk/p2p/client/basicportallocator.h"
#include "talk/session/phone/videorendererfactory.h"
Conductor::Conductor(PeerConnectionClient* client, MainWindow* main_wnd)
: peer_id_(-1),
client_(client),
main_wnd_(main_wnd) {
client_->RegisterObserver(this);
main_wnd->RegisterObserver(this);
}
Conductor::~Conductor() {
ASSERT(peer_connection_.get() == NULL);
}
bool Conductor::connection_active() const {
return peer_connection_.get() != NULL;
}
void Conductor::Close() {
client_->SignOut();
DeletePeerConnection();
}
bool Conductor::InitializePeerConnection() {
ASSERT(peer_connection_factory_.get() == NULL);
ASSERT(peer_connection_.get() == NULL);
peer_connection_factory_ = webrtc::CreatePeerConnectionFactory();
if (!peer_connection_factory_.get()) {
main_wnd_->MessageBox("Error",
"Failed to initialize PeerConnectionFactory", true);
DeletePeerConnection();
return false;
}
peer_connection_ = peer_connection_factory_->CreatePeerConnection(
GetPeerConnectionString(), this);
if (!peer_connection_.get()) {
main_wnd_->MessageBox("Error",
"CreatePeerConnection failed", true);
DeletePeerConnection();
}
return peer_connection_.get() != NULL;
}
void Conductor::DeletePeerConnection() {
peer_connection_ = NULL;
active_streams_.clear();
peer_connection_factory_ = NULL;
peer_id_ = -1;
}
void Conductor::EnsureStreamingUI() {
ASSERT(peer_connection_.get() != NULL);
if (main_wnd_->IsWindow()) {
if (main_wnd_->current_ui() != MainWindow::STREAMING)
main_wnd_->SwitchToStreamingUI();
}
}
//
// PeerConnectionObserver implementation.
//
void Conductor::OnError() {
LOG(LS_ERROR) << __FUNCTION__;
main_wnd_->QueueUIThreadCallback(PEER_CONNECTION_ERROR, NULL);
}
void Conductor::OnSignalingMessage(const std::string& msg) {
LOG(INFO) << __FUNCTION__;
std::string* msg_copy = new std::string(msg);
main_wnd_->QueueUIThreadCallback(SEND_MESSAGE_TO_PEER, msg_copy);
}
// Called when a remote stream is added
void Conductor::OnAddStream(webrtc::MediaStreamInterface* stream) {
LOG(INFO) << __FUNCTION__ << " " << stream->label();
stream->AddRef();
main_wnd_->QueueUIThreadCallback(NEW_STREAM_ADDED,
stream);
}
void Conductor::OnRemoveStream(webrtc::MediaStreamInterface* stream) {
LOG(INFO) << __FUNCTION__ << " " << stream->label();
stream->AddRef();
main_wnd_->QueueUIThreadCallback(STREAM_REMOVED,
stream);
}
//
// PeerConnectionClientObserver implementation.
//
void Conductor::OnSignedIn() {
LOG(INFO) << __FUNCTION__;
main_wnd_->SwitchToPeerList(client_->peers());
}
void Conductor::OnDisconnected() {
LOG(INFO) << __FUNCTION__;
DeletePeerConnection();
if (main_wnd_->IsWindow())
main_wnd_->SwitchToConnectUI();
}
void Conductor::OnPeerConnected(int id, const std::string& name) {
LOG(INFO) << __FUNCTION__;
// Refresh the list if we're showing it.
if (main_wnd_->current_ui() == MainWindow::LIST_PEERS)
main_wnd_->SwitchToPeerList(client_->peers());
}
void Conductor::OnPeerDisconnected(int id) {
LOG(INFO) << __FUNCTION__;
if (id == peer_id_) {
LOG(INFO) << "Our peer disconnected";
main_wnd_->QueueUIThreadCallback(PEER_CONNECTION_CLOSED, NULL);
} else {
// Refresh the list if we're showing it.
if (main_wnd_->current_ui() == MainWindow::LIST_PEERS)
main_wnd_->SwitchToPeerList(client_->peers());
}
}
void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
ASSERT(peer_id_ == peer_id || peer_id_ == -1);
ASSERT(!message.empty());
if (!peer_connection_.get()) {
ASSERT(peer_id_ == -1);
peer_id_ = peer_id;
// Got an offer. Give it to the PeerConnection instance.
// Once processed, we will get a callback to OnSignalingMessage with
// our 'answer' which we'll send to the peer.
LOG(INFO) << "Got an offer from our peer: " << peer_id;
if (!InitializePeerConnection()) {
LOG(LS_ERROR) << "Failed to initialize our PeerConnection instance";
client_->SignOut();
return;
}
} else if (peer_id != peer_id_) {
ASSERT(peer_id_ != -1);
LOG(WARNING) << "Received an offer from a peer while already in a "
"conversation with a different peer.";
return;
}
peer_connection_->ProcessSignalingMessage(message);
}
void Conductor::OnMessageSent(int err) {
// Process the next pending message if any.
main_wnd_->QueueUIThreadCallback(SEND_MESSAGE_TO_PEER, NULL);
}
//
// MainWndCallback implementation.
//
bool Conductor::StartLogin(const std::string& server, int port) {
if (client_->is_connected())
return false;
if (!client_->Connect(server, port, GetPeerName())) {
main_wnd_->MessageBox("Error", ("Failed to connect to " + server).c_str(),
true);
return false;
}
return true;
}
void Conductor::DisconnectFromServer() {
if (client_->is_connected())
client_->SignOut();
}
void Conductor::ConnectToPeer(int peer_id) {
ASSERT(peer_id_ == -1);
ASSERT(peer_id != -1);
if (peer_connection_.get()) {
main_wnd_->MessageBox("Error",
"We only support connecting to one peer at a time", true);
return;
}
if (InitializePeerConnection()) {
peer_id_ = peer_id;
AddStreams();
} else {
main_wnd_->MessageBox("Error", "Failed to initialize PeerConnection", true);
}
}
talk_base::scoped_refptr<webrtc::VideoCaptureModule>
Conductor::OpenVideoCaptureDevice() {
webrtc::VideoCaptureModule::DeviceInfo* device_info(
webrtc::VideoCaptureFactory::CreateDeviceInfo(0));
talk_base::scoped_refptr<webrtc::VideoCaptureModule> video_device;
const size_t kMaxDeviceNameLength = 128;
const size_t kMaxUniqueIdLength = 256;
uint8 device_name[kMaxDeviceNameLength];
uint8 unique_id[kMaxUniqueIdLength];
const size_t device_count = device_info->NumberOfDevices();
for (size_t i = 0; i < device_count; ++i) {
// Get the name of the video capture device.
device_info->GetDeviceName(i, device_name, kMaxDeviceNameLength, unique_id,
kMaxUniqueIdLength);
// Try to open this device.
video_device =
webrtc::VideoCaptureFactory::Create(0, unique_id);
if (video_device.get())
break;
}
delete device_info;
return video_device;
}
void Conductor::AddStreams() {
if (active_streams_.find(kStreamLabel) != active_streams_.end())
return; // Already added.
talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
peer_connection_factory_->CreateLocalAudioTrack(kAudioLabel, NULL));
talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
peer_connection_factory_->CreateLocalVideoTrack(
kVideoLabel, OpenVideoCaptureDevice()));
video_track->SetRenderer(main_wnd_->local_renderer());
talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface> stream =
peer_connection_factory_->CreateLocalMediaStream(kStreamLabel);
stream->AddTrack(audio_track);
stream->AddTrack(video_track);
peer_connection_->AddStream(stream);
peer_connection_->CommitStreamChanges();
typedef std::pair<std::string,
talk_base::scoped_refptr<webrtc::MediaStreamInterface> >
MediaStreamPair;
active_streams_.insert(MediaStreamPair(stream->label(), stream));
main_wnd_->SwitchToStreamingUI();
}
void Conductor::DisconnectFromCurrentPeer() {
LOG(INFO) << __FUNCTION__;
if (peer_connection_.get()) {
client_->SendHangUp(peer_id_);
DeletePeerConnection();
}
if (main_wnd_->IsWindow())
main_wnd_->SwitchToPeerList(client_->peers());
}
void Conductor::UIThreadCallback(int msg_id, void* data) {
switch (msg_id) {
case PEER_CONNECTION_CLOSED:
LOG(INFO) << "PEER_CONNECTION_CLOSED";
DeletePeerConnection();
ASSERT(active_streams_.empty());
if (main_wnd_->IsWindow()) {
if (client_->is_connected()) {
main_wnd_->SwitchToPeerList(client_->peers());
} else {
main_wnd_->SwitchToConnectUI();
}
} else {
DisconnectFromServer();
}
break;
case SEND_MESSAGE_TO_PEER: {
LOG(INFO) << "SEND_MESSAGE_TO_PEER";
std::string* msg = reinterpret_cast<std::string*>(data);
if (msg) {
// For convenience, we always run the message through the queue.
// This way we can be sure that messages are sent to the server
// in the same order they were signaled without much hassle.
pending_messages_.push_back(msg);
}
if (!pending_messages_.empty() && !client_->IsSendingMessage()) {
msg = pending_messages_.front();
pending_messages_.pop_front();
if (!client_->SendToPeer(peer_id_, *msg) && peer_id_ != -1) {
LOG(LS_ERROR) << "SendToPeer failed";
DisconnectFromServer();
}
delete msg;
}
if (!peer_connection_.get())
peer_id_ = -1;
break;
}
case PEER_CONNECTION_ADDSTREAMS:
AddStreams();
break;
case PEER_CONNECTION_ERROR:
main_wnd_->MessageBox("Error", "an unknown error occurred", true);
break;
case NEW_STREAM_ADDED: {
webrtc::MediaStreamInterface* stream =
reinterpret_cast<webrtc::MediaStreamInterface*>(
data);
talk_base::scoped_refptr<webrtc::VideoTracks> tracks =
stream->video_tracks();
for (size_t i = 0; i < tracks->count(); ++i) {
webrtc::VideoTrackInterface* track = tracks->at(i);
LOG(INFO) << "Setting video renderer for track: " << track->label();
track->SetRenderer(main_wnd_->remote_renderer());
}
// If we haven't shared any streams with this peer (we're the receiver)
// then do so now.
if (active_streams_.empty())
AddStreams();
stream->Release();
break;
}
case STREAM_REMOVED: {
webrtc::MediaStreamInterface* stream =
reinterpret_cast<webrtc::MediaStreamInterface*>(
data);
active_streams_.erase(stream->label());
stream->Release();
if (active_streams_.empty()) {
LOG(INFO) << "All streams have been closed.";
main_wnd_->QueueUIThreadCallback(PEER_CONNECTION_CLOSED, NULL);
}
break;
}
default:
ASSERT(false);
break;
}
}

View File

@ -1,116 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PEERCONNECTION_SAMPLES_CLIENT_CONDUCTOR_H_
#define PEERCONNECTION_SAMPLES_CLIENT_CONDUCTOR_H_
#pragma once
#include <deque>
#include <map>
#include <set>
#include <string>
#include "talk/examples/peerconnection_client/main_wnd.h"
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/app/webrtc_dev/peerconnection.h"
#include "talk/base/scoped_ptr.h"
namespace webrtc {
class VideoCaptureModule;
} // namespace webrtc
namespace cricket {
class VideoRenderer;
} // namespace cricket
class Conductor
: public webrtc::PeerConnectionObserver,
public PeerConnectionClientObserver,
public MainWndCallback {
public:
enum CallbackID {
MEDIA_CHANNELS_INITIALIZED = 1,
PEER_CONNECTION_CLOSED,
SEND_MESSAGE_TO_PEER,
PEER_CONNECTION_ADDSTREAMS,
PEER_CONNECTION_ERROR,
NEW_STREAM_ADDED,
STREAM_REMOVED,
};
Conductor(PeerConnectionClient* client, MainWindow* main_wnd);
~Conductor();
bool connection_active() const;
virtual void Close();
protected:
bool InitializePeerConnection();
void DeletePeerConnection();
void EnsureStreamingUI();
void AddStreams();
talk_base::scoped_refptr<webrtc::VideoCaptureModule> OpenVideoCaptureDevice();
//
// PeerConnectionObserver implementation.
//
virtual void OnError();
virtual void OnMessage(const std::string& msg) {}
virtual void OnSignalingMessage(const std::string& msg);
virtual void OnStateChange(Readiness state) {}
virtual void OnAddStream(webrtc::MediaStreamInterface* stream);
virtual void OnRemoveStream(webrtc::MediaStreamInterface* stream);
//
// PeerConnectionClientObserver implementation.
//
virtual void OnSignedIn();
virtual void OnDisconnected();
virtual void OnPeerConnected(int id, const std::string& name);
virtual void OnPeerDisconnected(int id);
virtual void OnMessageFromPeer(int peer_id, const std::string& message);
virtual void OnMessageSent(int err);
//
// MainWndCallback implementation.
//
virtual bool StartLogin(const std::string& server, int port);
virtual void DisconnectFromServer();
virtual void ConnectToPeer(int peer_id);
virtual void DisconnectFromCurrentPeer();
virtual void UIThreadCallback(int msg_id, void* data);
protected:
int peer_id_;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
PeerConnectionClient* client_;
MainWindow* main_wnd_;
std::deque<std::string*> pending_messages_;
std::map<std::string, talk_base::scoped_refptr<webrtc::MediaStreamInterface> >
active_streams_;
};
#endif // PEERCONNECTION_SAMPLES_CLIENT_CONDUCTOR_H_

View File

@ -1,58 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "talk/examples/peerconnection_client/defaults.h"
#include <stdlib.h>
#include <string.h>
#ifdef WIN32
#include <winsock2.h>
#else
#include <unistd.h>
#endif
#include "talk/base/common.h"
const char kAudioLabel[] = "audio_label";
const char kVideoLabel[] = "video_label";
const char kStreamLabel[] = "stream_label";
const uint16 kDefaultServerPort = 8888;
std::string GetEnvVarOrDefault(const char* env_var_name,
const char* default_value) {
std::string value;
const char* env_var = getenv(env_var_name);
if (env_var)
value = env_var;
if (value.empty())
value = default_value;
return value;
}
std::string GetPeerConnectionString() {
return GetEnvVarOrDefault("WEBRTC_CONNECT", "STUN stun.l.google.com:19302");
}
std::string GetDefaultServerName() {
return GetEnvVarOrDefault("WEBRTC_SERVER", "localhost");
}
std::string GetPeerName() {
char computer_name[256];
if (gethostname(computer_name, ARRAY_SIZE(computer_name)) != 0)
strcpy(computer_name, "host");
std::string ret(GetEnvVarOrDefault("USERNAME", "user"));
ret += '@';
ret += computer_name;
return ret;
}

View File

@ -1,30 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
#define PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
#pragma once
#include <string>
#include "talk/base/basictypes.h"
extern const char kAudioLabel[];
extern const char kVideoLabel[];
extern const char kStreamLabel[];
extern const uint16 kDefaultServerPort;
std::string GetEnvVarOrDefault(const char* env_var_name,
const char* default_value);
std::string GetPeerConnectionString();
std::string GetDefaultServerName();
std::string GetPeerName();
#endif // PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_

View File

@ -1,85 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <gtk/gtk.h>
#include "talk/examples/peerconnection_client/conductor.h"
#include "talk/examples/peerconnection_client/linux/main_wnd.h"
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "talk/base/thread.h"
class CustomSocketServer : public talk_base::PhysicalSocketServer {
public:
CustomSocketServer(talk_base::Thread* thread, GtkMainWnd* wnd)
: thread_(thread), wnd_(wnd), conductor_(NULL), client_(NULL) {}
virtual ~CustomSocketServer() {}
void set_client(PeerConnectionClient* client) { client_ = client; }
void set_conductor(Conductor* conductor) { conductor_ = conductor; }
// Override so that we can also pump the GTK message loop.
virtual bool Wait(int cms, bool process_io) {
// Pump GTK events.
// TODO(tommi): We really should move either the socket server or UI to a
// different thread. Alternatively we could look at merging the two loops
// by implementing a dispatcher for the socket server and/or use
// g_main_context_set_poll_func.
while (gtk_events_pending())
gtk_main_iteration();
if (!wnd_->IsWindow() && !conductor_->connection_active() &&
client_ != NULL && !client_->is_connected()) {
thread_->Quit();
}
return talk_base::PhysicalSocketServer::Wait(0/*cms == -1 ? 1 : cms*/,
process_io);
}
protected:
talk_base::Thread* thread_;
GtkMainWnd* wnd_;
Conductor* conductor_;
PeerConnectionClient* client_;
};
int main(int argc, char* argv[]) {
gtk_init(&argc, &argv);
g_type_init();
g_thread_init(NULL);
GtkMainWnd wnd;
wnd.Create();
talk_base::AutoThread auto_thread;
talk_base::Thread* thread = talk_base::Thread::Current();
CustomSocketServer socket_server(thread, &wnd);
thread->set_socketserver(&socket_server);
// Must be constructed after we set the socketserver.
PeerConnectionClient client;
Conductor conductor(&client, &wnd);
socket_server.set_client(&client);
socket_server.set_conductor(&conductor);
thread->Run();
// gtk_main();
wnd.Destroy();
thread->set_socketserver(NULL);
// TODO(tommi): Run the Gtk main loop to tear down the connection.
//while (gtk_events_pending()) {
// gtk_main_iteration();
//}
return 0;
}

View File

@ -1,464 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "talk/examples/peerconnection_client/linux/main_wnd.h"
#include <gdk/gdkkeysyms.h>
#include <gtk/gtk.h>
#include <stddef.h>
#include "talk/examples/peerconnection_client/defaults.h"
#include "talk/base/common.h"
#include "talk/base/logging.h"
#include "talk/base/stringutils.h"
using talk_base::sprintfn;
namespace {
//
// Simple static functions that simply forward the callback to the
// GtkMainWnd instance.
//
gboolean OnDestroyedCallback(GtkWidget* widget, GdkEvent* event,
gpointer data) {
reinterpret_cast<GtkMainWnd*>(data)->OnDestroyed(widget, event);
return FALSE;
}
void OnClickedCallback(GtkWidget* widget, gpointer data) {
reinterpret_cast<GtkMainWnd*>(data)->OnClicked(widget);
}
gboolean OnKeyPressCallback(GtkWidget* widget, GdkEventKey* key,
gpointer data) {
reinterpret_cast<GtkMainWnd*>(data)->OnKeyPress(widget, key);
return false;
}
void OnRowActivatedCallback(GtkTreeView* tree_view, GtkTreePath* path,
GtkTreeViewColumn* column, gpointer data) {
reinterpret_cast<GtkMainWnd*>(data)->OnRowActivated(tree_view, path, column);
}
// Creates a tree view, that we use to display the list of peers.
void InitializeList(GtkWidget* list) {
GtkCellRenderer* renderer = gtk_cell_renderer_text_new();
GtkTreeViewColumn* column = gtk_tree_view_column_new_with_attributes(
"List Items", renderer, "text", 0, NULL);
gtk_tree_view_append_column(GTK_TREE_VIEW(list), column);
GtkListStore* store = gtk_list_store_new(2, G_TYPE_STRING, G_TYPE_INT);
gtk_tree_view_set_model(GTK_TREE_VIEW(list), GTK_TREE_MODEL(store));
g_object_unref(store);
}
// Adds an entry to a tree view.
void AddToList(GtkWidget* list, const gchar* str, int value) {
GtkListStore* store = GTK_LIST_STORE(
gtk_tree_view_get_model(GTK_TREE_VIEW(list)));
GtkTreeIter iter;
gtk_list_store_append(store, &iter);
gtk_list_store_set(store, &iter, 0, str, 1, value, -1);
}
struct UIThreadCallbackData {
explicit UIThreadCallbackData(MainWndCallback* cb, int id, void* d)
: callback(cb), msg_id(id), data(d) {}
MainWndCallback* callback;
int msg_id;
void* data;
};
gboolean HandleUIThreadCallback(gpointer data) {
UIThreadCallbackData* cb_data = reinterpret_cast<UIThreadCallbackData*>(data);
cb_data->callback->UIThreadCallback(cb_data->msg_id, cb_data->data);
delete cb_data;
return false;
}
gboolean Redraw(gpointer data) {
GtkMainWnd* wnd = reinterpret_cast<GtkMainWnd*>(data);
wnd->OnRedraw();
return false;
}
} // end anonymous
//
// GtkMainWnd implementation.
//
GtkMainWnd::GtkMainWnd()
: window_(NULL), draw_area_(NULL), vbox_(NULL), server_edit_(NULL),
port_edit_(NULL), peer_list_(NULL), callback_(NULL),
server_("localhost") {
char buffer[10];
sprintfn(buffer, sizeof(buffer), "%i", kDefaultServerPort);
port_ = buffer;
}
GtkMainWnd::~GtkMainWnd() {
ASSERT(!IsWindow());
}
void GtkMainWnd::RegisterObserver(MainWndCallback* callback) {
callback_ = callback;
}
bool GtkMainWnd::IsWindow() {
return window_ != NULL && GTK_IS_WINDOW(window_);
}
void GtkMainWnd::MessageBox(const char* caption, const char* text,
bool is_error) {
GtkWidget* dialog = gtk_message_dialog_new(GTK_WINDOW(window_),
GTK_DIALOG_DESTROY_WITH_PARENT,
is_error ? GTK_MESSAGE_ERROR : GTK_MESSAGE_INFO,
GTK_BUTTONS_CLOSE, "%s", text);
gtk_window_set_title(GTK_WINDOW(dialog), caption);
gtk_dialog_run(GTK_DIALOG(dialog));
gtk_widget_destroy(dialog);
}
MainWindow::UI GtkMainWnd::current_ui() {
if (vbox_)
return CONNECT_TO_SERVER;
if (peer_list_)
return LIST_PEERS;
return STREAMING;
}
webrtc::VideoRendererWrapperInterface* GtkMainWnd::local_renderer() {
if (!local_renderer_wrapper_.get())
local_renderer_wrapper_ =
webrtc::CreateVideoRenderer(new VideoRenderer(this));
return local_renderer_wrapper_.get();
}
webrtc::VideoRendererWrapperInterface* GtkMainWnd::remote_renderer() {
if (!remote_renderer_wrapper_.get())
remote_renderer_wrapper_ =
webrtc::CreateVideoRenderer(new VideoRenderer(this));
return remote_renderer_wrapper_.get();
}
void GtkMainWnd::QueueUIThreadCallback(int msg_id, void* data) {
g_idle_add(HandleUIThreadCallback,
new UIThreadCallbackData(callback_, msg_id, data));
}
bool GtkMainWnd::Create() {
ASSERT(window_ == NULL);
window_ = gtk_window_new(GTK_WINDOW_TOPLEVEL);
if (window_) {
gtk_window_set_position(GTK_WINDOW(window_), GTK_WIN_POS_CENTER);
gtk_window_set_default_size(GTK_WINDOW(window_), 640, 480);
gtk_window_set_title(GTK_WINDOW(window_), "PeerConnection client");
g_signal_connect(G_OBJECT(window_), "delete-event",
G_CALLBACK(&OnDestroyedCallback), this);
g_signal_connect(window_, "key-press-event", G_CALLBACK(OnKeyPressCallback),
this);
SwitchToConnectUI();
}
return window_ != NULL;
}
bool GtkMainWnd::Destroy() {
if (!IsWindow())
return false;
gtk_widget_destroy(window_);
window_ = NULL;
return true;
}
void GtkMainWnd::SwitchToConnectUI() {
LOG(INFO) << __FUNCTION__;
ASSERT(IsWindow());
ASSERT(vbox_ == NULL);
gtk_container_set_border_width(GTK_CONTAINER(window_), 10);
if (peer_list_) {
gtk_widget_destroy(peer_list_);
peer_list_ = NULL;
}
vbox_ = gtk_vbox_new(FALSE, 5);
GtkWidget* valign = gtk_alignment_new(0, 1, 0, 0);
gtk_container_add(GTK_CONTAINER(vbox_), valign);
gtk_container_add(GTK_CONTAINER(window_), vbox_);
GtkWidget* hbox = gtk_hbox_new(FALSE, 5);
GtkWidget* label = gtk_label_new("Server");
gtk_container_add(GTK_CONTAINER(hbox), label);
server_edit_ = gtk_entry_new();
gtk_entry_set_text(GTK_ENTRY(server_edit_), server_.c_str());
gtk_widget_set_size_request(server_edit_, 400, 30);
gtk_container_add(GTK_CONTAINER(hbox), server_edit_);
port_edit_ = gtk_entry_new();
gtk_entry_set_text(GTK_ENTRY(port_edit_), port_.c_str());
gtk_widget_set_size_request(port_edit_, 70, 30);
gtk_container_add(GTK_CONTAINER(hbox), port_edit_);
GtkWidget* button = gtk_button_new_with_label("Connect");
gtk_widget_set_size_request(button, 70, 30);
g_signal_connect(button, "clicked", G_CALLBACK(OnClickedCallback), this);
gtk_container_add(GTK_CONTAINER(hbox), button);
GtkWidget* halign = gtk_alignment_new(1, 0, 0, 0);
gtk_container_add(GTK_CONTAINER(halign), hbox);
gtk_box_pack_start(GTK_BOX(vbox_), halign, FALSE, FALSE, 0);
gtk_widget_show_all(window_);
}
void GtkMainWnd::SwitchToPeerList(const Peers& peers) {
LOG(INFO) << __FUNCTION__;
// Clean up buffers from a potential previous session.
local_renderer_wrapper_ = NULL;
remote_renderer_wrapper_ = NULL;
if (!peer_list_) {
gtk_container_set_border_width(GTK_CONTAINER(window_), 0);
if (vbox_) {
gtk_widget_destroy(vbox_);
vbox_ = NULL;
server_edit_ = NULL;
port_edit_ = NULL;
} else if (draw_area_) {
gtk_widget_destroy(draw_area_);
draw_area_ = NULL;
draw_buffer_.reset();
}
peer_list_ = gtk_tree_view_new();
g_signal_connect(peer_list_, "row-activated",
G_CALLBACK(OnRowActivatedCallback), this);
gtk_tree_view_set_headers_visible(GTK_TREE_VIEW(peer_list_), FALSE);
InitializeList(peer_list_);
gtk_container_add(GTK_CONTAINER(window_), peer_list_);
gtk_widget_show_all(window_);
} else {
GtkListStore* store =
GTK_LIST_STORE(gtk_tree_view_get_model(GTK_TREE_VIEW(peer_list_)));
gtk_list_store_clear(store);
}
AddToList(peer_list_, "List of currently connected peers:", -1);
for (Peers::const_iterator i = peers.begin(); i != peers.end(); ++i)
AddToList(peer_list_, i->second.c_str(), i->first);
}
void GtkMainWnd::SwitchToStreamingUI() {
LOG(INFO) << __FUNCTION__;
ASSERT(draw_area_ == NULL);
gtk_container_set_border_width(GTK_CONTAINER(window_), 0);
if (peer_list_) {
gtk_widget_destroy(peer_list_);
peer_list_ = NULL;
}
draw_area_ = gtk_drawing_area_new();
gtk_container_add(GTK_CONTAINER(window_), draw_area_);
gtk_widget_show_all(window_);
}
void GtkMainWnd::OnDestroyed(GtkWidget* widget, GdkEvent* event) {
callback_->Close();
window_ = NULL;
draw_area_ = NULL;
vbox_ = NULL;
server_edit_ = NULL;
port_edit_ = NULL;
peer_list_ = NULL;
}
void GtkMainWnd::OnClicked(GtkWidget* widget) {
server_ = gtk_entry_get_text(GTK_ENTRY(server_edit_));
port_ = gtk_entry_get_text(GTK_ENTRY(port_edit_));
int port = port_.length() ? atoi(port_.c_str()) : 0;
callback_->StartLogin(server_, port);
}
void GtkMainWnd::OnKeyPress(GtkWidget* widget, GdkEventKey* key) {
if (key->type == GDK_KEY_PRESS) {
switch (key->keyval) {
case GDK_Escape:
if (draw_area_) {
callback_->DisconnectFromCurrentPeer();
} else if (peer_list_) {
callback_->DisconnectFromServer();
}
break;
case GDK_KP_Enter:
case GDK_Return:
if (vbox_) {
OnClicked(NULL);
} else if (peer_list_) {
// OnRowActivated will be called automatically when the user
// presses enter.
}
break;
default:
break;
}
}
}
void GtkMainWnd::OnRowActivated(GtkTreeView* tree_view, GtkTreePath* path,
GtkTreeViewColumn* column) {
ASSERT(peer_list_ != NULL);
GtkTreeIter iter;
GtkTreeModel* model;
GtkTreeSelection* selection =
gtk_tree_view_get_selection(GTK_TREE_VIEW(tree_view));
if (gtk_tree_selection_get_selected(selection, &model, &iter)) {
char* text;
int id = -1;
gtk_tree_model_get(model, &iter, 0, &text, 1, &id, -1);
if (id != -1)
callback_->ConnectToPeer(id);
g_free(text);
}
}
void GtkMainWnd::OnRedraw() {
gdk_threads_enter();
VideoRenderer* remote_renderer =
static_cast<VideoRenderer*>(remote_renderer_wrapper_->renderer());
if (remote_renderer && remote_renderer->image() != NULL &&
draw_area_ != NULL) {
int width = remote_renderer->width();
int height = remote_renderer->height();
if (!draw_buffer_.get()) {
draw_buffer_size_ = (width * height * 4) * 4;
draw_buffer_.reset(new uint8[draw_buffer_size_]);
gtk_widget_set_size_request(draw_area_, width * 2, height * 2);
}
const uint32* image = reinterpret_cast<const uint32*>(
remote_renderer->image());
uint32* scaled = reinterpret_cast<uint32*>(draw_buffer_.get());
for (int r = 0; r < height; ++r) {
for (int c = 0; c < width; ++c) {
int x = c * 2;
scaled[x] = scaled[x + 1] = image[c];
}
uint32* prev_line = scaled;
scaled += width * 2;
memcpy(scaled, prev_line, (width * 2) * 4);
image += width;
scaled += width * 2;
}
VideoRenderer* local_renderer =
static_cast<VideoRenderer*>(local_renderer_wrapper_->renderer());
if (local_renderer && local_renderer->image()) {
image = reinterpret_cast<const uint32*>(local_renderer->image());
scaled = reinterpret_cast<uint32*>(draw_buffer_.get());
// Position the local preview on the right side.
scaled += (width * 2) - (local_renderer->width() / 2);
// right margin...
scaled -= 10;
// ... towards the bottom.
scaled += (height * width * 4) -
((local_renderer->height() / 2) *
(local_renderer->width() / 2) * 4);
// bottom margin...
scaled -= (width * 2) * 5;
for (int r = 0; r < local_renderer->height(); r += 2) {
for (int c = 0; c < local_renderer->width(); c += 2) {
scaled[c / 2] = image[c + r * local_renderer->width()];
}
scaled += width * 2;
}
}
gdk_draw_rgb_32_image(draw_area_->window,
draw_area_->style->fg_gc[GTK_STATE_NORMAL],
0,
0,
width * 2,
height * 2,
GDK_RGB_DITHER_MAX,
draw_buffer_.get(),
(width * 2) * 4);
}
gdk_threads_leave();
}
GtkMainWnd::VideoRenderer::VideoRenderer(GtkMainWnd* main_wnd)
: width_(0), height_(0), main_wnd_(main_wnd) {
}
GtkMainWnd::VideoRenderer::~VideoRenderer() {
}
bool GtkMainWnd::VideoRenderer::SetSize(int width, int height, int reserved) {
gdk_threads_enter();
width_ = width;
height_ = height;
image_.reset(new uint8[width * height * 4]);
gdk_threads_leave();
return true;
}
bool GtkMainWnd::VideoRenderer::RenderFrame(const cricket::VideoFrame* frame) {
gdk_threads_enter();
int size = width_ * height_ * 4;
frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
image_.get(),
size,
width_ * 4);
// Convert the B,G,R,A frame to R,G,B,A, which is accepted by GTK.
// The 'A' is just padding for GTK, so we can use it as temp.
uint8* pix = image_.get();
uint8* end = image_.get() + size;
while (pix < end) {
pix[3] = pix[0]; // Save B to A.
pix[0] = pix[2]; // Set Red.
pix[2] = pix[3]; // Set Blue.
pix[3] = 0xFF; // Fixed Alpha.
pix += 4;
}
gdk_threads_leave();
g_idle_add(Redraw, main_wnd_);
return true;
}

View File

@ -1,118 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
#define PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
#include "talk/examples/peerconnection_client/main_wnd.h"
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "talk/base/scoped_refptr.h"
// Forward declarations.
typedef struct _GtkWidget GtkWidget;
typedef union _GdkEvent GdkEvent;
typedef struct _GdkEventKey GdkEventKey;
typedef struct _GtkTreeView GtkTreeView;
typedef struct _GtkTreePath GtkTreePath;
typedef struct _GtkTreeViewColumn GtkTreeViewColumn;
// Implements the main UI of the peer connection client.
// This is functionally equivalent to the MainWnd class in the Windows
// implementation.
class GtkMainWnd : public MainWindow {
public:
GtkMainWnd();
~GtkMainWnd();
virtual void RegisterObserver(MainWndCallback* callback);
virtual bool IsWindow();
virtual void SwitchToConnectUI();
virtual void SwitchToPeerList(const Peers& peers);
virtual void SwitchToStreamingUI();
virtual void MessageBox(const char* caption, const char* text,
bool is_error);
virtual MainWindow::UI current_ui();
virtual webrtc::VideoRendererWrapperInterface* local_renderer();
virtual webrtc::VideoRendererWrapperInterface* remote_renderer();
virtual void QueueUIThreadCallback(int msg_id, void* data);
// Creates and shows the main window with the |Connect UI| enabled.
bool Create();
// Destroys the window. When the window is destroyed, it ends the
// main message loop.
bool Destroy();
// Callback for when the main window is destroyed.
void OnDestroyed(GtkWidget* widget, GdkEvent* event);
// Callback for when the user clicks the "Connect" button.
void OnClicked(GtkWidget* widget);
// Callback for keystrokes. Used to capture Esc and Return.
void OnKeyPress(GtkWidget* widget, GdkEventKey* key);
// Callback when the user double clicks a peer in order to initiate a
// connection.
void OnRowActivated(GtkTreeView* tree_view, GtkTreePath* path,
GtkTreeViewColumn* column);
void OnRedraw();
protected:
class VideoRenderer : public cricket::VideoRenderer {
public:
explicit VideoRenderer(GtkMainWnd* main_wnd);
virtual ~VideoRenderer();
virtual bool SetSize(int width, int height, int reserved);
virtual bool RenderFrame(const cricket::VideoFrame* frame);
const uint8* image() const {
return image_.get();
}
int width() const {
return width_;
}
int height() const {
return height_;
}
protected:
talk_base::scoped_array<uint8> image_;
int width_;
int height_;
GtkMainWnd* main_wnd_;
};
protected:
GtkWidget* window_; // Our main window.
GtkWidget* draw_area_; // The drawing surface for rendering video streams.
GtkWidget* vbox_; // Container for the Connect UI.
GtkWidget* server_edit_;
GtkWidget* port_edit_;
GtkWidget* peer_list_; // The list of peers.
MainWndCallback* callback_;
std::string server_;
std::string port_;
talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
local_renderer_wrapper_;
talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
remote_renderer_wrapper_;
talk_base::scoped_ptr<uint8> draw_buffer_;
int draw_buffer_size_;
};
#endif // PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_

View File

@ -1,58 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <windows.h>
#include "talk/examples/peerconnection_client/conductor.h"
#include "talk/examples/peerconnection_client/main_wnd.h"
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "system_wrappers/source/trace_impl.h"
#include "talk/base/win32socketinit.h"
int PASCAL wWinMain(HINSTANCE instance, HINSTANCE prev_instance,
wchar_t* cmd_line, int cmd_show) {
talk_base::EnsureWinsockInit();
webrtc::Trace::CreateTrace();
webrtc::Trace::SetTraceFile("peerconnection_client.log");
webrtc::Trace::SetLevelFilter(webrtc::kTraceWarning);
MainWnd wnd;
if (!wnd.Create()) {
ASSERT(false);
return -1;
}
PeerConnectionClient client;
Conductor conductor(&client, &wnd);
// Main loop.
MSG msg;
BOOL gm;
while ((gm = ::GetMessage(&msg, NULL, 0, 0)) && gm != -1) {
if (!wnd.PreTranslateMessage(&msg)) {
::TranslateMessage(&msg);
::DispatchMessage(&msg);
}
}
if (conductor.connection_active() || client.is_connected()) {
while ((conductor.connection_active() || client.is_connected()) &&
(gm = ::GetMessage(&msg, NULL, 0, 0)) && gm != -1) {
if (!wnd.PreTranslateMessage(&msg)) {
::TranslateMessage(&msg);
::DispatchMessage(&msg);
}
}
}
return 0;
}

View File

@ -1,582 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "talk/examples/peerconnection_client/main_wnd.h"
#include <math.h>
#include "talk/base/common.h"
#include "talk/base/logging.h"
ATOM MainWnd::wnd_class_ = 0;
const wchar_t MainWnd::kClassName[] = L"WebRTC_MainWnd";
namespace {
const char kConnecting[] = "Connecting... ";
const char kNoVideoStreams[] = "(no video streams either way)";
const char kNoIncomingStream[] = "(no incoming video)";
void CalculateWindowSizeForText(HWND wnd, const wchar_t* text,
size_t* width, size_t* height) {
HDC dc = ::GetDC(wnd);
RECT text_rc = {0};
::DrawText(dc, text, -1, &text_rc, DT_CALCRECT | DT_SINGLELINE);
::ReleaseDC(wnd, dc);
RECT client, window;
::GetClientRect(wnd, &client);
::GetWindowRect(wnd, &window);
*width = text_rc.right - text_rc.left;
*width += (window.right - window.left) -
(client.right - client.left);
*height = text_rc.bottom - text_rc.top;
*height += (window.bottom - window.top) -
(client.bottom - client.top);
}
HFONT GetDefaultFont() {
static HFONT font = reinterpret_cast<HFONT>(GetStockObject(DEFAULT_GUI_FONT));
return font;
}
std::string GetWindowText(HWND wnd) {
char text[MAX_PATH] = {0};
::GetWindowTextA(wnd, &text[0], ARRAYSIZE(text));
return text;
}
void AddListBoxItem(HWND listbox, const std::string& str, LPARAM item_data) {
LRESULT index = ::SendMessageA(listbox, LB_ADDSTRING, 0,
reinterpret_cast<LPARAM>(str.c_str()));
::SendMessageA(listbox, LB_SETITEMDATA, index, item_data);
}
} // namespace
MainWnd::MainWnd()
: ui_(CONNECT_TO_SERVER), wnd_(NULL), edit1_(NULL), edit2_(NULL),
label1_(NULL), label2_(NULL), button_(NULL), listbox_(NULL),
destroyed_(false), callback_(NULL), nested_msg_(NULL) {
}
MainWnd::~MainWnd() {
ASSERT(!IsWindow());
}
bool MainWnd::Create() {
ASSERT(wnd_ == NULL);
if (!RegisterWindowClass())
return false;
ui_thread_id_ = ::GetCurrentThreadId();
wnd_ = ::CreateWindowExW(WS_EX_OVERLAPPEDWINDOW, kClassName, L"WebRTC",
WS_OVERLAPPEDWINDOW | WS_VISIBLE | WS_CLIPCHILDREN,
CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
NULL, NULL, GetModuleHandle(NULL), this);
::SendMessage(wnd_, WM_SETFONT, reinterpret_cast<WPARAM>(GetDefaultFont()),
TRUE);
CreateChildWindows();
SwitchToConnectUI();
return wnd_ != NULL;
}
bool MainWnd::Destroy() {
BOOL ret = FALSE;
if (IsWindow()) {
ret = ::DestroyWindow(wnd_);
}
return ret != FALSE;
}
void MainWnd::RegisterObserver(MainWndCallback* callback) {
callback_ = callback;
}
bool MainWnd::IsWindow() {
return wnd_ && ::IsWindow(wnd_) != FALSE;
}
bool MainWnd::PreTranslateMessage(MSG* msg) {
bool ret = false;
if (msg->message == WM_CHAR) {
if (msg->wParam == VK_TAB) {
HandleTabbing();
ret = true;
} else if (msg->wParam == VK_RETURN) {
OnDefaultAction();
ret = true;
} else if (msg->wParam == VK_ESCAPE) {
if (callback_) {
if (ui_ == STREAMING) {
callback_->DisconnectFromCurrentPeer();
} else {
callback_->DisconnectFromServer();
}
}
}
} else if (msg->hwnd == NULL && msg->message == UI_THREAD_CALLBACK) {
callback_->UIThreadCallback(static_cast<int>(msg->wParam),
reinterpret_cast<void*>(msg->lParam));
ret = true;
}
return ret;
}
void MainWnd::SwitchToConnectUI() {
ASSERT(IsWindow());
LayoutPeerListUI(false);
ui_ = CONNECT_TO_SERVER;
LayoutConnectUI(true);
::SetFocus(edit1_);
}
void MainWnd::SwitchToPeerList(const Peers& peers) {
remote_video_.reset();
local_video_.reset();
LayoutConnectUI(false);
::SendMessage(listbox_, LB_RESETCONTENT, 0, 0);
AddListBoxItem(listbox_, "List of currently connected peers:", -1);
Peers::const_iterator i = peers.begin();
for (; i != peers.end(); ++i)
AddListBoxItem(listbox_, i->second.c_str(), i->first);
ui_ = LIST_PEERS;
LayoutPeerListUI(true);
::SetFocus(listbox_);
}
void MainWnd::SwitchToStreamingUI() {
LayoutConnectUI(false);
LayoutPeerListUI(false);
ui_ = STREAMING;
}
void MainWnd::MessageBox(const char* caption, const char* text, bool is_error) {
DWORD flags = MB_OK;
if (is_error)
flags |= MB_ICONERROR;
::MessageBoxA(handle(), text, caption, flags);
}
cricket::VideoRenderer* MainWnd::local_renderer() {
if (!local_video_.get())
local_video_.reset(new VideoRenderer(handle(), 1, 1));
return local_video_.get();
}
cricket::VideoRenderer* MainWnd::remote_renderer() {
if (!remote_video_.get())
remote_video_.reset(new VideoRenderer(handle(), 1, 1));
return remote_video_.get();
}
void MainWnd::QueueUIThreadCallback(int msg_id, void* data) {
::PostThreadMessage(ui_thread_id_, UI_THREAD_CALLBACK,
static_cast<WPARAM>(msg_id), reinterpret_cast<LPARAM>(data));
}
void MainWnd::OnPaint() {
PAINTSTRUCT ps;
::BeginPaint(handle(), &ps);
RECT rc;
::GetClientRect(handle(), &rc);
if (ui_ == STREAMING && remote_video_.get() && local_video_.get()) {
AutoLock<VideoRenderer> local_lock(local_video_.get());
AutoLock<VideoRenderer> remote_lock(remote_video_.get());
const BITMAPINFO& bmi = remote_video_->bmi();
int height = abs(bmi.bmiHeader.biHeight);
int width = bmi.bmiHeader.biWidth;
const uint8* image = remote_video_->image();
if (image != NULL) {
HDC dc_mem = ::CreateCompatibleDC(ps.hdc);
::SetStretchBltMode(dc_mem, HALFTONE);
// Set the map mode so that the ratio will be maintained for us.
HDC all_dc[] = { ps.hdc, dc_mem };
for (int i = 0; i < ARRAY_SIZE(all_dc); ++i) {
SetMapMode(all_dc[i], MM_ISOTROPIC);
SetWindowExtEx(all_dc[i], width, height, NULL);
SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
}
HBITMAP bmp_mem = ::CreateCompatibleBitmap(ps.hdc, rc.right, rc.bottom);
HGDIOBJ bmp_old = ::SelectObject(dc_mem, bmp_mem);
POINT logical_area = { rc.right, rc.bottom };
DPtoLP(ps.hdc, &logical_area, 1);
HBRUSH brush = ::CreateSolidBrush(RGB(0, 0, 0));
RECT logical_rect = {0, 0, logical_area.x, logical_area.y };
::FillRect(dc_mem, &logical_rect, brush);
::DeleteObject(brush);
int max_unit = std::max(width, height);
int x = (logical_area.x / 2) - (width / 2);
int y = (logical_area.y / 2) - (height / 2);
StretchDIBits(dc_mem, x, y, width, height,
0, 0, width, height, image, &bmi, DIB_RGB_COLORS, SRCCOPY);
if ((rc.right - rc.left) > 200 && (rc.bottom - rc.top) > 200) {
const BITMAPINFO& bmi = local_video_->bmi();
image = local_video_->image();
int thumb_width = bmi.bmiHeader.biWidth / 4;
int thumb_height = abs(bmi.bmiHeader.biHeight) / 4;
StretchDIBits(dc_mem,
logical_area.x - thumb_width - 10,
logical_area.y - thumb_height - 10,
thumb_width, thumb_height,
0, 0, bmi.bmiHeader.biWidth, -bmi.bmiHeader.biHeight,
image, &bmi, DIB_RGB_COLORS, SRCCOPY);
}
BitBlt(ps.hdc, 0, 0, logical_area.x, logical_area.y,
dc_mem, 0, 0, SRCCOPY);
// Cleanup.
::SelectObject(dc_mem, bmp_old);
::DeleteObject(bmp_mem);
::DeleteDC(dc_mem);
} else {
// We're still waiting for the video stream to be initialized.
HBRUSH brush = ::CreateSolidBrush(RGB(0, 0, 0));
::FillRect(ps.hdc, &rc, brush);
::DeleteObject(brush);
HGDIOBJ old_font = ::SelectObject(ps.hdc, GetDefaultFont());
::SetTextColor(ps.hdc, RGB(0xff, 0xff, 0xff));
::SetBkMode(ps.hdc, TRANSPARENT);
std::string text(kConnecting);
if (!local_video_->image()) {
text += kNoVideoStreams;
} else {
text += kNoIncomingStream;
}
::DrawTextA(ps.hdc, text.c_str(), -1, &rc,
DT_SINGLELINE | DT_CENTER | DT_VCENTER);
::SelectObject(ps.hdc, old_font);
}
} else {
HBRUSH brush = ::CreateSolidBrush(::GetSysColor(COLOR_WINDOW));
::FillRect(ps.hdc, &rc, brush);
::DeleteObject(brush);
}
::EndPaint(handle(), &ps);
}
void MainWnd::OnDestroyed() {
PostQuitMessage(0);
}
void MainWnd::OnDefaultAction() {
if (!callback_)
return;
if (ui_ == CONNECT_TO_SERVER) {
std::string server(GetWindowText(edit1_));
std::string port_str(GetWindowText(edit2_));
int port = port_str.length() ? atoi(port_str.c_str()) : 0;
callback_->StartLogin(server, port);
} else if (ui_ == LIST_PEERS) {
LRESULT sel = ::SendMessage(listbox_, LB_GETCURSEL, 0, 0);
if (sel != LB_ERR) {
LRESULT peer_id = ::SendMessage(listbox_, LB_GETITEMDATA, sel, 0);
if (peer_id != -1 && callback_) {
callback_->ConnectToPeer(peer_id);
}
}
} else {
MessageBoxA(wnd_, "OK!", "Yeah", MB_OK);
}
}
bool MainWnd::OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT* result) {
switch (msg) {
case WM_ERASEBKGND:
*result = TRUE;
return true;
case WM_PAINT:
OnPaint();
return true;
case WM_SETFOCUS:
if (ui_ == CONNECT_TO_SERVER) {
SetFocus(edit1_);
} else if (ui_ == LIST_PEERS) {
SetFocus(listbox_);
}
return true;
case WM_SIZE:
if (ui_ == CONNECT_TO_SERVER) {
LayoutConnectUI(true);
} else if (ui_ == LIST_PEERS) {
LayoutPeerListUI(true);
}
break;
case WM_CTLCOLORSTATIC:
*result = reinterpret_cast<LRESULT>(GetSysColorBrush(COLOR_WINDOW));
return true;
case WM_COMMAND:
if (button_ == reinterpret_cast<HWND>(lp)) {
if (BN_CLICKED == HIWORD(wp))
OnDefaultAction();
} else if (listbox_ == reinterpret_cast<HWND>(lp)) {
if (LBN_DBLCLK == HIWORD(wp)) {
OnDefaultAction();
}
}
return true;
case WM_CLOSE:
if (callback_)
callback_->Close();
break;
}
return false;
}
// static
LRESULT CALLBACK MainWnd::WndProc(HWND hwnd, UINT msg, WPARAM wp, LPARAM lp) {
MainWnd* me = reinterpret_cast<MainWnd*>(
::GetWindowLongPtr(hwnd, GWL_USERDATA));
if (!me && WM_CREATE == msg) {
CREATESTRUCT* cs = reinterpret_cast<CREATESTRUCT*>(lp);
me = reinterpret_cast<MainWnd*>(cs->lpCreateParams);
me->wnd_ = hwnd;
::SetWindowLongPtr(hwnd, GWL_USERDATA, reinterpret_cast<LONG_PTR>(me));
}
LRESULT result = 0;
if (me) {
void* prev_nested_msg = me->nested_msg_;
me->nested_msg_ = &msg;
bool handled = me->OnMessage(msg, wp, lp, &result);
if (WM_NCDESTROY == msg) {
me->destroyed_ = true;
} else if (!handled) {
result = ::DefWindowProc(hwnd, msg, wp, lp);
}
if (me->destroyed_ && prev_nested_msg == NULL) {
me->OnDestroyed();
me->wnd_ = NULL;
me->destroyed_ = false;
}
me->nested_msg_ = prev_nested_msg;
} else {
result = ::DefWindowProc(hwnd, msg, wp, lp);
}
return result;
}
// static
bool MainWnd::RegisterWindowClass() {
if (wnd_class_)
return true;
WNDCLASSEX wcex = { sizeof(WNDCLASSEX) };
wcex.style = CS_DBLCLKS;
wcex.hInstance = GetModuleHandle(NULL);
wcex.hbrBackground = reinterpret_cast<HBRUSH>(COLOR_WINDOW + 1);
wcex.hCursor = ::LoadCursor(NULL, IDC_ARROW);
wcex.lpfnWndProc = &WndProc;
wcex.lpszClassName = kClassName;
wnd_class_ = ::RegisterClassEx(&wcex);
ASSERT(wnd_class_ != 0);
return wnd_class_ != 0;
}
void MainWnd::CreateChildWindow(HWND* wnd, MainWnd::ChildWindowID id,
const wchar_t* class_name, DWORD control_style,
DWORD ex_style) {
if (::IsWindow(*wnd))
return;
// Child windows are invisible at first, and shown after being resized.
DWORD style = WS_CHILD | control_style;
*wnd = ::CreateWindowEx(ex_style, class_name, L"", style,
100, 100, 100, 100, wnd_,
reinterpret_cast<HMENU>(id),
GetModuleHandle(NULL), NULL);
ASSERT(::IsWindow(*wnd) != FALSE);
::SendMessage(*wnd, WM_SETFONT, reinterpret_cast<WPARAM>(GetDefaultFont()),
TRUE);
}
void MainWnd::CreateChildWindows() {
// Create the child windows in tab order.
CreateChildWindow(&label1_, LABEL1_ID, L"Static", ES_CENTER | ES_READONLY, 0);
CreateChildWindow(&edit1_, EDIT_ID, L"Edit",
ES_LEFT | ES_NOHIDESEL | WS_TABSTOP, WS_EX_CLIENTEDGE);
CreateChildWindow(&label2_, LABEL2_ID, L"Static", ES_CENTER | ES_READONLY, 0);
CreateChildWindow(&edit2_, EDIT_ID, L"Edit",
ES_LEFT | ES_NOHIDESEL | WS_TABSTOP, WS_EX_CLIENTEDGE);
CreateChildWindow(&button_, BUTTON_ID, L"Button", BS_CENTER | WS_TABSTOP, 0);
CreateChildWindow(&listbox_, LISTBOX_ID, L"ListBox",
LBS_HASSTRINGS | LBS_NOTIFY, WS_EX_CLIENTEDGE);
::SetWindowTextA(edit1_, GetDefaultServerName().c_str());
::SetWindowTextA(edit2_, "8888");
}
void MainWnd::LayoutConnectUI(bool show) {
struct Windows {
HWND wnd;
const wchar_t* text;
size_t width;
size_t height;
} windows[] = {
{ label1_, L"Server" },
{ edit1_, L"XXXyyyYYYgggXXXyyyYYYggg" },
{ label2_, L":" },
{ edit2_, L"XyXyX" },
{ button_, L"Connect" },
};
if (show) {
const size_t kSeparator = 5;
size_t total_width = (ARRAYSIZE(windows) - 1) * kSeparator;
for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
CalculateWindowSizeForText(windows[i].wnd, windows[i].text,
&windows[i].width, &windows[i].height);
total_width += windows[i].width;
}
RECT rc;
::GetClientRect(wnd_, &rc);
size_t x = (rc.right / 2) - (total_width / 2);
size_t y = rc.bottom / 2;
for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
size_t top = y - (windows[i].height / 2);
::MoveWindow(windows[i].wnd, x, top, windows[i].width, windows[i].height,
TRUE);
x += kSeparator + windows[i].width;
if (windows[i].text[0] != 'X')
::SetWindowText(windows[i].wnd, windows[i].text);
::ShowWindow(windows[i].wnd, SW_SHOWNA);
}
} else {
for (size_t i = 0; i < ARRAYSIZE(windows); ++i) {
::ShowWindow(windows[i].wnd, SW_HIDE);
}
}
}
void MainWnd::LayoutPeerListUI(bool show) {
if (show) {
RECT rc;
::GetClientRect(wnd_, &rc);
::MoveWindow(listbox_, 0, 0, rc.right, rc.bottom, TRUE);
::ShowWindow(listbox_, SW_SHOWNA);
} else {
::ShowWindow(listbox_, SW_HIDE);
InvalidateRect(wnd_, NULL, TRUE);
}
}
void MainWnd::HandleTabbing() {
bool shift = ((::GetAsyncKeyState(VK_SHIFT) & 0x8000) != 0);
UINT next_cmd = shift ? GW_HWNDPREV : GW_HWNDNEXT;
UINT loop_around_cmd = shift ? GW_HWNDLAST : GW_HWNDFIRST;
HWND focus = GetFocus(), next;
do {
next = ::GetWindow(focus, next_cmd);
if (IsWindowVisible(next) &&
(GetWindowLong(next, GWL_STYLE) & WS_TABSTOP)) {
break;
}
if (!next) {
next = ::GetWindow(focus, loop_around_cmd);
if (IsWindowVisible(next) &&
(GetWindowLong(next, GWL_STYLE) & WS_TABSTOP)) {
break;
}
}
focus = next;
} while (true);
::SetFocus(next);
}
//
// MainWnd::VideoRenderer
//
MainWnd::VideoRenderer::VideoRenderer(HWND wnd, int width, int height)
: wnd_(wnd) {
::InitializeCriticalSection(&buffer_lock_);
ZeroMemory(&bmi_, sizeof(bmi_));
bmi_.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi_.bmiHeader.biPlanes = 1;
bmi_.bmiHeader.biBitCount = 32;
bmi_.bmiHeader.biCompression = BI_RGB;
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage = width * height *
(bmi_.bmiHeader.biBitCount >> 3);
}
MainWnd::VideoRenderer::~VideoRenderer() {
::DeleteCriticalSection(&buffer_lock_);
}
bool MainWnd::VideoRenderer::SetSize(int width, int height, int reserved) {
AutoLock<VideoRenderer> lock(this);
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage = width * height *
(bmi_.bmiHeader.biBitCount >> 3);
image_.reset(new uint8[bmi_.bmiHeader.biSizeImage]);
return true;
}
bool MainWnd::VideoRenderer::RenderFrame(const cricket::VideoFrame* frame) {
if (!frame)
return false;
{
AutoLock<VideoRenderer> lock(this);
ASSERT(image_.get() != NULL);
frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB, image_.get(),
bmi_.bmiHeader.biSizeImage,
bmi_.bmiHeader.biWidth *
(bmi_.bmiHeader.biBitCount >> 3));
}
InvalidateRect(wnd_, NULL, TRUE);
return true;
}

View File

@ -1,191 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
#define PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
#pragma once
#include <map>
#include <string>
#include "talk/app/webrtc_dev/mediastream.h"
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "talk/base/win32.h"
#include "talk/session/phone/mediachannel.h"
#include "talk/session/phone/videocommon.h"
#include "talk/session/phone/videoframe.h"
#include "talk/session/phone/videorenderer.h"
class MainWndCallback {
public:
virtual bool StartLogin(const std::string& server, int port) = 0;
virtual void DisconnectFromServer() = 0;
virtual void ConnectToPeer(int peer_id) = 0;
virtual void DisconnectFromCurrentPeer() = 0;
virtual void UIThreadCallback(int msg_id, void* data) = 0;
virtual void Close() = 0;
protected:
virtual ~MainWndCallback() {}
};
// Pure virtual interface for the main window.
class MainWindow {
public:
virtual ~MainWindow() {}
enum UI {
CONNECT_TO_SERVER,
LIST_PEERS,
STREAMING,
};
virtual void RegisterObserver(MainWndCallback* callback) = 0;
virtual bool IsWindow() = 0;
virtual void MessageBox(const char* caption, const char* text,
bool is_error) = 0;
virtual UI current_ui() = 0;
virtual void SwitchToConnectUI() = 0;
virtual void SwitchToPeerList(const Peers& peers) = 0;
virtual void SwitchToStreamingUI() = 0;
virtual webrtc::VideoRendererWrapperInterface* local_renderer() = 0;
virtual webrtc::VideoRendererWrapperInterface* remote_renderer() = 0;
virtual void QueueUIThreadCallback(int msg_id, void* data) = 0;
};
#ifdef WIN32
class MainWnd : public MainWindow {
public:
static const wchar_t kClassName[];
enum WindowMessages {
UI_THREAD_CALLBACK = WM_APP + 1,
};
MainWnd();
~MainWnd();
bool Create();
bool Destroy();
bool PreTranslateMessage(MSG* msg);
virtual void RegisterObserver(MainWndCallback* callback);
virtual bool IsWindow();
virtual void SwitchToConnectUI();
virtual void SwitchToPeerList(const Peers& peers);
virtual void SwitchToStreamingUI();
virtual void MessageBox(const char* caption, const char* text,
bool is_error);
virtual UI current_ui() { return ui_; }
virtual cricket::VideoRenderer* local_renderer();
virtual cricket::VideoRenderer* remote_renderer();
virtual void QueueUIThreadCallback(int msg_id, void* data);
HWND handle() const { return wnd_; }
class VideoRenderer : public cricket::VideoRenderer {
public:
VideoRenderer(HWND wnd, int width, int height);
virtual ~VideoRenderer();
void Lock() {
::EnterCriticalSection(&buffer_lock_);
}
void Unlock() {
::LeaveCriticalSection(&buffer_lock_);
}
virtual bool SetSize(int width, int height, int reserved);
// Called when a new frame is available for display.
virtual bool RenderFrame(const cricket::VideoFrame* frame);
const BITMAPINFO& bmi() const { return bmi_; }
const uint8* image() const { return image_.get(); }
protected:
enum {
SET_SIZE,
RENDER_FRAME,
};
HWND wnd_;
BITMAPINFO bmi_;
talk_base::scoped_array<uint8> image_;
CRITICAL_SECTION buffer_lock_;
};
// A little helper class to make sure we always to proper locking and
// unlocking when working with VideoRenderer buffers.
template <typename T>
class AutoLock {
public:
explicit AutoLock(T* obj) : obj_(obj) { obj_->Lock(); }
~AutoLock() { obj_->Unlock(); }
protected:
T* obj_;
};
protected:
enum ChildWindowID {
EDIT_ID = 1,
BUTTON_ID,
LABEL1_ID,
LABEL2_ID,
LISTBOX_ID,
};
void OnPaint();
void OnDestroyed();
void OnDefaultAction();
bool OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT* result);
static LRESULT CALLBACK WndProc(HWND hwnd, UINT msg, WPARAM wp, LPARAM lp);
static bool RegisterWindowClass();
void CreateChildWindow(HWND* wnd, ChildWindowID id, const wchar_t* class_name,
DWORD control_style, DWORD ex_style);
void CreateChildWindows();
void LayoutConnectUI(bool show);
void LayoutPeerListUI(bool show);
void HandleTabbing();
private:
talk_base::scoped_ptr<VideoRenderer> remote_video_;
talk_base::scoped_ptr<VideoRenderer> local_video_;
UI ui_;
HWND wnd_;
DWORD ui_thread_id_;
HWND edit1_;
HWND edit2_;
HWND label1_;
HWND label2_;
HWND button_;
HWND listbox_;
bool destroyed_;
void* nested_msg_;
MainWndCallback* callback_;
static ATOM wnd_class_;
};
#endif // WIN32
#endif // PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_

View File

@ -1,482 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "talk/examples/peerconnection_client/peer_connection_client.h"
#include "talk/examples/peerconnection_client/defaults.h"
#include "talk/base/common.h"
#include "talk/base/nethelpers.h"
#include "talk/base/logging.h"
#include "talk/base/stringutils.h"
#ifdef WIN32
#include "talk/base/win32socketserver.h"
#endif
using talk_base::sprintfn;
namespace {
// This is our magical hangup signal.
const char kByeMessage[] = "BYE";
talk_base::AsyncSocket* CreateClientSocket() {
#ifdef WIN32
return new talk_base::Win32Socket();
#elif defined(POSIX)
talk_base::Thread* thread = talk_base::Thread::Current();
ASSERT(thread != NULL);
return thread->socketserver()->CreateAsyncSocket(SOCK_STREAM);
#else
#error Platform not supported.
#endif
}
}
PeerConnectionClient::PeerConnectionClient()
: callback_(NULL),
control_socket_(CreateClientSocket()),
hanging_get_(CreateClientSocket()),
state_(NOT_CONNECTED),
my_id_(-1) {
control_socket_->SignalCloseEvent.connect(this,
&PeerConnectionClient::OnClose);
hanging_get_->SignalCloseEvent.connect(this,
&PeerConnectionClient::OnClose);
control_socket_->SignalConnectEvent.connect(this,
&PeerConnectionClient::OnConnect);
hanging_get_->SignalConnectEvent.connect(this,
&PeerConnectionClient::OnHangingGetConnect);
control_socket_->SignalReadEvent.connect(this,
&PeerConnectionClient::OnRead);
hanging_get_->SignalReadEvent.connect(this,
&PeerConnectionClient::OnHangingGetRead);
}
PeerConnectionClient::~PeerConnectionClient() {
}
int PeerConnectionClient::id() const {
return my_id_;
}
bool PeerConnectionClient::is_connected() const {
return my_id_ != -1;
}
const Peers& PeerConnectionClient::peers() const {
return peers_;
}
void PeerConnectionClient::RegisterObserver(
PeerConnectionClientObserver* callback) {
ASSERT(!callback_);
callback_ = callback;
}
bool PeerConnectionClient::Connect(const std::string& server, int port,
const std::string& client_name) {
ASSERT(!server.empty());
ASSERT(!client_name.empty());
if (state_ != NOT_CONNECTED) {
LOG(WARNING)
<< "The client must not be connected before you can call Connect()";
return false;
}
if (server.empty() || client_name.empty())
return false;
if (port <= 0)
port = kDefaultServerPort;
server_address_.SetIP(server);
server_address_.SetPort(port);
if (server_address_.IsUnresolved()) {
int errcode = 0;
hostent* h = talk_base::SafeGetHostByName(
server_address_.IPAsString().c_str(), &errcode);
if (!h) {
LOG(LS_ERROR) << "Failed to resolve host name: "
<< server_address_.IPAsString();
return false;
} else {
server_address_.SetResolvedIP(
ntohl(*reinterpret_cast<uint32*>(h->h_addr_list[0])));
talk_base::FreeHostEnt(h);
}
}
char buffer[1024];
sprintfn(buffer, sizeof(buffer),
"GET /sign_in?%s HTTP/1.0\r\n\r\n", client_name.c_str());
onconnect_data_ = buffer;
bool ret = ConnectControlSocket();
if (ret)
state_ = SIGNING_IN;
return ret;
}
bool PeerConnectionClient::SendToPeer(int peer_id, const std::string& message) {
if (state_ != CONNECTED)
return false;
ASSERT(is_connected());
ASSERT(control_socket_->GetState() == talk_base::Socket::CS_CLOSED);
if (!is_connected() || peer_id == -1)
return false;
char headers[1024];
sprintfn(headers, sizeof(headers),
"POST /message?peer_id=%i&to=%i HTTP/1.0\r\n"
"Content-Length: %i\r\n"
"Content-Type: text/plain\r\n"
"\r\n",
my_id_, peer_id, message.length());
onconnect_data_ = headers;
onconnect_data_ += message;
return ConnectControlSocket();
}
bool PeerConnectionClient::SendHangUp(int peer_id) {
return SendToPeer(peer_id, kByeMessage);
}
bool PeerConnectionClient::IsSendingMessage() {
return state_ == CONNECTED &&
control_socket_->GetState() != talk_base::Socket::CS_CLOSED;
}
bool PeerConnectionClient::SignOut() {
if (state_ == NOT_CONNECTED || state_ == SIGNING_OUT)
return true;
if (hanging_get_->GetState() != talk_base::Socket::CS_CLOSED)
hanging_get_->Close();
if (control_socket_->GetState() == talk_base::Socket::CS_CLOSED) {
state_ = SIGNING_OUT;
if (my_id_ != -1) {
char buffer[1024];
sprintfn(buffer, sizeof(buffer),
"GET /sign_out?peer_id=%i HTTP/1.0\r\n\r\n", my_id_);
onconnect_data_ = buffer;
return ConnectControlSocket();
} else {
// Can occur if the app is closed before we finish connecting.
return true;
}
} else {
state_ = SIGNING_OUT_WAITING;
}
return true;
}
void PeerConnectionClient::Close() {
control_socket_->Close();
hanging_get_->Close();
onconnect_data_.clear();
peers_.clear();
my_id_ = -1;
state_ = NOT_CONNECTED;
}
bool PeerConnectionClient::ConnectControlSocket() {
ASSERT(control_socket_->GetState() == talk_base::Socket::CS_CLOSED);
int err = control_socket_->Connect(server_address_);
if (err == SOCKET_ERROR) {
Close();
return false;
}
return true;
}
void PeerConnectionClient::OnConnect(talk_base::AsyncSocket* socket) {
ASSERT(!onconnect_data_.empty());
size_t sent = socket->Send(onconnect_data_.c_str(), onconnect_data_.length());
ASSERT(sent == onconnect_data_.length());
UNUSED(sent);
onconnect_data_.clear();
}
void PeerConnectionClient::OnHangingGetConnect(talk_base::AsyncSocket* socket) {
char buffer[1024];
sprintfn(buffer, sizeof(buffer),
"GET /wait?peer_id=%i HTTP/1.0\r\n\r\n", my_id_);
int len = strlen(buffer);
int sent = socket->Send(buffer, len);
ASSERT(sent == len);
UNUSED2(sent, len);
}
void PeerConnectionClient::OnMessageFromPeer(int peer_id,
const std::string& message) {
if (message.length() == (sizeof(kByeMessage) - 1) &&
message.compare(kByeMessage) == 0) {
callback_->OnPeerDisconnected(peer_id);
} else {
callback_->OnMessageFromPeer(peer_id, message);
}
}
bool PeerConnectionClient::GetHeaderValue(const std::string& data,
size_t eoh,
const char* header_pattern,
size_t* value) {
ASSERT(value != NULL);
size_t found = data.find(header_pattern);
if (found != std::string::npos && found < eoh) {
*value = atoi(&data[found + strlen(header_pattern)]);
return true;
}
return false;
}
bool PeerConnectionClient::GetHeaderValue(const std::string& data, size_t eoh,
const char* header_pattern,
std::string* value) {
ASSERT(value != NULL);
size_t found = data.find(header_pattern);
if (found != std::string::npos && found < eoh) {
size_t begin = found + strlen(header_pattern);
size_t end = data.find("\r\n", begin);
if (end == std::string::npos)
end = eoh;
value->assign(data.substr(begin, end - begin));
return true;
}
return false;
}
bool PeerConnectionClient::ReadIntoBuffer(talk_base::AsyncSocket* socket,
std::string* data,
size_t* content_length) {
LOG(INFO) << __FUNCTION__;
char buffer[0xffff];
do {
int bytes = socket->Recv(buffer, sizeof(buffer));
if (bytes <= 0)
break;
data->append(buffer, bytes);
} while (true);
bool ret = false;
size_t i = data->find("\r\n\r\n");
if (i != std::string::npos) {
LOG(INFO) << "Headers received";
if (GetHeaderValue(*data, i, "\r\nContent-Length: ", content_length)) {
LOG(INFO) << "Expecting " << *content_length << " bytes.";
size_t total_response_size = (i + 4) + *content_length;
if (data->length() >= total_response_size) {
ret = true;
std::string should_close;
const char kConnection[] = "\r\nConnection: ";
if (GetHeaderValue(*data, i, kConnection, &should_close) &&
should_close.compare("close") == 0) {
socket->Close();
// Since we closed the socket, there was no notification delivered
// to us. Compensate by letting ourselves know.
OnClose(socket, 0);
}
} else {
// We haven't received everything. Just continue to accept data.
}
} else {
LOG(LS_ERROR) << "No content length field specified by the server.";
}
}
return ret;
}
void PeerConnectionClient::OnRead(talk_base::AsyncSocket* socket) {
LOG(INFO) << __FUNCTION__;
size_t content_length = 0;
if (ReadIntoBuffer(socket, &control_data_, &content_length)) {
size_t peer_id = 0, eoh = 0;
bool ok = ParseServerResponse(control_data_, content_length, &peer_id,
&eoh);
if (ok) {
if (my_id_ == -1) {
// First response. Let's store our server assigned ID.
ASSERT(state_ == SIGNING_IN);
my_id_ = peer_id;
ASSERT(my_id_ != -1);
// The body of the response will be a list of already connected peers.
if (content_length) {
size_t pos = eoh + 4;
while (pos < control_data_.size()) {
size_t eol = control_data_.find('\n', pos);
if (eol == std::string::npos)
break;
int id = 0;
std::string name;
bool connected;
if (ParseEntry(control_data_.substr(pos, eol - pos), &name, &id,
&connected) && id != my_id_) {
peers_[id] = name;
callback_->OnPeerConnected(id, name);
}
pos = eol + 1;
}
}
ASSERT(is_connected());
callback_->OnSignedIn();
} else if (state_ == SIGNING_OUT) {
Close();
callback_->OnDisconnected();
} else if (state_ == SIGNING_OUT_WAITING) {
SignOut();
}
}
control_data_.clear();
if (state_ == SIGNING_IN) {
ASSERT(hanging_get_->GetState() == talk_base::Socket::CS_CLOSED);
state_ = CONNECTED;
hanging_get_->Connect(server_address_);
}
}
}
void PeerConnectionClient::OnHangingGetRead(talk_base::AsyncSocket* socket) {
LOG(INFO) << __FUNCTION__;
size_t content_length = 0;
if (ReadIntoBuffer(socket, &notification_data_, &content_length)) {
size_t peer_id = 0, eoh = 0;
bool ok = ParseServerResponse(notification_data_, content_length,
&peer_id, &eoh);
if (ok) {
// Store the position where the body begins.
size_t pos = eoh + 4;
if (my_id_ == static_cast<int>(peer_id)) {
// A notification about a new member or a member that just
// disconnected.
int id = 0;
std::string name;
bool connected = false;
if (ParseEntry(notification_data_.substr(pos), &name, &id,
&connected)) {
if (connected) {
peers_[id] = name;
callback_->OnPeerConnected(id, name);
} else {
peers_.erase(id);
callback_->OnPeerDisconnected(id);
}
}
} else {
OnMessageFromPeer(peer_id, notification_data_.substr(pos));
}
}
notification_data_.clear();
}
if (hanging_get_->GetState() == talk_base::Socket::CS_CLOSED &&
state_ == CONNECTED) {
hanging_get_->Connect(server_address_);
}
}
bool PeerConnectionClient::ParseEntry(const std::string& entry,
std::string* name,
int* id,
bool* connected) {
ASSERT(name != NULL);
ASSERT(id != NULL);
ASSERT(connected != NULL);
ASSERT(!entry.empty());
*connected = false;
size_t separator = entry.find(',');
if (separator != std::string::npos) {
*id = atoi(&entry[separator + 1]);
name->assign(entry.substr(0, separator));
separator = entry.find(',', separator + 1);
if (separator != std::string::npos) {
*connected = atoi(&entry[separator + 1]) ? true : false;
}
}
return !name->empty();
}
int PeerConnectionClient::GetResponseStatus(const std::string& response) {
int status = -1;
size_t pos = response.find(' ');
if (pos != std::string::npos)
status = atoi(&response[pos + 1]);
return status;
}
bool PeerConnectionClient::ParseServerResponse(const std::string& response,
size_t content_length,
size_t* peer_id,
size_t* eoh) {
LOG(INFO) << response;
int status = GetResponseStatus(response.c_str());
if (status != 200) {
LOG(LS_ERROR) << "Received error from server";
Close();
callback_->OnDisconnected();
return false;
}
*eoh = response.find("\r\n\r\n");
ASSERT(*eoh != std::string::npos);
if (*eoh == std::string::npos)
return false;
*peer_id = -1;
// See comment in peer_channel.cc for why we use the Pragma header and
// not e.g. "X-Peer-Id".
GetHeaderValue(response, *eoh, "\r\nPragma: ", peer_id);
return true;
}
void PeerConnectionClient::OnClose(talk_base::AsyncSocket* socket, int err) {
LOG(INFO) << __FUNCTION__;
socket->Close();
#ifdef WIN32
if (err != WSAECONNREFUSED) {
#else
if (err != ECONNREFUSED) {
#endif
if (socket == hanging_get_.get()) {
if (state_ == CONNECTED) {
LOG(INFO) << "Issuing a new hanging get";
hanging_get_->Close();
hanging_get_->Connect(server_address_);
}
} else {
callback_->OnMessageSent(err);
}
} else {
LOG(WARNING) << "Failed to connect to the server";
Close();
callback_->OnDisconnected();
}
}

View File

@ -1,109 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
#define PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
#pragma once
#include <map>
#include <string>
#include "talk/base/sigslot.h"
#include "talk/base/physicalsocketserver.h"
#include "talk/base/scoped_ptr.h"
typedef std::map<int, std::string> Peers;
struct PeerConnectionClientObserver {
virtual void OnSignedIn() = 0; // Called when we're logged on.
virtual void OnDisconnected() = 0;
virtual void OnPeerConnected(int id, const std::string& name) = 0;
virtual void OnPeerDisconnected(int peer_id) = 0;
virtual void OnMessageFromPeer(int peer_id, const std::string& message) = 0;
virtual void OnMessageSent(int err) = 0;
protected:
virtual ~PeerConnectionClientObserver() {}
};
class PeerConnectionClient : public sigslot::has_slots<> {
public:
enum State {
NOT_CONNECTED,
SIGNING_IN,
CONNECTED,
SIGNING_OUT_WAITING,
SIGNING_OUT,
};
PeerConnectionClient();
~PeerConnectionClient();
int id() const;
bool is_connected() const;
const Peers& peers() const;
void RegisterObserver(PeerConnectionClientObserver* callback);
bool Connect(const std::string& server, int port,
const std::string& client_name);
bool SendToPeer(int peer_id, const std::string& message);
bool SendHangUp(int peer_id);
bool IsSendingMessage();
bool SignOut();
protected:
void Close();
bool ConnectControlSocket();
void OnConnect(talk_base::AsyncSocket* socket);
void OnHangingGetConnect(talk_base::AsyncSocket* socket);
void OnMessageFromPeer(int peer_id, const std::string& message);
// Quick and dirty support for parsing HTTP header values.
bool GetHeaderValue(const std::string& data, size_t eoh,
const char* header_pattern, size_t* value);
bool GetHeaderValue(const std::string& data, size_t eoh,
const char* header_pattern, std::string* value);
// Returns true if the whole response has been read.
bool ReadIntoBuffer(talk_base::AsyncSocket* socket, std::string* data,
size_t* content_length);
void OnRead(talk_base::AsyncSocket* socket);
void OnHangingGetRead(talk_base::AsyncSocket* socket);
// Parses a single line entry in the form "<name>,<id>,<connected>"
bool ParseEntry(const std::string& entry, std::string* name, int* id,
bool* connected);
int GetResponseStatus(const std::string& response);
bool ParseServerResponse(const std::string& response, size_t content_length,
size_t* peer_id, size_t* eoh);
void OnClose(talk_base::AsyncSocket* socket, int err);
PeerConnectionClientObserver* callback_;
talk_base::SocketAddress server_address_;
talk_base::scoped_ptr<talk_base::AsyncSocket> control_socket_;
talk_base::scoped_ptr<talk_base::AsyncSocket> hanging_get_;
std::string onconnect_data_;
std::string control_data_;
std::string notification_data_;
Peers peers_;
State state_;
int my_id_;
};
#endif // PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_

View File

@ -1,59 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'conditions': [
['OS=="linux"', {
'targets': [
{
'target_name': 'peerconnection_client_dev',
'type': 'executable',
'sources': [
'conductor.cc',
'conductor.h',
'defaults.cc',
'defaults.h',
'linux/main.cc',
'linux/main_wnd.cc',
'linux/main_wnd.h',
'peer_connection_client.cc',
'peer_connection_client.h',
],
'dependencies': [
'../../../../libjingle.gyp:libjingle_app',
'../../../../../../src/modules/modules.gyp:video_capture_module',
'../../../../../../src/system_wrappers/source/'
'system_wrappers.gyp:system_wrappers',
# TODO(tommi): Switch to this and remove specific gtk dependency
# sections below for cflags and link_settings.
# '<(DEPTH)/build/linux/system.gyp:gtk',
],
'include_dirs': [
'../../../',
'../../../../../../src', # webrtc modules
#TODO(perkj): Remove when this project is in the correct folder.
'../../../../../../third_party/libjingle/source/',
],
'cflags': [
'<!@(pkg-config --cflags gtk+-2.0)',
],
'link_settings': {
'ldflags': [
'<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
],
'libraries': [
'<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
'-lX11',
'-lXext',
],
},
},
], # targets
}, ], # OS="linux"
], # conditions
}

View File

@ -1,231 +0,0 @@
<html>
<head>
<title>PeerConnection server test page</title>
<script>
var request = null;
var hangingGet = null;
var localName;
var server;
var my_id = -1;
var other_peers = {};
var message_counter = 0;
function trace(txt) {
var elem = document.getElementById("debug");
elem.innerHTML += txt + "<br>";
}
function handleServerNotification(data) {
trace("Server notification: " + data);
var parsed = data.split(',');
if (parseInt(parsed[2]) != 0)
other_peers[parseInt(parsed[1])] = parsed[0];
}
function handlePeerMessage(peer_id, data) {
++message_counter;
var str = "Message from '" + other_peers[peer_id] + "'&nbsp;";
str += "<span id='toggle_" + message_counter + "' onclick='toggleMe(this);' ";
str += "style='cursor: pointer'>+</span><br>";
str += "<blockquote id='msg_" + message_counter + "' style='display:none'>";
str += data + "</blockquote>";
trace(str);
if (document.getElementById("loopback").checked) {
var answer_tag = "\"SDP\" : \"ANSWER\"";
data = data.replace(offer_tag, answer_tag);
sendToPeer(peer_id, data);
}
}
function GetIntHeader(r, name) {
var val = r.getResponseHeader(name);
return val != null && val.length ? parseInt(val) : -1;
}
function hangingGetCallback() {
try {
if (hangingGet.readyState != 4)
return;
if (hangingGet.status != 200) {
trace("server error: " + hangingGet.statusText);
disconnect();
} else {
var peer_id = GetIntHeader(hangingGet, "Pragma");
if (peer_id == my_id) {
handleServerNotification(hangingGet.responseText);
} else {
handlePeerMessage(peer_id, hangingGet.responseText);
}
}
if (hangingGet) {
hangingGet.abort();
hangingGet = null;
}
if (my_id != -1)
window.setTimeout(startHangingGet, 0);
} catch (e) {
trace("Hanging get error: " + e.description);
}
}
function startHangingGet() {
try {
hangingGet = new XMLHttpRequest();
hangingGet.onreadystatechange = hangingGetCallback;
hangingGet.ontimeout = onHangingGetTimeout;
hangingGet.open("GET", server + "/wait?peer_id=" + my_id, true);
hangingGet.send();
} catch (e) {
trace("error" + e.description);
}
}
function onHangingGetTimeout() {
trace("hanging get timeout. issuing again.");
hangingGet.abort();
hangingGet = null;
if (my_id != -1)
window.setTimeout(startHangingGet, 0);
}
function signInCallback() {
try {
if (request.readyState == 4) {
if (request.status == 200) {
var peers = request.responseText.split("\n");
my_id = parseInt(peers[0].split(',')[1]);
trace("My id: " + my_id);
for (var i = 1; i < peers.length; ++i) {
if (peers[i].length > 0) {
trace("Peer " + i + ": " + peers[i]);
var parsed = peers[i].split(',');
other_peers[parseInt(parsed[1])] = parsed[0];
}
}
startHangingGet();
request = null;
}
}
} catch (e) {
trace("error: " + e.description);
}
}
function signIn() {
try {
request = new XMLHttpRequest();
request.onreadystatechange = signInCallback;
request.open("GET", server + "/sign_in?" + localName, true);
request.send();
} catch (e) {
trace("error: " + e.description);
}
}
function sendToPeer(peer_id, data) {
if (my_id == -1) {
alert("Not connected");
return;
}
if (peer_id == my_id) {
alert("Can't send a message to oneself :)");
return;
}
var r = new XMLHttpRequest();
r.open("POST", server + "/message?peer_id=" + my_id + "&to=" + peer_id,
false);
r.setRequestHeader("Content-Type", "text/plain");
r.send(data);
r = null;
}
function connect() {
localName = document.getElementById("local").value.toLowerCase();
server = document.getElementById("server").value.toLowerCase();
if (localName.length == 0) {
alert("I need a name please.");
document.getElementById("local").focus();
} else {
document.getElementById("connect").disabled = true;
document.getElementById("disconnect").disabled = false;
document.getElementById("send").disabled = false;
signIn();
}
}
function disconnect() {
if (request) {
request.abort();
request = null;
}
if (hangingGet) {
hangingGet.abort();
hangingGet = null;
}
if (my_id != -1) {
request = new XMLHttpRequest();
request.open("GET", server + "/sign_out?peer_id=" + my_id, false);
request.send();
request = null;
my_id = -1;
}
document.getElementById("connect").disabled = false;
document.getElementById("disconnect").disabled = true;
document.getElementById("send").disabled = true;
}
window.onbeforeunload = disconnect;
function send() {
var text = document.getElementById("message").value;
var peer_id = parseInt(document.getElementById("peer_id").value);
if (!text.length || peer_id == 0) {
alert("No text supplied or invalid peer id");
} else {
sendToPeer(peer_id, text);
}
}
function toggleMe(obj) {
var id = obj.id.replace("toggle", "msg");
var t = document.getElementById(id);
if (obj.innerText == "+") {
obj.innerText = "-";
t.style.display = "block";
} else {
obj.innerText = "+";
t.style.display = "none";
}
}
</script>
</head>
<body>
Server: <input type="text" id="server" value="http://localhost:8888" /><br>
<input type="checkbox" id="loopback" checked="checked"/> Loopback (just send
received messages right back)<br>
Your name: <input type="text" id="local" value="my_name"/>
<button id="connect" onclick="connect();">Connect</button>
<button disabled="true" id="disconnect"
onclick="disconnect();">Disconnect</button>
<br>
<table><tr><td>
Target peer id: <input type="text" id="peer_id" size="3"/></td><td>
Message: <input type="text" id="message"/></td><td>
<button disabled="true" id="send" onclick="send();">Send</button>
</td></tr></table>
<button onclick="document.getElementById('debug').innerHTML='';">
Clear log</button>
<pre id="debug">
</pre>
<br><hr>
</body>
</html>

View File

@ -1,92 +0,0 @@
// Copyright 2010 Google Inc. All Rights Reserved,
//
// Author: Justin Uberti (juberti@google.com)
#ifndef TALK_P2P_CLIENT_FAKEPORTALLOCATOR_H_
#define TALK_P2P_CLIENT_FAKEPORTALLOCATOR_H_
#include <string>
#include "talk/base/basicpacketsocketfactory.h"
#include "talk/base/scoped_ptr.h"
#include "talk/p2p/base/portallocator.h"
#include "talk/p2p/base/udpport.h"
namespace talk_base {
class SocketFactory;
class Thread;
}
namespace cricket {
class FakePortAllocatorSession : public PortAllocatorSession {
public:
FakePortAllocatorSession(talk_base::Thread* worker_thread,
talk_base::PacketSocketFactory* factory,
const std::string& name,
const std::string& session_type)
: PortAllocatorSession(0), worker_thread_(worker_thread),
factory_(factory), name_(name),
network_("network", "unittest", 0x7F000001, 0),
port_(NULL), running_(false) {
}
virtual void GetInitialPorts() {
if (!port_.get()) {
port_.reset(cricket::UDPPort::Create(worker_thread_, factory_,
&network_, network_.ip(), 0, 0));
AddPort(port_.get());
}
}
virtual void StartGetAllPorts() { running_ = true; }
virtual void StopGetAllPorts() { running_ = false; }
virtual bool IsGettingAllPorts() { return running_; }
void AddPort(cricket::Port* port) {
port->set_name(name_);
port->set_preference(1.0);
port->set_generation(0);
port->SignalAddressReady.connect(
this, &FakePortAllocatorSession::OnAddressReady);
port->PrepareAddress();
SignalPortReady(this, port);
}
void OnAddressReady(cricket::Port* port) {
SignalCandidatesReady(this, port->candidates());
}
private:
talk_base::Thread* worker_thread_;
talk_base::PacketSocketFactory* factory_;
std::string name_;
talk_base::Network network_;
talk_base::scoped_ptr<cricket::Port> port_;
bool running_;
};
class FakePortAllocator : public cricket::PortAllocator {
public:
FakePortAllocator(talk_base::Thread* worker_thread,
talk_base::PacketSocketFactory* factory)
: worker_thread_(worker_thread), factory_(factory) {
if (factory_ == NULL) {
owned_factory_.reset(new talk_base::BasicPacketSocketFactory(
worker_thread_));
factory_ = owned_factory_.get();
}
}
virtual cricket::PortAllocatorSession* CreateSession(
const std::string &name, const std::string &session_type) {
return new FakePortAllocatorSession(worker_thread_, factory_, name,
session_type);
}
private:
talk_base::Thread* worker_thread_;
talk_base::PacketSocketFactory* factory_;
talk_base::scoped_ptr<talk_base::BasicPacketSocketFactory> owned_factory_;
};
} // namespace cricket
#endif // TALK_P2P_CLIENT_FAKEPORTALLOCATOR_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,515 +0,0 @@
/*
* libjingle
* Copyright 2004--2007, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_SESSION_PHONE_CHANNEL_H_
#define TALK_SESSION_PHONE_CHANNEL_H_
#include <string>
#include <vector>
#include "talk/base/asyncudpsocket.h"
#include "talk/base/criticalsection.h"
#include "talk/base/network.h"
#include "talk/base/sigslot.h"
#include "talk/p2p/client/socketmonitor.h"
#include "talk/p2p/base/session.h"
#include "talk/session/phone/audiomonitor.h"
#include "talk/session/phone/mediachannel.h"
#include "talk/session/phone/mediaengine.h"
#include "talk/session/phone/mediamonitor.h"
#include "talk/session/phone/rtcpmuxfilter.h"
#include "talk/session/phone/srtpfilter.h"
namespace webrtc {
class VideoCaptureModule;
}
namespace cricket {
class MediaContentDescription;
struct CryptoParams;
enum {
MSG_ENABLE = 1,
MSG_DISABLE = 2,
MSG_MUTE = 3,
MSG_UNMUTE = 4,
MSG_SETREMOTECONTENT = 5,
MSG_SETLOCALCONTENT = 6,
MSG_EARLYMEDIATIMEOUT = 8,
MSG_PRESSDTMF = 9,
MSG_SETRENDERER = 10,
MSG_ADDSTREAM = 11,
MSG_REMOVESTREAM = 12,
MSG_SETRINGBACKTONE = 13,
MSG_PLAYRINGBACKTONE = 14,
MSG_SETMAXSENDBANDWIDTH = 15,
MSG_SETRTCPCNAME = 18,
MSG_SENDINTRAFRAME = 19,
MSG_REQUESTINTRAFRAME = 20,
MSG_RTPPACKET = 22,
MSG_RTCPPACKET = 23,
MSG_CHANNEL_ERROR = 24,
MSG_ENABLECPUADAPTATION = 25,
MSG_DISABLECPUADAPTATION = 26,
MSG_SCALEVOLUME = 27
};
// BaseChannel contains logic common to voice and video, including
// enable/mute, marshaling calls to a worker thread, and
// connection and media monitors.
class BaseChannel
: public talk_base::MessageHandler, public sigslot::has_slots<>,
public MediaChannel::NetworkInterface {
public:
BaseChannel(talk_base::Thread* thread, MediaEngineInterface* media_engine,
MediaChannel* channel, BaseSession* session,
const std::string& content_name, bool rtcp);
virtual ~BaseChannel();
bool Init(TransportChannel* transport_channel,
TransportChannel* rtcp_transport_channel);
talk_base::Thread* worker_thread() const { return worker_thread_; }
BaseSession* session() const { return session_; }
const std::string& content_name() { return content_name_; }
TransportChannel* transport_channel() const {
return transport_channel_;
}
TransportChannel* rtcp_transport_channel() const {
return rtcp_transport_channel_;
}
bool enabled() const { return enabled_; }
bool secure() const { return srtp_filter_.IsActive(); }
// Channel control
bool SetRtcpCName(const std::string& cname);
bool SetLocalContent(const MediaContentDescription* content,
ContentAction action);
bool SetRemoteContent(const MediaContentDescription* content,
ContentAction action);
bool SetMaxSendBandwidth(int max_bandwidth);
bool Enable(bool enable);
bool Mute(bool mute);
// Multiplexing
bool RemoveStream(uint32 ssrc);
// Monitoring
void StartConnectionMonitor(int cms);
void StopConnectionMonitor();
void set_srtp_signal_silent_time(uint32 silent_time) {
srtp_filter_.set_signal_silent_time(silent_time);
}
template <class T>
void RegisterSendSink(T* sink,
void (T::*OnPacket)(const void*, size_t, bool)) {
talk_base::CritScope cs(&signal_send_packet_cs_);
SignalSendPacket.disconnect(sink);
SignalSendPacket.connect(sink, OnPacket);
}
void UnregisterSendSink(sigslot::has_slots<>* sink) {
talk_base::CritScope cs(&signal_send_packet_cs_);
SignalSendPacket.disconnect(sink);
}
bool HasSendSinks() {
talk_base::CritScope cs(&signal_send_packet_cs_);
return !SignalSendPacket.is_empty();
}
template <class T>
void RegisterRecvSink(T* sink,
void (T::*OnPacket)(const void*, size_t, bool)) {
talk_base::CritScope cs(&signal_recv_packet_cs_);
SignalRecvPacket.disconnect(sink);
SignalRecvPacket.connect(sink, OnPacket);
}
void UnregisterRecvSink(sigslot::has_slots<>* sink) {
talk_base::CritScope cs(&signal_recv_packet_cs_);
SignalRecvPacket.disconnect(sink);
}
bool HasRecvSinks() {
talk_base::CritScope cs(&signal_recv_packet_cs_);
return !SignalRecvPacket.is_empty();
}
protected:
MediaEngineInterface* media_engine() const { return media_engine_; }
virtual MediaChannel* media_channel() const { return media_channel_; }
void set_rtcp_transport_channel(TransportChannel* transport);
bool writable() const { return writable_; }
bool was_ever_writable() const { return was_ever_writable_; }
bool has_local_content() const { return has_local_content_; }
bool has_remote_content() const { return has_remote_content_; }
void set_has_local_content(bool has) { has_local_content_ = has; }
void set_has_remote_content(bool has) { has_remote_content_ = has; }
bool muted() const { return muted_; }
talk_base::Thread* signaling_thread() { return session_->signaling_thread(); }
SrtpFilter* srtp_filter() { return &srtp_filter_; }
bool rtcp() const { return rtcp_; }
void Send(uint32 id, talk_base::MessageData *pdata = NULL);
void Post(uint32 id, talk_base::MessageData *pdata = NULL);
void PostDelayed(int cmsDelay, uint32 id = 0,
talk_base::MessageData *pdata = NULL);
void Clear(uint32 id = talk_base::MQID_ANY,
talk_base::MessageList* removed = NULL);
void FlushRtcpMessages();
// NetworkInterface implementation, called by MediaEngine
virtual bool SendPacket(talk_base::Buffer* packet);
virtual bool SendRtcp(talk_base::Buffer* packet);
virtual int SetOption(SocketType type, talk_base::Socket::Option o, int val);
// From TransportChannel
void OnWritableState(TransportChannel* channel);
virtual void OnChannelRead(TransportChannel* channel, const char* data,
size_t len);
bool PacketIsRtcp(const TransportChannel* channel, const char* data,
size_t len);
bool SendPacket(bool rtcp, talk_base::Buffer* packet);
void HandlePacket(bool rtcp, talk_base::Buffer* packet);
// Setting the send codec based on the remote description.
void OnSessionState(BaseSession* session, BaseSession::State state);
void OnRemoteDescriptionUpdate(BaseSession* session);
void EnableMedia_w();
void DisableMedia_w();
void MuteMedia_w();
void UnmuteMedia_w();
void ChannelWritable_w();
void ChannelNotWritable_w();
struct StreamMessageData : public talk_base::MessageData {
StreamMessageData(uint32 s1, uint32 s2) : ssrc1(s1), ssrc2(s2) {}
uint32 ssrc1;
uint32 ssrc2;
};
virtual void RemoveStream_w(uint32 ssrc) = 0;
virtual void ChangeState() = 0;
struct SetRtcpCNameData : public talk_base::MessageData {
explicit SetRtcpCNameData(const std::string& cname)
: cname(cname), result(false) {}
std::string cname;
bool result;
};
bool SetRtcpCName_w(const std::string& cname);
struct SetContentData : public talk_base::MessageData {
SetContentData(const MediaContentDescription* content,
ContentAction action)
: content(content), action(action), result(false) {}
const MediaContentDescription* content;
ContentAction action;
bool result;
};
// Gets the content appropriate to the channel (audio or video).
virtual const MediaContentDescription* GetFirstContent(
const SessionDescription* sdesc) = 0;
virtual bool SetLocalContent_w(const MediaContentDescription* content,
ContentAction action) = 0;
virtual bool SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action) = 0;
bool SetSrtp_w(const std::vector<CryptoParams>& params, ContentAction action,
ContentSource src);
bool SetRtcpMux_w(bool enable, ContentAction action, ContentSource src);
struct SetBandwidthData : public talk_base::MessageData {
explicit SetBandwidthData(int value) : value(value), result(false) {}
int value;
bool result;
};
bool SetMaxSendBandwidth_w(int max_bandwidth);
// From MessageHandler
virtual void OnMessage(talk_base::Message *pmsg);
// Handled in derived classes
virtual void OnConnectionMonitorUpdate(SocketMonitor *monitor,
const std::vector<ConnectionInfo> &infos) = 0;
private:
sigslot::signal3<const void*, size_t, bool> SignalSendPacket;
sigslot::signal3<const void*, size_t, bool> SignalRecvPacket;
talk_base::CriticalSection signal_send_packet_cs_;
talk_base::CriticalSection signal_recv_packet_cs_;
talk_base::Thread *worker_thread_;
MediaEngineInterface *media_engine_;
BaseSession *session_;
MediaChannel *media_channel_;
std::string content_name_;
bool rtcp_;
TransportChannel *transport_channel_;
TransportChannel *rtcp_transport_channel_;
SrtpFilter srtp_filter_;
RtcpMuxFilter rtcp_mux_filter_;
talk_base::scoped_ptr<SocketMonitor> socket_monitor_;
bool enabled_;
bool writable_;
bool was_ever_writable_;
bool has_local_content_;
bool has_remote_content_;
bool muted_;
};
// VoiceChannel is a specialization that adds support for early media, DTMF,
// and input/output level monitoring.
class VoiceChannel : public BaseChannel {
public:
VoiceChannel(talk_base::Thread *thread, MediaEngineInterface *media_engine,
VoiceMediaChannel *channel, BaseSession *session,
const std::string& content_name, bool rtcp);
~VoiceChannel();
bool Init();
// downcasts a MediaChannel
virtual VoiceMediaChannel* media_channel() const {
return static_cast<VoiceMediaChannel*>(BaseChannel::media_channel());
}
// Add an incoming stream with the specified SSRC.
bool AddStream(uint32 ssrc);
bool SetRingbackTone(const void* buf, int len);
void SetEarlyMedia(bool enable);
// This signal is emitted when we have gone a period of time without
// receiving early media. When received, a UI should start playing its
// own ringing sound
sigslot::signal1<VoiceChannel*> SignalEarlyMediaTimeout;
bool PlayRingbackTone(uint32 ssrc, bool play, bool loop);
bool PressDTMF(int digit, bool playout);
bool SetOutputScaling(uint32 ssrc, double left, double right);
// Monitoring functions
sigslot::signal2<VoiceChannel*, const std::vector<ConnectionInfo> &>
SignalConnectionMonitor;
void StartMediaMonitor(int cms);
void StopMediaMonitor();
sigslot::signal2<VoiceChannel*, const VoiceMediaInfo&> SignalMediaMonitor;
void StartAudioMonitor(int cms);
void StopAudioMonitor();
bool IsAudioMonitorRunning() const;
sigslot::signal2<VoiceChannel*, const AudioInfo&> SignalAudioMonitor;
int GetInputLevel_w();
int GetOutputLevel_w();
void GetActiveStreams_w(AudioInfo::StreamList* actives);
// Signal errors from VoiceMediaChannel. Arguments are:
// ssrc(uint32), and error(VoiceMediaChannel::Error).
sigslot::signal3<VoiceChannel*, uint32, VoiceMediaChannel::Error>
SignalMediaError;
private:
struct SetRingbackToneMessageData : public talk_base::MessageData {
SetRingbackToneMessageData(const void* b, int l)
: buf(b),
len(l),
result(false) {
}
const void* buf;
int len;
bool result;
};
struct PlayRingbackToneMessageData : public talk_base::MessageData {
PlayRingbackToneMessageData(uint32 s, bool p, bool l)
: ssrc(s),
play(p),
loop(l),
result(false) {
}
uint32 ssrc;
bool play;
bool loop;
bool result;
};
struct DtmfMessageData : public talk_base::MessageData {
DtmfMessageData(int d, bool p)
: digit(d),
playout(p),
result(false) {
}
int digit;
bool playout;
bool result;
};
struct ScaleVolumeMessageData : public talk_base::MessageData {
ScaleVolumeMessageData(uint32 s, double l, double r)
: ssrc(s),
left(l),
right(r),
result(false) {
}
uint32 ssrc;
double left;
double right;
bool result;
};
// overrides from BaseChannel
virtual void OnChannelRead(TransportChannel* channel,
const char *data, size_t len);
virtual void ChangeState();
virtual const MediaContentDescription* GetFirstContent(
const SessionDescription* sdesc);
virtual bool SetLocalContent_w(const MediaContentDescription* content,
ContentAction action);
virtual bool SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action);
void AddStream_w(uint32 ssrc);
void RemoveStream_w(uint32 ssrc);
bool SetRingbackTone_w(const void* buf, int len);
bool PlayRingbackTone_w(uint32 ssrc, bool play, bool loop);
void HandleEarlyMediaTimeout();
bool PressDTMF_w(int digit, bool playout);
bool SetOutputScaling_w(uint32 ssrc, double left, double right);
virtual void OnMessage(talk_base::Message *pmsg);
virtual void OnConnectionMonitorUpdate(
SocketMonitor *monitor, const std::vector<ConnectionInfo> &infos);
virtual void OnMediaMonitorUpdate(
VoiceMediaChannel *media_channel, const VoiceMediaInfo& info);
void OnAudioMonitorUpdate(AudioMonitor *monitor, const AudioInfo& info);
void OnVoiceChannelError(uint32 ssrc, VoiceMediaChannel::Error error);
void SendLastMediaError();
void OnSrtpError(uint32 ssrc, SrtpFilter::Mode mode, SrtpFilter::Error error);
static const int kEarlyMediaTimeout = 1000;
bool received_media_;
talk_base::scoped_ptr<VoiceMediaMonitor> media_monitor_;
talk_base::scoped_ptr<AudioMonitor> audio_monitor_;
};
// VideoChannel is a specialization for video.
class VideoChannel : public BaseChannel {
public:
VideoChannel(talk_base::Thread *thread, MediaEngineInterface *media_engine,
VideoMediaChannel *channel, BaseSession *session,
const std::string& content_name, bool rtcp,
VoiceChannel *voice_channel);
~VideoChannel();
bool Init();
// downcasts a MediaChannel
virtual VideoMediaChannel* media_channel() const {
return static_cast<VideoMediaChannel*>(BaseChannel::media_channel());
}
// Add an incoming stream with the specified SSRC.
bool AddStream(uint32 ssrc, uint32 voice_ssrc);
bool SetRenderer(uint32 ssrc, VideoRenderer* renderer);
sigslot::signal2<VideoChannel*, const std::vector<ConnectionInfo> &>
SignalConnectionMonitor;
void StartMediaMonitor(int cms);
void StopMediaMonitor();
sigslot::signal2<VideoChannel*, const VideoMediaInfo&> SignalMediaMonitor;
bool SendIntraFrame();
bool RequestIntraFrame();
void EnableCpuAdaptation(bool enable);
sigslot::signal3<VideoChannel*, uint32, VideoMediaChannel::Error>
SignalMediaError;
void SetCaptureDevice(uint32 ssrc, webrtc::VideoCaptureModule* camera);
void SetLocalRenderer(uint32 ssrc, VideoRenderer* renderer);
private:
// overrides from BaseChannel
virtual void ChangeState();
virtual const MediaContentDescription* GetFirstContent(
const SessionDescription* sdesc);
virtual bool SetLocalContent_w(const MediaContentDescription* content,
ContentAction action);
virtual bool SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action);
void AddStream_w(uint32 ssrc, uint32 voice_ssrc);
void RemoveStream_w(uint32 ssrc);
void SendIntraFrame_w() {
media_channel()->SendIntraFrame();
}
void RequestIntraFrame_w() {
media_channel()->RequestIntraFrame();
}
void EnableCpuAdaptation_w(bool enable) {
// TODO: The following call will clear all other options, which is
// OK now since SetOptions is not used in video media channel. In the
// future, add GetOptions() method and change the options.
media_channel()->SetOptions(enable ? OPT_CPU_ADAPTATION : 0);
}
struct RenderMessageData : public talk_base::MessageData {
RenderMessageData(uint32 s, VideoRenderer* r) : ssrc(s), renderer(r) {}
uint32 ssrc;
VideoRenderer* renderer;
};
void SetRenderer_w(uint32 ssrc, VideoRenderer* renderer);
virtual void OnMessage(talk_base::Message *pmsg);
virtual void OnConnectionMonitorUpdate(
SocketMonitor *monitor, const std::vector<ConnectionInfo> &infos);
virtual void OnMediaMonitorUpdate(
VideoMediaChannel *media_channel, const VideoMediaInfo& info);
void OnVideoChannelError(uint32 ssrc, VideoMediaChannel::Error error);
void OnSrtpError(uint32 ssrc, SrtpFilter::Mode mode, SrtpFilter::Error error);
VoiceChannel *voice_channel_;
VideoRenderer *renderer_;
talk_base::scoped_ptr<VideoMediaMonitor> media_monitor_;
};
} // namespace cricket
#endif // TALK_SESSION_PHONE_CHANNEL_H_

View File

@ -1,591 +0,0 @@
/*
* libjingle
* Copyright 2004--2005, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/session/phone/mediasession.h"
#include "talk/base/helpers.h"
#include "talk/base/logging.h"
#include "talk/base/scoped_ptr.h"
#include "talk/p2p/base/constants.h"
#include "talk/session/phone/channelmanager.h"
#include "talk/session/phone/cryptoparams.h"
#include "talk/session/phone/srtpfilter.h"
#include "talk/xmpp/constants.h"
namespace {
const char kInline[] = "inline:";
}
namespace cricket {
using talk_base::scoped_ptr;
static bool CreateCryptoParams(int tag, const std::string& cipher,
CryptoParams *out) {
std::string key;
key.reserve(SRTP_MASTER_KEY_BASE64_LEN);
if (!talk_base::CreateRandomString(SRTP_MASTER_KEY_BASE64_LEN, &key)) {
return false;
}
out->tag = tag;
out->cipher_suite = cipher;
out->key_params = kInline;
out->key_params += key;
return true;
}
#ifdef HAVE_SRTP
static bool AddCryptoParams(const std::string& cipher_suite,
CryptoParamsVec *out) {
int size = out->size();
out->resize(size + 1);
return CreateCryptoParams(size, cipher_suite, &out->at(size));
}
#endif
// For audio, HMAC 32 is prefered because of the low overhead.
static bool GetSupportedAudioCryptos(CryptoParamsVec* cryptos) {
#ifdef HAVE_SRTP
return AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_32, cryptos) &&
AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_80, cryptos);
#else
return false;
#endif
}
static bool GetSupportedVideoCryptos(CryptoParamsVec* cryptos) {
#ifdef HAVE_SRTP
return AddCryptoParams(CS_AES_CM_128_HMAC_SHA1_80, cryptos);
#else
return false;
#endif
}
// For video support only 80-bit SHA1 HMAC. For audio 32-bit HMAC is
// tolerated because it is low overhead. Pick the crypto in the list
// that is supported.
static bool SelectCrypto(const MediaContentDescription* offer,
CryptoParams *crypto) {
bool audio = offer->type() == MEDIA_TYPE_AUDIO;
const CryptoParamsVec& cryptos = offer->cryptos();
for (CryptoParamsVec::const_iterator i = cryptos.begin();
i != cryptos.end(); ++i) {
if (CS_AES_CM_128_HMAC_SHA1_80 == i->cipher_suite ||
(CS_AES_CM_128_HMAC_SHA1_32 == i->cipher_suite && audio)) {
return CreateCryptoParams(i->tag, i->cipher_suite, crypto);
}
}
return false;
}
static const StreamParams* FindStreamParamsByName(
const StreamParamsVec& params_vec,
const std::string& name) {
for (StreamParamsVec::const_iterator it = params_vec.begin();
it != params_vec.end(); ++it) {
if (it->name == name)
return &*it;
}
return NULL;
}
static const StreamParams* FindFirstStreamParamsByCname(
const StreamParamsVec& params_vec,
const std::string& cname) {
for (StreamParamsVec::const_iterator it = params_vec.begin();
it != params_vec.end(); ++it) {
if (cname == it->cname)
return &*it;
}
return NULL;
}
static const StreamParams* FindStreamParamsBySsrc(
const StreamParamsVec& params_vec,
uint32 ssrc) {
for (StreamParamsVec::const_iterator stream_it = params_vec.begin();
stream_it != params_vec.end(); ++stream_it) {
const std::vector<uint32>& ssrcs = stream_it->ssrcs;
for (std::vector<uint32>::const_iterator ssrc_it = ssrcs.begin();
ssrc_it != ssrcs.end(); ++ssrc_it) {
if (ssrc == *ssrc_it)
return &*stream_it;
}
}
return NULL;
}
// Generates a new CNAME or the CNAME of an already existing StreamParams
// if a StreamParams exist for another Stream in streams with sync_label
// sync_label.
static bool GenerateCname(const StreamParamsVec& params_vec,
const MediaSessionOptions::Streams& streams,
const std::string& synch_label,
std::string* cname) {
ASSERT(cname);
if (!cname)
return false;
// Check if a CNAME exist for any of the other synched streams.
for (MediaSessionOptions::Streams::const_iterator stream_it = streams.begin();
stream_it != streams.end() ; ++stream_it) {
if (synch_label != stream_it->sync_label)
continue;
const StreamParams* param = FindStreamParamsByName(params_vec,
stream_it->name);
if (param) {
*cname = param->cname;
return true;
}
}
// No other stream seems to exist that we should sync with.
// Generate a random string for the RTCP CNAME, as stated in RFC 6222.
// This string is only used for synchronization, and therefore is opaque.
do {
if (!talk_base::CreateRandomString(16, cname)) {
ASSERT(false);
return false;
}
} while (FindFirstStreamParamsByCname(params_vec, *cname));
return true;
}
// Generate a new SSRC and make sure it does not exist in params_vec.
static uint32 GenerateSsrc(const StreamParamsVec& params_vec) {
uint32 ssrc = 0;
do {
ssrc = talk_base::CreateRandomNonZeroId();
} while (FindStreamParamsBySsrc(params_vec, ssrc));
return ssrc;
}
// Finds all StreamParams of all media types and attach them to stream_params.
static void GetCurrentStreamParams(const SessionDescription* sdesc,
StreamParamsVec* stream_params) {
if (!sdesc)
return;
const ContentInfos& contents = sdesc->contents();
for (ContentInfos::const_iterator content = contents.begin();
content != contents.end(); content++) {
if (!IsAudioContent(&*content) && !IsVideoContent(&*content))
continue;
const MediaContentDescription* media =
static_cast<const MediaContentDescription*>(
content->description);
const StreamParamsVec& streams = media->streams();
for (StreamParamsVec::const_iterator it = streams.begin();
it != streams.end(); ++it) {
stream_params->push_back(*it);
}
}
}
// Adds a StreamParams for each Stream in Streams with media type
// media_type to content_description.
// current_parms - All currently known StreamParams of any media type.
static bool AddStreamParams(
MediaType media_type,
const MediaSessionOptions::Streams& streams,
StreamParamsVec* current_params,
MediaContentDescription* content_description) {
for (MediaSessionOptions::Streams::const_iterator stream_it = streams.begin();
stream_it != streams.end(); ++stream_it) {
if (stream_it->type != media_type)
continue; // Wrong media type.
const StreamParams* params = FindStreamParamsByName(*current_params,
stream_it->name);
if (!params) {
// This is a new stream.
// Get a CNAME. Either new or same as one of the other synched streams.
std::string cname;
if (!GenerateCname(*current_params, streams, stream_it->sync_label,
&cname)) {
return false;
}
uint32 ssrc = GenerateSsrc(*current_params);
// TODO(perkj): Generate the more complex types of stream_params.
StreamParams stream_param(stream_it->name, ssrc, cname,
stream_it->sync_label);
content_description->AddStream(stream_param);
// Store the new StreamParams in current_params.
// This is necessary so that we can use the CNAME for other media types.
current_params->push_back(stream_param);
} else {
content_description->AddStream(*params);
}
}
return true;
}
void MediaSessionOptions::AddStream(MediaType type,
const std::string& name,
const std::string& sync_label) {
streams.push_back(Stream(type, name, sync_label));
if (type == MEDIA_TYPE_VIDEO)
has_video = true;
else if (type == MEDIA_TYPE_AUDIO)
has_audio = true;
}
void MediaSessionOptions::RemoveStream(MediaType type,
const std::string& name) {
Streams::iterator stream_it = streams.begin();
for (; stream_it != streams.end(); ++stream_it) {
if (stream_it->type == type && stream_it->name == name) {
streams.erase(stream_it);
break;
}
}
ASSERT(stream_it != streams.end());
}
MediaSessionDescriptionFactory::MediaSessionDescriptionFactory()
: secure_(SEC_DISABLED) {
}
MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
ChannelManager* channel_manager)
: secure_(SEC_DISABLED) {
channel_manager->GetSupportedAudioCodecs(&audio_codecs_);
channel_manager->GetSupportedVideoCodecs(&video_codecs_);
}
SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
const MediaSessionOptions& options) {
return CreateOffer(options, NULL);
}
SessionDescription* MediaSessionDescriptionFactory::CreateOffer(
const MediaSessionOptions& options,
const SessionDescription* current_description) {
scoped_ptr<SessionDescription> offer(new SessionDescription());
StreamParamsVec current_params;
GetCurrentStreamParams(current_description, &current_params);
if (options.has_audio) {
scoped_ptr<AudioContentDescription> audio(new AudioContentDescription());
for (AudioCodecs::const_iterator codec = audio_codecs_.begin();
codec != audio_codecs_.end(); ++codec) {
audio->AddCodec(*codec);
}
audio->SortCodecs();
if (!AddStreamParams(MEDIA_TYPE_AUDIO, options.streams, &current_params,
audio.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
audio->set_ssrc(talk_base::CreateRandomNonZeroId());
}
audio->set_rtcp_mux(true);
audio->set_lang(lang_);
if (secure() != SEC_DISABLED) {
CryptoParamsVec audio_cryptos;
if (current_description) {
// Copy crypto parameters from the previous offer.
const ContentInfo* info =
GetFirstAudioContent(current_description);
if (info) {
const AudioContentDescription* desc =
static_cast<const AudioContentDescription*>(info->description);
audio_cryptos = desc->cryptos();
}
}
if (audio_cryptos.empty())
GetSupportedAudioCryptos(&audio_cryptos); // Generate new cryptos.
for (CryptoParamsVec::const_iterator crypto = audio_cryptos.begin();
crypto != audio_cryptos.end(); ++crypto) {
audio->AddCrypto(*crypto);
}
if (secure() == SEC_REQUIRED) {
if (audio->cryptos().empty()) {
return NULL; // Abort, crypto required but none found.
}
audio->set_crypto_required(true);
}
}
offer->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release());
}
// add video codecs, if this is a video call
if (options.has_video) {
scoped_ptr<VideoContentDescription> video(new VideoContentDescription());
for (VideoCodecs::const_iterator codec = video_codecs_.begin();
codec != video_codecs_.end(); ++codec) {
video->AddCodec(*codec);
}
video->SortCodecs();
if (!AddStreamParams(MEDIA_TYPE_VIDEO, options.streams, &current_params,
video.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
video->set_ssrc(talk_base::CreateRandomNonZeroId());
}
video->set_bandwidth(options.video_bandwidth);
video->set_rtcp_mux(true);
if (secure() != SEC_DISABLED) {
CryptoParamsVec video_cryptos;
if (current_description) {
// Copy crypto parameters from the previous offer.
const ContentInfo* info =
GetFirstVideoContent(current_description);
if (info) {
const VideoContentDescription* desc =
static_cast<const VideoContentDescription*>(info->description);
video_cryptos = desc->cryptos();
}
}
if (video_cryptos.empty())
GetSupportedVideoCryptos(&video_cryptos); // Generate new crypto.
for (CryptoParamsVec::const_iterator crypto = video_cryptos.begin();
crypto != video_cryptos.end(); ++crypto) {
video->AddCrypto(*crypto);
}
if (secure() == SEC_REQUIRED) {
if (video->cryptos().empty()) {
return NULL; // Abort, crypto required but none found.
}
video->set_crypto_required(true);
}
}
offer->AddContent(CN_VIDEO, NS_JINGLE_RTP, video.release());
}
return offer.release();
}
SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
const SessionDescription* offer,
const MediaSessionOptions& options) {
return CreateAnswer(offer, options, NULL);
}
SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
const SessionDescription* offer, const MediaSessionOptions& options,
const SessionDescription* current_description) {
// The answer contains the intersection of the codecs in the offer with the
// codecs we support, ordered by our local preference. As indicated by
// XEP-0167, we retain the same payload ids from the offer in the answer.
scoped_ptr<SessionDescription> accept(new SessionDescription());
StreamParamsVec current_params;
GetCurrentStreamParams(current_description, &current_params);
const ContentInfo* audio_content = GetFirstAudioContent(offer);
if (audio_content && options.has_audio) {
const AudioContentDescription* audio_offer =
static_cast<const AudioContentDescription*>(audio_content->description);
scoped_ptr<AudioContentDescription> audio_accept(
new AudioContentDescription());
for (AudioCodecs::const_iterator ours = audio_codecs_.begin();
ours != audio_codecs_.end(); ++ours) {
for (AudioCodecs::const_iterator theirs = audio_offer->codecs().begin();
theirs != audio_offer->codecs().end(); ++theirs) {
if (ours->Matches(*theirs)) {
AudioCodec negotiated(*ours);
negotiated.id = theirs->id;
audio_accept->AddCodec(negotiated);
}
}
}
audio_accept->SortCodecs();
if (!AddStreamParams(MEDIA_TYPE_AUDIO, options.streams, &current_params,
audio_accept.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
audio_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
}
audio_accept->set_rtcp_mux(audio_offer->rtcp_mux());
if (secure() != SEC_DISABLED) {
CryptoParams crypto;
if (SelectCrypto(audio_offer, &crypto)) {
if (current_description) {
// Check if this crypto already exist in the previous
// session description. Use it in that case.
const ContentInfo* info =
GetFirstAudioContent(current_description);
if (info) {
const AudioContentDescription* desc =
static_cast<const AudioContentDescription*>(info->description);
const CryptoParamsVec& cryptos = desc->cryptos();
for (CryptoParamsVec::const_iterator it = cryptos.begin();
it != cryptos.end(); ++it) {
if (crypto.Matches(*it)) {
crypto = *it;
break;
}
}
}
}
audio_accept->AddCrypto(crypto);
}
}
if (audio_accept->cryptos().empty() &&
(audio_offer->crypto_required() || secure() == SEC_REQUIRED)) {
return NULL; // Fails the session setup.
}
accept->AddContent(audio_content->name, audio_content->type,
audio_accept.release());
} else {
LOG(LS_INFO) << "Audio is not supported in answer";
}
const ContentInfo* video_content = GetFirstVideoContent(offer);
if (video_content && options.has_video) {
const VideoContentDescription* video_offer =
static_cast<const VideoContentDescription*>(video_content->description);
scoped_ptr<VideoContentDescription> video_accept(
new VideoContentDescription());
for (VideoCodecs::const_iterator ours = video_codecs_.begin();
ours != video_codecs_.end(); ++ours) {
for (VideoCodecs::const_iterator theirs = video_offer->codecs().begin();
theirs != video_offer->codecs().end(); ++theirs) {
if (ours->Matches(*theirs)) {
VideoCodec negotiated(*ours);
negotiated.id = theirs->id;
video_accept->AddCodec(negotiated);
}
}
}
if (!AddStreamParams(MEDIA_TYPE_VIDEO, options.streams, &current_params,
video_accept.get())) {
return NULL; // Abort, something went seriously wrong.
}
if (options.streams.empty()) {
// TODO(perkj): Remove this legacy ssrc when all apps use StreamParams.
video_accept->set_ssrc(talk_base::CreateRandomNonZeroId());
}
video_accept->set_bandwidth(options.video_bandwidth);
video_accept->set_rtcp_mux(video_offer->rtcp_mux());
video_accept->SortCodecs();
if (secure() != SEC_DISABLED) {
CryptoParams crypto;
if (SelectCrypto(video_offer, &crypto)) {
if (current_description) {
// Check if this crypto already exist in the previous
// session description. Use it in that case.
const ContentInfo* info = GetFirstVideoContent(current_description);
if (info) {
const VideoContentDescription* desc =
static_cast<const VideoContentDescription*>(info->description);
const CryptoParamsVec& cryptos = desc->cryptos();
for (CryptoParamsVec::const_iterator it = cryptos.begin();
it != cryptos.end(); ++it) {
if (crypto.Matches(*it)) {
crypto = *it;
break;
}
}
}
}
video_accept->AddCrypto(crypto);
}
}
if (video_accept->cryptos().empty() &&
(video_offer->crypto_required() || secure() == SEC_REQUIRED)) {
return NULL; // Fails the session setup.
}
accept->AddContent(video_content->name, video_content->type,
video_accept.release());
} else {
LOG(LS_INFO) << "Video is not supported in answer";
}
return accept.release();
}
static bool IsMediaContent(const ContentInfo* content, MediaType media_type) {
if (content == NULL || content->type != NS_JINGLE_RTP) {
return false;
}
const MediaContentDescription* media =
static_cast<const MediaContentDescription*>(content->description);
return media->type() == media_type;
}
bool IsAudioContent(const ContentInfo* content) {
return IsMediaContent(content, MEDIA_TYPE_AUDIO);
}
bool IsVideoContent(const ContentInfo* content) {
return IsMediaContent(content, MEDIA_TYPE_VIDEO);
}
static const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc,
MediaType media_type) {
if (sdesc == NULL)
return NULL;
const ContentInfos& contents = sdesc->contents();
for (ContentInfos::const_iterator content = contents.begin();
content != contents.end(); content++) {
if (IsMediaContent(&*content, media_type)) {
return &*content;
}
}
return NULL;
}
const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc) {
return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO);
}
const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc) {
return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO);
}
} // namespace cricket

View File

@ -1,284 +0,0 @@
/*
* libjingle
* Copyright 2004--2005, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Types and classes used in media session descriptions.
#ifndef TALK_SESSION_PHONE_MEDIASESSION_H_
#define TALK_SESSION_PHONE_MEDIASESSION_H_
#include <string>
#include <vector>
#include <algorithm>
#include "talk/session/phone/codec.h"
#include "talk/session/phone/cryptoparams.h"
#include "talk/session/phone/mediachannel.h"
#include "talk/session/phone/streamparams.h"
#include "talk/p2p/base/sessiondescription.h"
namespace cricket {
class ChannelManager;
typedef std::vector<AudioCodec> AudioCodecs;
typedef std::vector<VideoCodec> VideoCodecs;
typedef std::vector<CryptoParams> CryptoParamsVec;
typedef std::vector<StreamParams> StreamParamsVec;
// SEC_ENABLED and SEC_REQUIRED should only be used if the session
// was negotiated over TLS, to protect the inline crypto material
// exchange.
// SEC_DISABLED: No crypto in outgoing offer and answer. Fail any
// offer with crypto required.
// SEC_ENABLED: Crypto in outgoing offer and answer. Fail any offer
// with unsupported required crypto. Crypto set but not
// required in outgoing offer.
// SEC_REQUIRED: Crypto in outgoing offer and answer with
// required='true'. Fail any offer with no or
// unsupported crypto (implicit crypto required='true'
// in the offer.)
enum SecureMediaPolicy {
SEC_DISABLED,
SEC_ENABLED,
SEC_REQUIRED
};
enum MediaType {
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO
};
// Options to control how session descriptions are generated.
const int kAutoBandwidth = -1;
struct MediaSessionOptions {
MediaSessionOptions() :
has_audio(true), // Audio enabled by default.
has_video(false),
is_muc(false),
video_bandwidth(kAutoBandwidth) {
}
// Add a stream with MediaType type and id name.
// All streams with the same sync_label will get the same CNAME.
// All names must be unique.
void AddStream(MediaType type,
const std::string& name,
const std::string& sync_label);
void RemoveStream(MediaType type, const std::string& name);
bool has_audio;
bool has_video;
bool is_muc;
// bps. -1 == auto.
int video_bandwidth;
struct Stream {
Stream(MediaType type,
const std::string& name,
const std::string& sync_label)
: type(type), name(name), sync_label(sync_label) {
}
MediaType type;
std::string name;
std::string sync_label;
};
typedef std::vector<Stream> Streams;
Streams streams;
};
// "content" (as used in XEP-0166) descriptions for voice and video.
class MediaContentDescription : public ContentDescription {
public:
MediaContentDescription()
: ssrc_(0),
ssrc_set_(false),
rtcp_mux_(false),
bandwidth_(kAutoBandwidth),
crypto_required_(false),
rtp_header_extensions_set_(false) {
}
virtual MediaType type() const = 0;
uint32 ssrc() const { return ssrc_; }
bool ssrc_set() const { return ssrc_set_; }
void set_ssrc(uint32 ssrc) {
ssrc_ = ssrc;
ssrc_set_ = true;
}
bool rtcp_mux() const { return rtcp_mux_; }
void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; }
int bandwidth() const { return bandwidth_; }
void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; }
const std::vector<CryptoParams>& cryptos() const { return cryptos_; }
void AddCrypto(const CryptoParams& params) {
cryptos_.push_back(params);
}
bool crypto_required() const { return crypto_required_; }
void set_crypto_required(bool crypto) {
crypto_required_ = crypto;
}
const std::vector<RtpHeaderExtension>& rtp_header_extensions() const {
return rtp_header_extensions_;
}
void AddRtpHeaderExtension(const RtpHeaderExtension& ext) {
rtp_header_extensions_.push_back(ext);
rtp_header_extensions_set_ = true;
}
void ClearRtpHeaderExtensions() {
rtp_header_extensions_.clear();
rtp_header_extensions_set_ = true;
}
// We can't always tell if an empty list of header extensions is
// because the other side doesn't support them, or just isn't hooked up to
// signal them. For now we assume an empty list means no signaling, but
// provide the ClearRtpHeaderExtensions method to allow "no support" to be
// clearly indicated (i.e. when derived from other information).
bool rtp_header_extensions_set() const {
return rtp_header_extensions_set_;
}
const StreamParamsVec& streams() const {
return streams_;
}
void AddStream(const StreamParams& stream) {
streams_.push_back(stream);
}
protected:
uint32 ssrc_;
bool ssrc_set_;
bool rtcp_mux_;
int bandwidth_;
std::vector<CryptoParams> cryptos_;
bool crypto_required_;
std::vector<RtpHeaderExtension> rtp_header_extensions_;
bool rtp_header_extensions_set_;
StreamParamsVec streams_;
};
template <class C>
class MediaContentDescriptionImpl : public MediaContentDescription {
public:
struct PreferenceSort {
bool operator()(C a, C b) { return a.preference > b.preference; }
};
const std::vector<C>& codecs() const { return codecs_; }
void AddCodec(const C& codec) {
codecs_.push_back(codec);
}
void SortCodecs() {
std::sort(codecs_.begin(), codecs_.end(), PreferenceSort());
}
private:
std::vector<C> codecs_;
};
class AudioContentDescription : public MediaContentDescriptionImpl<AudioCodec> {
public:
AudioContentDescription() :
conference_mode_(false) {}
virtual MediaType type() const { return MEDIA_TYPE_AUDIO; }
bool conference_mode() const { return conference_mode_; }
void set_conference_mode(bool enable) {
conference_mode_ = enable;
}
const std::string &lang() const { return lang_; }
void set_lang(const std::string &lang) { lang_ = lang; }
private:
bool conference_mode_;
std::string lang_;
};
class VideoContentDescription : public MediaContentDescriptionImpl<VideoCodec> {
public:
virtual MediaType type() const { return MEDIA_TYPE_VIDEO; }
};
// Creates media session descriptions according to the supplied codecs and
// other fields, as well as the supplied per-call options.
// When creating answers, performs the appropriate negotiation
// of the various fields to determine the proper result.
class MediaSessionDescriptionFactory {
public:
// Default ctor; use methods below to set configuration.
MediaSessionDescriptionFactory();
// Helper, to allow configuration to be loaded from a ChannelManager.
explicit MediaSessionDescriptionFactory(ChannelManager* manager);
const AudioCodecs& audio_codecs() const { return audio_codecs_; }
void set_audio_codecs(const AudioCodecs& codecs) { audio_codecs_ = codecs; }
const VideoCodecs& video_codecs() const { return video_codecs_; }
void set_video_codecs(const VideoCodecs& codecs) { video_codecs_ = codecs; }
SecureMediaPolicy secure() const { return secure_; }
void set_secure(SecureMediaPolicy s) { secure_ = s; }
// TODO(perkj) Deprecate this version of CreateOffer and
// force to use the second alternative.
SessionDescription* CreateOffer(
const MediaSessionOptions& options);
SessionDescription* CreateOffer(
const MediaSessionOptions& options,
const SessionDescription* current_description);
// TODO(perkj) Deprecate this version of CreateAnswer and
// force to use the second alternative.
SessionDescription* CreateAnswer(
const SessionDescription* offer,
const MediaSessionOptions& options);
SessionDescription* CreateAnswer(
const SessionDescription* offer,
const MediaSessionOptions& options,
const SessionDescription* current_description);
private:
AudioCodecs audio_codecs_;
VideoCodecs video_codecs_;
SecureMediaPolicy secure_;
std::string lang_;
};
// Convenience functions.
bool IsAudioContent(const ContentInfo* content);
bool IsVideoContent(const ContentInfo* content);
const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc);
const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc);
} // namespace cricket
#endif // TALK_SESSION_PHONE_MEDIASESSION_H_

View File

@ -1,105 +0,0 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains structures for describing SSRCs from a media source such
// as a MediaStreamTrack when it is sent across an RTP session. Multiple media
// sources may be sent across the same RTP session, each of them will be
// described by one StreamParams object
// SsrcGroup is used to describe the relationship between the SSRCs that
// are used for this media source.
// E.x: Consider a source that is sent as 3 simulcast streams
// Let the simulcast elements have SSRC 10, 20, 30.
// Let each simulcast element use FEC and let the protection packets have
// SSRC 11,21,31.
// To describe this 4 SsrcGroups are needed,
// StreamParams would then contain ssrc = {10,11,20,21,30,31} and
// ssrc_groups = {{SIM,{10,20,30}, {FEC,{10,11}, {FEC, {20,21}, {FEC {30,31}}}
// Please see RFC 5576.
#ifndef TALK_SESSION_PHONE_STREAMPARAMS_H_
#define TALK_SESSION_PHONE_STREAMPARAMS_H_
#include <string>
#include <vector>
namespace cricket {
struct SsrcGroup {
SsrcGroup(const std::string& usage, const std::vector<uint32>& ssrcs)
: semantics(usage), ssrcs(ssrcs) {
}
bool operator==(const SsrcGroup& other) const {
return (semantics == other.semantics && ssrcs == other.ssrcs);
}
bool operator!=(const SsrcGroup &other) const {
return !(*this == other);
}
std::string semantics; // e.g FIX, FEC, SIM.
std::vector<uint32> ssrcs; // SSRCs of this type.
};
struct StreamParams {
StreamParams(const std::string& name,
const std::vector<uint32>& ssrcs,
const std::vector<SsrcGroup>& ssrc_groups,
const std::string& cname,
const std::string& sync_label)
: name(name),
ssrcs(ssrcs),
ssrc_groups(ssrc_groups),
cname(cname),
sync_label(sync_label) {
}
StreamParams(const std::string& name,
uint32 ssrc,
const std::string& cname,
const std::string& sync_label)
: name(name),
cname(cname),
sync_label(sync_label) {
ssrcs.push_back(ssrc);
}
bool operator==(const StreamParams& other) const {
return (name == other.name && ssrcs == other.ssrcs &&
ssrc_groups == other.ssrc_groups && cname == other.cname &&
sync_label == sync_label);
}
bool operator!=(const StreamParams &other) const {
return !(*this == other);
}
std::string name; // Unique name of this source.
std::vector<uint32> ssrcs; // All SSRCs for this source
std::vector<SsrcGroup> ssrc_groups; // e.g. FID, FEC, SIM
std::string cname; // RTCP CNAME
std::string sync_label; // Friendly name of cname.
};
} // namespace cricket
#endif // TALK_SESSION_PHONE_STREAMPARAMS_H_