Adds trunk/talk folder of revision 359 from libjingles google code to

trunk/talk


git-svn-id: http://webrtc.googlecode.com/svn/trunk@4318 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrike@webrtc.org 2013-07-10 00:45:36 +00:00
parent 6aa6229953
commit 28e2075280
1067 changed files with 275209 additions and 0 deletions

1
talk/OWNERS Normal file
View File

@ -0,0 +1 @@
henrike@webrtc.org

View File

@ -0,0 +1,53 @@
/*
* libjingle
* Copyright 2004--2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/audiotrack.h"
#include <string>
namespace webrtc {
static const char kAudioTrackKind[] = "audio";
AudioTrack::AudioTrack(const std::string& label,
AudioSourceInterface* audio_source)
: MediaStreamTrack<AudioTrackInterface>(label),
audio_source_(audio_source),
renderer_(new AudioTrackRenderer()) {
}
std::string AudioTrack::kind() const {
return kAudioTrackKind;
}
talk_base::scoped_refptr<AudioTrack> AudioTrack::Create(
const std::string& id, AudioSourceInterface* source) {
talk_base::RefCountedObject<AudioTrack>* track =
new talk_base::RefCountedObject<AudioTrack>(id, source);
return track;
}
} // namespace webrtc

View File

@ -0,0 +1,66 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_AUDIOTRACK_H_
#define TALK_APP_WEBRTC_AUDIOTRACK_H_
#include "talk/app/webrtc/audiotrackrenderer.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/mediastreamtrack.h"
#include "talk/app/webrtc/notifier.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/scoped_ref_ptr.h"
namespace webrtc {
class AudioTrack : public MediaStreamTrack<AudioTrackInterface> {
public:
static talk_base::scoped_refptr<AudioTrack> Create(
const std::string& id, AudioSourceInterface* source);
virtual AudioSourceInterface* GetSource() const {
return audio_source_.get();
}
virtual cricket::AudioRenderer* FrameInput() {
return renderer_.get();
}
// Implement MediaStreamTrack
virtual std::string kind() const;
protected:
AudioTrack(const std::string& label, AudioSourceInterface* audio_source);
private:
talk_base::scoped_refptr<AudioSourceInterface> audio_source_;
talk_base::scoped_ptr<AudioTrackRenderer> renderer_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_AUDIOTRACK_H_

View File

@ -0,0 +1,48 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/audiotrackrenderer.h"
#include "talk/base/common.h"
namespace webrtc {
AudioTrackRenderer::AudioTrackRenderer() : channel_id_(-1) {
}
AudioTrackRenderer::~AudioTrackRenderer() {
}
void AudioTrackRenderer::SetChannelId(int channel_id) {
ASSERT(channel_id_ == -1);
channel_id_ = channel_id;
}
int AudioTrackRenderer::GetChannelId() const {
return channel_id_;
}
} // namespace webrtc

View File

@ -0,0 +1,55 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_AUDIOTRACKRENDERER_H_
#define TALK_APP_WEBRTC_AUDIOTRACKRENDERER_H_
#include "talk/media/base/audiorenderer.h"
namespace webrtc {
// Class used for AudioTrack to get the ID of WebRtc voice channel that
// the AudioTrack is connecting to.
// Each AudioTrack owns a AudioTrackRenderer instance.
// SetChannelID() should be called only when a AudioTrack is added to a
// MediaStream and should not be changed afterwards.
class AudioTrackRenderer : public cricket::AudioRenderer {
public:
AudioTrackRenderer();
~AudioTrackRenderer();
// Implements cricket::AudioRenderer.
virtual void SetChannelId(int channel_id);
virtual int GetChannelId() const;
private:
int channel_id_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_AUDIOTRACKRENDERER_H_

View File

@ -0,0 +1,295 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/datachannel.h"
#include <string>
#include "talk/app/webrtc/webrtcsession.h"
#include "talk/base/logging.h"
#include "talk/base/refcount.h"
namespace webrtc {
static size_t kMaxQueuedDataPackets = 100;
talk_base::scoped_refptr<DataChannel> DataChannel::Create(
WebRtcSession* session,
const std::string& label,
const DataChannelInit* config) {
talk_base::scoped_refptr<DataChannel> channel(
new talk_base::RefCountedObject<DataChannel>(session, label));
if (!channel->Init(config)) {
return NULL;
}
return channel;
}
DataChannel::DataChannel(WebRtcSession* session, const std::string& label)
: label_(label),
observer_(NULL),
state_(kConnecting),
was_ever_writable_(false),
session_(session),
data_session_(NULL),
send_ssrc_set_(false),
send_ssrc_(0),
receive_ssrc_set_(false),
receive_ssrc_(0) {
}
bool DataChannel::Init(const DataChannelInit* config) {
if (config) {
if (session_->data_channel_type() == cricket::DCT_RTP &&
(config->reliable ||
config->id != -1 ||
config->maxRetransmits != -1 ||
config->maxRetransmitTime != -1)) {
LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
<< "invalid DataChannelInit.";
return false;
} else if (session_->data_channel_type() == cricket::DCT_SCTP) {
if (config->id < -1 ||
config->maxRetransmits < -1 ||
config->maxRetransmitTime < -1) {
LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
<< "invalid DataChannelInit.";
return false;
}
if (config->maxRetransmits != -1 && config->maxRetransmitTime != -1) {
LOG(LS_ERROR) <<
"maxRetransmits and maxRetransmitTime should not be both set.";
return false;
}
}
config_ = *config;
}
return true;
}
bool DataChannel::HasNegotiationCompleted() {
return send_ssrc_set_ == receive_ssrc_set_;
}
DataChannel::~DataChannel() {
ClearQueuedData();
}
void DataChannel::RegisterObserver(DataChannelObserver* observer) {
observer_ = observer;
DeliverQueuedData();
}
void DataChannel::UnregisterObserver() {
observer_ = NULL;
}
bool DataChannel::reliable() const {
if (session_->data_channel_type() == cricket::DCT_RTP) {
return false;
} else {
return config_.maxRetransmits == -1 &&
config_.maxRetransmitTime == -1;
}
}
uint64 DataChannel::buffered_amount() const {
return 0;
}
void DataChannel::Close() {
if (state_ == kClosed)
return;
send_ssrc_ = 0;
send_ssrc_set_ = false;
SetState(kClosing);
UpdateState();
}
bool DataChannel::Send(const DataBuffer& buffer) {
if (state_ != kOpen) {
return false;
}
cricket::SendDataParams send_params;
send_params.ssrc = send_ssrc_;
if (session_->data_channel_type() == cricket::DCT_SCTP) {
send_params.ordered = config_.ordered;
send_params.max_rtx_count = config_.maxRetransmits;
send_params.max_rtx_ms = config_.maxRetransmitTime;
}
send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
cricket::SendDataResult send_result;
// TODO(pthatcher): Use send_result.would_block for buffering.
return session_->data_channel()->SendData(
send_params, buffer.data, &send_result);
}
void DataChannel::SetReceiveSsrc(uint32 receive_ssrc) {
if (receive_ssrc_set_) {
ASSERT(session_->data_channel_type() == cricket::DCT_RTP ||
receive_ssrc_ == send_ssrc_);
return;
}
receive_ssrc_ = receive_ssrc;
receive_ssrc_set_ = true;
UpdateState();
}
// The remote peer request that this channel shall be closed.
void DataChannel::RemotePeerRequestClose() {
DoClose();
}
void DataChannel::SetSendSsrc(uint32 send_ssrc) {
if (send_ssrc_set_) {
ASSERT(session_->data_channel_type() == cricket::DCT_RTP ||
receive_ssrc_ == send_ssrc_);
return;
}
send_ssrc_ = send_ssrc;
send_ssrc_set_ = true;
UpdateState();
}
// The underlaying data engine is closing.
// This function make sure the DataChannel is disconneced and change state to
// kClosed.
void DataChannel::OnDataEngineClose() {
DoClose();
}
void DataChannel::DoClose() {
receive_ssrc_set_ = false;
send_ssrc_set_ = false;
SetState(kClosing);
UpdateState();
}
void DataChannel::UpdateState() {
switch (state_) {
case kConnecting: {
if (HasNegotiationCompleted()) {
if (!IsConnectedToDataSession()) {
ConnectToDataSession();
}
if (was_ever_writable_) {
SetState(kOpen);
// If we have received buffers before the channel got writable.
// Deliver them now.
DeliverQueuedData();
}
}
break;
}
case kOpen: {
break;
}
case kClosing: {
if (IsConnectedToDataSession()) {
DisconnectFromDataSession();
}
if (HasNegotiationCompleted()) {
SetState(kClosed);
}
break;
}
case kClosed:
break;
}
}
void DataChannel::SetState(DataState state) {
state_ = state;
if (observer_) {
observer_->OnStateChange();
}
}
void DataChannel::ConnectToDataSession() {
ASSERT(session_->data_channel() != NULL);
if (!session_->data_channel()) {
LOG(LS_ERROR) << "The DataEngine does not exist.";
return;
}
data_session_ = session_->data_channel();
data_session_->SignalReadyToSendData.connect(this,
&DataChannel::OnChannelReady);
data_session_->SignalDataReceived.connect(this, &DataChannel::OnDataReceived);
}
void DataChannel::DisconnectFromDataSession() {
data_session_->SignalReadyToSendData.disconnect(this);
data_session_->SignalDataReceived.disconnect(this);
data_session_ = NULL;
}
void DataChannel::DeliverQueuedData() {
if (was_ever_writable_ && observer_) {
while (!queued_data_.empty()) {
DataBuffer* buffer = queued_data_.front();
observer_->OnMessage(*buffer);
queued_data_.pop();
delete buffer;
}
}
}
void DataChannel::ClearQueuedData() {
while (!queued_data_.empty()) {
DataBuffer* buffer = queued_data_.front();
queued_data_.pop();
delete buffer;
}
}
void DataChannel::OnDataReceived(cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload) {
if (params.ssrc == receive_ssrc_) {
bool binary = false;
talk_base::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
if (was_ever_writable_ && observer_) {
observer_->OnMessage(*buffer.get());
} else {
if (queued_data_.size() > kMaxQueuedDataPackets) {
ClearQueuedData();
}
queued_data_.push(buffer.release());
}
}
}
void DataChannel::OnChannelReady(bool writable) {
if (!was_ever_writable_ && writable) {
was_ever_writable_ = true;
UpdateState();
}
}
} // namespace webrtc

View File

@ -0,0 +1,154 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_DATACHANNEL_H_
#define TALK_APP_WEBRTC_DATACHANNEL_H_
#include <string>
#include <queue>
#include "talk/app/webrtc/datachannelinterface.h"
#include "talk/app/webrtc/proxy.h"
#include "talk/base/scoped_ref_ptr.h"
#include "talk/base/sigslot.h"
#include "talk/session/media/channel.h"
namespace webrtc {
class WebRtcSession;
// DataChannel is a an implementation of the DataChannelInterface based on
// libjingle's data engine. It provides an implementation of unreliable data
// channels. Currently this class is specifically designed to use RtpDataEngine,
// and will changed to use SCTP in the future.
// DataChannel states:
// kConnecting: The channel has been created but SSRC for sending and receiving
// has not yet been set and the transport might not yet be ready.
// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
// and a remote SSRC set by call to UpdateReceiveSsrc and the transport
// has been writable once.
// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
// has been called with SSRC==0
// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
// SSRC==0.
class DataChannel : public DataChannelInterface,
public sigslot::has_slots<> {
public:
static talk_base::scoped_refptr<DataChannel> Create(
WebRtcSession* session,
const std::string& label,
const DataChannelInit* config);
virtual void RegisterObserver(DataChannelObserver* observer);
virtual void UnregisterObserver();
virtual std::string label() const { return label_; }
virtual bool reliable() const;
virtual int id() const { return config_.id; }
virtual uint64 buffered_amount() const;
virtual void Close();
virtual DataState state() const { return state_; }
virtual bool Send(const DataBuffer& buffer);
// Set the SSRC this channel should use to receive data from the
// underlying data engine.
void SetReceiveSsrc(uint32 receive_ssrc);
// The remote peer request that this channel should be closed.
void RemotePeerRequestClose();
// Set the SSRC this channel should use to send data on the
// underlying data engine. |send_ssrc| == 0 means that the channel is no
// longer part of the session negotiation.
void SetSendSsrc(uint32 send_ssrc);
// Called if the underlying data engine is closing.
void OnDataEngineClose();
protected:
DataChannel(WebRtcSession* session, const std::string& label);
virtual ~DataChannel();
bool Init(const DataChannelInit* config);
bool HasNegotiationCompleted();
// Sigslots from cricket::DataChannel
void OnDataReceived(cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,
const talk_base::Buffer& payload);
void OnChannelReady(bool writable);
private:
void DoClose();
void UpdateState();
void SetState(DataState state);
void ConnectToDataSession();
void DisconnectFromDataSession();
bool IsConnectedToDataSession() { return data_session_ != NULL; }
void DeliverQueuedData();
void ClearQueuedData();
std::string label_;
DataChannelInit config_;
DataChannelObserver* observer_;
DataState state_;
bool was_ever_writable_;
WebRtcSession* session_;
cricket::DataChannel* data_session_;
bool send_ssrc_set_;
uint32 send_ssrc_;
bool receive_ssrc_set_;
uint32 receive_ssrc_;
std::queue<DataBuffer*> queued_data_;
};
class DataChannelFactory {
public:
virtual talk_base::scoped_refptr<DataChannel> CreateDataChannel(
const std::string& label,
const DataChannelInit* config) = 0;
protected:
virtual ~DataChannelFactory() {}
};
// Define proxy for DataChannelInterface.
BEGIN_PROXY_MAP(DataChannel)
PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
PROXY_METHOD0(void, UnregisterObserver)
PROXY_CONSTMETHOD0(std::string, label)
PROXY_CONSTMETHOD0(bool, reliable)
PROXY_CONSTMETHOD0(int, id)
PROXY_CONSTMETHOD0(DataState, state)
PROXY_CONSTMETHOD0(uint64, buffered_amount)
PROXY_METHOD0(void, Close)
PROXY_METHOD1(bool, Send, const DataBuffer&)
END_PROXY()
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DATACHANNEL_H_

View File

@ -0,0 +1,127 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains interfaces for DataChannels
// http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcdatachannel
#ifndef TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_
#define TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_
#include <string>
#include "talk/base/basictypes.h"
#include "talk/base/buffer.h"
#include "talk/base/refcount.h"
namespace webrtc {
struct DataChannelInit {
DataChannelInit()
: reliable(false),
ordered(true),
maxRetransmitTime(-1),
maxRetransmits(-1),
negotiated(false),
id(-1) {
}
bool reliable; // Deprecated.
bool ordered; // True if ordered delivery is required.
int maxRetransmitTime; // The max period of time in milliseconds in which
// retransmissions will be sent. After this time, no
// more retransmissions will be sent. -1 if unset.
int maxRetransmits; // The max number of retransmissions. -1 if unset.
std::string protocol; // This is set by the application and opaque to the
// WebRTC implementation.
bool negotiated; // True if the channel has been externally negotiated
// and we do not send an in-band signalling in the
// form of an "open" message.
int id; // The stream id, or SID, for SCTP data channels. -1
// if unset.
};
struct DataBuffer {
DataBuffer(const talk_base::Buffer& data, bool binary)
: data(data),
binary(binary) {
}
// For convenience for unit tests.
explicit DataBuffer(const std::string& text)
: data(text.data(), text.length()),
binary(false) {
}
talk_base::Buffer data;
// Indicates if the received data contains UTF-8 or binary data.
// Note that the upper layers are left to verify the UTF-8 encoding.
// TODO(jiayl): prefer to use an enum instead of a bool.
bool binary;
};
class DataChannelObserver {
public:
// The data channel state have changed.
virtual void OnStateChange() = 0;
// A data buffer was successfully received.
virtual void OnMessage(const DataBuffer& buffer) = 0;
protected:
virtual ~DataChannelObserver() {}
};
class DataChannelInterface : public talk_base::RefCountInterface {
public:
enum DataState {
kConnecting,
kOpen, // The DataChannel is ready to send data.
kClosing,
kClosed
};
virtual void RegisterObserver(DataChannelObserver* observer) = 0;
virtual void UnregisterObserver() = 0;
// The label attribute represents a label that can be used to distinguish this
// DataChannel object from other DataChannel objects.
virtual std::string label() const = 0;
virtual bool reliable() const = 0;
virtual int id() const = 0;
virtual DataState state() const = 0;
// The buffered_amount returns the number of bytes of application data
// (UTF-8 text and binary data) that have been queued using SendBuffer but
// have not yet been transmitted to the network.
virtual uint64 buffered_amount() const = 0;
virtual void Close() = 0;
// Sends |data| to the remote peer.
virtual bool Send(const DataBuffer& buffer) = 0;
protected:
virtual ~DataChannelInterface() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DATACHANNELINTERFACE_H_

View File

@ -0,0 +1,257 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/dtmfsender.h"
#include <ctype.h>
#include <string>
#include "talk/base/logging.h"
#include "talk/base/thread.h"
namespace webrtc {
enum {
MSG_DO_INSERT_DTMF = 0,
};
// RFC4733
// +-------+--------+------+---------+
// | Event | Code | Type | Volume? |
// +-------+--------+------+---------+
// | 0--9 | 0--9 | tone | yes |
// | * | 10 | tone | yes |
// | # | 11 | tone | yes |
// | A--D | 12--15 | tone | yes |
// +-------+--------+------+---------+
// The "," is a special event defined by the WebRTC spec. It means to delay for
// 2 seconds before processing the next tone. We use -1 as its code.
static const int kDtmfCodeTwoSecondDelay = -1;
static const int kDtmfTwoSecondInMs = 2000;
static const char kDtmfValidTones[] = ",0123456789*#ABCDabcd";
static const char kDtmfTonesTable[] = ",0123456789*#ABCD";
// The duration cannot be more than 6000ms or less than 70ms. The gap between
// tones must be at least 50 ms.
static const int kDtmfDefaultDurationMs = 100;
static const int kDtmfMinDurationMs = 70;
static const int kDtmfMaxDurationMs = 6000;
static const int kDtmfDefaultGapMs = 50;
static const int kDtmfMinGapMs = 50;
// Get DTMF code from the DTMF event character.
bool GetDtmfCode(char tone, int* code) {
// Convert a-d to A-D.
char event = toupper(tone);
const char* p = strchr(kDtmfTonesTable, event);
if (!p) {
return false;
}
*code = p - kDtmfTonesTable - 1;
return true;
}
talk_base::scoped_refptr<DtmfSender> DtmfSender::Create(
AudioTrackInterface* track,
talk_base::Thread* signaling_thread,
DtmfProviderInterface* provider) {
if (!track || !signaling_thread) {
return NULL;
}
talk_base::scoped_refptr<DtmfSender> dtmf_sender(
new talk_base::RefCountedObject<DtmfSender>(track, signaling_thread,
provider));
return dtmf_sender;
}
DtmfSender::DtmfSender(AudioTrackInterface* track,
talk_base::Thread* signaling_thread,
DtmfProviderInterface* provider)
: track_(track),
observer_(NULL),
signaling_thread_(signaling_thread),
provider_(provider),
duration_(kDtmfDefaultDurationMs),
inter_tone_gap_(kDtmfDefaultGapMs) {
ASSERT(track_ != NULL);
ASSERT(signaling_thread_ != NULL);
if (provider_) {
ASSERT(provider_->GetOnDestroyedSignal() != NULL);
provider_->GetOnDestroyedSignal()->connect(
this, &DtmfSender::OnProviderDestroyed);
}
}
DtmfSender::~DtmfSender() {
if (provider_) {
ASSERT(provider_->GetOnDestroyedSignal() != NULL);
provider_->GetOnDestroyedSignal()->disconnect(this);
}
StopSending();
}
void DtmfSender::RegisterObserver(DtmfSenderObserverInterface* observer) {
observer_ = observer;
}
void DtmfSender::UnregisterObserver() {
observer_ = NULL;
}
bool DtmfSender::CanInsertDtmf() {
ASSERT(signaling_thread_->IsCurrent());
if (!provider_) {
return false;
}
return provider_->CanInsertDtmf(track_->id());
}
bool DtmfSender::InsertDtmf(const std::string& tones, int duration,
int inter_tone_gap) {
ASSERT(signaling_thread_->IsCurrent());
if (duration > kDtmfMaxDurationMs ||
duration < kDtmfMinDurationMs ||
inter_tone_gap < kDtmfMinGapMs) {
LOG(LS_ERROR) << "InsertDtmf is called with invalid duration or tones gap. "
<< "The duration cannot be more than " << kDtmfMaxDurationMs
<< "ms or less than " << kDtmfMinDurationMs << "ms. "
<< "The gap between tones must be at least " << kDtmfMinGapMs << "ms.";
return false;
}
if (!CanInsertDtmf()) {
LOG(LS_ERROR)
<< "InsertDtmf is called on DtmfSender that can't send DTMF.";
return false;
}
tones_ = tones;
duration_ = duration;
inter_tone_gap_ = inter_tone_gap;
// Clear the previous queue.
signaling_thread_->Clear(this, MSG_DO_INSERT_DTMF);
// Kick off a new DTMF task queue.
signaling_thread_->Post(this, MSG_DO_INSERT_DTMF);
return true;
}
const AudioTrackInterface* DtmfSender::track() const {
return track_;
}
std::string DtmfSender::tones() const {
return tones_;
}
int DtmfSender::duration() const {
return duration_;
}
int DtmfSender::inter_tone_gap() const {
return inter_tone_gap_;
}
void DtmfSender::OnMessage(talk_base::Message* msg) {
switch (msg->message_id) {
case MSG_DO_INSERT_DTMF: {
DoInsertDtmf();
break;
}
default: {
ASSERT(false);
break;
}
}
}
void DtmfSender::DoInsertDtmf() {
ASSERT(signaling_thread_->IsCurrent());
// Get the first DTMF tone from the tone buffer. Unrecognized characters will
// be ignored and skipped.
size_t first_tone_pos = tones_.find_first_of(kDtmfValidTones);
int code = 0;
if (first_tone_pos == std::string::npos) {
tones_.clear();
// Fire a “OnToneChange” event with an empty string and stop.
if (observer_) {
observer_->OnToneChange(std::string());
}
return;
} else {
char tone = tones_[first_tone_pos];
if (!GetDtmfCode(tone, &code)) {
// The find_first_of(kDtmfValidTones) should have guarantee |tone| is
// a valid DTMF tone.
ASSERT(false);
}
}
int tone_gap = inter_tone_gap_;
if (code == kDtmfCodeTwoSecondDelay) {
// Special case defined by WebRTC - The character',' indicates a delay of 2
// seconds before processing the next character in the tones parameter.
tone_gap = kDtmfTwoSecondInMs;
} else {
if (!provider_) {
LOG(LS_ERROR) << "The DtmfProvider has been destroyed.";
return;
}
// The provider starts playout of the given tone on the
// associated RTP media stream, using the appropriate codec.
if (!provider_->InsertDtmf(track_->id(), code, duration_)) {
LOG(LS_ERROR) << "The DtmfProvider can no longer send DTMF.";
return;
}
// Wait for the number of milliseconds specified by |duration_|.
tone_gap += duration_;
}
// Fire a “OnToneChange” event with the tone that's just processed.
if (observer_) {
observer_->OnToneChange(tones_.substr(first_tone_pos, 1));
}
// Erase the unrecognized characters plus the tone that's just processed.
tones_.erase(0, first_tone_pos + 1);
// Continue with the next tone.
signaling_thread_->PostDelayed(tone_gap, this, MSG_DO_INSERT_DTMF);
}
void DtmfSender::OnProviderDestroyed() {
LOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue.";
StopSending();
provider_ = NULL;
}
void DtmfSender::StopSending() {
signaling_thread_->Clear(this);
}
} // namespace webrtc

View File

@ -0,0 +1,138 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_DTMFSENDER_H_
#define TALK_APP_WEBRTC_DTMFSENDER_H_
#include <string>
#include "talk/app/webrtc/dtmfsenderinterface.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/proxy.h"
#include "talk/base/common.h"
#include "talk/base/messagehandler.h"
#include "talk/base/refcount.h"
// DtmfSender is the native implementation of the RTCDTMFSender defined by
// the WebRTC W3C Editor's Draft.
// http://dev.w3.org/2011/webrtc/editor/webrtc.html
namespace talk_base {
class Thread;
}
namespace webrtc {
// This interface is called by DtmfSender to talk to the actual audio channel
// to send DTMF.
class DtmfProviderInterface {
public:
// Returns true if the audio track with given id (|track_id|) is capable
// of sending DTMF. Otherwise returns false.
virtual bool CanInsertDtmf(const std::string& track_id) = 0;
// Sends DTMF |code| via the audio track with given id (|track_id|).
// The |duration| indicates the length of the DTMF tone in ms.
// Returns true on success and false on failure.
virtual bool InsertDtmf(const std::string& track_id,
int code, int duration) = 0;
// Returns a |sigslot::signal0<>| signal. The signal should fire before
// the provider is destroyed.
virtual sigslot::signal0<>* GetOnDestroyedSignal() = 0;
protected:
virtual ~DtmfProviderInterface() {}
};
class DtmfSender
: public DtmfSenderInterface,
public sigslot::has_slots<>,
public talk_base::MessageHandler {
public:
static talk_base::scoped_refptr<DtmfSender> Create(
AudioTrackInterface* track,
talk_base::Thread* signaling_thread,
DtmfProviderInterface* provider);
// Implements DtmfSenderInterface.
virtual void RegisterObserver(DtmfSenderObserverInterface* observer) OVERRIDE;
virtual void UnregisterObserver() OVERRIDE;
virtual bool CanInsertDtmf() OVERRIDE;
virtual bool InsertDtmf(const std::string& tones, int duration,
int inter_tone_gap) OVERRIDE;
virtual const AudioTrackInterface* track() const OVERRIDE;
virtual std::string tones() const OVERRIDE;
virtual int duration() const OVERRIDE;
virtual int inter_tone_gap() const OVERRIDE;
protected:
DtmfSender(AudioTrackInterface* track,
talk_base::Thread* signaling_thread,
DtmfProviderInterface* provider);
virtual ~DtmfSender();
private:
DtmfSender();
// Implements MessageHandler.
virtual void OnMessage(talk_base::Message* msg);
// The DTMF sending task.
void DoInsertDtmf();
void OnProviderDestroyed();
void StopSending();
talk_base::scoped_refptr<AudioTrackInterface> track_;
DtmfSenderObserverInterface* observer_;
talk_base::Thread* signaling_thread_;
DtmfProviderInterface* provider_;
std::string tones_;
int duration_;
int inter_tone_gap_;
DISALLOW_COPY_AND_ASSIGN(DtmfSender);
};
// Define proxy for DtmfSenderInterface.
BEGIN_PROXY_MAP(DtmfSender)
PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
PROXY_METHOD0(void, UnregisterObserver)
PROXY_METHOD0(bool, CanInsertDtmf)
PROXY_METHOD3(bool, InsertDtmf, const std::string&, int, int)
PROXY_CONSTMETHOD0(const AudioTrackInterface*, track)
PROXY_CONSTMETHOD0(std::string, tones)
PROXY_CONSTMETHOD0(int, duration)
PROXY_CONSTMETHOD0(int, inter_tone_gap)
END_PROXY()
// Get DTMF code from the DTMF event character.
bool GetDtmfCode(char tone, int* code);
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DTMFSENDER_H_

View File

@ -0,0 +1,356 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/dtmfsender.h"
#include <set>
#include <string>
#include <vector>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/base/gunit.h"
#include "talk/base/logging.h"
#include "talk/base/timeutils.h"
using webrtc::AudioTrackInterface;
using webrtc::AudioTrack;
using webrtc::DtmfProviderInterface;
using webrtc::DtmfSender;
using webrtc::DtmfSenderObserverInterface;
static const char kTestAudioLabel[] = "test_audio_track";
static const int kMaxWaitMs = 3000;
class FakeDtmfObserver : public DtmfSenderObserverInterface {
public:
FakeDtmfObserver() : completed_(false) {}
// Implements DtmfSenderObserverInterface.
virtual void OnToneChange(const std::string& tone) OVERRIDE {
LOG(LS_VERBOSE) << "FakeDtmfObserver::OnToneChange '" << tone << "'.";
tones_.push_back(tone);
if (tone.empty()) {
completed_ = true;
}
}
// getters
const std::vector<std::string>& tones() const {
return tones_;
}
bool completed() const {
return completed_;
}
private:
std::vector<std::string> tones_;
bool completed_;
};
class FakeDtmfProvider : public DtmfProviderInterface {
public:
struct DtmfInfo {
DtmfInfo(int code, int duration, int gap)
: code(code),
duration(duration),
gap(gap) {}
int code;
int duration;
int gap;
};
FakeDtmfProvider() : last_insert_dtmf_call_(0) {}
~FakeDtmfProvider() {
SignalDestroyed();
}
// Implements DtmfProviderInterface.
virtual bool CanInsertDtmf(const std::string& track_label) OVERRIDE {
return (can_insert_dtmf_tracks_.count(track_label) != 0);
}
virtual bool InsertDtmf(const std::string& track_label,
int code, int duration) OVERRIDE {
int gap = 0;
// TODO(ronghuawu): Make the timer (basically the talk_base::TimeNanos)
// mockable and use a fake timer in the unit tests.
if (last_insert_dtmf_call_ > 0) {
gap = static_cast<int>(talk_base::Time() - last_insert_dtmf_call_);
}
last_insert_dtmf_call_ = talk_base::Time();
LOG(LS_VERBOSE) << "FakeDtmfProvider::InsertDtmf code=" << code
<< " duration=" << duration
<< " gap=" << gap << ".";
dtmf_info_queue_.push_back(DtmfInfo(code, duration, gap));
return true;
}
virtual sigslot::signal0<>* GetOnDestroyedSignal() {
return &SignalDestroyed;
}
// getter and setter
const std::vector<DtmfInfo>& dtmf_info_queue() const {
return dtmf_info_queue_;
}
// helper functions
void AddCanInsertDtmfTrack(const std::string& label) {
can_insert_dtmf_tracks_.insert(label);
}
void RemoveCanInsertDtmfTrack(const std::string& label) {
can_insert_dtmf_tracks_.erase(label);
}
private:
std::set<std::string> can_insert_dtmf_tracks_;
std::vector<DtmfInfo> dtmf_info_queue_;
int64 last_insert_dtmf_call_;
sigslot::signal0<> SignalDestroyed;
};
class DtmfSenderTest : public testing::Test {
protected:
DtmfSenderTest()
: track_(AudioTrack::Create(kTestAudioLabel, NULL)),
observer_(new talk_base::RefCountedObject<FakeDtmfObserver>()),
provider_(new FakeDtmfProvider()) {
provider_->AddCanInsertDtmfTrack(kTestAudioLabel);
dtmf_ = DtmfSender::Create(track_, talk_base::Thread::Current(),
provider_.get());
dtmf_->RegisterObserver(observer_.get());
}
~DtmfSenderTest() {
if (dtmf_.get()) {
dtmf_->UnregisterObserver();
}
}
// Constructs a list of DtmfInfo from |tones|, |duration| and
// |inter_tone_gap|.
void GetDtmfInfoFromString(const std::string& tones, int duration,
int inter_tone_gap,
std::vector<FakeDtmfProvider::DtmfInfo>* dtmfs) {
// Init extra_delay as -inter_tone_gap - duration to ensure the first
// DtmfInfo's gap field will be 0.
int extra_delay = -1 * (inter_tone_gap + duration);
std::string::const_iterator it = tones.begin();
for (; it != tones.end(); ++it) {
char tone = *it;
int code = 0;
webrtc::GetDtmfCode(tone, &code);
if (tone == ',') {
extra_delay = 2000; // 2 seconds
} else {
dtmfs->push_back(FakeDtmfProvider::DtmfInfo(code, duration,
duration + inter_tone_gap + extra_delay));
extra_delay = 0;
}
}
}
void VerifyExpectedState(AudioTrackInterface* track,
const std::string& tones,
int duration, int inter_tone_gap) {
EXPECT_EQ(track, dtmf_->track());
EXPECT_EQ(tones, dtmf_->tones());
EXPECT_EQ(duration, dtmf_->duration());
EXPECT_EQ(inter_tone_gap, dtmf_->inter_tone_gap());
}
// Verify the provider got all the expected calls.
void VerifyOnProvider(const std::string& tones, int duration,
int inter_tone_gap) {
std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
GetDtmfInfoFromString(tones, duration, inter_tone_gap, &dtmf_queue_ref);
VerifyOnProvider(dtmf_queue_ref);
}
void VerifyOnProvider(
const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue_ref) {
const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue =
provider_->dtmf_info_queue();
ASSERT_EQ(dtmf_queue_ref.size(), dtmf_queue.size());
std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it_ref =
dtmf_queue_ref.begin();
std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it =
dtmf_queue.begin();
while (it_ref != dtmf_queue_ref.end() && it != dtmf_queue.end()) {
EXPECT_EQ(it_ref->code, it->code);
EXPECT_EQ(it_ref->duration, it->duration);
// Allow ~20ms error.
EXPECT_GE(it_ref->gap, it->gap - 20);
EXPECT_LE(it_ref->gap, it->gap + 20);
++it_ref;
++it;
}
}
// Verify the observer got all the expected callbacks.
void VerifyOnObserver(const std::string& tones_ref) {
const std::vector<std::string>& tones = observer_->tones();
// The observer will get an empty string at the end.
EXPECT_EQ(tones_ref.size() + 1, tones.size());
EXPECT_TRUE(tones.back().empty());
std::string::const_iterator it_ref = tones_ref.begin();
std::vector<std::string>::const_iterator it = tones.begin();
while (it_ref != tones_ref.end() && it != tones.end()) {
EXPECT_EQ(*it_ref, it->at(0));
++it_ref;
++it;
}
}
talk_base::scoped_refptr<AudioTrackInterface> track_;
talk_base::scoped_ptr<FakeDtmfObserver> observer_;
talk_base::scoped_ptr<FakeDtmfProvider> provider_;
talk_base::scoped_refptr<DtmfSender> dtmf_;
};
TEST_F(DtmfSenderTest, CanInsertDtmf) {
EXPECT_TRUE(dtmf_->CanInsertDtmf());
provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
EXPECT_FALSE(dtmf_->CanInsertDtmf());
}
TEST_F(DtmfSenderTest, InsertDtmf) {
std::string tones = "@1%a&*$";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
// The unrecognized characters should be ignored.
std::string known_tones = "1a*";
VerifyOnProvider(known_tones, duration, inter_tone_gap);
VerifyOnObserver(known_tones);
}
TEST_F(DtmfSenderTest, InsertDtmfTwice) {
std::string tones1 = "12";
std::string tones2 = "ab";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
VerifyExpectedState(track_, tones1, duration, inter_tone_gap);
// Wait until the first tone got sent.
EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
VerifyExpectedState(track_, "2", duration, inter_tone_gap);
// Insert with another tone buffer.
EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
VerifyExpectedState(track_, tones2, duration, inter_tone_gap);
// Wait until it's completed.
EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
GetDtmfInfoFromString("ab", duration, inter_tone_gap, &dtmf_queue_ref);
VerifyOnProvider(dtmf_queue_ref);
VerifyOnObserver("1ab");
}
TEST_F(DtmfSenderTest, InsertDtmfWhileProviderIsDeleted) {
std::string tones = "@1%a&*$";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
// Wait until the first tone got sent.
EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
// Delete provider.
provider_.reset();
// The queue should be discontinued so no more tone callbacks.
WAIT(false, 200);
EXPECT_EQ(1U, observer_->tones().size());
}
TEST_F(DtmfSenderTest, InsertDtmfWhileSenderIsDeleted) {
std::string tones = "@1%a&*$";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
// Wait until the first tone got sent.
EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
// Delete the sender.
dtmf_ = NULL;
// The queue should be discontinued so no more tone callbacks.
WAIT(false, 200);
EXPECT_EQ(1U, observer_->tones().size());
}
TEST_F(DtmfSenderTest, InsertEmptyTonesToCancelPreviousTask) {
std::string tones1 = "12";
std::string tones2 = "";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
// Wait until the first tone got sent.
EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
// Insert with another tone buffer.
EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
// Wait until it's completed.
EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
VerifyOnProvider(dtmf_queue_ref);
VerifyOnObserver("1");
}
TEST_F(DtmfSenderTest, InsertDtmfWithCommaAsDelay) {
std::string tones = "3,4";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
VerifyOnProvider(tones, duration, inter_tone_gap);
VerifyOnObserver(tones);
}
TEST_F(DtmfSenderTest, TryInsertDtmfWhenItDoesNotWork) {
std::string tones = "3,4";
int duration = 100;
int inter_tone_gap = 50;
provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
}
TEST_F(DtmfSenderTest, InsertDtmfWithInvalidDurationOrGap) {
std::string tones = "3,4";
int duration = 100;
int inter_tone_gap = 50;
EXPECT_FALSE(dtmf_->InsertDtmf(tones, 6001, inter_tone_gap));
EXPECT_FALSE(dtmf_->InsertDtmf(tones, 69, inter_tone_gap));
EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, 49));
EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
}

View File

@ -0,0 +1,105 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_
#define TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_
#include <string>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/base/common.h"
#include "talk/base/refcount.h"
// This file contains interfaces for DtmfSender.
namespace webrtc {
// DtmfSender callback interface. Application should implement this interface
// to get notifications from the DtmfSender.
class DtmfSenderObserverInterface {
public:
// Triggered when DTMF |tone| is sent.
// If |tone| is empty that means the DtmfSender has sent out all the given
// tones.
virtual void OnToneChange(const std::string& tone) = 0;
protected:
virtual ~DtmfSenderObserverInterface() {}
};
// The interface of native implementation of the RTCDTMFSender defined by the
// WebRTC W3C Editor's Draft.
class DtmfSenderInterface : public talk_base::RefCountInterface {
public:
virtual void RegisterObserver(DtmfSenderObserverInterface* observer) = 0;
virtual void UnregisterObserver() = 0;
// Returns true if this DtmfSender is capable of sending DTMF.
// Otherwise returns false.
virtual bool CanInsertDtmf() = 0;
// Queues a task that sends the DTMF |tones|. The |tones| parameter is treated
// as a series of characters. The characters 0 through 9, A through D, #, and
// * generate the associated DTMF tones. The characters a to d are equivalent
// to A to D. The character ',' indicates a delay of 2 seconds before
// processing the next character in the tones parameter.
// Unrecognized characters are ignored.
// The |duration| parameter indicates the duration in ms to use for each
// character passed in the |tones| parameter.
// The duration cannot be more than 6000 or less than 70.
// The |inter_tone_gap| parameter indicates the gap between tones in ms.
// The |inter_tone_gap| must be at least 50 ms but should be as short as
// possible.
// If InsertDtmf is called on the same object while an existing task for this
// object to generate DTMF is still running, the previous task is canceled.
// Returns true on success and false on failure.
virtual bool InsertDtmf(const std::string& tones, int duration,
int inter_tone_gap) = 0;
// Returns the track given as argument to the constructor.
virtual const AudioTrackInterface* track() const = 0;
// Returns the tones remaining to be played out.
virtual std::string tones() const = 0;
// Returns the current tone duration value in ms.
// This value will be the value last set via the InsertDtmf() method, or the
// default value of 100 ms if InsertDtmf() was never called.
virtual int duration() const = 0;
// Returns the current value of the between-tone gap in ms.
// This value will be the value last set via the InsertDtmf() method, or the
// default value of 50 ms if InsertDtmf() was never called.
virtual int inter_tone_gap() const = 0;
protected:
virtual ~DtmfSenderInterface() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_DTMFSENDERINTERFACE_H_

View File

@ -0,0 +1,74 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file defines a fake port allocator factory used for testing.
// This implementation creates instances of cricket::FakePortAllocator.
#ifndef TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
#define TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/p2p/client/fakeportallocator.h"
namespace webrtc {
class FakePortAllocatorFactory : public PortAllocatorFactoryInterface {
public:
static FakePortAllocatorFactory* Create() {
talk_base::RefCountedObject<FakePortAllocatorFactory>* allocator =
new talk_base::RefCountedObject<FakePortAllocatorFactory>();
return allocator;
}
virtual cricket::PortAllocator* CreatePortAllocator(
const std::vector<StunConfiguration>& stun_configurations,
const std::vector<TurnConfiguration>& turn_configurations) {
stun_configs_ = stun_configurations;
turn_configs_ = turn_configurations;
return new cricket::FakePortAllocator(talk_base::Thread::Current(), NULL);
}
const std::vector<StunConfiguration>& stun_configs() const {
return stun_configs_;
}
const std::vector<TurnConfiguration>& turn_configs() const {
return turn_configs_;
}
protected:
FakePortAllocatorFactory() {}
~FakePortAllocatorFactory() {}
private:
std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_configs_;
std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_configs_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_

View File

@ -0,0 +1,23 @@
This directory holds a Java implementation of the webrtc::PeerConnection API, as
well as the JNI glue C++ code that lets the Java implementation reuse the C++
implementation of the same API.
To build the Java API and related tests, build with
OS=linux or OS=android and include
build_with_libjingle=1 build_with_chromium=0
in $GYP_DEFINES.
To use the Java API, start by looking at the public interface of
org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
To understand the implementation of the API, see the native code in jni/.
An example command-line to build & run the unittest:
cd path/to/trunk
GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 java_home=path/to/JDK" gclient runhooks && \
ninja -C out/Debug libjingle_peerconnection_java_unittest && \
./out/Debug/libjingle_peerconnection_java_unittest
(where path/to/JDK should contain include/jni.h)
During development it can be helpful to run the JVM with the -Xcheck:jni flag.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/**
* Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
* more {@code AudioTrack} objects.
*/
public class AudioSource extends MediaSource {
public AudioSource(long nativeSource) {
super(nativeSource);
}
}

View File

@ -0,0 +1,35 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java wrapper for a C++ AudioTrackInterface */
public class AudioTrack extends MediaStreamTrack {
public AudioTrack(long nativeTrack) {
super(nativeTrack);
}
}

View File

@ -0,0 +1,48 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/**
* Representation of a single ICE Candidate, mirroring
* {@code IceCandidateInterface} in the C++ API.
*/
public class IceCandidate {
public final String sdpMid;
public final int sdpMLineIndex;
public final String sdp;
public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
}
public String toString() {
return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
}
}

View File

@ -0,0 +1,85 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.util.LinkedList;
import java.util.List;
/**
* Description of media constraints for {@code MediaStream} and
* {@code PeerConnection}.
*/
public class MediaConstraints {
/** Simple String key/value pair. */
public static class KeyValuePair {
private final String key;
private final String value;
public KeyValuePair(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
public String toString() {
return key + ": " + value;
}
}
public final List<KeyValuePair> mandatory;
public final List<KeyValuePair> optional;
public MediaConstraints() {
mandatory = new LinkedList<KeyValuePair>();
optional = new LinkedList<KeyValuePair>();
}
private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
StringBuilder builder = new StringBuilder("[");
for (KeyValuePair pair : list) {
if (builder.length() > 1) {
builder.append(", ");
}
builder.append(pair.toString());
}
return builder.append("]").toString();
}
public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) +
", optional: " + stringifyKeyValuePairList(optional);
}
}

View File

@ -0,0 +1,55 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */
public enum State {
INITIALIZING, LIVE, ENDED, MUTED
}
final long nativeSource; // Package-protected for PeerConnectionFactory.
public MediaSource(long nativeSource) {
this.nativeSource = nativeSource;
}
public State state() {
return nativeState(nativeSource);
}
void dispose() {
free(nativeSource);
}
private static native State nativeState(long pointer);
private static native void free(long nativeSource);
}

View File

@ -0,0 +1,114 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.util.LinkedList;
import java.util.List;
/** Java wrapper for a C++ MediaStreamInterface. */
public class MediaStream {
public final List<AudioTrack> audioTracks;
public final List<VideoTrack> videoTracks;
// Package-protected for LocalMediaStream and PeerConnection.
final long nativeStream;
public MediaStream(long nativeStream) {
audioTracks = new LinkedList<AudioTrack>();
videoTracks = new LinkedList<VideoTrack>();
this.nativeStream = nativeStream;
}
public boolean addTrack(AudioTrack track) {
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
audioTracks.add(track);
return true;
}
return false;
}
public boolean addTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
videoTracks.add(track);
return true;
}
return false;
}
public boolean removeTrack(AudioTrack track) {
if (nativeRemoveAudioTrack(nativeStream, track.nativeTrack)) {
audioTracks.remove(track);
return true;
}
return false;
}
public boolean removeTrack(VideoTrack track) {
if (nativeRemoveVideoTrack(nativeStream, track.nativeTrack)) {
videoTracks.remove(track);
return true;
}
return false;
}
public void dispose() {
for (AudioTrack track : audioTracks) {
track.dispose();
}
audioTracks.clear();
for (VideoTrack track : videoTracks) {
track.dispose();
}
videoTracks.clear();
free(nativeStream);
}
public String label() {
return nativeLabel(nativeStream);
}
public String toString() {
return "[" + label() + ":A=" + audioTracks.size() +
":V=" + videoTracks.size() + "]";
}
private static native boolean nativeAddAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream);
private static native void free(long nativeStream);
}

View File

@ -0,0 +1,86 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaStreamTrackInterface. */
public class MediaStreamTrack {
/** Tracks MediaStreamTrackInterface.TrackState */
public enum State {
INITIALIZING, LIVE, ENDED, FAILED
}
final long nativeTrack;
public MediaStreamTrack(long nativeTrack) {
this.nativeTrack = nativeTrack;
}
public String id() {
return nativeId(nativeTrack);
}
public String kind() {
return nativeKind(nativeTrack);
}
public boolean enabled() {
return nativeEnabled(nativeTrack);
}
public boolean setEnabled(boolean enable) {
return nativeSetEnabled(nativeTrack, enable);
}
public State state() {
return nativeState(nativeTrack);
}
public boolean setState(State newState) {
return nativeSetState(nativeTrack, newState.ordinal());
}
public void dispose() {
free(nativeTrack);
}
private static native String nativeId(long nativeTrack);
private static native String nativeKind(long nativeTrack);
private static native boolean nativeEnabled(long nativeTrack);
private static native boolean nativeSetEnabled(
long nativeTrack, boolean enabled);
private static native State nativeState(long nativeTrack);
private static native boolean nativeSetState(
long nativeTrack, int newState);
private static native void free(long nativeTrack);
}

View File

@ -0,0 +1,194 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.util.LinkedList;
import java.util.List;
/**
* Java-land version of the PeerConnection APIs; wraps the C++ API
* http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
* JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
* http://www.w3.org/TR/mediacapture-streams/
*/
public class PeerConnection {
static {
System.loadLibrary("jingle_peerconnection_so");
}
/** Tracks PeerConnectionInterface::IceGatheringState */
public enum IceGatheringState { NEW, GATHERING, COMPLETE };
/** Tracks PeerConnectionInterface::IceConnectionState */
public enum IceConnectionState {
NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
};
/** Tracks PeerConnectionInterface::SignalingState */
public enum SignalingState {
STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER, CLOSED
};
/** Java version of PeerConnectionObserver. */
public static interface Observer {
/** Triggered when the SignalingState changes. */
public void onSignalingChange(SignalingState newState);
/** Triggered when the IceConnectionState changes. */
public void onIceConnectionChange(IceConnectionState newState);
/** Triggered when the IceGatheringState changes. */
public void onIceGatheringChange(IceGatheringState newState);
/** Triggered when a new ICE candidate has been found. */
public void onIceCandidate(IceCandidate candidate);
/** Triggered on any error. */
public void onError();
/** Triggered when media is received on a new stream from remote peer. */
public void onAddStream(MediaStream stream);
/** Triggered when a remote peer close a stream. */
public void onRemoveStream(MediaStream stream);
}
/** Java version of PeerConnectionInterface.IceServer. */
public static class IceServer {
public final String uri;
public final String username;
public final String password;
/** Convenience constructor for STUN servers. */
public IceServer(String uri) {
this(uri, "", "");
}
public IceServer(String uri, String username, String password) {
this.uri = uri;
this.username = username;
this.password = password;
}
public String toString() {
return uri + "[" + username + ":" + password + "]";
}
}
private final List<MediaStream> localStreams;
private final long nativePeerConnection;
private final long nativeObserver;
PeerConnection(long nativePeerConnection, long nativeObserver) {
this.nativePeerConnection = nativePeerConnection;
this.nativeObserver = nativeObserver;
localStreams = new LinkedList<MediaStream>();
}
// JsepInterface.
public native SessionDescription getLocalDescription();
public native SessionDescription getRemoteDescription();
public native void createOffer(
SdpObserver observer, MediaConstraints constraints);
public native void createAnswer(
SdpObserver observer, MediaConstraints constraints);
public native void setLocalDescription(
SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription(
SdpObserver observer, SessionDescription sdp);
public native boolean updateIce(
List<IceServer> iceServers, MediaConstraints constraints);
public boolean addIceCandidate(IceCandidate candidate) {
return nativeAddIceCandidate(
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
}
public boolean addStream(
MediaStream stream, MediaConstraints constraints) {
boolean ret = nativeAddLocalStream(stream.nativeStream, constraints);
if (!ret) {
return false;
}
localStreams.add(stream);
return true;
}
public void removeStream(MediaStream stream) {
nativeRemoveLocalStream(stream.nativeStream);
localStreams.remove(stream);
}
public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
}
// TODO(fischman): add support for DTMF-related methods once that API
// stabilizes.
public native SignalingState signalingState();
public native IceConnectionState iceConnectionState();
public native IceGatheringState iceGatheringState();
public native void close();
public void dispose() {
close();
for (MediaStream stream : localStreams) {
stream.dispose();
}
localStreams.clear();
freePeerConnection(nativePeerConnection);
freeObserver(nativeObserver);
}
private static native void freePeerConnection(long nativePeerConnection);
private static native void freeObserver(long nativeObserver);
private native boolean nativeAddIceCandidate(
String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
private native boolean nativeAddLocalStream(
long nativeStream, MediaConstraints constraints);
private native void nativeRemoveLocalStream(long nativeStream);
private native boolean nativeGetStats(
StatsObserver observer, long nativeTrack);
}

View File

@ -0,0 +1,119 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.util.List;
/**
* Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
* the PeerConnection API for clients.
*/
public class PeerConnectionFactory {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private final long nativeFactory;
// |context| is an android.content.Context object, but we keep it untyped here
// to allow building on non-Android platforms.
public static native boolean initializeAndroidGlobals(Object context);
public PeerConnectionFactory() {
nativeFactory = nativeCreatePeerConnectionFactory();
if (nativeFactory == 0) {
throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
}
}
public PeerConnection createPeerConnection(
List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints,
PeerConnection.Observer observer) {
long nativeObserver = nativeCreateObserver(observer);
if (nativeObserver == 0) {
return null;
}
long nativePeerConnection = nativeCreatePeerConnection(
nativeFactory, iceServers, constraints, nativeObserver);
if (nativePeerConnection == 0) {
return null;
}
return new PeerConnection(nativePeerConnection, nativeObserver);
}
public MediaStream createLocalMediaStream(String label) {
return new MediaStream(
nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(
VideoCapturer capturer, MediaConstraints constraints) {
return new VideoSource(nativeCreateVideoSource(
nativeFactory, capturer.nativeVideoCapturer, constraints));
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack(
nativeFactory, id, source.nativeSource));
}
public AudioTrack createAudioTrack(String id) {
return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id));
}
public void dispose() {
freeFactory(nativeFactory);
}
private static native long nativeCreatePeerConnectionFactory();
private static native long nativeCreateObserver(
PeerConnection.Observer observer);
private static native long nativeCreatePeerConnection(
long nativeFactory, List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(
long nativeFactory, String label);
private static native long nativeCreateVideoSource(
long nativeFactory, long nativeVideoCapturer,
MediaConstraints constraints);
private static native long nativeCreateVideoTrack(
long nativeFactory, String id, long nativeVideoSource);
private static native long nativeCreateAudioTrack(
long nativeFactory, String id);
private static native void freeFactory(long nativeFactory);
}

View File

@ -0,0 +1,43 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Interface for observing SDP-related events. */
public interface SdpObserver {
/** Called on success of Create{Offer,Answer}(). */
public void onCreateSuccess(SessionDescription sdp);
/** Called on success of Set{Local,Remote}Description(). */
public void onSetSuccess();
/** Called on error of Create{Offer,Answer}(). */
public void onCreateFailure(String error);
/** Called on error of Set{Local,Remote}Description(). */
public void onSetFailure(String error);
}

View File

@ -0,0 +1,57 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/**
* Description of an RFC 4566 Session.
* SDPs are passed as serialized Strings in Java-land and are materialized
* to SessionDescriptionInterface as appropriate in the JNI layer.
*/
public class SessionDescription {
/** Java-land enum version of SessionDescriptionInterface's type() string. */
public static enum Type {
OFFER, PRANSWER, ANSWER;
public String canonicalForm() {
return name().toLowerCase();
}
public static Type fromCanonicalForm(String canonical) {
return Type.valueOf(Type.class, canonical.toUpperCase());
}
}
public final Type type;
public final String description;
public SessionDescription(Type type, String description) {
this.type = type;
this.description = description;
}
}

View File

@ -0,0 +1,34 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Interface for observing Stats reports (see webrtc::StatsObservers). */
public interface StatsObserver {
/** Called when the reports are ready.*/
public void onComplete(StatsReport[] reports);
}

View File

@ -0,0 +1,72 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java version of webrtc::StatsReport. */
public class StatsReport {
/** Java version of webrtc::StatsReport::Value. */
public static class Value {
public final String name;
public final String value;
public Value(String name, String value) {
this.name = name;
this.value = value;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("[").append(name).append(": ").append(value).append("]");
return builder.toString();
}
}
public final String id;
public final String type;
// Time since 1970-01-01T00:00:00Z in milliseconds.
public final double timestamp;
public final Value[] values;
public StatsReport(String id, String type, double timestamp, Value[] values) {
this.id = id;
this.type = type;
this.timestamp = timestamp;
this.values = values;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("id: ").append(id).append(", type: ").append(type)
.append(", timestamp: ").append(timestamp).append(", values: ");
for (int i = 0; i < values.length; ++i) {
builder.append(values[i].toString()).append(", ");
}
return builder.toString();
}
}

View File

@ -0,0 +1,53 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java version of VideoCapturerInterface. */
public class VideoCapturer {
final long nativeVideoCapturer;
private VideoCapturer(long nativeVideoCapturer) {
this.nativeVideoCapturer = nativeVideoCapturer;
}
public static VideoCapturer create(String deviceName) {
long nativeVideoCapturer = nativeCreateVideoCapturer(deviceName);
if (nativeVideoCapturer == 0) {
return null;
}
return new VideoCapturer(nativeVideoCapturer);
}
public void dispose() {
free(nativeVideoCapturer);
}
private static native long nativeCreateVideoCapturer(String deviceName);
private static native void free(long nativeVideoCapturer);
}

View File

@ -0,0 +1,136 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.nio.ByteBuffer;
import java.util.Arrays;
/**
* Java version of VideoRendererInterface. In addition to allowing clients to
* define their own rendering behavior (by passing in a Callbacks object), this
* class also provides a createGui() method for creating a GUI-rendering window
* on various platforms.
*/
public class VideoRenderer {
/** Java version of cricket::VideoFrame. */
public static class I420Frame {
public final int width;
public final int height;
public final int[] yuvStrides;
public final ByteBuffer[] yuvPlanes;
/**
* Construct a frame of the given dimensions with the specified planar
* data. If |yuvPlanes| is null, new planes of the appropriate sizes are
* allocated.
*/
public I420Frame(
int width, int height, int[] yuvStrides, ByteBuffer[] yuvPlanes) {
this.width = width;
this.height = height;
this.yuvStrides = yuvStrides;
if (yuvPlanes == null) {
yuvPlanes = new ByteBuffer[3];
yuvPlanes[0] = ByteBuffer.allocateDirect(yuvStrides[0] * height);
yuvPlanes[1] = ByteBuffer.allocateDirect(yuvStrides[1] * height);
yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height);
}
this.yuvPlanes = yuvPlanes;
}
/**
* Copy the planes out of |source| into |this| and return |this|. Calling
* this with mismatched frame dimensions is a programming error and will
* likely crash.
*/
public I420Frame copyFrom(I420Frame source) {
if (!Arrays.equals(yuvStrides, source.yuvStrides) ||
width != source.width || height != source.height) {
throw new RuntimeException("Mismatched dimensions! Source: " +
source.toString() + ", destination: " + toString());
}
copyPlane(source.yuvPlanes[0], yuvPlanes[0]);
copyPlane(source.yuvPlanes[1], yuvPlanes[1]);
copyPlane(source.yuvPlanes[2], yuvPlanes[2]);
return this;
}
@Override
public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
":" + yuvStrides[2];
}
// Copy the bytes out of |src| and into |dst|, ignoring and overwriting
// positon & limit in both buffers.
private void copyPlane(ByteBuffer src, ByteBuffer dst) {
src.position(0).limit(src.capacity());
dst.put(src);
dst.position(0).limit(dst.capacity());
}
}
/** The real meat of VideoRendererInterface. */
public static interface Callbacks {
public void setSize(int width, int height);
public void renderFrame(I420Frame frame);
}
// |this| either wraps a native (GUI) renderer or a client-supplied Callbacks
// (Java) implementation; so exactly one of these will be non-0/null.
final long nativeVideoRenderer;
private final Callbacks callbacks;
public static VideoRenderer createGui(int x, int y) {
long nativeVideoRenderer = nativeCreateGuiVideoRenderer(x, y);
if (nativeVideoRenderer == 0) {
return null;
}
return new VideoRenderer(nativeVideoRenderer);
}
public VideoRenderer(Callbacks callbacks) {
nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
this.callbacks = callbacks;
}
private VideoRenderer(long nativeVideoRenderer) {
this.nativeVideoRenderer = nativeVideoRenderer;
callbacks = null;
}
public void dispose() {
free(nativeVideoRenderer);
}
private static native long nativeCreateGuiVideoRenderer(int x, int y);
private static native long nativeWrapVideoRenderer(Callbacks callbacks);
private static native void free(long nativeVideoRenderer);
}

View File

@ -0,0 +1,36 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
/** Java version of VideoSourceInterface. */
public class VideoSource extends MediaSource {
public VideoSource(long nativeSource) {
super(nativeSource);
}
}

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.util.LinkedList;
/** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers;
public VideoTrack(long nativeTrack) {
super(nativeTrack);
renderers = new LinkedList<VideoRenderer>();
}
public void addRenderer(VideoRenderer renderer) {
renderers.add(renderer);
nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
}
public void removeRenderer(VideoRenderer renderer) {
if (!renderers.remove(renderer)) {
return;
}
nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
public void dispose() {
while (!renderers.isEmpty()) {
removeRenderer(renderers.getFirst());
}
}
private static native void nativeAddRenderer(
long nativeTrack, long nativeRenderer);
private static native void nativeRemoveRenderer(
long nativeTrack, long nativeRenderer);
}

View File

@ -0,0 +1,47 @@
#!/bin/bash
#
# libjingle
# Copyright 2013, Google Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Wrapper script for running the Java tests under this directory.
# Exit with error immediately if any subcommand fails.
set -e
# Change directory to the PRODUCT_DIR (e.g. out/Debug).
cd -P $(dirname $0)
export CLASSPATH=`pwd`/junit-4.11.jar
CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection_test.jar
CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection.jar
export LD_LIBRARY_PATH=`pwd`
# The RHS value is replaced by the build action that copies this script to
# <(PRODUCT_DIR).
export JAVA_HOME=GYP_JAVA_HOME
${JAVA_HOME}/bin/java -Xcheck:jni -classpath $CLASSPATH \
junit.textui.TestRunner org.webrtc.PeerConnectionTest

View File

@ -0,0 +1,532 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import junit.framework.TestCase;
import org.junit.Test;
import org.webrtc.PeerConnection.IceConnectionState;
import org.webrtc.PeerConnection.IceGatheringState;
import org.webrtc.PeerConnection.SignalingState;
import java.lang.ref.WeakReference;
import java.util.IdentityHashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/** End-to-end tests for PeerConnection.java. */
public class PeerConnectionTest extends TestCase {
// Set to true to render video.
private static final boolean RENDER_TO_GUI = false;
private static class ObserverExpectations implements PeerConnection.Observer,
VideoRenderer.Callbacks,
StatsObserver {
private int expectedIceCandidates = 0;
private int expectedErrors = 0;
private LinkedList<Integer> expectedSetSizeDimensions =
new LinkedList<Integer>(); // Alternating width/height.
private int expectedFramesDelivered = 0;
private LinkedList<SignalingState> expectedSignalingChanges =
new LinkedList<SignalingState>();
private LinkedList<IceConnectionState> expectedIceConnectionChanges =
new LinkedList<IceConnectionState>();
private LinkedList<IceGatheringState> expectedIceGatheringChanges =
new LinkedList<IceGatheringState>();
private LinkedList<String> expectedAddStreamLabels =
new LinkedList<String>();
private LinkedList<String> expectedRemoveStreamLabels =
new LinkedList<String>();
public LinkedList<IceCandidate> gotIceCandidates =
new LinkedList<IceCandidate>();
private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
private int expectedStatsCallbacks = 0;
private LinkedList<StatsReport[]> gotStatsReports =
new LinkedList<StatsReport[]>();
public synchronized void expectIceCandidates(int count) {
expectedIceCandidates += count;
}
public synchronized void onIceCandidate(IceCandidate candidate) {
--expectedIceCandidates;
// We don't assert expectedIceCandidates >= 0 because it's hard to know
// how many to expect, in general. We only use expectIceCandidates to
// assert a minimal count.
gotIceCandidates.add(candidate);
}
public synchronized void expectError() {
++expectedErrors;
}
public synchronized void onError() {
assertTrue(--expectedErrors >= 0);
}
public synchronized void expectSetSize(int width, int height) {
expectedSetSizeDimensions.add(width);
expectedSetSizeDimensions.add(height);
}
@Override
public synchronized void setSize(int width, int height) {
assertEquals(width, expectedSetSizeDimensions.removeFirst().intValue());
assertEquals(height, expectedSetSizeDimensions.removeFirst().intValue());
}
public synchronized void expectFramesDelivered(int count) {
expectedFramesDelivered += count;
}
@Override
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
--expectedFramesDelivered;
}
public synchronized void expectSignalingChange(SignalingState newState) {
expectedSignalingChanges.add(newState);
}
@Override
public synchronized void onSignalingChange(SignalingState newState) {
assertEquals(expectedSignalingChanges.removeFirst(), newState);
}
public synchronized void expectIceConnectionChange(
IceConnectionState newState) {
expectedIceConnectionChanges.add(newState);
}
@Override
public void onIceConnectionChange(IceConnectionState newState) {
assertEquals(expectedIceConnectionChanges.removeFirst(), newState);
}
public synchronized void expectIceGatheringChange(
IceGatheringState newState) {
expectedIceGatheringChanges.add(newState);
}
@Override
public void onIceGatheringChange(IceGatheringState newState) {
// It's fine to get a variable number of GATHERING messages before
// COMPLETE fires (depending on how long the test runs) so we don't assert
// any particular count.
if (newState == IceGatheringState.GATHERING) {
return;
}
assertEquals(expectedIceGatheringChanges.removeFirst(), newState);
}
public synchronized void expectAddStream(String label) {
expectedAddStreamLabels.add(label);
}
public synchronized void onAddStream(MediaStream stream) {
assertEquals(expectedAddStreamLabels.removeFirst(), stream.label());
assertEquals(1, stream.videoTracks.size());
assertEquals(1, stream.audioTracks.size());
assertTrue(stream.videoTracks.get(0).id().endsWith("LMSv0"));
assertTrue(stream.audioTracks.get(0).id().endsWith("LMSa0"));
assertEquals("video", stream.videoTracks.get(0).kind());
assertEquals("audio", stream.audioTracks.get(0).kind());
VideoRenderer renderer = createVideoRenderer(this);
stream.videoTracks.get(0).addRenderer(renderer);
assertNull(renderers.put(
stream, new WeakReference<VideoRenderer>(renderer)));
}
public synchronized void expectRemoveStream(String label) {
expectedRemoveStreamLabels.add(label);
}
public synchronized void onRemoveStream(MediaStream stream) {
assertEquals(expectedRemoveStreamLabels.removeFirst(), stream.label());
WeakReference<VideoRenderer> renderer = renderers.remove(stream);
assertNotNull(renderer);
assertNotNull(renderer.get());
assertEquals(1, stream.videoTracks.size());
stream.videoTracks.get(0).removeRenderer(renderer.get());
}
@Override
public synchronized void onComplete(StatsReport[] reports) {
if (--expectedStatsCallbacks < 0) {
throw new RuntimeException("Unexpected stats report: " + reports);
}
gotStatsReports.add(reports);
}
public synchronized void expectStatsCallback() {
++expectedStatsCallbacks;
}
public synchronized LinkedList<StatsReport[]> takeStatsReports() {
LinkedList<StatsReport[]> got = gotStatsReports;
gotStatsReports = new LinkedList<StatsReport[]>();
return got;
}
public synchronized boolean areAllExpectationsSatisfied() {
return expectedIceCandidates <= 0 && // See comment in onIceCandidate.
expectedErrors == 0 &&
expectedSignalingChanges.size() == 0 &&
expectedIceConnectionChanges.size() == 0 &&
expectedIceGatheringChanges.size() == 0 &&
expectedAddStreamLabels.size() == 0 &&
expectedRemoveStreamLabels.size() == 0 &&
expectedSetSizeDimensions.isEmpty() &&
expectedFramesDelivered <= 0 &&
expectedStatsCallbacks == 0;
}
public void waitForAllExpectationsToBeSatisfied() {
// TODO(fischman): problems with this approach:
// - come up with something better than a poll loop
// - avoid serializing expectations explicitly; the test is not as robust
// as it could be because it must place expectations between wait
// statements very precisely (e.g. frame must not arrive before its
// expectation, and expectation must not be registered so early as to
// stall a wait). Use callbacks to fire off dependent steps instead of
// explicitly waiting, so there can be just a single wait at the end of
// the test.
while (!areAllExpectationsSatisfied()) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
private static class SdpObserverLatch implements SdpObserver {
private boolean success = false;
private SessionDescription sdp = null;
private String error = null;
private CountDownLatch latch = new CountDownLatch(1);
public SdpObserverLatch() {}
public void onCreateSuccess(SessionDescription sdp) {
this.sdp = sdp;
onSetSuccess();
}
public void onSetSuccess() {
success = true;
latch.countDown();
}
public void onCreateFailure(String error) {
onSetFailure(error);
}
public void onSetFailure(String error) {
this.error = error;
latch.countDown();
}
public boolean await() {
try {
assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
return getSuccess();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public boolean getSuccess() {
return success;
}
public SessionDescription getSdp() {
return sdp;
}
public String getError() {
return error;
}
}
static int videoWindowsMapped = -1;
private static class TestRenderer implements VideoRenderer.Callbacks {
public int width = -1;
public int height = -1;
public int numFramesDelivered = 0;
public void setSize(int width, int height) {
assertEquals(this.width, -1);
assertEquals(this.height, -1);
this.width = width;
this.height = height;
}
public void renderFrame(VideoRenderer.I420Frame frame) {
++numFramesDelivered;
}
}
private static VideoRenderer createVideoRenderer(
ObserverExpectations observer) {
if (!RENDER_TO_GUI) {
return new VideoRenderer(observer);
}
++videoWindowsMapped;
assertTrue(videoWindowsMapped < 4);
int x = videoWindowsMapped % 2 != 0 ? 700 : 0;
int y = videoWindowsMapped >= 2 ? 0 : 500;
return VideoRenderer.createGui(x, y);
}
// Return a weak reference to test that ownership is correctly held by
// PeerConnection, not by test code.
private static WeakReference<MediaStream> addTracksToPC(
PeerConnectionFactory factory, PeerConnection pc,
VideoSource videoSource,
String streamLabel, String videoTrackId, String audioTrackId,
ObserverExpectations observer) {
MediaStream lMS = factory.createLocalMediaStream(streamLabel);
VideoTrack videoTrack =
factory.createVideoTrack(videoTrackId, videoSource);
assertNotNull(videoTrack);
VideoRenderer videoRenderer = createVideoRenderer(observer);
assertNotNull(videoRenderer);
videoTrack.addRenderer(videoRenderer);
lMS.addTrack(videoTrack);
// Just for fun, let's remove and re-add the track.
lMS.removeTrack(videoTrack);
lMS.addTrack(videoTrack);
lMS.addTrack(factory.createAudioTrack(audioTrackId));
pc.addStream(lMS, new MediaConstraints());
return new WeakReference<MediaStream>(lMS);
}
private static void assertEquals(
SessionDescription lhs, SessionDescription rhs) {
assertEquals(lhs.type, rhs.type);
assertEquals(lhs.description, rhs.description);
}
@Test
public void testCompleteSession() throws Exception {
CountDownLatch testDone = new CountDownLatch(1);
PeerConnectionFactory factory = new PeerConnectionFactory();
MediaConstraints constraints = new MediaConstraints();
LinkedList<PeerConnection.IceServer> iceServers =
new LinkedList<PeerConnection.IceServer>();
iceServers.add(new PeerConnection.IceServer(
"stun:stun.l.google.com:19302"));
iceServers.add(new PeerConnection.IceServer(
"turn:fake.example.com", "fakeUsername", "fakePassword"));
ObserverExpectations offeringExpectations = new ObserverExpectations();
PeerConnection offeringPC = factory.createPeerConnection(
iceServers, constraints, offeringExpectations);
assertNotNull(offeringPC);
ObserverExpectations answeringExpectations = new ObserverExpectations();
PeerConnection answeringPC = factory.createPeerConnection(
iceServers, constraints, answeringExpectations);
assertNotNull(answeringPC);
// We want to use the same camera for offerer & answerer, so create it here
// instead of in addTracksToPC.
VideoSource videoSource = factory.createVideoSource(
VideoCapturer.create(""), new MediaConstraints());
// TODO(fischman): the track ids here and in the addTracksToPC() call
// below hard-code the <mediaStreamLabel>[av]<index> scheme used in the
// serialized SDP, because the C++ API doesn't auto-translate.
// Drop |label| params from {Audio,Video}Track-related APIs once
// https://code.google.com/p/webrtc/issues/detail?id=1253 is fixed.
WeakReference<MediaStream> oLMS = addTracksToPC(
factory, offeringPC, videoSource, "oLMS", "oLMSv0", "oLMSa0",
offeringExpectations);
SdpObserverLatch sdpLatch = new SdpObserverLatch();
offeringPC.createOffer(sdpLatch, constraints);
assertTrue(sdpLatch.await());
SessionDescription offerSdp = sdpLatch.getSdp();
assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
assertFalse(offerSdp.description.isEmpty());
sdpLatch = new SdpObserverLatch();
answeringExpectations.expectSignalingChange(
SignalingState.HAVE_REMOTE_OFFER);
answeringExpectations.expectAddStream("oLMS");
answeringPC.setRemoteDescription(sdpLatch, offerSdp);
answeringExpectations.waitForAllExpectationsToBeSatisfied();
assertEquals(
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp());
WeakReference<MediaStream> aLMS = addTracksToPC(
factory, answeringPC, videoSource, "aLMS", "aLMSv0", "aLMSa0",
answeringExpectations);
sdpLatch = new SdpObserverLatch();
answeringPC.createAnswer(sdpLatch, constraints);
assertTrue(sdpLatch.await());
SessionDescription answerSdp = sdpLatch.getSdp();
assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
assertFalse(answerSdp.description.isEmpty());
offeringExpectations.expectIceCandidates(2);
answeringExpectations.expectIceCandidates(2);
sdpLatch = new SdpObserverLatch();
answeringExpectations.expectSignalingChange(SignalingState.STABLE);
answeringPC.setLocalDescription(sdpLatch, answerSdp);
assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp());
sdpLatch = new SdpObserverLatch();
offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
offeringPC.setLocalDescription(sdpLatch, offerSdp);
assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp());
sdpLatch = new SdpObserverLatch();
offeringExpectations.expectSignalingChange(SignalingState.STABLE);
offeringExpectations.expectAddStream("aLMS");
offeringPC.setRemoteDescription(sdpLatch, answerSdp);
assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp());
offeringExpectations.waitForAllExpectationsToBeSatisfied();
answeringExpectations.waitForAllExpectationsToBeSatisfied();
assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
if (!RENDER_TO_GUI) {
offeringExpectations.expectSetSize(640, 480);
offeringExpectations.expectSetSize(640, 480);
answeringExpectations.expectSetSize(640, 480);
answeringExpectations.expectSetSize(640, 480);
// Wait for at least some frames to be delivered at each end (number
// chosen arbitrarily).
offeringExpectations.expectFramesDelivered(10);
answeringExpectations.expectFramesDelivered(10);
}
offeringExpectations.expectIceConnectionChange(
IceConnectionState.CHECKING);
offeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
answeringExpectations.expectIceConnectionChange(
IceConnectionState.CHECKING);
answeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
for (IceCandidate candidate : offeringExpectations.gotIceCandidates) {
answeringPC.addIceCandidate(candidate);
}
offeringExpectations.gotIceCandidates.clear();
for (IceCandidate candidate : answeringExpectations.gotIceCandidates) {
offeringPC.addIceCandidate(candidate);
}
answeringExpectations.gotIceCandidates.clear();
offeringExpectations.waitForAllExpectationsToBeSatisfied();
answeringExpectations.waitForAllExpectationsToBeSatisfied();
assertEquals(
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
assertEquals(
PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
if (RENDER_TO_GUI) {
try {
Thread.sleep(3000);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
// TODO(fischman) MOAR test ideas:
// - Test that PC.removeStream() works; requires a second
// createOffer/createAnswer dance.
// - audit each place that uses |constraints| for specifying non-trivial
// constraints (and ensure they're honored).
// - test error cases
// - ensure reasonable coverage of _jni.cc is achieved. Coverage is
// extra-important because of all the free-text (class/method names, etc)
// in JNI-style programming; make sure no typos!
// - Test that shutdown mid-interaction is crash-free.
// Free the Java-land objects, collect them, and sleep a bit to make sure we
// don't get late-arrival crashes after the Java-land objects have been
// freed.
shutdownPC(offeringPC, offeringExpectations);
offeringPC = null;
shutdownPC(answeringPC, answeringExpectations);
answeringPC = null;
System.gc();
Thread.sleep(100);
}
private static void shutdownPC(
PeerConnection pc, ObserverExpectations expectations) {
expectations.expectStatsCallback();
assertTrue(pc.getStats(expectations, null));
expectations.waitForAllExpectationsToBeSatisfied();
expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
expectations.expectSignalingChange(SignalingState.CLOSED);
pc.close();
expectations.waitForAllExpectationsToBeSatisfied();
expectations.expectStatsCallback();
assertTrue(pc.getStats(expectations, null));
expectations.waitForAllExpectationsToBeSatisfied();
System.out.println("FYI stats: ");
int reportIndex = -1;
for (StatsReport[] reports : expectations.takeStatsReports()) {
System.out.println(" Report #" + (++reportIndex));
for (int i = 0; i < reports.length; ++i) {
System.out.println(" " + reports[i].toString());
}
}
assertEquals(1, reportIndex);
System.out.println("End stats.");
pc.dispose();
}
}

164
talk/app/webrtc/jsep.h Normal file
View File

@ -0,0 +1,164 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Interfaces matching the draft-ietf-rtcweb-jsep-01.
#ifndef TALK_APP_WEBRTC_JSEP_H_
#define TALK_APP_WEBRTC_JSEP_H_
#include <string>
#include <vector>
#include "talk/base/basictypes.h"
#include "talk/base/refcount.h"
namespace cricket {
class SessionDescription;
class Candidate;
} // namespace cricket
namespace webrtc {
struct SdpParseError {
public:
// The sdp line that causes the error.
std::string line;
// Explains the error.
std::string description;
};
// Class representation of an ICE candidate.
// An instance of this interface is supposed to be owned by one class at
// a time and is therefore not expected to be thread safe.
class IceCandidateInterface {
public:
virtual ~IceCandidateInterface() {}
/// If present, this contains the identierfier of the "media stream
// identification" as defined in [RFC 3388] for m-line this candidate is
// assocated with.
virtual std::string sdp_mid() const = 0;
// This indeicates the index (starting at zero) of m-line in the SDP this
// candidate is assocated with.
virtual int sdp_mline_index() const = 0;
virtual const cricket::Candidate& candidate() const = 0;
// Creates a SDP-ized form of this candidate.
virtual bool ToString(std::string* out) const = 0;
};
// Creates a IceCandidateInterface based on SDP string.
// Returns NULL if the sdp string can't be parsed.
// TODO(ronghuawu): Deprecated.
IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const std::string& sdp);
// |error| can be NULL if doesn't care about the failure reason.
IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const std::string& sdp,
SdpParseError* error);
// This class represents a collection of candidates for a specific m-line.
// This class is used in SessionDescriptionInterface to represent all known
// candidates for a certain m-line.
class IceCandidateCollection {
public:
virtual ~IceCandidateCollection() {}
virtual size_t count() const = 0;
// Returns true if an equivalent |candidate| exist in the collection.
virtual bool HasCandidate(const IceCandidateInterface* candidate) const = 0;
virtual const IceCandidateInterface* at(size_t index) const = 0;
};
// Class representation of a Session description.
// An instance of this interface is supposed to be owned by one class at
// a time and is therefore not expected to be thread safe.
class SessionDescriptionInterface {
public:
// Supported types:
static const char kOffer[];
static const char kPrAnswer[];
static const char kAnswer[];
virtual ~SessionDescriptionInterface() {}
virtual cricket::SessionDescription* description() = 0;
virtual const cricket::SessionDescription* description() const = 0;
// Get the session id and session version, which are defined based on
// RFC 4566 for the SDP o= line.
virtual std::string session_id() const = 0;
virtual std::string session_version() const = 0;
virtual std::string type() const = 0;
// Adds the specified candidate to the description.
// Ownership is not transferred.
// Returns false if the session description does not have a media section that
// corresponds to the |candidate| label.
virtual bool AddCandidate(const IceCandidateInterface* candidate) = 0;
// Returns the number of m- lines in the session description.
virtual size_t number_of_mediasections() const = 0;
// Returns a collection of all candidates that belong to a certain m-line
virtual const IceCandidateCollection* candidates(
size_t mediasection_index) const = 0;
// Serializes the description to SDP.
virtual bool ToString(std::string* out) const = 0;
};
// Creates a SessionDescriptionInterface based on SDP string and the type.
// Returns NULL if the sdp string can't be parsed or the type is unsupported.
// TODO(ronghuawu): Deprecated.
SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
const std::string& sdp);
// |error| can be NULL if doesn't care about the failure reason.
SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
const std::string& sdp,
SdpParseError* error);
// Jsep CreateOffer and CreateAnswer callback interface.
class CreateSessionDescriptionObserver : public talk_base::RefCountInterface {
public:
// The implementation of the CreateSessionDescriptionObserver takes
// the ownership of the |desc|.
virtual void OnSuccess(SessionDescriptionInterface* desc) = 0;
virtual void OnFailure(const std::string& error) = 0;
protected:
~CreateSessionDescriptionObserver() {}
};
// Jsep SetLocalDescription and SetRemoteDescription callback interface.
class SetSessionDescriptionObserver : public talk_base::RefCountInterface {
public:
virtual void OnSuccess() = 0;
virtual void OnFailure(const std::string& error) = 0;
protected:
~SetSessionDescriptionObserver() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_JSEP_H_

View File

@ -0,0 +1,105 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/jsepicecandidate.h"
#include <vector>
#include "talk/app/webrtc/webrtcsdp.h"
#include "talk/base/stringencode.h"
namespace webrtc {
IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const std::string& sdp) {
return CreateIceCandidate(sdp_mid, sdp_mline_index, sdp, NULL);
}
IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const std::string& sdp,
SdpParseError* error) {
JsepIceCandidate* jsep_ice = new JsepIceCandidate(sdp_mid, sdp_mline_index);
if (!jsep_ice->Initialize(sdp, error)) {
delete jsep_ice;
return NULL;
}
return jsep_ice;
}
JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
int sdp_mline_index)
: sdp_mid_(sdp_mid),
sdp_mline_index_(sdp_mline_index) {
}
JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const cricket::Candidate& candidate)
: sdp_mid_(sdp_mid),
sdp_mline_index_(sdp_mline_index),
candidate_(candidate) {
}
JsepIceCandidate::~JsepIceCandidate() {
}
bool JsepIceCandidate::Initialize(const std::string& sdp, SdpParseError* err) {
return SdpDeserializeCandidate(sdp, this, err);
}
bool JsepIceCandidate::ToString(std::string* out) const {
if (!out)
return false;
*out = SdpSerializeCandidate(*this);
return !out->empty();
}
JsepCandidateCollection::~JsepCandidateCollection() {
for (std::vector<JsepIceCandidate*>::iterator it = candidates_.begin();
it != candidates_.end(); ++it) {
delete *it;
}
}
bool JsepCandidateCollection::HasCandidate(
const IceCandidateInterface* candidate) const {
bool ret = false;
for (std::vector<JsepIceCandidate*>::const_iterator it = candidates_.begin();
it != candidates_.end(); ++it) {
if ((*it)->sdp_mid() == candidate->sdp_mid() &&
(*it)->sdp_mline_index() == candidate->sdp_mline_index() &&
(*it)->candidate().IsEquivalent(candidate->candidate())) {
ret = true;
break;
}
}
return ret;
}
} // namespace webrtc

View File

@ -0,0 +1,92 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Implements the IceCandidateInterface.
#ifndef TALK_APP_WEBRTC_JSEPICECANDIDATE_H_
#define TALK_APP_WEBRTC_JSEPICECANDIDATE_H_
#include <string>
#include "talk/app/webrtc/jsep.h"
#include "talk/base/constructormagic.h"
#include "talk/p2p/base/candidate.h"
namespace webrtc {
class JsepIceCandidate : public IceCandidateInterface {
public:
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index);
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index,
const cricket::Candidate& candidate);
~JsepIceCandidate();
// |error| can be NULL if don't care about the failure reason.
bool Initialize(const std::string& sdp, SdpParseError* err);
void SetCandidate(const cricket::Candidate& candidate) {
candidate_ = candidate;
}
virtual std::string sdp_mid() const { return sdp_mid_; }
virtual int sdp_mline_index() const { return sdp_mline_index_; }
virtual const cricket::Candidate& candidate() const {
return candidate_;
}
virtual bool ToString(std::string* out) const;
private:
std::string sdp_mid_;
int sdp_mline_index_;
cricket::Candidate candidate_;
DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate);
};
// Implementation of IceCandidateCollection.
// This implementation stores JsepIceCandidates.
class JsepCandidateCollection : public IceCandidateCollection {
public:
~JsepCandidateCollection();
virtual size_t count() const {
return candidates_.size();
}
virtual bool HasCandidate(const IceCandidateInterface* candidate) const;
// Adds and takes ownership of the JsepIceCandidate.
virtual void add(JsepIceCandidate* candidate) {
candidates_.push_back(candidate);
}
virtual const IceCandidateInterface* at(size_t index) const {
return candidates_[index];
}
private:
std::vector<JsepIceCandidate*> candidates_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_JSEPICECANDIDATE_H_

View File

@ -0,0 +1,193 @@
/* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/webrtcsdp.h"
#include "talk/base/stringencode.h"
#include "talk/session/media/mediasession.h"
using talk_base::scoped_ptr;
using cricket::SessionDescription;
namespace webrtc {
static const char* kSupportedTypes[] = {
JsepSessionDescription::kOffer,
JsepSessionDescription::kPrAnswer,
JsepSessionDescription::kAnswer
};
static bool IsTypeSupported(const std::string& type) {
bool type_supported = false;
for (size_t i = 0; i < ARRAY_SIZE(kSupportedTypes); ++i) {
if (kSupportedTypes[i] == type) {
type_supported = true;
break;
}
}
return type_supported;
}
const char SessionDescriptionInterface::kOffer[] = "offer";
const char SessionDescriptionInterface::kPrAnswer[] = "pranswer";
const char SessionDescriptionInterface::kAnswer[] = "answer";
const int JsepSessionDescription::kDefaultVideoCodecId = 100;
const int JsepSessionDescription::kDefaultVideoCodecFramerate = 30;
const char JsepSessionDescription::kDefaultVideoCodecName[] = "VP8";
const int JsepSessionDescription::kMaxVideoCodecWidth = 1280;
const int JsepSessionDescription::kMaxVideoCodecHeight = 720;
const int JsepSessionDescription::kDefaultVideoCodecPreference = 1;
SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
const std::string& sdp) {
return CreateSessionDescription(type, sdp, NULL);
}
SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
const std::string& sdp,
SdpParseError* error) {
if (!IsTypeSupported(type)) {
return NULL;
}
JsepSessionDescription* jsep_desc = new JsepSessionDescription(type);
if (!jsep_desc->Initialize(sdp, error)) {
delete jsep_desc;
return NULL;
}
return jsep_desc;
}
JsepSessionDescription::JsepSessionDescription(const std::string& type)
: type_(type) {
}
JsepSessionDescription::~JsepSessionDescription() {}
bool JsepSessionDescription::Initialize(
cricket::SessionDescription* description,
const std::string& session_id,
const std::string& session_version) {
if (!description)
return false;
session_id_ = session_id;
session_version_ = session_version;
description_.reset(description);
candidate_collection_.resize(number_of_mediasections());
return true;
}
bool JsepSessionDescription::Initialize(const std::string& sdp,
SdpParseError* error) {
return SdpDeserialize(sdp, this, error);
}
bool JsepSessionDescription::AddCandidate(
const IceCandidateInterface* candidate) {
if (!candidate || candidate->sdp_mline_index() < 0)
return false;
size_t mediasection_index = 0;
if (!GetMediasectionIndex(candidate, &mediasection_index)) {
return false;
}
if (mediasection_index >= number_of_mediasections())
return false;
if (candidate_collection_[mediasection_index].HasCandidate(candidate)) {
return true; // Silently ignore this candidate if we already have it.
}
const std::string content_name =
description_->contents()[mediasection_index].name;
const cricket::TransportInfo* transport_info =
description_->GetTransportInfoByName(content_name);
if (!transport_info) {
return false;
}
cricket::Candidate updated_candidate = candidate->candidate();
if (updated_candidate.username().empty()) {
updated_candidate.set_username(transport_info->description.ice_ufrag);
}
if (updated_candidate.password().empty()) {
updated_candidate.set_password(transport_info->description.ice_pwd);
}
candidate_collection_[mediasection_index].add(
new JsepIceCandidate(candidate->sdp_mid(),
mediasection_index,
updated_candidate));
return true;
}
size_t JsepSessionDescription::number_of_mediasections() const {
if (!description_)
return 0;
return description_->contents().size();
}
const IceCandidateCollection* JsepSessionDescription::candidates(
size_t mediasection_index) const {
if (mediasection_index >= candidate_collection_.size())
return NULL;
return &candidate_collection_[mediasection_index];
}
bool JsepSessionDescription::ToString(std::string* out) const {
if (!description_ || !out)
return false;
*out = SdpSerialize(*this);
return !out->empty();
}
bool JsepSessionDescription::GetMediasectionIndex(
const IceCandidateInterface* candidate,
size_t* index) {
if (!candidate || !index) {
return false;
}
*index = static_cast<size_t>(candidate->sdp_mline_index());
if (description_ && !candidate->sdp_mid().empty()) {
bool found = false;
// Try to match the sdp_mid with content name.
for (size_t i = 0; i < description_->contents().size(); ++i) {
if (candidate->sdp_mid() == description_->contents().at(i).name) {
*index = i;
found = true;
break;
}
}
if (!found) {
// If the sdp_mid is presented but we can't find a match, we consider
// this as an error.
return false;
}
}
return true;
}
} // namespace webrtc

View File

@ -0,0 +1,106 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Implements the SessionDescriptionInterface.
#ifndef TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_
#define TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_
#include <string>
#include <vector>
#include "talk/app/webrtc/jsep.h"
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/base/scoped_ptr.h"
namespace cricket {
class SessionDescription;
}
namespace webrtc {
class JsepSessionDescription : public SessionDescriptionInterface {
public:
explicit JsepSessionDescription(const std::string& type);
virtual ~JsepSessionDescription();
// |error| can be NULL if don't care about the failure reason.
bool Initialize(const std::string& sdp, SdpParseError* error);
// Takes ownership of |description|.
bool Initialize(cricket::SessionDescription* description,
const std::string& session_id,
const std::string& session_version);
virtual cricket::SessionDescription* description() {
return description_.get();
}
virtual const cricket::SessionDescription* description() const {
return description_.get();
}
virtual std::string session_id() const {
return session_id_;
}
virtual std::string session_version() const {
return session_version_;
}
virtual std::string type() const {
return type_;
}
// Allow changing the type. Used for testing.
void set_type(const std::string& type) { type_ = type; }
virtual bool AddCandidate(const IceCandidateInterface* candidate);
virtual size_t number_of_mediasections() const;
virtual const IceCandidateCollection* candidates(
size_t mediasection_index) const;
virtual bool ToString(std::string* out) const;
// Default video encoder settings. The resolution is the max resolution.
// TODO(perkj): Implement proper negotiation of video resolution.
static const int kDefaultVideoCodecId;
static const int kDefaultVideoCodecFramerate;
static const char kDefaultVideoCodecName[];
static const int kMaxVideoCodecWidth;
static const int kMaxVideoCodecHeight;
static const int kDefaultVideoCodecPreference;
private:
talk_base::scoped_ptr<cricket::SessionDescription> description_;
std::string session_id_;
std::string session_version_;
std::string type_;
std::vector<JsepCandidateCollection> candidate_collection_;
bool GetMediasectionIndex(const IceCandidateInterface* candidate,
size_t* index);
DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription);
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_JSEPSESSIONDESCRIPTION_H_

View File

@ -0,0 +1,223 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/base/gunit.h"
#include "talk/base/helpers.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/stringencode.h"
#include "talk/p2p/base/candidate.h"
#include "talk/p2p/base/constants.h"
#include "talk/p2p/base/sessiondescription.h"
#include "talk/session/media/mediasession.h"
using webrtc::IceCandidateCollection;
using webrtc::IceCandidateInterface;
using webrtc::JsepIceCandidate;
using webrtc::JsepSessionDescription;
using webrtc::SessionDescriptionInterface;
using talk_base::scoped_ptr;
static const char kCandidateUfrag[] = "ufrag";
static const char kCandidatePwd[] = "pwd";
static const char kCandidateUfragVoice[] = "ufrag_voice";
static const char kCandidatePwdVoice[] = "pwd_voice";
static const char kCandidateUfragVideo[] = "ufrag_video";
static const char kCandidatePwdVideo[] = "pwd_video";
// This creates a session description with both audio and video media contents.
// In SDP this is described by two m lines, one audio and one video.
static cricket::SessionDescription* CreateCricketSessionDescription() {
cricket::SessionDescription* desc(new cricket::SessionDescription());
// AudioContentDescription
scoped_ptr<cricket::AudioContentDescription> audio(
new cricket::AudioContentDescription());
// VideoContentDescription
scoped_ptr<cricket::VideoContentDescription> video(
new cricket::VideoContentDescription());
audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0, 0));
desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
audio.release());
video->AddCodec(cricket::VideoCodec(120, "VP8", 640, 480, 30, 0));
desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
video.release());
EXPECT_TRUE(desc->AddTransportInfo(
cricket::TransportInfo(
cricket::CN_AUDIO,
cricket::TransportDescription(
cricket::NS_GINGLE_P2P,
std::vector<std::string>(),
kCandidateUfragVoice, kCandidatePwdVoice,
cricket::ICEMODE_FULL, NULL,
cricket::Candidates()))));
EXPECT_TRUE(desc->AddTransportInfo(
cricket::TransportInfo(cricket::CN_VIDEO,
cricket::TransportDescription(
cricket::NS_GINGLE_P2P,
std::vector<std::string>(),
kCandidateUfragVideo, kCandidatePwdVideo,
cricket::ICEMODE_FULL, NULL,
cricket::Candidates()))));
return desc;
}
class JsepSessionDescriptionTest : public testing::Test {
protected:
virtual void SetUp() {
int port = 1234;
talk_base::SocketAddress address("127.0.0.1", port++);
cricket::Candidate candidate("rtp", cricket::ICE_CANDIDATE_COMPONENT_RTP,
"udp", address, 1, "",
"", "local", "eth0", 0, "1");
candidate_ = candidate;
const std::string session_id =
talk_base::ToString(talk_base::CreateRandomId64());
const std::string session_version =
talk_base::ToString(talk_base::CreateRandomId());
jsep_desc_.reset(new JsepSessionDescription("dummy"));
ASSERT_TRUE(jsep_desc_->Initialize(CreateCricketSessionDescription(),
session_id, session_version));
}
std::string Serialize(const SessionDescriptionInterface* desc) {
std::string sdp;
EXPECT_TRUE(desc->ToString(&sdp));
EXPECT_FALSE(sdp.empty());
return sdp;
}
SessionDescriptionInterface* DeSerialize(const std::string& sdp) {
JsepSessionDescription* desc(new JsepSessionDescription("dummy"));
EXPECT_TRUE(desc->Initialize(sdp, NULL));
return desc;
}
cricket::Candidate candidate_;
talk_base::scoped_ptr<JsepSessionDescription> jsep_desc_;
};
// Test that number_of_mediasections() returns the number of media contents in
// a session description.
TEST_F(JsepSessionDescriptionTest, CheckSessionDescription) {
EXPECT_EQ(2u, jsep_desc_->number_of_mediasections());
}
// Test that we can add a candidate to a session description.
TEST_F(JsepSessionDescriptionTest, AddCandidateWithoutMid) {
JsepIceCandidate jsep_candidate("", 0, candidate_);
EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
ASSERT_TRUE(ice_candidates != NULL);
EXPECT_EQ(1u, ice_candidates->count());
const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
ASSERT_TRUE(ice_candidate != NULL);
candidate_.set_username(kCandidateUfragVoice);
candidate_.set_password(kCandidatePwdVoice);
EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
EXPECT_EQ(0, ice_candidate->sdp_mline_index());
EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
}
TEST_F(JsepSessionDescriptionTest, AddCandidateWithMid) {
// mid and m-line index don't match, in this case mid is preferred.
JsepIceCandidate jsep_candidate("video", 0, candidate_);
EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
EXPECT_EQ(0u, jsep_desc_->candidates(0)->count());
const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(1);
ASSERT_TRUE(ice_candidates != NULL);
EXPECT_EQ(1u, ice_candidates->count());
const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
ASSERT_TRUE(ice_candidate != NULL);
candidate_.set_username(kCandidateUfragVideo);
candidate_.set_password(kCandidatePwdVideo);
EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
// The mline index should have been updated according to mid.
EXPECT_EQ(1, ice_candidate->sdp_mline_index());
}
TEST_F(JsepSessionDescriptionTest, AddCandidateAlreadyHasUfrag) {
candidate_.set_username(kCandidateUfrag);
candidate_.set_password(kCandidatePwd);
JsepIceCandidate jsep_candidate("audio", 0, candidate_);
EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
ASSERT_TRUE(ice_candidates != NULL);
EXPECT_EQ(1u, ice_candidates->count());
const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
ASSERT_TRUE(ice_candidate != NULL);
candidate_.set_username(kCandidateUfrag);
candidate_.set_password(kCandidatePwd);
EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
}
// Test that we can not add a candidate if there is no corresponding media
// content in the session description.
TEST_F(JsepSessionDescriptionTest, AddBadCandidate) {
JsepIceCandidate bad_candidate1("", 55, candidate_);
EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate1));
JsepIceCandidate bad_candidate2("some weird mid", 0, candidate_);
EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate2));
}
// Test that we can serialize a JsepSessionDescription and deserialize it again.
TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) {
std::string sdp = Serialize(jsep_desc_.get());
scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(DeSerialize(sdp));
EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
std::string parsed_sdp = Serialize(parsed_jsep_desc.get());
EXPECT_EQ(sdp, parsed_sdp);
}
// Tests that we can serialize and deserialize a JsepSesssionDescription
// with candidates.
TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) {
std::string sdp = Serialize(jsep_desc_.get());
// Add a candidate and check that the serialized result is different.
JsepIceCandidate jsep_candidate("audio", 0, candidate_);
EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
std::string sdp_with_candidate = Serialize(jsep_desc_.get());
EXPECT_NE(sdp, sdp_with_candidate);
scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(
DeSerialize(sdp_with_candidate));
std::string parsed_sdp_with_candidate = Serialize(parsed_jsep_desc.get());
EXPECT_EQ(sdp_with_candidate, parsed_sdp_with_candidate);
}

View File

@ -0,0 +1,127 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localaudiosource.h"
#include <vector>
#include "talk/media/base/mediaengine.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
namespace webrtc {
// Constraint keys.
// They are declared as static members in mediaconstraintsinterface.h
const char MediaConstraintsInterface::kEchoCancellation[] =
"googEchoCancellation";
const char MediaConstraintsInterface::kExperimentalEchoCancellation[] =
"googEchoCancellation2";
const char MediaConstraintsInterface::kAutoGainControl[] =
"googAutoGainControl";
const char MediaConstraintsInterface::kExperimentalAutoGainControl[] =
"googAutoGainControl2";
const char MediaConstraintsInterface::kNoiseSuppression[] =
"googNoiseSuppression";
const char MediaConstraintsInterface::kHighpassFilter[] =
"googHighpassFilter";
const char MediaConstraintsInterface::kInternalAecDump[] = "internalAecDump";
namespace {
// Convert constraints to audio options. Return false if constraints are
// invalid.
bool FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
cricket::AudioOptions* options) {
bool success = true;
MediaConstraintsInterface::Constraints::const_iterator iter;
// This design relies on the fact that all the audio constraints are actually
// "options", i.e. boolean-valued and always satisfiable. If the constraints
// are extended to include non-boolean values or actual format constraints,
// a different algorithm will be required.
for (iter = constraints.begin(); iter != constraints.end(); ++iter) {
bool value = false;
if (!talk_base::FromString(iter->value, &value)) {
success = false;
continue;
}
if (iter->key == MediaConstraintsInterface::kEchoCancellation)
options->echo_cancellation.Set(value);
else if (iter->key ==
MediaConstraintsInterface::kExperimentalEchoCancellation)
options->experimental_aec.Set(value);
else if (iter->key == MediaConstraintsInterface::kAutoGainControl)
options->auto_gain_control.Set(value);
else if (iter->key ==
MediaConstraintsInterface::kExperimentalAutoGainControl)
options->experimental_agc.Set(value);
else if (iter->key == MediaConstraintsInterface::kNoiseSuppression)
options->noise_suppression.Set(value);
else if (iter->key == MediaConstraintsInterface::kHighpassFilter)
options->highpass_filter.Set(value);
else if (iter->key == MediaConstraintsInterface::kInternalAecDump)
options->aec_dump.Set(value);
else
success = false;
}
return success;
}
} // namespace
talk_base::scoped_refptr<LocalAudioSource> LocalAudioSource::Create(
const MediaConstraintsInterface* constraints) {
talk_base::scoped_refptr<LocalAudioSource> source(
new talk_base::RefCountedObject<LocalAudioSource>());
source->Initialize(constraints);
return source;
}
void LocalAudioSource::Initialize(
const MediaConstraintsInterface* constraints) {
if (!constraints)
return;
// Apply optional constraints first, they will be overwritten by mandatory
// constraints.
FromConstraints(constraints->GetOptional(), &options_);
cricket::AudioOptions options;
if (!FromConstraints(constraints->GetMandatory(), &options)) {
source_state_ = kEnded;
return;
}
options_.SetAll(options);
source_state_ = kLive;
}
} // namespace webrtc

View File

@ -0,0 +1,69 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_
#define TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
#include "talk/base/scoped_ptr.h"
#include "talk/media/base/mediachannel.h"
// LocalAudioSource implements AudioSourceInterface.
// This contains settings for switching audio processing on and off.
namespace webrtc {
class MediaConstraintsInterface;
class LocalAudioSource : public Notifier<AudioSourceInterface> {
public:
// Creates an instance of LocalAudioSource.
static talk_base::scoped_refptr<LocalAudioSource> Create(
const MediaConstraintsInterface* constraints);
virtual SourceState state() const { return source_state_; }
virtual const cricket::AudioOptions& options() const { return options_; }
protected:
LocalAudioSource()
: source_state_(kInitializing) {
}
~LocalAudioSource() {
}
private:
void Initialize(const MediaConstraintsInterface* constraints);
cricket::AudioOptions options_;
SourceState source_state_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_LOCALAUDIOSOURCE_H_

View File

@ -0,0 +1,118 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localaudiosource.h"
#include <string>
#include <vector>
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/base/gunit.h"
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/base/fakevideorenderer.h"
#include "talk/media/devices/fakedevicemanager.h"
using webrtc::LocalAudioSource;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
TEST(LocalAudioSourceTest, SetValidOptions) {
webrtc::FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kEchoCancellation, false);
constraints.AddOptional(
MediaConstraintsInterface::kExperimentalEchoCancellation, true);
constraints.AddOptional(MediaConstraintsInterface::kAutoGainControl, true);
constraints.AddOptional(
MediaConstraintsInterface::kExperimentalAutoGainControl, true);
constraints.AddMandatory(MediaConstraintsInterface::kNoiseSuppression, false);
constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, true);
talk_base::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(&constraints);
bool value;
EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(source->options().experimental_aec.Get(&value));
EXPECT_TRUE(value);
EXPECT_TRUE(source->options().auto_gain_control.Get(&value));
EXPECT_TRUE(value);
EXPECT_TRUE(source->options().experimental_agc.Get(&value));
EXPECT_TRUE(value);
EXPECT_TRUE(source->options().noise_suppression.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(source->options().highpass_filter.Get(&value));
EXPECT_TRUE(value);
}
TEST(LocalAudioSourceTest, OptionNotSet) {
webrtc::FakeConstraints constraints;
talk_base::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(&constraints);
bool value;
EXPECT_FALSE(source->options().highpass_filter.Get(&value));
}
TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
webrtc::FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kEchoCancellation, false);
constraints.AddOptional(MediaConstraintsInterface::kEchoCancellation, true);
talk_base::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(&constraints);
bool value;
EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
EXPECT_FALSE(value);
}
TEST(LocalAudioSourceTest, InvalidOptional) {
webrtc::FakeConstraints constraints;
constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, false);
constraints.AddOptional("invalidKey", false);
talk_base::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(&constraints);
EXPECT_EQ(MediaSourceInterface::kLive, source->state());
bool value;
EXPECT_TRUE(source->options().highpass_filter.Get(&value));
EXPECT_FALSE(value);
}
TEST(LocalAudioSourceTest, InvalidMandatory) {
webrtc::FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
constraints.AddMandatory("invalidKey", false);
talk_base::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(&constraints);
EXPECT_EQ(MediaSourceInterface::kEnded, source->state());
bool value;
EXPECT_FALSE(source->options().highpass_filter.Get(&value));
}

View File

@ -0,0 +1,442 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localvideosource.h"
#include <vector>
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/session/media/channelmanager.h"
using cricket::CaptureState;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
namespace webrtc {
// Constraint keys. Specified by draft-alvestrand-constraints-resolution-00b
// They are declared as static members in mediastreaminterface.h
const char MediaConstraintsInterface::kMinAspectRatio[] = "minAspectRatio";
const char MediaConstraintsInterface::kMaxAspectRatio[] = "maxAspectRatio";
const char MediaConstraintsInterface::kMaxWidth[] = "maxWidth";
const char MediaConstraintsInterface::kMinWidth[] = "minWidth";
const char MediaConstraintsInterface::kMaxHeight[] = "maxHeight";
const char MediaConstraintsInterface::kMinHeight[] = "minHeight";
const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Google-specific keys
const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction";
const char MediaConstraintsInterface::kLeakyBucket[] = "googLeakyBucket";
const char MediaConstraintsInterface::kTemporalLayeredScreencast[] =
"googTemporalLayeredScreencast";
} // namespace webrtc
namespace {
const double kRoundingTruncation = 0.0005;
enum {
MSG_VIDEOCAPTURESTATECONNECT,
MSG_VIDEOCAPTURESTATEDISCONNECT,
MSG_VIDEOCAPTURESTATECHANGE,
};
// Default resolution. If no constraint is specified, this is the resolution we
// will use.
static const cricket::VideoFormatPod kDefaultResolution =
{640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
// List of formats used if the camera doesn't support capability enumeration.
static const cricket::VideoFormatPod kVideoFormats[] = {
{1920, 1080, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{1280, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{960, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{640, 360, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{320, 240, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
{320, 180, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY}
};
MediaSourceInterface::SourceState
GetReadyState(cricket::CaptureState state) {
switch (state) {
case cricket::CS_STARTING:
return MediaSourceInterface::kInitializing;
case cricket::CS_RUNNING:
return MediaSourceInterface::kLive;
case cricket::CS_FAILED:
case cricket::CS_NO_DEVICE:
case cricket::CS_STOPPED:
return MediaSourceInterface::kEnded;
case cricket::CS_PAUSED:
return MediaSourceInterface::kMuted;
default:
ASSERT(false && "GetReadyState unknown state");
}
return MediaSourceInterface::kEnded;
}
void SetUpperLimit(int new_limit, int* original_limit) {
if (*original_limit < 0 || new_limit < *original_limit)
*original_limit = new_limit;
}
// Updates |format_upper_limit| from |constraint|.
// If constraint.maxFoo is smaller than format_upper_limit.foo,
// set format_upper_limit.foo to constraint.maxFoo.
void SetUpperLimitFromConstraint(
const MediaConstraintsInterface::Constraint& constraint,
cricket::VideoFormat* format_upper_limit) {
if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
int value = talk_base::FromString<int>(constraint.value);
SetUpperLimit(value, &(format_upper_limit->width));
} else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
int value = talk_base::FromString<int>(constraint.value);
SetUpperLimit(value, &(format_upper_limit->height));
}
}
// Fills |format_out| with the max width and height allowed by |constraints|.
void FromConstraintsForScreencast(
const MediaConstraintsInterface::Constraints& constraints,
cricket::VideoFormat* format_out) {
typedef MediaConstraintsInterface::Constraints::const_iterator
ConstraintsIterator;
cricket::VideoFormat upper_limit(-1, -1, 0, 0);
for (ConstraintsIterator constraints_it = constraints.begin();
constraints_it != constraints.end(); ++constraints_it)
SetUpperLimitFromConstraint(*constraints_it, &upper_limit);
if (upper_limit.width >= 0)
format_out->width = upper_limit.width;
if (upper_limit.height >= 0)
format_out->height = upper_limit.height;
}
// Returns true if |constraint| is fulfilled. |format_out| can differ from
// |format_in| if the format is changed by the constraint. Ie - the frame rate
// can be changed by setting maxFrameRate.
bool NewFormatWithConstraints(
const MediaConstraintsInterface::Constraint& constraint,
const cricket::VideoFormat& format_in,
bool mandatory,
cricket::VideoFormat* format_out) {
ASSERT(format_out != NULL);
*format_out = format_in;
if (constraint.key == MediaConstraintsInterface::kMinWidth) {
int value = talk_base::FromString<int>(constraint.value);
return (value <= format_in.width);
} else if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
int value = talk_base::FromString<int>(constraint.value);
return (value >= format_in.width);
} else if (constraint.key == MediaConstraintsInterface::kMinHeight) {
int value = talk_base::FromString<int>(constraint.value);
return (value <= format_in.height);
} else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
int value = talk_base::FromString<int>(constraint.value);
return (value >= format_in.height);
} else if (constraint.key == MediaConstraintsInterface::kMinFrameRate) {
int value = talk_base::FromString<int>(constraint.value);
return (value <= cricket::VideoFormat::IntervalToFps(format_in.interval));
} else if (constraint.key == MediaConstraintsInterface::kMaxFrameRate) {
int value = talk_base::FromString<int>(constraint.value);
if (value == 0) {
if (mandatory) {
// TODO(ronghuawu): Convert the constraint value to float when sub-1fps
// is supported by the capturer.
return false;
} else {
value = 1;
}
}
if (value <= cricket::VideoFormat::IntervalToFps(format_in.interval)) {
format_out->interval = cricket::VideoFormat::FpsToInterval(value);
return true;
} else {
return false;
}
} else if (constraint.key == MediaConstraintsInterface::kMinAspectRatio) {
double value = talk_base::FromString<double>(constraint.value);
// The aspect ratio in |constraint.value| has been converted to a string and
// back to a double, so it may have a rounding error.
// E.g if the value 1/3 is converted to a string, the string will not have
// infinite length.
// We add a margin of 0.0005 which is high enough to detect the same aspect
// ratio but small enough to avoid matching wrong aspect ratios.
double ratio = static_cast<double>(format_in.width) / format_in.height;
return (value <= ratio + kRoundingTruncation);
} else if (constraint.key == MediaConstraintsInterface::kMaxAspectRatio) {
double value = talk_base::FromString<double>(constraint.value);
double ratio = static_cast<double>(format_in.width) / format_in.height;
// Subtract 0.0005 to avoid rounding problems. Same as above.
const double kRoundingTruncation = 0.0005;
return (value >= ratio - kRoundingTruncation);
} else if (constraint.key == MediaConstraintsInterface::kNoiseReduction ||
constraint.key == MediaConstraintsInterface::kLeakyBucket ||
constraint.key ==
MediaConstraintsInterface::kTemporalLayeredScreencast) {
// These are actually options, not constraints, so they can be satisfied
// regardless of the format.
return true;
}
LOG(LS_WARNING) << "Found unknown MediaStream constraint. Name:"
<< constraint.key << " Value:" << constraint.value;
return false;
}
// Removes cricket::VideoFormats from |formats| that don't meet |constraint|.
void FilterFormatsByConstraint(
const MediaConstraintsInterface::Constraint& constraint,
bool mandatory,
std::vector<cricket::VideoFormat>* formats) {
std::vector<cricket::VideoFormat>::iterator format_it =
formats->begin();
while (format_it != formats->end()) {
// Modify the format_it to fulfill the constraint if possible.
// Delete it otherwise.
if (!NewFormatWithConstraints(constraint, (*format_it),
mandatory, &(*format_it))) {
format_it = formats->erase(format_it);
} else {
++format_it;
}
}
}
// Returns a vector of cricket::VideoFormat that best match |constraints|.
std::vector<cricket::VideoFormat> FilterFormats(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional,
const std::vector<cricket::VideoFormat>& supported_formats) {
typedef MediaConstraintsInterface::Constraints::const_iterator
ConstraintsIterator;
std::vector<cricket::VideoFormat> candidates = supported_formats;
for (ConstraintsIterator constraints_it = mandatory.begin();
constraints_it != mandatory.end(); ++constraints_it)
FilterFormatsByConstraint(*constraints_it, true, &candidates);
if (candidates.size() == 0)
return candidates;
// Ok - all mandatory checked and we still have a candidate.
// Let's try filtering using the optional constraints.
for (ConstraintsIterator constraints_it = optional.begin();
constraints_it != optional.end(); ++constraints_it) {
std::vector<cricket::VideoFormat> current_candidates = candidates;
FilterFormatsByConstraint(*constraints_it, false, &current_candidates);
if (current_candidates.size() > 0) {
candidates = current_candidates;
}
}
// We have done as good as we can to filter the supported resolutions.
return candidates;
}
// Find the format that best matches the default video size.
// Constraints are optional and since the performance of a video call
// might be bad due to bitrate limitations, CPU, and camera performance,
// it is better to select a resolution that is as close as possible to our
// default and still meets the contraints.
const cricket::VideoFormat& GetBestCaptureFormat(
const std::vector<cricket::VideoFormat>& formats) {
ASSERT(formats.size() > 0);
int default_area = kDefaultResolution.width * kDefaultResolution.height;
std::vector<cricket::VideoFormat>::const_iterator it = formats.begin();
std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin();
int best_diff = abs(default_area - it->width* it->height);
for (; it != formats.end(); ++it) {
int diff = abs(default_area - it->width* it->height);
if (diff < best_diff) {
best_diff = diff;
best_it = it;
}
}
return *best_it;
}
// Set |option| to the highest-priority value of |key| in the constraints.
// Return false if the key is mandatory, and the value is invalid.
bool ExtractOption(const MediaConstraintsInterface* all_constraints,
const std::string& key, cricket::Settable<bool>* option) {
size_t mandatory = 0;
bool value;
if (FindConstraint(all_constraints, key, &value, &mandatory)) {
option->Set(value);
return true;
}
return mandatory == 0;
}
// Search |all_constraints| for known video options. Apply all options that are
// found with valid values, and return false if any mandatory video option was
// found with an invalid value.
bool ExtractVideoOptions(const MediaConstraintsInterface* all_constraints,
cricket::VideoOptions* options) {
bool all_valid = true;
all_valid &= ExtractOption(all_constraints,
MediaConstraintsInterface::kNoiseReduction,
&(options->video_noise_reduction));
all_valid &= ExtractOption(all_constraints,
MediaConstraintsInterface::kLeakyBucket,
&(options->video_leaky_bucket));
all_valid &= ExtractOption(all_constraints,
MediaConstraintsInterface::kTemporalLayeredScreencast,
&(options->video_temporal_layer_screencast));
return all_valid;
}
} // anonymous namespace
namespace webrtc {
talk_base::scoped_refptr<LocalVideoSource> LocalVideoSource::Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
const webrtc::MediaConstraintsInterface* constraints) {
ASSERT(channel_manager != NULL);
ASSERT(capturer != NULL);
talk_base::scoped_refptr<LocalVideoSource> source(
new talk_base::RefCountedObject<LocalVideoSource>(channel_manager,
capturer));
source->Initialize(constraints);
return source;
}
LocalVideoSource::LocalVideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer)
: channel_manager_(channel_manager),
video_capturer_(capturer),
state_(kInitializing) {
channel_manager_->SignalVideoCaptureStateChange.connect(
this, &LocalVideoSource::OnStateChange);
}
LocalVideoSource::~LocalVideoSource() {
channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
channel_manager_->SignalVideoCaptureStateChange.disconnect(this);
}
void LocalVideoSource::Initialize(
const webrtc::MediaConstraintsInterface* constraints) {
std::vector<cricket::VideoFormat> formats;
if (video_capturer_->GetSupportedFormats() &&
video_capturer_->GetSupportedFormats()->size() > 0) {
formats = *video_capturer_->GetSupportedFormats();
} else if (video_capturer_->IsScreencast()) {
// The screen capturer can accept any resolution and we will derive the
// format from the constraints if any.
// Note that this only affects tab capturing, not desktop capturing,
// since desktop capturer does not respect the VideoFormat passed in.
formats.push_back(cricket::VideoFormat(kDefaultResolution));
} else {
// The VideoCapturer implementation doesn't support capability enumeration.
// We need to guess what the camera support.
for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) {
formats.push_back(cricket::VideoFormat(kVideoFormats[i]));
}
}
if (constraints) {
MediaConstraintsInterface::Constraints mandatory_constraints =
constraints->GetMandatory();
MediaConstraintsInterface::Constraints optional_constraints;
optional_constraints = constraints->GetOptional();
if (video_capturer_->IsScreencast()) {
// Use the maxWidth and maxHeight allowed by constraints for screencast.
FromConstraintsForScreencast(mandatory_constraints, &(formats[0]));
}
formats = FilterFormats(mandatory_constraints, optional_constraints,
formats);
}
if (formats.size() == 0) {
LOG(LS_WARNING) << "Failed to find a suitable video format.";
SetState(kEnded);
return;
}
cricket::VideoOptions options;
if (!ExtractVideoOptions(constraints, &options)) {
LOG(LS_WARNING) << "Could not satisfy mandatory options.";
SetState(kEnded);
return;
}
options_.SetAll(options);
format_ = GetBestCaptureFormat(formats);
// Start the camera with our best guess.
// TODO(perkj): Should we try again with another format it it turns out that
// the camera doesn't produce frames with the correct format? Or will
// cricket::VideCapturer be able to re-scale / crop to the requested
// resolution?
if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
SetState(kEnded);
return;
}
// Initialize hasn't succeeded until a successful state change has occurred.
}
void LocalVideoSource::AddSink(cricket::VideoRenderer* output) {
channel_manager_->AddVideoRenderer(video_capturer_.get(), output);
}
void LocalVideoSource::RemoveSink(cricket::VideoRenderer* output) {
channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output);
}
// OnStateChange listens to the ChannelManager::SignalVideoCaptureStateChange.
// This signal is triggered for all video capturers. Not only the one we are
// interested in.
void LocalVideoSource::OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state) {
if (capturer == video_capturer_.get()) {
SetState(GetReadyState(capture_state));
}
}
void LocalVideoSource::SetState(SourceState new_state) {
if (VERIFY(state_ != new_state)) {
state_ = new_state;
FireOnChanged();
}
}
} // namespace webrtc

View File

@ -0,0 +1,100 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_
#define TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
#include "talk/app/webrtc/videosourceinterface.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/sigslot.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
// LocalVideoSource implements VideoSourceInterface. It owns a
// cricket::VideoCapturer and make sure the camera is started at a resolution
// that honors the constraints.
// The state is set depending on the result of starting the capturer.
// If the constraint can't be met or the capturer fails to start, the state
// transition to kEnded, otherwise it transitions to kLive.
namespace cricket {
class ChannelManager;
} // namespace cricket
namespace webrtc {
class MediaConstraintsInterface;
class LocalVideoSource : public Notifier<VideoSourceInterface>,
public sigslot::has_slots<> {
public:
// Creates an instance of LocalVideoSource.
// LocalVideoSource take ownership of |capturer|.
// |constraints| can be NULL and in that case the camera is opened using a
// default resolution.
static talk_base::scoped_refptr<LocalVideoSource> Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
const webrtc::MediaConstraintsInterface* constraints);
virtual SourceState state() const { return state_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoCapturer* GetVideoCapturer() {
return video_capturer_.get();
}
// |output| will be served video frames as long as the underlying capturer
// is running video frames.
virtual void AddSink(cricket::VideoRenderer* output);
virtual void RemoveSink(cricket::VideoRenderer* output);
protected:
LocalVideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer);
~LocalVideoSource();
private:
void Initialize(const webrtc::MediaConstraintsInterface* constraints);
void OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state);
void SetState(SourceState new_state);
cricket::ChannelManager* channel_manager_;
talk_base::scoped_ptr<cricket::VideoCapturer> video_capturer_;
cricket::VideoFormat format_;
cricket::VideoOptions options_;
SourceState state_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_

View File

@ -0,0 +1,523 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localvideosource.h"
#include <string>
#include <vector>
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/base/gunit.h"
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/base/fakevideorenderer.h"
#include "talk/media/devices/fakedevicemanager.h"
#include "talk/session/media/channelmanager.h"
using webrtc::FakeConstraints;
using webrtc::LocalVideoSource;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
using webrtc::ObserverInterface;
using webrtc::VideoSourceInterface;
namespace {
// Max wait time for a test.
const int kMaxWaitMs = 100;
} // anonymous namespace
// TestVideoCapturer extends cricket::FakeVideoCapturer so it can be used for
// testing without known camera formats.
// It keeps its own lists of cricket::VideoFormats for the unit tests in this
// file.
class TestVideoCapturer : public cricket::FakeVideoCapturer {
public:
TestVideoCapturer() : test_without_formats_(false) {
std::vector<cricket::VideoFormat> formats;
formats.push_back(cricket::VideoFormat(1280, 720,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(640, 480,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(640, 400,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(320, 240,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(352, 288,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
ResetSupportedFormats(formats);
}
// This function is used for resetting the supported capture formats and
// simulating a cricket::VideoCapturer implementation that don't support
// capture format enumeration. This is used to simulate the current
// Chrome implementation.
void TestWithoutCameraFormats() {
test_without_formats_ = true;
std::vector<cricket::VideoFormat> formats;
ResetSupportedFormats(formats);
}
virtual cricket::CaptureState Start(
const cricket::VideoFormat& capture_format) {
if (test_without_formats_) {
std::vector<cricket::VideoFormat> formats;
formats.push_back(capture_format);
ResetSupportedFormats(formats);
}
return FakeVideoCapturer::Start(capture_format);
}
virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
cricket::VideoFormat* best_format) {
if (test_without_formats_) {
*best_format = desired;
return true;
}
return FakeVideoCapturer::GetBestCaptureFormat(desired,
best_format);
}
private:
bool test_without_formats_;
};
class StateObserver : public ObserverInterface {
public:
explicit StateObserver(VideoSourceInterface* source)
: state_(source->state()),
source_(source) {
}
virtual void OnChanged() {
state_ = source_->state();
}
MediaSourceInterface::SourceState state() const { return state_; }
private:
MediaSourceInterface::SourceState state_;
talk_base::scoped_refptr<VideoSourceInterface> source_;
};
class LocalVideoSourceTest : public testing::Test {
protected:
LocalVideoSourceTest()
: channel_manager_(new cricket::ChannelManager(
new cricket::FakeMediaEngine(),
new cricket::FakeDeviceManager(), talk_base::Thread::Current())) {
}
void SetUp() {
ASSERT_TRUE(channel_manager_->Init());
capturer_ = new TestVideoCapturer();
}
void CreateLocalVideoSource() {
CreateLocalVideoSource(NULL);
}
void CreateLocalVideoSource(
const webrtc::MediaConstraintsInterface* constraints) {
// VideoSource take ownership of |capturer_|
local_source_ = LocalVideoSource::Create(channel_manager_.get(),
capturer_,
constraints);
ASSERT_TRUE(local_source_.get() != NULL);
EXPECT_EQ(capturer_, local_source_->GetVideoCapturer());
state_observer_.reset(new StateObserver(local_source_));
local_source_->RegisterObserver(state_observer_.get());
local_source_->AddSink(&renderer_);
}
TestVideoCapturer* capturer_; // Raw pointer. Owned by local_source_.
cricket::FakeVideoRenderer renderer_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_ptr<StateObserver> state_observer_;
talk_base::scoped_refptr<LocalVideoSource> local_source_;
};
// Test that a LocalVideoSource transition to kLive state when the capture
// device have started and kEnded if it is stopped.
// It also test that an output can receive video frames.
TEST_F(LocalVideoSourceTest, StartStop) {
// Initialize without constraints.
CreateLocalVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
ASSERT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(1, renderer_.num_rendered_frames());
capturer_->Stop();
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that a LocalVideoSource transition to kEnded if the capture device
// fails.
TEST_F(LocalVideoSourceTest, CameraFailed) {
CreateLocalVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
capturer_->SignalStateChange(capturer_, cricket::CS_FAILED);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that the capture output is CIF if we set max constraints to CIF.
// and the capture device support CIF.
TEST_F(LocalVideoSourceTest, MandatoryConstraintCif5Fps) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 5);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(352, format->width);
EXPECT_EQ(288, format->height);
EXPECT_EQ(5, format->framerate());
}
// Test that the capture output is 720P if the camera support it and the
// optional constraint is set to 720P.
TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio,
1280.0 / 720);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(1280, format->width);
EXPECT_EQ(720, format->height);
EXPECT_EQ(30, format->framerate());
}
// Test that the capture output have aspect ratio 4:3 if a mandatory constraint
// require it even if an optional constraint request a higher resolution
// that don't have this aspect ratio.
TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
constraints.AddMandatory(MediaConstraintsInterface::kMaxAspectRatio,
640.0 / 480);
constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(640, format->width);
EXPECT_EQ(480, format->height);
EXPECT_EQ(30, format->framerate());
}
// Test that the source state transition to kEnded if the mandatory aspect ratio
// is set higher than supported.
TEST_F(LocalVideoSourceTest, MandatoryAspectRatioTooHigh) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, 2);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that the source ignores an optional aspect ratio that is higher than
// supported.
TEST_F(LocalVideoSourceTest, OptionalAspectRatioTooHigh) {
FakeConstraints constraints;
constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 2);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
double aspect_ratio = static_cast<double>(format->width) / format->height;
EXPECT_LT(aspect_ratio, 2);
}
// Test that the source starts video with the default resolution if the
// camera doesn't support capability enumeration and there are no constraints.
TEST_F(LocalVideoSourceTest, NoCameraCapability) {
capturer_->TestWithoutCameraFormats();
CreateLocalVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(640, format->width);
EXPECT_EQ(480, format->height);
EXPECT_EQ(30, format->framerate());
}
// Test that the source can start the video and get the requested aspect ratio
// if the camera doesn't support capability enumeration and the aspect ratio is
// set.
TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) {
capturer_->TestWithoutCameraFormats();
FakeConstraints constraints;
double requested_aspect_ratio = 640.0 / 360;
constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio,
requested_aspect_ratio);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
double aspect_ratio = static_cast<double>(format->width) / format->height;
EXPECT_LE(requested_aspect_ratio, aspect_ratio);
}
// Test that the source state transitions to kEnded if an unknown mandatory
// constraint is found.
TEST_F(LocalVideoSourceTest, InvalidMandatoryConstraint) {
FakeConstraints constraints;
constraints.AddMandatory("weird key", 640);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that the source ignores an unknown optional constraint.
TEST_F(LocalVideoSourceTest, InvalidOptionalConstraint) {
FakeConstraints constraints;
constraints.AddOptional("weird key", 640);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
}
TEST_F(LocalVideoSourceTest, SetValidOptionValues) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kNoiseReduction, "false");
constraints.AddMandatory(
MediaConstraintsInterface::kTemporalLayeredScreencast, "false");
constraints.AddOptional(
MediaConstraintsInterface::kLeakyBucket, "true");
CreateLocalVideoSource(&constraints);
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(local_source_->options()->
video_temporal_layer_screencast.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(local_source_->options()->video_leaky_bucket.Get(&value));
EXPECT_TRUE(value);
}
TEST_F(LocalVideoSourceTest, OptionNotSet) {
FakeConstraints constraints;
CreateLocalVideoSource(&constraints);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
}
TEST_F(LocalVideoSourceTest, MandatoryOptionOverridesOptional) {
FakeConstraints constraints;
constraints.AddMandatory(
MediaConstraintsInterface::kNoiseReduction, true);
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, false);
CreateLocalVideoSource(&constraints);
bool value = false;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionKeyOptional) {
FakeConstraints constraints;
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, false);
constraints.AddOptional("invalidKey", false);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
}
TEST_F(LocalVideoSourceTest, InvalidOptionKeyMandatory) {
FakeConstraints constraints;
constraints.AddMandatory(
MediaConstraintsInterface::kNoiseReduction, false);
constraints.AddMandatory("invalidKey", false);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionValueOptional) {
FakeConstraints constraints;
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, "true");
constraints.AddOptional(
MediaConstraintsInterface::kLeakyBucket, "not boolean");
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
bool value = false;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionValueMandatory) {
FakeConstraints constraints;
// Optional constraints should be ignored if the mandatory constraints fail.
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, "false");
// Values are case-sensitive and must be all lower-case.
constraints.AddMandatory(
MediaConstraintsInterface::kLeakyBucket, "True");
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
}
TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 5);
constraints.AddMandatory(
MediaConstraintsInterface::kNoiseReduction, false);
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, true);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(352, format->width);
EXPECT_EQ(288, format->height);
EXPECT_EQ(5, format->framerate());
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
}
// Tests that the source starts video with the default resolution for
// screencast if no constraint is set.
TEST_F(LocalVideoSourceTest, ScreencastResolutionNoConstraint) {
capturer_->TestWithoutCameraFormats();
capturer_->SetScreencast(true);
CreateLocalVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(640, format->width);
EXPECT_EQ(480, format->height);
EXPECT_EQ(30, format->framerate());
}
// Tests that the source starts video with the max width and height set by
// constraints for screencast.
TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 480);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 270);
capturer_->TestWithoutCameraFormats();
capturer_->SetScreencast(true);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(480, format->width);
EXPECT_EQ(270, format->height);
EXPECT_EQ(30, format->framerate());
}
TEST_F(LocalVideoSourceTest, MandatorySubOneFpsConstraints) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 0.5);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
ASSERT_TRUE(capturer_->GetCaptureFormat() == NULL);
}
TEST_F(LocalVideoSourceTest, OptionalSubOneFpsConstraints) {
FakeConstraints constraints;
constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 0.5);
CreateLocalVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(1, format->framerate());
}

View File

@ -0,0 +1,78 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/base/stringencode.h"
namespace webrtc {
const char MediaConstraintsInterface::kValueTrue[] = "true";
const char MediaConstraintsInterface::kValueFalse[] = "false";
// Set |value| to the value associated with the first appearance of |key|, or
// return false if |key| is not found.
bool MediaConstraintsInterface::Constraints::FindFirst(
const std::string& key, std::string* value) const {
for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
if (iter->key == key) {
*value = iter->value;
return true;
}
}
return false;
}
// Find the highest-priority instance of the boolean-valued constraint) named by
// |key| and return its value as |value|. |constraints| can be null.
// If |mandatory_constraints| is non-null, it is incremented if the key appears
// among the mandatory constraints.
// Returns true if the key was found and has a valid boolean value.
// If the key appears multiple times as an optional constraint, appearances
// after the first are ignored.
// Note: Because this uses FindFirst, repeated optional constraints whose
// first instance has an unrecognized value are not handled precisely in
// accordance with the specification.
bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value,
size_t* mandatory_constraints) {
std::string string_value;
if (!constraints) {
return false;
}
if (constraints->GetMandatory().FindFirst(key, &string_value)) {
if (mandatory_constraints)
++*mandatory_constraints;
return talk_base::FromString(string_value, value);
}
if (constraints->GetOptional().FindFirst(key, &string_value)) {
return talk_base::FromString(string_value, value);
}
return false;
}
} // namespace webrtc

View File

@ -0,0 +1,129 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains the interface for MediaConstraints, corresponding to
// the definition at
// http://www.w3.org/TR/mediacapture-streams/#mediastreamconstraints and also
// used in WebRTC: http://dev.w3.org/2011/webrtc/editor/webrtc.html#constraints.
#ifndef TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_
#define TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_
#include <string>
#include <vector>
namespace webrtc {
// MediaConstraintsInterface
// Interface used for passing arguments about media constraints
// to the MediaStream and PeerConnection implementation.
class MediaConstraintsInterface {
public:
struct Constraint {
Constraint() {}
Constraint(const std::string& key, const std::string value)
: key(key), value(value) {
}
std::string key;
std::string value;
};
class Constraints : public std::vector<Constraint> {
public:
bool FindFirst(const std::string& key, std::string* value) const;
};
virtual const Constraints& GetMandatory() const = 0;
virtual const Constraints& GetOptional() const = 0;
// Constraint keys used by a local video source.
// Specified by draft-alvestrand-constraints-resolution-00b
static const char kMinAspectRatio[]; // minAspectRatio
static const char kMaxAspectRatio[]; // maxAspectRatio
static const char kMaxWidth[]; // maxWidth
static const char kMinWidth[]; // minWidth
static const char kMaxHeight[]; // maxHeight
static const char kMinHeight[]; // minHeight
static const char kMaxFrameRate[]; // maxFrameRate
static const char kMinFrameRate[]; // minFrameRate
// Constraint keys used by a local audio source.
// These keys are google specific.
static const char kEchoCancellation[]; // googEchoCancellation
static const char kExperimentalEchoCancellation[]; // googEchoCancellation2
static const char kAutoGainControl[]; // googAutoGainControl
static const char kExperimentalAutoGainControl[]; // googAutoGainControl2
static const char kNoiseSuppression[]; // googNoiseSuppression
static const char kHighpassFilter[]; // googHighpassFilter
// Google-specific constraint keys for a local video source
static const char kNoiseReduction[]; // googNoiseReduction
static const char kLeakyBucket[]; // googLeakyBucket
// googTemporalLayeredScreencast
static const char kTemporalLayeredScreencast[];
// Constraint keys for CreateOffer / CreateAnswer
// Specified by the W3C PeerConnection spec
static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo
static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio
static const char kVoiceActivityDetection[]; // VoiceActivityDetection
static const char kIceRestart[]; // IceRestart
// These keys are google specific.
static const char kUseRtpMux[]; // googUseRtpMUX
// Constraints values.
static const char kValueTrue[]; // true
static const char kValueFalse[]; // false
// Temporary pseudo-constraints used to enable DTLS-SRTP
static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP
// Temporary pseudo-constraints used to enable DataChannels
static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels
static const char kEnableSctpDataChannels[]; // Enable SCTP DataChannels
// The prefix of internal-only constraints whose JS set values should be
// stripped by Chrome before passed down to Libjingle.
static const char kInternalConstraintPrefix[];
// This constraint is for internal use only, representing the Chrome command
// line flag. So it is prefixed with "internal" so JS values will be removed.
// Used by a local audio source.
static const char kInternalAecDump[]; // internalAecDump
protected:
// Dtor protected as objects shouldn't be deleted via this interface
virtual ~MediaConstraintsInterface() {}
};
bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value,
size_t* mandatory_constraints);
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIACONSTRAINTSINTERFACE_H_

View File

@ -0,0 +1,112 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/mediastream.h"
#include "talk/base/logging.h"
namespace webrtc {
template <class V>
static typename V::iterator FindTrack(V* vector,
const std::string& track_id) {
typename V::iterator it = vector->begin();
for (; it != vector->end(); ++it) {
if ((*it)->id() == track_id) {
break;
}
}
return it;
};
talk_base::scoped_refptr<MediaStream> MediaStream::Create(
const std::string& label) {
talk_base::RefCountedObject<MediaStream>* stream =
new talk_base::RefCountedObject<MediaStream>(label);
return stream;
}
MediaStream::MediaStream(const std::string& label)
: label_(label) {
}
bool MediaStream::AddTrack(AudioTrackInterface* track) {
return AddTrack<AudioTrackVector, AudioTrackInterface>(&audio_tracks_, track);
}
bool MediaStream::AddTrack(VideoTrackInterface* track) {
return AddTrack<VideoTrackVector, VideoTrackInterface>(&video_tracks_, track);
}
bool MediaStream::RemoveTrack(AudioTrackInterface* track) {
return RemoveTrack<AudioTrackVector>(&audio_tracks_, track);
}
bool MediaStream::RemoveTrack(VideoTrackInterface* track) {
return RemoveTrack<VideoTrackVector>(&video_tracks_, track);
}
talk_base::scoped_refptr<AudioTrackInterface>
MediaStream::FindAudioTrack(const std::string& track_id) {
AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
if (it == audio_tracks_.end())
return NULL;
return *it;
}
talk_base::scoped_refptr<VideoTrackInterface>
MediaStream::FindVideoTrack(const std::string& track_id) {
VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
if (it == video_tracks_.end())
return NULL;
return *it;
}
template <typename TrackVector, typename Track>
bool MediaStream::AddTrack(TrackVector* tracks, Track* track) {
typename TrackVector::iterator it = FindTrack(tracks, track->id());
if (it != tracks->end())
return false;
tracks->push_back(track);
FireOnChanged();
return true;
}
template <typename TrackVector>
bool MediaStream::RemoveTrack(TrackVector* tracks,
MediaStreamTrackInterface* track) {
ASSERT(tracks != NULL);
if (!track)
return false;
typename TrackVector::iterator it = FindTrack(tracks, track->id());
if (it == tracks->end())
return false;
tracks->erase(it);
FireOnChanged();
return true;
}
} // namespace webrtc

View File

@ -0,0 +1,75 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains the implementation of MediaStreamInterface interface.
#ifndef TALK_APP_WEBRTC_MEDIASTREAM_H_
#define TALK_APP_WEBRTC_MEDIASTREAM_H_
#include <string>
#include <vector>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
namespace webrtc {
class MediaStream : public Notifier<MediaStreamInterface> {
public:
static talk_base::scoped_refptr<MediaStream> Create(const std::string& label);
virtual std::string label() const OVERRIDE { return label_; }
virtual bool AddTrack(AudioTrackInterface* track) OVERRIDE;
virtual bool AddTrack(VideoTrackInterface* track) OVERRIDE;
virtual bool RemoveTrack(AudioTrackInterface* track) OVERRIDE;
virtual bool RemoveTrack(VideoTrackInterface* track) OVERRIDE;
virtual talk_base::scoped_refptr<AudioTrackInterface>
FindAudioTrack(const std::string& track_id);
virtual talk_base::scoped_refptr<VideoTrackInterface>
FindVideoTrack(const std::string& track_id);
virtual AudioTrackVector GetAudioTracks() OVERRIDE { return audio_tracks_; }
virtual VideoTrackVector GetVideoTracks() OVERRIDE { return video_tracks_; }
protected:
explicit MediaStream(const std::string& label);
private:
template <typename TrackVector, typename Track>
bool AddTrack(TrackVector* Tracks, Track* track);
template <typename TrackVector>
bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track);
std::string label_;
AudioTrackVector audio_tracks_;
VideoTrackVector video_tracks_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAM_H_

View File

@ -0,0 +1,162 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/gunit.h"
#include "testing/base/public/gmock.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "dummy_video_cam_1";
static const char kAudioTrackId[] = "dummy_microphone_1";
using talk_base::scoped_refptr;
using ::testing::Exactly;
namespace webrtc {
// Helper class to test Observer.
class MockObserver : public ObserverInterface {
public:
MockObserver() {}
MOCK_METHOD0(OnChanged, void());
};
class MediaStreamTest: public testing::Test {
protected:
virtual void SetUp() {
stream_ = MediaStream::Create(kStreamLabel1);
ASSERT_TRUE(stream_.get() != NULL);
video_track_ = VideoTrack::Create(kVideoTrackId, NULL);
ASSERT_TRUE(video_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
ASSERT_TRUE(audio_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
EXPECT_TRUE(stream_->AddTrack(video_track_));
EXPECT_FALSE(stream_->AddTrack(video_track_));
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_FALSE(stream_->AddTrack(audio_track_));
}
void ChangeTrack(MediaStreamTrackInterface* track) {
MockObserver observer;
track->RegisterObserver(&observer);
EXPECT_CALL(observer, OnChanged())
.Times(Exactly(1));
track->set_enabled(false);
EXPECT_FALSE(track->enabled());
EXPECT_CALL(observer, OnChanged())
.Times(Exactly(1));
track->set_state(MediaStreamTrackInterface::kLive);
EXPECT_EQ(MediaStreamTrackInterface::kLive, track->state());
}
scoped_refptr<MediaStreamInterface> stream_;
scoped_refptr<AudioTrackInterface> audio_track_;
scoped_refptr<VideoTrackInterface> video_track_;
};
TEST_F(MediaStreamTest, GetTrackInfo) {
ASSERT_EQ(1u, stream_->GetVideoTracks().size());
ASSERT_EQ(1u, stream_->GetAudioTracks().size());
// Verify the video track.
scoped_refptr<webrtc::MediaStreamTrackInterface> video_track(
stream_->GetVideoTracks()[0]);
EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
EXPECT_TRUE(video_track->enabled());
ASSERT_EQ(1u, stream_->GetVideoTracks().size());
EXPECT_TRUE(stream_->GetVideoTracks()[0].get() == video_track.get());
EXPECT_TRUE(stream_->FindVideoTrack(video_track->id()).get()
== video_track.get());
video_track = stream_->GetVideoTracks()[0];
EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
EXPECT_TRUE(video_track->enabled());
// Verify the audio track.
scoped_refptr<webrtc::MediaStreamTrackInterface> audio_track(
stream_->GetAudioTracks()[0]);
EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
EXPECT_TRUE(audio_track->enabled());
ASSERT_EQ(1u, stream_->GetAudioTracks().size());
EXPECT_TRUE(stream_->GetAudioTracks()[0].get() == audio_track.get());
EXPECT_TRUE(stream_->FindAudioTrack(audio_track->id()).get()
== audio_track.get());
audio_track = stream_->GetAudioTracks()[0];
EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
EXPECT_TRUE(audio_track->enabled());
}
TEST_F(MediaStreamTest, RemoveTrack) {
MockObserver observer;
stream_->RegisterObserver(&observer);
EXPECT_CALL(observer, OnChanged())
.Times(Exactly(2));
EXPECT_TRUE(stream_->RemoveTrack(audio_track_));
EXPECT_FALSE(stream_->RemoveTrack(audio_track_));
EXPECT_EQ(0u, stream_->GetAudioTracks().size());
EXPECT_EQ(0u, stream_->GetAudioTracks().size());
EXPECT_TRUE(stream_->RemoveTrack(video_track_));
EXPECT_FALSE(stream_->RemoveTrack(video_track_));
EXPECT_EQ(0u, stream_->GetVideoTracks().size());
EXPECT_EQ(0u, stream_->GetVideoTracks().size());
EXPECT_FALSE(stream_->RemoveTrack(static_cast<AudioTrackInterface*>(NULL)));
EXPECT_FALSE(stream_->RemoveTrack(static_cast<VideoTrackInterface*>(NULL)));
}
TEST_F(MediaStreamTest, ChangeVideoTrack) {
scoped_refptr<webrtc::VideoTrackInterface> video_track(
stream_->GetVideoTracks()[0]);
ChangeTrack(video_track.get());
}
TEST_F(MediaStreamTest, ChangeAudioTrack) {
scoped_refptr<webrtc::AudioTrackInterface> audio_track(
stream_->GetAudioTracks()[0]);
ChangeTrack(audio_track.get());
}
} // namespace webrtc

View File

@ -0,0 +1,440 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/mediastreamhandler.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/videosourceinterface.h"
namespace webrtc {
TrackHandler::TrackHandler(MediaStreamTrackInterface* track, uint32 ssrc)
: track_(track),
ssrc_(ssrc),
state_(track->state()),
enabled_(track->enabled()) {
track_->RegisterObserver(this);
}
TrackHandler::~TrackHandler() {
track_->UnregisterObserver(this);
}
void TrackHandler::OnChanged() {
if (state_ != track_->state()) {
state_ = track_->state();
OnStateChanged();
}
if (enabled_ != track_->enabled()) {
enabled_ = track_->enabled();
OnEnabledChanged();
}
}
LocalAudioTrackHandler::LocalAudioTrackHandler(
AudioTrackInterface* track,
uint32 ssrc,
AudioProviderInterface* provider)
: TrackHandler(track, ssrc),
audio_track_(track),
provider_(provider) {
OnEnabledChanged();
}
LocalAudioTrackHandler::~LocalAudioTrackHandler() {
}
void LocalAudioTrackHandler::OnStateChanged() {
// TODO(perkj): What should happen when the state change?
}
void LocalAudioTrackHandler::Stop() {
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc(), false, options);
}
void LocalAudioTrackHandler::OnEnabledChanged() {
cricket::AudioOptions options;
if (audio_track_->enabled() && audio_track_->GetSource()) {
options = static_cast<LocalAudioSource*>(
audio_track_->GetSource())->options();
}
provider_->SetAudioSend(ssrc(), audio_track_->enabled(), options);
}
RemoteAudioTrackHandler::RemoteAudioTrackHandler(
AudioTrackInterface* track,
uint32 ssrc,
AudioProviderInterface* provider)
: TrackHandler(track, ssrc),
audio_track_(track),
provider_(provider) {
OnEnabledChanged();
provider_->SetAudioRenderer(ssrc, audio_track_->FrameInput());
}
RemoteAudioTrackHandler::~RemoteAudioTrackHandler() {
}
void RemoteAudioTrackHandler::Stop() {
provider_->SetAudioPlayout(ssrc(), false);
}
void RemoteAudioTrackHandler::OnStateChanged() {
}
void RemoteAudioTrackHandler::OnEnabledChanged() {
provider_->SetAudioPlayout(ssrc(), audio_track_->enabled());
}
LocalVideoTrackHandler::LocalVideoTrackHandler(
VideoTrackInterface* track,
uint32 ssrc,
VideoProviderInterface* provider)
: TrackHandler(track, ssrc),
local_video_track_(track),
provider_(provider) {
VideoSourceInterface* source = local_video_track_->GetSource();
if (source)
provider_->SetCaptureDevice(ssrc, source->GetVideoCapturer());
OnEnabledChanged();
}
LocalVideoTrackHandler::~LocalVideoTrackHandler() {
}
void LocalVideoTrackHandler::OnStateChanged() {
}
void LocalVideoTrackHandler::Stop() {
provider_->SetCaptureDevice(ssrc(), NULL);
provider_->SetVideoSend(ssrc(), false, NULL);
}
void LocalVideoTrackHandler::OnEnabledChanged() {
const cricket::VideoOptions* options = NULL;
VideoSourceInterface* source = local_video_track_->GetSource();
if (local_video_track_->enabled() && source) {
options = source->options();
}
provider_->SetVideoSend(ssrc(), local_video_track_->enabled(), options);
}
RemoteVideoTrackHandler::RemoteVideoTrackHandler(
VideoTrackInterface* track,
uint32 ssrc,
VideoProviderInterface* provider)
: TrackHandler(track, ssrc),
remote_video_track_(track),
provider_(provider) {
OnEnabledChanged();
}
RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
}
void RemoteVideoTrackHandler::Stop() {
// Since cricket::VideoRenderer is not reference counted
// we need to remove the renderer before we are deleted.
provider_->SetVideoPlayout(ssrc(), false, NULL);
}
void RemoteVideoTrackHandler::OnStateChanged() {
}
void RemoteVideoTrackHandler::OnEnabledChanged() {
provider_->SetVideoPlayout(ssrc(),
remote_video_track_->enabled(),
remote_video_track_->FrameInput());
}
MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider)
: stream_(stream),
audio_provider_(audio_provider),
video_provider_(video_provider) {
}
MediaStreamHandler::~MediaStreamHandler() {
for (TrackHandlers::iterator it = track_handlers_.begin();
it != track_handlers_.end(); ++it) {
delete *it;
}
}
void MediaStreamHandler::RemoveTrack(MediaStreamTrackInterface* track) {
for (TrackHandlers::iterator it = track_handlers_.begin();
it != track_handlers_.end(); ++it) {
if ((*it)->track() == track) {
TrackHandler* track = *it;
track->Stop();
delete track;
track_handlers_.erase(it);
break;
}
}
}
TrackHandler* MediaStreamHandler::FindTrackHandler(
MediaStreamTrackInterface* track) {
TrackHandlers::iterator it = track_handlers_.begin();
for (; it != track_handlers_.end(); ++it) {
if ((*it)->track() == track) {
return *it;
break;
}
}
return NULL;
}
MediaStreamInterface* MediaStreamHandler::stream() {
return stream_.get();
}
void MediaStreamHandler::OnChanged() {
}
void MediaStreamHandler::Stop() {
for (TrackHandlers::const_iterator it = track_handlers_.begin();
it != track_handlers_.end(); ++it) {
(*it)->Stop();
}
}
LocalMediaStreamHandler::LocalMediaStreamHandler(
MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider)
: MediaStreamHandler(stream, audio_provider, video_provider) {
}
LocalMediaStreamHandler::~LocalMediaStreamHandler() {
}
void LocalMediaStreamHandler::AddAudioTrack(AudioTrackInterface* audio_track,
uint32 ssrc) {
ASSERT(!FindTrackHandler(audio_track));
TrackHandler* handler(new LocalAudioTrackHandler(audio_track, ssrc,
audio_provider_));
track_handlers_.push_back(handler);
}
void LocalMediaStreamHandler::AddVideoTrack(VideoTrackInterface* video_track,
uint32 ssrc) {
ASSERT(!FindTrackHandler(video_track));
TrackHandler* handler(new LocalVideoTrackHandler(video_track, ssrc,
video_provider_));
track_handlers_.push_back(handler);
}
RemoteMediaStreamHandler::RemoteMediaStreamHandler(
MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider)
: MediaStreamHandler(stream, audio_provider, video_provider) {
}
RemoteMediaStreamHandler::~RemoteMediaStreamHandler() {
}
void RemoteMediaStreamHandler::AddAudioTrack(AudioTrackInterface* audio_track,
uint32 ssrc) {
ASSERT(!FindTrackHandler(audio_track));
TrackHandler* handler(
new RemoteAudioTrackHandler(audio_track, ssrc, audio_provider_));
track_handlers_.push_back(handler);
}
void RemoteMediaStreamHandler::AddVideoTrack(VideoTrackInterface* video_track,
uint32 ssrc) {
ASSERT(!FindTrackHandler(video_track));
TrackHandler* handler(
new RemoteVideoTrackHandler(video_track, ssrc, video_provider_));
track_handlers_.push_back(handler);
}
MediaStreamHandlerContainer::MediaStreamHandlerContainer(
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider)
: audio_provider_(audio_provider),
video_provider_(video_provider) {
}
MediaStreamHandlerContainer::~MediaStreamHandlerContainer() {
ASSERT(remote_streams_handlers_.empty());
ASSERT(local_streams_handlers_.empty());
}
void MediaStreamHandlerContainer::TearDown() {
for (StreamHandlerList::iterator it = remote_streams_handlers_.begin();
it != remote_streams_handlers_.end(); ++it) {
(*it)->Stop();
delete *it;
}
remote_streams_handlers_.clear();
for (StreamHandlerList::iterator it = local_streams_handlers_.begin();
it != local_streams_handlers_.end(); ++it) {
(*it)->Stop();
delete *it;
}
local_streams_handlers_.clear();
}
void MediaStreamHandlerContainer::RemoveRemoteStream(
MediaStreamInterface* stream) {
DeleteStreamHandler(&remote_streams_handlers_, stream);
}
void MediaStreamHandlerContainer::AddRemoteAudioTrack(
MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) {
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
stream);
if (handler == NULL) {
handler = CreateRemoteStreamHandler(stream);
}
handler->AddAudioTrack(audio_track, ssrc);
}
void MediaStreamHandlerContainer::AddRemoteVideoTrack(
MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) {
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
stream);
if (handler == NULL) {
handler = CreateRemoteStreamHandler(stream);
}
handler->AddVideoTrack(video_track, ssrc);
}
void MediaStreamHandlerContainer::RemoveRemoteTrack(
MediaStreamInterface* stream,
MediaStreamTrackInterface* track) {
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
stream);
if (!VERIFY(handler != NULL)) {
LOG(LS_WARNING) << "Local MediaStreamHandler for stream with id "
<< stream->label() << "doesnt't exist.";
return;
}
handler->RemoveTrack(track);
}
void MediaStreamHandlerContainer::RemoveLocalStream(
MediaStreamInterface* stream) {
DeleteStreamHandler(&local_streams_handlers_, stream);
}
void MediaStreamHandlerContainer::AddLocalAudioTrack(
MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) {
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
stream);
if (handler == NULL) {
handler = CreateLocalStreamHandler(stream);
}
handler->AddAudioTrack(audio_track, ssrc);
}
void MediaStreamHandlerContainer::AddLocalVideoTrack(
MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) {
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
stream);
if (handler == NULL) {
handler = CreateLocalStreamHandler(stream);
}
handler->AddVideoTrack(video_track, ssrc);
}
void MediaStreamHandlerContainer::RemoveLocalTrack(
MediaStreamInterface* stream,
MediaStreamTrackInterface* track) {
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
stream);
if (!VERIFY(handler != NULL)) {
LOG(LS_WARNING) << "Remote MediaStreamHandler for stream with id "
<< stream->label() << "doesnt't exist.";
return;
}
handler->RemoveTrack(track);
}
MediaStreamHandler* MediaStreamHandlerContainer::CreateRemoteStreamHandler(
MediaStreamInterface* stream) {
ASSERT(!FindStreamHandler(remote_streams_handlers_, stream));
RemoteMediaStreamHandler* handler =
new RemoteMediaStreamHandler(stream, audio_provider_, video_provider_);
remote_streams_handlers_.push_back(handler);
return handler;
}
MediaStreamHandler* MediaStreamHandlerContainer::CreateLocalStreamHandler(
MediaStreamInterface* stream) {
ASSERT(!FindStreamHandler(local_streams_handlers_, stream));
LocalMediaStreamHandler* handler =
new LocalMediaStreamHandler(stream, audio_provider_, video_provider_);
local_streams_handlers_.push_back(handler);
return handler;
}
MediaStreamHandler* MediaStreamHandlerContainer::FindStreamHandler(
const StreamHandlerList& handlers,
MediaStreamInterface* stream) {
StreamHandlerList::const_iterator it = handlers.begin();
for (; it != handlers.end(); ++it) {
if ((*it)->stream() == stream) {
return *it;
}
}
return NULL;
}
void MediaStreamHandlerContainer::DeleteStreamHandler(
StreamHandlerList* streamhandlers, MediaStreamInterface* stream) {
StreamHandlerList::iterator it = streamhandlers->begin();
for (; it != streamhandlers->end(); ++it) {
if ((*it)->stream() == stream) {
(*it)->Stop();
delete *it;
streamhandlers->erase(it);
break;
}
}
}
} // namespace webrtc

View File

@ -0,0 +1,264 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains classes for listening on changes on MediaStreams and
// MediaTracks that are connected to a certain PeerConnection.
// Example: If a user sets a rendererer on a remote video track the renderer is
// connected to the appropriate remote video stream.
#ifndef TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
#define TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
#include <list>
#include <vector>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/mediastreamprovider.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/base/thread.h"
namespace webrtc {
// TrackHandler listen to events on a MediaStreamTrackInterface that is
// connected to a certain PeerConnection.
class TrackHandler : public ObserverInterface {
public:
TrackHandler(MediaStreamTrackInterface* track, uint32 ssrc);
virtual ~TrackHandler();
virtual void OnChanged();
// Stop using |track_| on this PeerConnection.
virtual void Stop() = 0;
MediaStreamTrackInterface* track() { return track_; }
uint32 ssrc() const { return ssrc_; }
protected:
virtual void OnStateChanged() = 0;
virtual void OnEnabledChanged() = 0;
private:
talk_base::scoped_refptr<MediaStreamTrackInterface> track_;
uint32 ssrc_;
MediaStreamTrackInterface::TrackState state_;
bool enabled_;
};
// LocalAudioTrackHandler listen to events on a local AudioTrack instance
// connected to a PeerConnection and orders the |provider| to executes the
// requested change.
class LocalAudioTrackHandler : public TrackHandler {
public:
LocalAudioTrackHandler(AudioTrackInterface* track,
uint32 ssrc,
AudioProviderInterface* provider);
virtual ~LocalAudioTrackHandler();
virtual void Stop() OVERRIDE;
protected:
virtual void OnStateChanged() OVERRIDE;
virtual void OnEnabledChanged() OVERRIDE;
private:
AudioTrackInterface* audio_track_;
AudioProviderInterface* provider_;
};
// RemoteAudioTrackHandler listen to events on a remote AudioTrack instance
// connected to a PeerConnection and orders the |provider| to executes the
// requested change.
class RemoteAudioTrackHandler : public TrackHandler {
public:
RemoteAudioTrackHandler(AudioTrackInterface* track,
uint32 ssrc,
AudioProviderInterface* provider);
virtual ~RemoteAudioTrackHandler();
virtual void Stop() OVERRIDE;
protected:
virtual void OnStateChanged() OVERRIDE;
virtual void OnEnabledChanged() OVERRIDE;
private:
AudioTrackInterface* audio_track_;
AudioProviderInterface* provider_;
};
// LocalVideoTrackHandler listen to events on a local VideoTrack instance
// connected to a PeerConnection and orders the |provider| to executes the
// requested change.
class LocalVideoTrackHandler : public TrackHandler {
public:
LocalVideoTrackHandler(VideoTrackInterface* track,
uint32 ssrc,
VideoProviderInterface* provider);
virtual ~LocalVideoTrackHandler();
virtual void Stop() OVERRIDE;
protected:
virtual void OnStateChanged() OVERRIDE;
virtual void OnEnabledChanged() OVERRIDE;
private:
VideoTrackInterface* local_video_track_;
VideoProviderInterface* provider_;
};
// RemoteVideoTrackHandler listen to events on a remote VideoTrack instance
// connected to a PeerConnection and orders the |provider| to execute
// requested changes.
class RemoteVideoTrackHandler : public TrackHandler {
public:
RemoteVideoTrackHandler(VideoTrackInterface* track,
uint32 ssrc,
VideoProviderInterface* provider);
virtual ~RemoteVideoTrackHandler();
virtual void Stop() OVERRIDE;
protected:
virtual void OnStateChanged() OVERRIDE;
virtual void OnEnabledChanged() OVERRIDE;
private:
VideoTrackInterface* remote_video_track_;
VideoProviderInterface* provider_;
};
class MediaStreamHandler : public ObserverInterface {
public:
MediaStreamHandler(MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider);
~MediaStreamHandler();
MediaStreamInterface* stream();
void Stop();
virtual void AddAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc) = 0;
virtual void AddVideoTrack(VideoTrackInterface* video_track, uint32 ssrc) = 0;
virtual void RemoveTrack(MediaStreamTrackInterface* track);
virtual void OnChanged() OVERRIDE;
protected:
TrackHandler* FindTrackHandler(MediaStreamTrackInterface* track);
talk_base::scoped_refptr<MediaStreamInterface> stream_;
AudioProviderInterface* audio_provider_;
VideoProviderInterface* video_provider_;
typedef std::vector<TrackHandler*> TrackHandlers;
TrackHandlers track_handlers_;
};
class LocalMediaStreamHandler : public MediaStreamHandler {
public:
LocalMediaStreamHandler(MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider);
~LocalMediaStreamHandler();
virtual void AddAudioTrack(AudioTrackInterface* audio_track,
uint32 ssrc) OVERRIDE;
virtual void AddVideoTrack(VideoTrackInterface* video_track,
uint32 ssrc) OVERRIDE;
};
class RemoteMediaStreamHandler : public MediaStreamHandler {
public:
RemoteMediaStreamHandler(MediaStreamInterface* stream,
AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider);
~RemoteMediaStreamHandler();
virtual void AddAudioTrack(AudioTrackInterface* audio_track,
uint32 ssrc) OVERRIDE;
virtual void AddVideoTrack(VideoTrackInterface* video_track,
uint32 ssrc) OVERRIDE;
};
// Container for MediaStreamHandlers of currently known local and remote
// MediaStreams.
class MediaStreamHandlerContainer {
public:
MediaStreamHandlerContainer(AudioProviderInterface* audio_provider,
VideoProviderInterface* video_provider);
~MediaStreamHandlerContainer();
// Notify all referenced objects that MediaStreamHandlerContainer will be
// destroyed. This method must be called prior to the dtor and prior to the
// |audio_provider| and |video_provider| is destroyed.
void TearDown();
// Remove all TrackHandlers for tracks in |stream| and make sure
// the audio_provider and video_provider is notified that the tracks has been
// removed.
void RemoveRemoteStream(MediaStreamInterface* stream);
// Create a RemoteAudioTrackHandler and associate |audio_track| with |ssrc|.
void AddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc);
// Create a RemoteVideoTrackHandler and associate |video_track| with |ssrc|.
void AddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc);
// Remove the TrackHandler for |track|.
void RemoveRemoteTrack(MediaStreamInterface* stream,
MediaStreamTrackInterface* track);
// Remove all TrackHandlers for tracks in |stream| and make sure
// the audio_provider and video_provider is notified that the tracks has been
// removed.
void RemoveLocalStream(MediaStreamInterface* stream);
// Create a LocalAudioTrackHandler and associate |audio_track| with |ssrc|.
void AddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc);
// Create a LocalVideoTrackHandler and associate |video_track| with |ssrc|.
void AddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc);
// Remove the TrackHandler for |track|.
void RemoveLocalTrack(MediaStreamInterface* stream,
MediaStreamTrackInterface* track);
private:
typedef std::list<MediaStreamHandler*> StreamHandlerList;
MediaStreamHandler* FindStreamHandler(const StreamHandlerList& handlers,
MediaStreamInterface* stream);
MediaStreamHandler* CreateRemoteStreamHandler(MediaStreamInterface* stream);
MediaStreamHandler* CreateLocalStreamHandler(MediaStreamInterface* stream);
void DeleteStreamHandler(StreamHandlerList* streamhandlers,
MediaStreamInterface* stream);
StreamHandlerList local_streams_handlers_;
StreamHandlerList remote_streams_handlers_;
AudioProviderInterface* audio_provider_;
VideoProviderInterface* video_provider_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_

View File

@ -0,0 +1,297 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/mediastreamhandler.h"
#include <string>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/gunit.h"
#include "talk/media/base/fakevideocapturer.h"
#include "talk/media/base/mediachannel.h"
#include "testing/base/public/gmock.h"
using ::testing::_;
using ::testing::Exactly;
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "video_1";
static const char kAudioTrackId[] = "audio_1";
static const uint32 kVideoSsrc = 98;
static const uint32 kAudioSsrc = 99;
namespace webrtc {
// Helper class to test MediaStreamHandler.
class MockAudioProvider : public AudioProviderInterface {
public:
virtual ~MockAudioProvider() {}
MOCK_METHOD2(SetAudioPlayout, void(uint32 ssrc, bool enable));
MOCK_METHOD3(SetAudioSend, void(uint32 ssrc, bool enable,
const cricket::AudioOptions& options));
MOCK_METHOD2(SetAudioRenderer, bool(uint32, cricket::AudioRenderer*));
};
// Helper class to test MediaStreamHandler.
class MockVideoProvider : public VideoProviderInterface {
public:
virtual ~MockVideoProvider() {}
MOCK_METHOD2(SetCaptureDevice, bool(uint32 ssrc,
cricket::VideoCapturer* camera));
MOCK_METHOD3(SetVideoPlayout, void(uint32 ssrc,
bool enable,
cricket::VideoRenderer* renderer));
MOCK_METHOD3(SetVideoSend, void(uint32 ssrc, bool enable,
const cricket::VideoOptions* options));
};
class FakeVideoSource : public Notifier<VideoSourceInterface> {
public:
static talk_base::scoped_refptr<FakeVideoSource> Create() {
return new talk_base::RefCountedObject<FakeVideoSource>();
}
virtual cricket::VideoCapturer* GetVideoCapturer() {
return &fake_capturer_;
}
virtual void AddSink(cricket::VideoRenderer* output) {}
virtual void RemoveSink(cricket::VideoRenderer* output) {}
virtual SourceState state() const { return state_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
protected:
FakeVideoSource() : state_(kLive) {}
~FakeVideoSource() {}
private:
cricket::FakeVideoCapturer fake_capturer_;
SourceState state_;
cricket::VideoOptions options_;
};
class MediaStreamHandlerTest : public testing::Test {
public:
MediaStreamHandlerTest()
: handlers_(&audio_provider_, &video_provider_) {
}
virtual void SetUp() {
stream_ = MediaStream::Create(kStreamLabel1);
talk_base::scoped_refptr<VideoSourceInterface> source(
FakeVideoSource::Create());
video_track_ = VideoTrack::Create(kVideoTrackId, source);
EXPECT_TRUE(stream_->AddTrack(video_track_));
audio_track_ = AudioTrack::Create(kAudioTrackId,
NULL);
EXPECT_TRUE(stream_->AddTrack(audio_track_));
}
void AddLocalAudioTrack() {
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _));
handlers_.AddLocalAudioTrack(stream_, stream_->GetAudioTracks()[0],
kAudioSsrc);
}
void AddLocalVideoTrack() {
EXPECT_CALL(video_provider_, SetCaptureDevice(
kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
handlers_.AddLocalVideoTrack(stream_, stream_->GetVideoTracks()[0],
kVideoSsrc);
}
void RemoveLocalAudioTrack() {
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _))
.Times(1);
handlers_.RemoveLocalTrack(stream_, audio_track_);
}
void RemoveLocalVideoTrack() {
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL))
.Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _))
.Times(1);
handlers_.RemoveLocalTrack(stream_, video_track_);
}
void AddRemoteAudioTrack() {
EXPECT_CALL(audio_provider_, SetAudioRenderer(kAudioSsrc, _));
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
handlers_.AddRemoteAudioTrack(stream_, stream_->GetAudioTracks()[0],
kAudioSsrc);
}
void AddRemoteVideoTrack() {
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true,
video_track_->FrameInput()));
handlers_.AddRemoteVideoTrack(stream_, stream_->GetVideoTracks()[0],
kVideoSsrc);
}
void RemoveRemoteAudioTrack() {
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
handlers_.RemoveRemoteTrack(stream_, stream_->GetAudioTracks()[0]);
}
void RemoveRemoteVideoTrack() {
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL));
handlers_.RemoveRemoteTrack(stream_, stream_->GetVideoTracks()[0]);
}
protected:
MockAudioProvider audio_provider_;
MockVideoProvider video_provider_;
MediaStreamHandlerContainer handlers_;
talk_base::scoped_refptr<MediaStreamInterface> stream_;
talk_base::scoped_refptr<VideoTrackInterface> video_track_;
talk_base::scoped_refptr<AudioTrackInterface> audio_track_;
};
// Test that |audio_provider_| is notified when an audio track is associated
// and disassociated with a MediaStreamHandler.
TEST_F(MediaStreamHandlerTest, AddAndRemoveLocalAudioTrack) {
AddLocalAudioTrack();
RemoveLocalAudioTrack();
handlers_.RemoveLocalStream(stream_);
}
// Test that |video_provider_| is notified when a video track is associated and
// disassociated with a MediaStreamHandler.
TEST_F(MediaStreamHandlerTest, AddAndRemoveLocalVideoTrack) {
AddLocalVideoTrack();
RemoveLocalVideoTrack();
handlers_.RemoveLocalStream(stream_);
}
// Test that |video_provider_| and |audio_provider_| is notified when an audio
// and video track is disassociated with a MediaStreamHandler by calling
// RemoveLocalStream.
TEST_F(MediaStreamHandlerTest, RemoveLocalStream) {
AddLocalAudioTrack();
AddLocalVideoTrack();
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL))
.Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _))
.Times(1);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _))
.Times(1);
handlers_.RemoveLocalStream(stream_);
}
// Test that |audio_provider_| is notified when a remote audio and track is
// associated and disassociated with a MediaStreamHandler.
TEST_F(MediaStreamHandlerTest, AddAndRemoveRemoteAudioTrack) {
AddRemoteAudioTrack();
RemoveRemoteAudioTrack();
handlers_.RemoveRemoteStream(stream_);
}
// Test that |video_provider_| is notified when a remote
// video track is associated and disassociated with a MediaStreamHandler.
TEST_F(MediaStreamHandlerTest, AddAndRemoveRemoteVideoTrack) {
AddRemoteVideoTrack();
RemoveRemoteVideoTrack();
handlers_.RemoveRemoteStream(stream_);
}
// Test that |audio_provider_| and |video_provider_| is notified when an audio
// and video track is disassociated with a MediaStreamHandler by calling
// RemoveRemoveStream.
TEST_F(MediaStreamHandlerTest, RemoveRemoteStream) {
AddRemoteAudioTrack();
AddRemoteVideoTrack();
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL))
.Times(1);
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false))
.Times(1);
handlers_.RemoveRemoteStream(stream_);
}
TEST_F(MediaStreamHandlerTest, LocalAudioTrackDisable) {
AddLocalAudioTrack();
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _));
audio_track_->set_enabled(false);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _));
audio_track_->set_enabled(true);
RemoveLocalAudioTrack();
handlers_.TearDown();
}
TEST_F(MediaStreamHandlerTest, RemoteAudioTrackDisable) {
AddRemoteAudioTrack();
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
audio_track_->set_enabled(false);
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
audio_track_->set_enabled(true);
RemoveRemoteAudioTrack();
handlers_.TearDown();
}
TEST_F(MediaStreamHandlerTest, LocalVideoTrackDisable) {
AddLocalVideoTrack();
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _));
video_track_->set_enabled(false);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
video_track_->set_enabled(true);
RemoveLocalVideoTrack();
handlers_.TearDown();
}
TEST_F(MediaStreamHandlerTest, RemoteVideoTrackDisable) {
AddRemoteVideoTrack();
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, _));
video_track_->set_enabled(false);
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true,
video_track_->FrameInput()));
video_track_->set_enabled(true);
RemoveRemoteVideoTrack();
handlers_.TearDown();
}
} // namespace webrtc

View File

@ -0,0 +1,196 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file contains interfaces for MediaStream, MediaTrack and MediaSource.
// These interfaces are used for implementing MediaStream and MediaTrack as
// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
// interfaces must be used only with PeerConnection. PeerConnectionManager
// interface provides the factory methods to create MediaStream and MediaTracks.
#ifndef TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_
#define TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_
#include <string>
#include <vector>
#include "talk/base/basictypes.h"
#include "talk/base/refcount.h"
#include "talk/base/scoped_ref_ptr.h"
namespace cricket {
class AudioRenderer;
class VideoCapturer;
class VideoRenderer;
class VideoFrame;
} // namespace cricket
namespace webrtc {
// Generic observer interface.
class ObserverInterface {
public:
virtual void OnChanged() = 0;
protected:
virtual ~ObserverInterface() {}
};
class NotifierInterface {
public:
virtual void RegisterObserver(ObserverInterface* observer) = 0;
virtual void UnregisterObserver(ObserverInterface* observer) = 0;
virtual ~NotifierInterface() {}
};
// Base class for sources. A MediaStreamTrack have an underlying source that
// provide media. A source can be shared with multiple tracks.
// TODO(perkj): Implement sources for local and remote audio tracks and
// remote video tracks.
class MediaSourceInterface : public talk_base::RefCountInterface,
public NotifierInterface {
public:
enum SourceState {
kInitializing,
kLive,
kEnded,
kMuted
};
virtual SourceState state() const = 0;
protected:
virtual ~MediaSourceInterface() {}
};
// Information about a track.
class MediaStreamTrackInterface : public talk_base::RefCountInterface,
public NotifierInterface {
public:
enum TrackState {
kInitializing, // Track is beeing negotiated.
kLive = 1, // Track alive
kEnded = 2, // Track have ended
kFailed = 3, // Track negotiation failed.
};
virtual std::string kind() const = 0;
virtual std::string id() const = 0;
virtual bool enabled() const = 0;
virtual TrackState state() const = 0;
virtual bool set_enabled(bool enable) = 0;
// These methods should be called by implementation only.
virtual bool set_state(TrackState new_state) = 0;
};
// Interface for rendering VideoFrames from a VideoTrack
class VideoRendererInterface {
public:
virtual void SetSize(int width, int height) = 0;
virtual void RenderFrame(const cricket::VideoFrame* frame) = 0;
protected:
// The destructor is protected to prevent deletion via the interface.
// This is so that we allow reference counted classes, where the destructor
// should never be public, to implement the interface.
virtual ~VideoRendererInterface() {}
};
class VideoSourceInterface;
class VideoTrackInterface : public MediaStreamTrackInterface {
public:
// Register a renderer that will render all frames received on this track.
virtual void AddRenderer(VideoRendererInterface* renderer) = 0;
// Deregister a renderer.
virtual void RemoveRenderer(VideoRendererInterface* renderer) = 0;
// Gets a pointer to the frame input of this VideoTrack.
// The pointer is valid for the lifetime of this VideoTrack.
// VideoFrames rendered to the cricket::VideoRenderer will be rendered on all
// registered renderers.
virtual cricket::VideoRenderer* FrameInput() = 0;
virtual VideoSourceInterface* GetSource() const = 0;
protected:
virtual ~VideoTrackInterface() {}
};
// AudioSourceInterface is a reference counted source used for AudioTracks.
// The same source can be used in multiple AudioTracks.
// TODO(perkj): Extend this class with necessary methods to allow separate
// sources for each audio track.
class AudioSourceInterface : public MediaSourceInterface {
};
class AudioTrackInterface : public MediaStreamTrackInterface {
public:
// TODO(xians): Figure out if the following interface should be const or not.
virtual AudioSourceInterface* GetSource() const = 0;
// Gets a pointer to the frame input of this AudioTrack.
// The pointer is valid for the lifetime of this AudioTrack.
// TODO(xians): Make the following interface pure virtual once Chrome has its
// implementation.
virtual cricket::AudioRenderer* FrameInput() { return NULL; }
protected:
virtual ~AudioTrackInterface() {}
};
typedef std::vector<talk_base::scoped_refptr<AudioTrackInterface> >
AudioTrackVector;
typedef std::vector<talk_base::scoped_refptr<VideoTrackInterface> >
VideoTrackVector;
class MediaStreamInterface : public talk_base::RefCountInterface,
public NotifierInterface {
public:
virtual std::string label() const = 0;
virtual AudioTrackVector GetAudioTracks() = 0;
virtual VideoTrackVector GetVideoTracks() = 0;
virtual talk_base::scoped_refptr<AudioTrackInterface>
FindAudioTrack(const std::string& track_id) = 0;
virtual talk_base::scoped_refptr<VideoTrackInterface>
FindVideoTrack(const std::string& track_id) = 0;
virtual bool AddTrack(AudioTrackInterface* track) = 0;
virtual bool AddTrack(VideoTrackInterface* track) = 0;
virtual bool RemoveTrack(AudioTrackInterface* track) = 0;
virtual bool RemoveTrack(VideoTrackInterface* track) = 0;
protected:
virtual ~MediaStreamInterface() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMINTERFACE_H_

View File

@ -0,0 +1,81 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
#define TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
namespace cricket {
class AudioRenderer;
class VideoCapturer;
class VideoRenderer;
struct AudioOptions;
struct VideoOptions;
} // namespace cricket
namespace webrtc {
// This interface is called by AudioTrackHandler classes in mediastreamhandler.h
// to change the settings of an audio track connected to certain PeerConnection.
class AudioProviderInterface {
public:
// Enable/disable the audio playout of a remote audio track with |ssrc|.
virtual void SetAudioPlayout(uint32 ssrc, bool enable) = 0;
// Enable/disable sending audio on the local audio track with |ssrc|.
// When |enable| is true |options| should be applied to the audio track.
virtual void SetAudioSend(uint32 ssrc, bool enable,
const cricket::AudioOptions& options) = 0;
// Sets the renderer to be used for the specified |ssrc|.
virtual bool SetAudioRenderer(uint32 ssrc,
cricket::AudioRenderer* renderer) = 0;
protected:
virtual ~AudioProviderInterface() {}
};
// This interface is called by VideoTrackHandler classes in mediastreamhandler.h
// to change the settings of a video track connected to a certain
// PeerConnection.
class VideoProviderInterface {
public:
virtual bool SetCaptureDevice(uint32 ssrc,
cricket::VideoCapturer* camera) = 0;
// Enable/disable the video playout of a remote video track with |ssrc|.
virtual void SetVideoPlayout(uint32 ssrc, bool enable,
cricket::VideoRenderer* renderer) = 0;
// Enable sending video on the local video track with |ssrc|.
virtual void SetVideoSend(uint32 ssrc, bool enable,
const cricket::VideoOptions* options) = 0;
protected:
virtual ~VideoProviderInterface() {}
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_

View File

@ -0,0 +1,54 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
#define TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/proxy.h"
namespace webrtc {
BEGIN_PROXY_MAP(MediaStream)
PROXY_CONSTMETHOD0(std::string, label)
PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
PROXY_METHOD1(talk_base::scoped_refptr<AudioTrackInterface>,
FindAudioTrack, const std::string&)
PROXY_METHOD1(talk_base::scoped_refptr<VideoTrackInterface>,
FindVideoTrack, const std::string&)
PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*)
PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*)
PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*)
PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_

View File

@ -0,0 +1,883 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/mediastreamsignaling.h"
#include <vector>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastreamproxy.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/mediastreamtrackproxy.h"
#include "talk/app/webrtc/videotrack.h"
static const char kDefaultStreamLabel[] = "default";
static const char kDefaultAudioTrackLabel[] = "defaulta0";
static const char kDefaultVideoTrackLabel[] = "defaultv0";
namespace webrtc {
using talk_base::scoped_ptr;
using talk_base::scoped_refptr;
// Supported MediaConstraints.
const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
"OfferToReceiveAudio";
const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
"OfferToReceiveVideo";
const char MediaConstraintsInterface::kIceRestart[] =
"IceRestart";
const char MediaConstraintsInterface::kUseRtpMux[] =
"googUseRtpMUX";
const char MediaConstraintsInterface::kVoiceActivityDetection[] =
"VoiceActivityDetection";
static bool ParseConstraints(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options, bool is_answer) {
bool value;
size_t mandatory_constraints_satisfied = 0;
if (FindConstraint(constraints,
MediaConstraintsInterface::kOfferToReceiveAudio,
&value, &mandatory_constraints_satisfied)) {
// |options-|has_audio| can only change from false to
// true, but never change from true to false. This is to make sure
// CreateOffer / CreateAnswer doesn't remove a media content
// description that has been created.
options->has_audio |= value;
} else {
// kOfferToReceiveAudio defaults to true according to spec.
options->has_audio = true;
}
if (FindConstraint(constraints,
MediaConstraintsInterface::kOfferToReceiveVideo,
&value, &mandatory_constraints_satisfied)) {
// |options->has_video| can only change from false to
// true, but never change from true to false. This is to make sure
// CreateOffer / CreateAnswer doesn't remove a media content
// description that has been created.
options->has_video |= value;
} else {
// kOfferToReceiveVideo defaults to false according to spec. But
// if it is an answer and video is offered, we should still accept video
// per default.
options->has_video |= is_answer;
}
if (FindConstraint(constraints,
MediaConstraintsInterface::kVoiceActivityDetection,
&value, &mandatory_constraints_satisfied)) {
options->vad_enabled = value;
}
if (FindConstraint(constraints,
MediaConstraintsInterface::kUseRtpMux,
&value, &mandatory_constraints_satisfied)) {
options->bundle_enabled = value;
} else {
// kUseRtpMux defaults to true according to spec.
options->bundle_enabled = true;
}
if (FindConstraint(constraints,
MediaConstraintsInterface::kIceRestart,
&value, &mandatory_constraints_satisfied)) {
options->transport_options.ice_restart = value;
} else {
// kIceRestart defaults to false according to spec.
options->transport_options.ice_restart = false;
}
if (!constraints) {
return true;
}
return mandatory_constraints_satisfied == constraints->GetMandatory().size();
}
// Returns true if if at least one media content is present and
// |options.bundle_enabled| is true.
// Bundle will be enabled by default if at least one media content is present
// and the constraint kUseRtpMux has not disabled bundle.
static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) {
return options.bundle_enabled &&
(options.has_audio || options.has_video || options.has_data());
}
// Factory class for creating remote MediaStreams and MediaStreamTracks.
class RemoteMediaStreamFactory {
public:
explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread)
: signaling_thread_(signaling_thread) {
}
talk_base::scoped_refptr<MediaStreamInterface> CreateMediaStream(
const std::string& stream_label) {
return MediaStreamProxy::Create(
signaling_thread_, MediaStream::Create(stream_label));
}
AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
const std::string& track_id) {
return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(stream,
track_id);
}
VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
const std::string& track_id) {
return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(stream,
track_id);
}
private:
template <typename TI, typename T, typename TP>
TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id) {
talk_base::scoped_refptr<TI> track(
TP::Create(signaling_thread_, T::Create(track_id, NULL)));
track->set_state(webrtc::MediaStreamTrackInterface::kLive);
if (stream->AddTrack(track)) {
return track;
}
return NULL;
}
talk_base::Thread* signaling_thread_;
};
MediaStreamSignaling::MediaStreamSignaling(
talk_base::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer)
: signaling_thread_(signaling_thread),
data_channel_factory_(NULL),
stream_observer_(stream_observer),
local_streams_(StreamCollection::Create()),
remote_streams_(StreamCollection::Create()),
remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread)),
last_allocated_sctp_id_(0) {
options_.has_video = false;
options_.has_audio = false;
}
MediaStreamSignaling::~MediaStreamSignaling() {
}
void MediaStreamSignaling::TearDown() {
OnAudioChannelClose();
OnVideoChannelClose();
OnDataChannelClose();
}
bool MediaStreamSignaling::IsSctpIdAvailable(int id) const {
if (id < 0 || id > static_cast<int>(cricket::kMaxSctpSid))
return false;
for (DataChannels::const_iterator iter = data_channels_.begin();
iter != data_channels_.end();
++iter) {
if (iter->second->id() == id) {
return false;
}
}
return true;
}
// Gets the first id that has not been taken by existing data
// channels. Starting from 1.
// Returns false if no id can be allocated.
// TODO(jiayl): Update to some kind of even/odd random number selection when the
// rules are fully standardized.
bool MediaStreamSignaling::AllocateSctpId(int* id) {
do {
last_allocated_sctp_id_++;
} while (last_allocated_sctp_id_ <= static_cast<int>(cricket::kMaxSctpSid) &&
!IsSctpIdAvailable(last_allocated_sctp_id_));
if (last_allocated_sctp_id_ > static_cast<int>(cricket::kMaxSctpSid)) {
last_allocated_sctp_id_ = cricket::kMaxSctpSid;
return false;
}
*id = last_allocated_sctp_id_;
return true;
}
bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) {
ASSERT(data_channel != NULL);
if (data_channels_.find(data_channel->label()) != data_channels_.end()) {
LOG(LS_ERROR) << "DataChannel with label " << data_channel->label()
<< " already exists.";
return false;
}
data_channels_[data_channel->label()] = data_channel;
return true;
}
bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) {
if (local_streams_->find(local_stream->label()) != NULL) {
LOG(LS_WARNING) << "MediaStream with label " << local_stream->label()
<< "already exist.";
return false;
}
local_streams_->AddStream(local_stream);
// Find tracks that has already been configured in SDP. This can occur if a
// local session description that contains the MSID of these tracks is set
// before AddLocalStream is called. It can also occur if the local session
// description is not changed and RemoveLocalStream
// is called and later AddLocalStream is called again with the same stream.
AudioTrackVector audio_tracks = local_stream->GetAudioTracks();
for (AudioTrackVector::const_iterator it = audio_tracks.begin();
it != audio_tracks.end(); ++it) {
TrackInfos::const_iterator track_info_it =
local_audio_tracks_.find((*it)->id());
if (track_info_it != local_audio_tracks_.end()) {
const TrackInfo& info = track_info_it->second;
OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
cricket::MEDIA_TYPE_AUDIO);
}
}
VideoTrackVector video_tracks = local_stream->GetVideoTracks();
for (VideoTrackVector::const_iterator it = video_tracks.begin();
it != video_tracks.end(); ++it) {
TrackInfos::const_iterator track_info_it =
local_video_tracks_.find((*it)->id());
if (track_info_it != local_video_tracks_.end()) {
const TrackInfo& info = track_info_it->second;
OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
cricket::MEDIA_TYPE_VIDEO);
}
}
return true;
}
void MediaStreamSignaling::RemoveLocalStream(
MediaStreamInterface* local_stream) {
local_streams_->RemoveStream(local_stream);
stream_observer_->OnRemoveLocalStream(local_stream);
}
bool MediaStreamSignaling::GetOptionsForOffer(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options) {
UpdateSessionOptions();
if (!ParseConstraints(constraints, &options_, false)) {
return false;
}
options_.bundle_enabled = EvaluateNeedForBundle(options_);
*options = options_;
return true;
}
bool MediaStreamSignaling::GetOptionsForAnswer(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options) {
UpdateSessionOptions();
// Copy the |options_| to not let the flag MediaSessionOptions::has_audio and
// MediaSessionOptions::has_video affect subsequent offers.
cricket::MediaSessionOptions current_options = options_;
if (!ParseConstraints(constraints, &current_options, true)) {
return false;
}
current_options.bundle_enabled = EvaluateNeedForBundle(current_options);
*options = current_options;
return true;
}
// Updates or creates remote MediaStream objects given a
// remote SessionDesription.
// If the remote SessionDesription contains new remote MediaStreams
// the observer OnAddStream method is called. If a remote MediaStream is missing
// from the remote SessionDescription OnRemoveStream is called.
void MediaStreamSignaling::OnRemoteDescriptionChanged(
const SessionDescriptionInterface* desc) {
const cricket::SessionDescription* remote_desc = desc->description();
talk_base::scoped_refptr<StreamCollection> new_streams(
StreamCollection::Create());
// Find all audio rtp streams and create corresponding remote AudioTracks
// and MediaStreams.
const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
if (audio_content) {
const cricket::AudioContentDescription* desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
remote_info_.default_audio_track_needed =
desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
}
// Find all video rtp streams and create corresponding remote VideoTracks
// and MediaStreams.
const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
if (video_content) {
const cricket::VideoContentDescription* desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
remote_info_.default_video_track_needed =
desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
}
// Update the DataChannels with the information from the remote peer.
const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
if (data_content) {
const cricket::DataContentDescription* data_desc =
static_cast<const cricket::DataContentDescription*>(
data_content->description);
if (data_desc->protocol() == cricket::kMediaProtocolDtlsSctp) {
UpdateRemoteSctpDataChannels();
} else {
UpdateRemoteRtpDataChannels(data_desc->streams());
}
}
// Iterate new_streams and notify the observer about new MediaStreams.
for (size_t i = 0; i < new_streams->count(); ++i) {
MediaStreamInterface* new_stream = new_streams->at(i);
stream_observer_->OnAddRemoteStream(new_stream);
}
// Find removed MediaStreams.
if (remote_info_.IsDefaultMediaStreamNeeded() &&
remote_streams_->find(kDefaultStreamLabel) != NULL) {
// The default media stream already exists. No need to do anything.
} else {
UpdateEndedRemoteMediaStreams();
remote_info_.msid_supported |= remote_streams_->count() > 0;
}
MaybeCreateDefaultStream();
}
void MediaStreamSignaling::OnLocalDescriptionChanged(
const SessionDescriptionInterface* desc) {
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(desc->description());
if (audio_content) {
if (audio_content->rejected) {
RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
}
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
}
const cricket::ContentInfo* video_content =
GetFirstVideoContent(desc->description());
if (video_content) {
if (video_content->rejected) {
RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
}
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateLocalTracks(video_desc->streams(), video_desc->type());
}
const cricket::ContentInfo* data_content =
GetFirstDataContent(desc->description());
if (data_content) {
const cricket::DataContentDescription* data_desc =
static_cast<const cricket::DataContentDescription*>(
data_content->description);
if (data_desc->protocol() == cricket::kMediaProtocolDtlsSctp) {
UpdateLocalSctpDataChannels();
} else {
UpdateLocalRtpDataChannels(data_desc->streams());
}
}
}
void MediaStreamSignaling::OnAudioChannelClose() {
RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
}
void MediaStreamSignaling::OnVideoChannelClose() {
RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
}
void MediaStreamSignaling::OnDataChannelClose() {
DataChannels::iterator it = data_channels_.begin();
for (; it != data_channels_.end(); ++it) {
DataChannel* data_channel = it->second;
data_channel->OnDataEngineClose();
}
}
bool MediaStreamSignaling::GetRemoteAudioTrackSsrc(
const std::string& track_id, uint32* ssrc) const {
TrackInfos::const_iterator it = remote_audio_tracks_.find(track_id);
if (it == remote_audio_tracks_.end()) {
return false;
}
*ssrc = it->second.ssrc;
return true;
}
bool MediaStreamSignaling::GetRemoteVideoTrackSsrc(
const std::string& track_id, uint32* ssrc) const {
TrackInfos::const_iterator it = remote_video_tracks_.find(track_id);
if (it == remote_video_tracks_.end()) {
return false;
}
*ssrc = it->second.ssrc;
return true;
}
void MediaStreamSignaling::UpdateSessionOptions() {
options_.streams.clear();
if (local_streams_ != NULL) {
for (size_t i = 0; i < local_streams_->count(); ++i) {
MediaStreamInterface* stream = local_streams_->at(i);
AudioTrackVector audio_tracks(stream->GetAudioTracks());
if (!audio_tracks.empty()) {
options_.has_audio = true;
}
// For each audio track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < audio_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
options_.AddStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
stream->label());
}
VideoTrackVector video_tracks(stream->GetVideoTracks());
if (!video_tracks.empty()) {
options_.has_video = true;
}
// For each video track in the stream, add it to the MediaSessionOptions.
for (size_t j = 0; j < video_tracks.size(); ++j) {
scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
options_.AddStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
stream->label());
}
}
}
// Check for data channels.
DataChannels::const_iterator data_channel_it = data_channels_.begin();
for (; data_channel_it != data_channels_.end(); ++data_channel_it) {
const DataChannel* channel = data_channel_it->second;
if (channel->state() == DataChannel::kConnecting ||
channel->state() == DataChannel::kOpen) {
// |streamid| and |sync_label| are both set to the DataChannel label
// here so they can be signaled the same way as MediaStreams and Tracks.
// For MediaStreams, the sync_label is the MediaStream label and the
// track label is the same as |streamid|.
const std::string& streamid = channel->label();
const std::string& sync_label = channel->label();
options_.AddStream(cricket::MEDIA_TYPE_DATA, streamid, sync_label);
}
}
}
void MediaStreamSignaling::UpdateRemoteStreamsList(
const cricket::StreamParamsVec& streams,
cricket::MediaType media_type,
StreamCollection* new_streams) {
TrackInfos* current_tracks = GetRemoteTracks(media_type);
// Find removed tracks. Ie tracks where the track id or ssrc don't match the
// new StreamParam.
TrackInfos::iterator track_it = current_tracks->begin();
while (track_it != current_tracks->end()) {
TrackInfo info = track_it->second;
cricket::StreamParams params;
if (!cricket::GetStreamBySsrc(streams, info.ssrc, &params) ||
params.id != info.track_id) {
OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
current_tracks->erase(track_it++);
} else {
++track_it;
}
}
// Find new and active tracks.
for (cricket::StreamParamsVec::const_iterator it = streams.begin();
it != streams.end(); ++it) {
// The sync_label is the MediaStream label and the |stream.id| is the
// track id.
const std::string& stream_label = it->sync_label;
const std::string& track_id = it->id;
uint32 ssrc = it->first_ssrc();
talk_base::scoped_refptr<MediaStreamInterface> stream =
remote_streams_->find(stream_label);
if (!stream) {
// This is a new MediaStream. Create a new remote MediaStream.
stream = remote_stream_factory_->CreateMediaStream(stream_label);
remote_streams_->AddStream(stream);
new_streams->AddStream(stream);
}
TrackInfos::iterator track_it = current_tracks->find(track_id);
if (track_it == current_tracks->end()) {
(*current_tracks)[track_id] =
TrackInfo(stream_label, track_id, ssrc);
OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
}
}
}
void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = remote_streams_->find(stream_label);
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* audio_track =
remote_stream_factory_->AddAudioTrack(stream, track_id);
stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track =
remote_stream_factory_->AddVideoTrack(stream, track_id);
stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc);
} else {
ASSERT(false && "Invalid media type");
}
}
void MediaStreamSignaling::OnRemoteTrackRemoved(
const std::string& stream_label,
const std::string& track_id,
cricket::MediaType media_type) {
MediaStreamInterface* stream = remote_streams_->find(stream_label);
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
talk_base::scoped_refptr<AudioTrackInterface> audio_track =
stream->FindAudioTrack(track_id);
audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
stream->RemoveTrack(audio_track);
stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
talk_base::scoped_refptr<VideoTrackInterface> video_track =
stream->FindVideoTrack(track_id);
video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
stream->RemoveTrack(video_track);
stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track);
} else {
ASSERT(false && "Invalid media type");
}
}
void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) {
TrackInfos* current_tracks = GetRemoteTracks(media_type);
for (TrackInfos::iterator track_it = current_tracks->begin();
track_it != current_tracks->end(); ++track_it) {
TrackInfo info = track_it->second;
MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
}
if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
}
}
}
void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() {
std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove;
for (size_t i = 0; i < remote_streams_->count(); ++i) {
MediaStreamInterface*stream = remote_streams_->at(i);
if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
streams_to_remove.push_back(stream);
}
}
std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it;
for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) {
remote_streams_->RemoveStream(*it);
stream_observer_->OnRemoveRemoteStream(*it);
}
}
void MediaStreamSignaling::MaybeCreateDefaultStream() {
if (!remote_info_.IsDefaultMediaStreamNeeded())
return;
bool default_created = false;
scoped_refptr<MediaStreamInterface> default_remote_stream =
remote_streams_->find(kDefaultStreamLabel);
if (default_remote_stream == NULL) {
default_created = true;
default_remote_stream =
remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
remote_streams_->AddStream(default_remote_stream);
}
if (remote_info_.default_audio_track_needed &&
default_remote_stream->GetAudioTracks().size() == 0) {
remote_audio_tracks_[kDefaultAudioTrackLabel] =
TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0);
OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
cricket::MEDIA_TYPE_AUDIO);
}
if (remote_info_.default_video_track_needed &&
default_remote_stream->GetVideoTracks().size() == 0) {
remote_video_tracks_[kDefaultVideoTrackLabel] =
TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0);
OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
cricket::MEDIA_TYPE_VIDEO);
}
if (default_created) {
stream_observer_->OnAddRemoteStream(default_remote_stream);
}
}
MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks(
cricket::MediaType type) {
if (type == cricket::MEDIA_TYPE_AUDIO)
return &remote_audio_tracks_;
else if (type == cricket::MEDIA_TYPE_VIDEO)
return &remote_video_tracks_;
ASSERT(false && "Unknown MediaType");
return NULL;
}
MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks(
cricket::MediaType media_type) {
ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO ||
media_type == cricket::MEDIA_TYPE_VIDEO);
return (media_type == cricket::MEDIA_TYPE_AUDIO) ?
&local_audio_tracks_ : &local_video_tracks_;
}
void MediaStreamSignaling::UpdateLocalTracks(
const std::vector<cricket::StreamParams>& streams,
cricket::MediaType media_type) {
TrackInfos* current_tracks = GetLocalTracks(media_type);
// Find removed tracks. Ie tracks where the track id or ssrc don't match the
// new StreamParam.
TrackInfos::iterator track_it = current_tracks->begin();
while (track_it != current_tracks->end()) {
TrackInfo info = track_it->second;
cricket::StreamParams params;
if (!cricket::GetStreamBySsrc(streams, info.ssrc, &params) ||
params.id != info.track_id) {
OnLocalTrackRemoved(info.stream_label, info.track_id, media_type);
current_tracks->erase(track_it++);
} else {
++track_it;
}
}
// Find new and active tracks.
for (cricket::StreamParamsVec::const_iterator it = streams.begin();
it != streams.end(); ++it) {
// The sync_label is the MediaStream label and the |stream.id| is the
// track id.
const std::string& stream_label = it->sync_label;
const std::string& track_id = it->id;
uint32 ssrc = it->first_ssrc();
TrackInfos::iterator track_it = current_tracks->find(track_id);
if (track_it == current_tracks->end()) {
(*current_tracks)[track_id] =
TrackInfo(stream_label, track_id, ssrc);
OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(),
media_type);
}
}
}
void MediaStreamSignaling::OnLocalTrackSeen(
const std::string& stream_label,
const std::string& track_id,
uint32 ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
LOG(LS_WARNING) << "An unknown local MediaStream with label "
<< stream_label << " has been configured.";
return;
}
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
if (!audio_track) {
LOG(LS_WARNING) << "An unknown local AudioTrack with id , "
<< track_id << " has been configured.";
return;
}
stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
if (!video_track) {
LOG(LS_WARNING) << "An unknown local VideoTrack with id , "
<< track_id << " has been configured.";
return;
}
stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc);
} else {
ASSERT(false && "Invalid media type");
}
}
void MediaStreamSignaling::OnLocalTrackRemoved(
const std::string& stream_label,
const std::string& track_id,
cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
// This is the normal case. Ie RemoveLocalStream has been called and the
// SessionDescriptions has been renegotiated.
return;
}
// A track has been removed from the SessionDescription but the MediaStream
// is still associated with MediaStreamSignaling. This only occurs if the SDP
// doesn't match with the calls to AddLocalStream and RemoveLocalStream.
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
if (!audio_track) {
return;
}
stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
if (!video_track) {
return;
}
stream_observer_->OnRemoveLocalVideoTrack(stream, video_track);
} else {
ASSERT(false && "Invalid media type.");
}
}
void MediaStreamSignaling::UpdateLocalRtpDataChannels(
const cricket::StreamParamsVec& streams) {
std::vector<std::string> existing_channels;
// Find new and active data channels.
for (cricket::StreamParamsVec::const_iterator it =streams.begin();
it != streams.end(); ++it) {
// |it->sync_label| is actually the data channel label. The reason is that
// we use the same naming of data channels as we do for
// MediaStreams and Tracks.
// For MediaStreams, the sync_label is the MediaStream label and the
// track label is the same as |streamid|.
const std::string& channel_label = it->sync_label;
DataChannels::iterator data_channel_it = data_channels_.find(channel_label);
if (!VERIFY(data_channel_it != data_channels_.end())) {
continue;
}
// Set the SSRC the data channel should use for sending.
data_channel_it->second->SetSendSsrc(it->first_ssrc());
existing_channels.push_back(data_channel_it->first);
}
UpdateClosingDataChannels(existing_channels, true);
}
void MediaStreamSignaling::UpdateRemoteRtpDataChannels(
const cricket::StreamParamsVec& streams) {
std::vector<std::string> existing_channels;
// Find new and active data channels.
for (cricket::StreamParamsVec::const_iterator it = streams.begin();
it != streams.end(); ++it) {
// The data channel label is either the mslabel or the SSRC if the mslabel
// does not exist. Ex a=ssrc:444330170 mslabel:test1.
std::string label = it->sync_label.empty() ?
talk_base::ToString(it->first_ssrc()) : it->sync_label;
DataChannels::iterator data_channel_it =
data_channels_.find(label);
if (data_channel_it == data_channels_.end()) {
// This is a new data channel.
CreateRemoteDataChannel(label, it->first_ssrc());
} else {
data_channel_it->second->SetReceiveSsrc(it->first_ssrc());
}
existing_channels.push_back(label);
}
UpdateClosingDataChannels(existing_channels, false);
}
void MediaStreamSignaling::UpdateClosingDataChannels(
const std::vector<std::string>& active_channels, bool is_local_update) {
DataChannels::iterator it = data_channels_.begin();
while (it != data_channels_.end()) {
DataChannel* data_channel = it->second;
if (std::find(active_channels.begin(), active_channels.end(),
data_channel->label()) != active_channels.end()) {
++it;
continue;
}
if (is_local_update)
data_channel->SetSendSsrc(0);
else
data_channel->RemotePeerRequestClose();
if (data_channel->state() == DataChannel::kClosed) {
data_channels_.erase(it);
it = data_channels_.begin();
} else {
++it;
}
}
}
void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label,
uint32 remote_ssrc) {
if (!data_channel_factory_) {
LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
<< "are not supported.";
return;
}
scoped_refptr<DataChannel> channel(
data_channel_factory_->CreateDataChannel(label, NULL));
channel->SetReceiveSsrc(remote_ssrc);
stream_observer_->OnAddDataChannel(channel);
}
void MediaStreamSignaling::UpdateLocalSctpDataChannels() {
DataChannels::iterator it = data_channels_.begin();
for (; it != data_channels_.end(); ++it) {
DataChannel* data_channel = it->second;
data_channel->SetSendSsrc(data_channel->id());
}
}
void MediaStreamSignaling::UpdateRemoteSctpDataChannels() {
DataChannels::iterator it = data_channels_.begin();
for (; it != data_channels_.end(); ++it) {
DataChannel* data_channel = it->second;
data_channel->SetReceiveSsrc(data_channel->id());
}
}
} // namespace webrtc

View File

@ -0,0 +1,385 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_
#define TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_
#include <string>
#include <vector>
#include <map>
#include "talk/app/webrtc/datachannel.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/base/scoped_ref_ptr.h"
#include "talk/session/media/mediasession.h"
namespace talk_base {
class Thread;
} // namespace talk_base
namespace webrtc {
class RemoteMediaStreamFactory;
// A MediaStreamSignalingObserver is notified when events happen to
// MediaStreams, MediaStreamTracks or DataChannels associated with the observed
// MediaStreamSignaling object. The notifications identify the stream, track or
// channel.
class MediaStreamSignalingObserver {
public:
// Triggered when the remote SessionDescription has a new stream.
virtual void OnAddRemoteStream(MediaStreamInterface* stream) = 0;
// Triggered when the remote SessionDescription removes a stream.
virtual void OnRemoveRemoteStream(MediaStreamInterface* stream) = 0;
// Triggered when the remote SessionDescription has a new data channel.
virtual void OnAddDataChannel(DataChannelInterface* data_channel) = 0;
// Triggered when the remote SessionDescription has a new audio track.
virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) = 0;
// Triggered when the remote SessionDescription has a new video track.
virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) = 0;
// Triggered when the remote SessionDescription has removed an audio track.
virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) = 0;
// Triggered when the remote SessionDescription has removed a video track.
virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) = 0;
// Triggered when the local SessionDescription has a new audio track.
virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) = 0;
// Triggered when the local SessionDescription has a new video track.
virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) = 0;
// Triggered when the local SessionDescription has removed an audio track.
virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) = 0;
// Triggered when the local SessionDescription has removed a video track.
virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) = 0;
// Triggered when RemoveLocalStream is called. |stream| is no longer used
// when negotiating and all tracks in |stream| should stop providing data to
// this PeerConnection. This doesn't mean that the local session description
// has changed and OnRemoveLocalAudioTrack and OnRemoveLocalVideoTrack is not
// called for each individual track.
virtual void OnRemoveLocalStream(MediaStreamInterface* stream) = 0;
protected:
~MediaStreamSignalingObserver() {}
};
// MediaStreamSignaling works as a glue between MediaStreams and a cricket
// classes for SessionDescriptions.
// It is used for creating cricket::MediaSessionOptions given the local
// MediaStreams and data channels.
//
// It is responsible for creating remote MediaStreams given a remote
// SessionDescription and creating cricket::MediaSessionOptions given
// local MediaStreams.
//
// To signal that a DataChannel should be established:
// 1. Call AddDataChannel with the new DataChannel. Next time
// GetMediaSessionOptions will include the description of the DataChannel.
// 2. When a local session description is set, call UpdateLocalStreams with the
// session description. This will set the SSRC used for sending data on
// this DataChannel.
// 3. When remote session description is set, call UpdateRemoteStream with the
// session description. If the DataChannel label and a SSRC is included in
// the description, the DataChannel is updated with SSRC that will be used
// for receiving data.
// 4. When both the local and remote SSRC of a DataChannel is set the state of
// the DataChannel change to kOpen.
//
// To setup a DataChannel initialized by the remote end.
// 1. When remote session description is set, call UpdateRemoteStream with the
// session description. If a label and a SSRC of a new DataChannel is found
// MediaStreamSignalingObserver::OnAddDataChannel with the label and SSRC is
// triggered.
// 2. Create a DataChannel instance with the label and set the remote SSRC.
// 3. Call AddDataChannel with this new DataChannel. GetMediaSessionOptions
// will include the description of the DataChannel.
// 4. Create a local session description and call UpdateLocalStreams. This will
// set the local SSRC used by the DataChannel.
// 5. When both the local and remote SSRC of a DataChannel is set the state of
// the DataChannel change to kOpen.
//
// To close a DataChannel:
// 1. Call DataChannel::Close. This will change the state of the DataChannel to
// kClosing. GetMediaSessionOptions will not
// include the description of the DataChannel.
// 2. When a local session description is set, call UpdateLocalStreams with the
// session description. The description will no longer contain the
// DataChannel label or SSRC.
// 3. When remote session description is set, call UpdateRemoteStream with the
// session description. The description will no longer contain the
// DataChannel label or SSRC. The DataChannel SSRC is updated with SSRC=0.
// The DataChannel change state to kClosed.
class MediaStreamSignaling {
public:
MediaStreamSignaling(talk_base::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer);
virtual ~MediaStreamSignaling();
// Notify all referenced objects that MediaStreamSignaling will be teared
// down. This method must be called prior to the dtor.
void TearDown();
// Set a factory for creating data channels that are initiated by the remote
// peer.
void SetDataChannelFactory(DataChannelFactory* data_channel_factory) {
data_channel_factory_ = data_channel_factory;
}
// Checks if |id| is available to be assigned to a new SCTP data channel.
bool IsSctpIdAvailable(int id) const;
// Gets the first available SCTP id that is not assigned to any existing
// data channels.
bool AllocateSctpId(int* id);
// Adds |local_stream| to the collection of known MediaStreams that will be
// offered in a SessionDescription.
bool AddLocalStream(MediaStreamInterface* local_stream);
// Removes |local_stream| from the collection of known MediaStreams that will
// be offered in a SessionDescription.
void RemoveLocalStream(MediaStreamInterface* local_stream);
// Adds |data_channel| to the collection of DataChannels that will be
// be offered in a SessionDescription.
bool AddDataChannel(DataChannel* data_channel);
// Returns a MediaSessionOptions struct with options decided by |constraints|,
// the local MediaStreams and DataChannels.
virtual bool GetOptionsForOffer(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options);
// Returns a MediaSessionOptions struct with options decided by
// |constraints|, the local MediaStreams and DataChannels.
virtual bool GetOptionsForAnswer(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options);
// Called when the remote session description has changed. The purpose is to
// update remote MediaStreams and DataChannels with the current
// session state.
// If the remote SessionDescription contain information about a new remote
// MediaStreams a new remote MediaStream is created and
// MediaStreamSignalingObserver::OnAddStream is called.
// If a remote MediaStream is missing from
// the remote SessionDescription MediaStreamSignalingObserver::OnRemoveStream
// is called.
// If the SessionDescription contains information about a new DataChannel,
// MediaStreamSignalingObserver::OnAddDataChannel is called with the
// DataChannel.
void OnRemoteDescriptionChanged(const SessionDescriptionInterface* desc);
// Called when the local session description has changed. The purpose is to
// update local and remote MediaStreams and DataChannels with the current
// session state.
// If |desc| indicates that the media type should be rejected, the method
// ends the remote MediaStreamTracks.
// It also updates local DataChannels with information about its local SSRC.
void OnLocalDescriptionChanged(const SessionDescriptionInterface* desc);
// Called when the audio channel closes.
void OnAudioChannelClose();
// Called when the video channel closes.
void OnVideoChannelClose();
// Called when the data channel closes.
void OnDataChannelClose();
// Returns the SSRC for a given track.
bool GetRemoteAudioTrackSsrc(const std::string& track_id, uint32* ssrc) const;
bool GetRemoteVideoTrackSsrc(const std::string& track_id, uint32* ssrc) const;
// Returns all current known local MediaStreams.
StreamCollectionInterface* local_streams() const { return local_streams_;}
// Returns all current remote MediaStreams.
StreamCollectionInterface* remote_streams() const {
return remote_streams_.get();
}
private:
struct RemotePeerInfo {
RemotePeerInfo()
: msid_supported(false),
default_audio_track_needed(false),
default_video_track_needed(false) {
}
// True if it has been discovered that the remote peer support MSID.
bool msid_supported;
// The remote peer indicates in the session description that audio will be
// sent but no MSID is given.
bool default_audio_track_needed;
// The remote peer indicates in the session description that video will be
// sent but no MSID is given.
bool default_video_track_needed;
bool IsDefaultMediaStreamNeeded() {
return !msid_supported && (default_audio_track_needed ||
default_video_track_needed);
}
};
struct TrackInfo {
TrackInfo() : ssrc(0) {}
TrackInfo(const std::string& stream_label,
const std::string track_id,
uint32 ssrc)
: stream_label(stream_label),
track_id(track_id),
ssrc(ssrc) {
}
std::string stream_label;
std::string track_id;
uint32 ssrc;
};
typedef std::map<std::string, TrackInfo> TrackInfos;
void UpdateSessionOptions();
// Makes sure a MediaStream Track is created for each StreamParam in
// |streams|. |media_type| is the type of the |streams| and can be either
// audio or video.
// If a new MediaStream is created it is added to |new_streams|.
void UpdateRemoteStreamsList(
const std::vector<cricket::StreamParams>& streams,
cricket::MediaType media_type,
StreamCollection* new_streams);
// Triggered when a remote track has been seen for the first time in a remote
// session description. It creates a remote MediaStreamTrackInterface
// implementation and triggers MediaStreamSignaling::OnAddRemoteAudioTrack or
// MediaStreamSignaling::OnAddRemoteVideoTrack.
void OnRemoteTrackSeen(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc,
cricket::MediaType media_type);
// Triggered when a remote track has been removed from a remote session
// description. It removes the remote track with id |track_id| from a remote
// MediaStream and triggers MediaStreamSignaling::OnRemoveRemoteAudioTrack or
// MediaStreamSignaling::OnRemoveRemoteVideoTrack.
void OnRemoteTrackRemoved(const std::string& stream_label,
const std::string& track_id,
cricket::MediaType media_type);
// Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
// tracks of type |media_type|.
void RejectRemoteTracks(cricket::MediaType media_type);
// Finds remote MediaStreams without any tracks and removes them from
// |remote_streams_| and notifies the observer that the MediaStream no longer
// exist.
void UpdateEndedRemoteMediaStreams();
void MaybeCreateDefaultStream();
TrackInfos* GetRemoteTracks(cricket::MediaType type);
// Returns a map of currently negotiated LocalTrackInfo of type |type|.
TrackInfos* GetLocalTracks(cricket::MediaType type);
bool FindLocalTrack(const std::string& track_id, cricket::MediaType type);
// Loops through the vector of |streams| and finds added and removed
// StreamParams since last time this method was called.
// For each new or removed StreamParam NotifyLocalTrackAdded or
// NotifyLocalTrackRemoved in invoked.
void UpdateLocalTracks(const std::vector<cricket::StreamParams>& streams,
cricket::MediaType media_type);
// Triggered when a local track has been seen for the first time in a local
// session description.
// This method triggers MediaStreamSignaling::OnAddLocalAudioTrack or
// MediaStreamSignaling::OnAddLocalVideoTrack if the rtp streams in the local
// SessionDescription can be mapped to a MediaStreamTrack in a MediaStream in
// |local_streams_|
void OnLocalTrackSeen(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc,
cricket::MediaType media_type);
// Triggered when a local track has been removed from a local session
// description.
// This method triggers MediaStreamSignaling::OnRemoveLocalAudioTrack or
// MediaStreamSignaling::OnRemoveLocalVideoTrack if a stream has been removed
// from the local SessionDescription and the stream can be mapped to a
// MediaStreamTrack in a MediaStream in |local_streams_|.
void OnLocalTrackRemoved(const std::string& stream_label,
const std::string& track_id,
cricket::MediaType media_type);
void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
void UpdateClosingDataChannels(
const std::vector<std::string>& active_channels, bool is_local_update);
void CreateRemoteDataChannel(const std::string& label, uint32 remote_ssrc);
void UpdateLocalSctpDataChannels();
void UpdateRemoteSctpDataChannels();
RemotePeerInfo remote_info_;
talk_base::Thread* signaling_thread_;
DataChannelFactory* data_channel_factory_;
cricket::MediaSessionOptions options_;
MediaStreamSignalingObserver* stream_observer_;
talk_base::scoped_refptr<StreamCollection> local_streams_;
talk_base::scoped_refptr<StreamCollection> remote_streams_;
talk_base::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
TrackInfos remote_audio_tracks_;
TrackInfos remote_video_tracks_;
TrackInfos local_audio_tracks_;
TrackInfos local_video_tracks_;
int last_allocated_sctp_id_;
typedef std::map<std::string, talk_base::scoped_refptr<DataChannel> >
DataChannels;
DataChannels data_channels_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_

View File

@ -0,0 +1,949 @@
/*
* libjingle
* Copyright 2012, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/gunit.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/stringutils.h"
#include "talk/base/thread.h"
#include "talk/p2p/base/constants.h"
#include "talk/p2p/base/sessiondescription.h"
static const char kStreams[][8] = {"stream1", "stream2"};
static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
using webrtc::AudioTrack;
using webrtc::AudioTrackInterface;
using webrtc::AudioTrackVector;
using webrtc::VideoTrack;
using webrtc::VideoTrackInterface;
using webrtc::VideoTrackVector;
using webrtc::DataChannelInterface;
using webrtc::FakeConstraints;
using webrtc::IceCandidateInterface;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStreamInterface;
using webrtc::MediaStreamTrackInterface;
using webrtc::SdpParseError;
using webrtc::SessionDescriptionInterface;
using webrtc::StreamCollection;
using webrtc::StreamCollectionInterface;
// Reference SDP with a MediaStream with label "stream1" and audio track with
// id "audio_1" and a video track with id "video_1;
static const char kSdpStringWithStream1[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n"
"a=ssrc:1 cname:stream1\r\n"
"a=ssrc:1 mslabel:stream1\r\n"
"a=ssrc:1 label:audiotrack0\r\n"
"m=video 1 RTP/AVPF 120\r\n"
"a=mid:video\r\n"
"a=rtpmap:120 VP8/90000\r\n"
"a=ssrc:2 cname:stream1\r\n"
"a=ssrc:2 mslabel:stream1\r\n"
"a=ssrc:2 label:videotrack0\r\n";
// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
// MediaStreams have one audio track and one video track.
// This uses MSID.
static const char kSdpStringWith2Stream[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"a=msid-semantic: WMS stream1 stream2\r\n"
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n"
"a=ssrc:1 cname:stream1\r\n"
"a=ssrc:1 msid:stream1 audiotrack0\r\n"
"a=ssrc:3 cname:stream2\r\n"
"a=ssrc:3 msid:stream2 audiotrack1\r\n"
"m=video 1 RTP/AVPF 120\r\n"
"a=mid:video\r\n"
"a=rtpmap:120 VP8/0\r\n"
"a=ssrc:2 cname:stream1\r\n"
"a=ssrc:2 msid:stream1 videotrack0\r\n"
"a=ssrc:4 cname:stream2\r\n"
"a=ssrc:4 msid:stream2 videotrack1\r\n";
// Reference SDP without MediaStreams. Msid is not supported.
static const char kSdpStringWithoutStreams[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n"
"m=video 1 RTP/AVPF 120\r\n"
"a=mid:video\r\n"
"a=rtpmap:120 VP8/90000\r\n";
// Reference SDP without MediaStreams. Msid is supported.
static const char kSdpStringWithMsidWithoutStreams[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"a:msid-semantic: WMS\r\n"
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n"
"m=video 1 RTP/AVPF 120\r\n"
"a=mid:video\r\n"
"a=rtpmap:120 VP8/90000\r\n";
// Reference SDP without MediaStreams and audio only.
static const char kSdpStringWithoutStreamsAudioOnly[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n";
static const char kSdpStringInit[] =
"v=0\r\n"
"o=- 0 0 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"a=msid-semantic: WMS\r\n";
static const char kSdpStringAudio[] =
"m=audio 1 RTP/AVPF 103\r\n"
"a=mid:audio\r\n"
"a=rtpmap:103 ISAC/16000\r\n";
static const char kSdpStringVideo[] =
"m=video 1 RTP/AVPF 120\r\n"
"a=mid:video\r\n"
"a=rtpmap:120 VP8/90000\r\n";
static const char kSdpStringMs1Audio0[] =
"a=ssrc:1 cname:stream1\r\n"
"a=ssrc:1 msid:stream1 audiotrack0\r\n";
static const char kSdpStringMs1Video0[] =
"a=ssrc:2 cname:stream1\r\n"
"a=ssrc:2 msid:stream1 videotrack0\r\n";
static const char kSdpStringMs1Audio1[] =
"a=ssrc:3 cname:stream1\r\n"
"a=ssrc:3 msid:stream1 audiotrack1\r\n";
static const char kSdpStringMs1Video1[] =
"a=ssrc:4 cname:stream1\r\n"
"a=ssrc:4 msid:stream1 videotrack1\r\n";
// Verifies that |options| contain all tracks in |collection| and that
// the |options| has set the the has_audio and has_video flags correct.
static void VerifyMediaOptions(StreamCollectionInterface* collection,
const cricket::MediaSessionOptions& options) {
if (!collection) {
return;
}
size_t stream_index = 0;
for (size_t i = 0; i < collection->count(); ++i) {
MediaStreamInterface* stream = collection->at(i);
AudioTrackVector audio_tracks = stream->GetAudioTracks();
ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size());
for (size_t j = 0; j < audio_tracks.size(); ++j) {
webrtc::AudioTrackInterface* audio = audio_tracks[j];
EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
EXPECT_EQ(options.streams[stream_index++].id, audio->id());
EXPECT_TRUE(options.has_audio);
}
VideoTrackVector video_tracks = stream->GetVideoTracks();
ASSERT_GE(options.streams.size(), stream_index + video_tracks.size());
for (size_t j = 0; j < video_tracks.size(); ++j) {
webrtc::VideoTrackInterface* video = video_tracks[j];
EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
EXPECT_EQ(options.streams[stream_index++].id, video->id());
EXPECT_TRUE(options.has_video);
}
}
}
static bool CompareStreamCollections(StreamCollectionInterface* s1,
StreamCollectionInterface* s2) {
if (s1 == NULL || s2 == NULL || s1->count() != s2->count())
return false;
for (size_t i = 0; i != s1->count(); ++i) {
if (s1->at(i)->label() != s2->at(i)->label())
return false;
webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
if (audio_tracks1.size() != audio_tracks2.size())
return false;
for (size_t j = 0; j != audio_tracks1.size(); ++j) {
if (audio_tracks1[j]->id() != audio_tracks2[j]->id())
return false;
}
if (video_tracks1.size() != video_tracks2.size())
return false;
for (size_t j = 0; j != video_tracks1.size(); ++j) {
if (video_tracks1[j]->id() != video_tracks2[j]->id())
return false;
}
}
return true;
}
class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
public:
MockSignalingObserver()
: remote_media_streams_(StreamCollection::Create()) {
}
virtual ~MockSignalingObserver() {
}
// New remote stream have been discovered.
virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) {
remote_media_streams_->AddStream(remote_stream);
}
// Remote stream is no longer available.
virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) {
remote_media_streams_->RemoveStream(remote_stream);
}
virtual void OnAddDataChannel(DataChannelInterface* data_channel) {
}
virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) {
AddTrack(&local_audio_tracks_, stream, audio_track, ssrc);
}
virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) {
AddTrack(&local_video_tracks_, stream, video_track, ssrc);
}
virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) {
RemoveTrack(&local_audio_tracks_, stream, audio_track);
}
virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) {
RemoveTrack(&local_video_tracks_, stream, video_track);
}
virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32 ssrc) {
AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc);
}
virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) {
AddTrack(&remote_video_tracks_, stream, video_track, ssrc);
}
virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) {
RemoveTrack(&remote_audio_tracks_, stream, audio_track);
}
virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) {
RemoveTrack(&remote_video_tracks_, stream, video_track);
}
virtual void OnRemoveLocalStream(MediaStreamInterface* stream) {
}
MediaStreamInterface* RemoteStream(const std::string& label) {
return remote_media_streams_->find(label);
}
StreamCollectionInterface* remote_streams() const {
return remote_media_streams_;
}
size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); }
void VerifyRemoteAudioTrack(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc) {
VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc);
}
size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); }
void VerifyRemoteVideoTrack(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc) {
VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc);
}
size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); }
void VerifyLocalAudioTrack(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc) {
VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc);
}
size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); }
void VerifyLocalVideoTrack(const std::string& stream_label,
const std::string& track_id,
uint32 ssrc) {
VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc);
}
private:
struct TrackInfo {
TrackInfo() {}
TrackInfo(const std::string& stream_label, const std::string track_id,
uint32 ssrc)
: stream_label(stream_label),
track_id(track_id),
ssrc(ssrc) {
}
std::string stream_label;
std::string track_id;
uint32 ssrc;
};
typedef std::map<std::string, TrackInfo> TrackInfos;
void AddTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
MediaStreamTrackInterface* track,
uint32 ssrc) {
(*track_infos)[track->id()] = TrackInfo(stream->label(), track->id(),
ssrc);
}
void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
MediaStreamTrackInterface* track) {
TrackInfos::iterator it = track_infos->find(track->id());
ASSERT_TRUE(it != track_infos->end());
ASSERT_EQ(it->second.stream_label, stream->label());
track_infos->erase(it);
}
void VerifyTrack(const TrackInfos& track_infos,
const std::string& stream_label,
const std::string& track_id,
uint32 ssrc) {
TrackInfos::const_iterator it = track_infos.find(track_id);
ASSERT_TRUE(it != track_infos.end());
EXPECT_EQ(stream_label, it->second.stream_label);
EXPECT_EQ(ssrc, it->second.ssrc);
}
TrackInfos remote_audio_tracks_;
TrackInfos remote_video_tracks_;
TrackInfos local_audio_tracks_;
TrackInfos local_video_tracks_;
talk_base::scoped_refptr<StreamCollection> remote_media_streams_;
};
class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling {
public:
explicit MediaStreamSignalingForTest(MockSignalingObserver* observer)
: webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer) {
};
using webrtc::MediaStreamSignaling::GetOptionsForOffer;
using webrtc::MediaStreamSignaling::GetOptionsForAnswer;
using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged;
using webrtc::MediaStreamSignaling::remote_streams;
};
class MediaStreamSignalingTest: public testing::Test {
protected:
virtual void SetUp() {
observer_.reset(new MockSignalingObserver());
signaling_.reset(new MediaStreamSignalingForTest(observer_.get()));
}
// Create a collection of streams.
// CreateStreamCollection(1) creates a collection that
// correspond to kSdpString1.
// CreateStreamCollection(2) correspond to kSdpString2.
talk_base::scoped_refptr<StreamCollection>
CreateStreamCollection(int number_of_streams) {
talk_base::scoped_refptr<StreamCollection> local_collection(
StreamCollection::Create());
for (int i = 0; i < number_of_streams; ++i) {
talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream(
webrtc::MediaStream::Create(kStreams[i]));
// Add a local audio track.
talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
webrtc::AudioTrack::Create(kAudioTracks[i], NULL));
stream->AddTrack(audio_track);
// Add a local video track.
talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track(
webrtc::VideoTrack::Create(kVideoTracks[i], NULL));
stream->AddTrack(video_track);
local_collection->AddStream(stream);
}
return local_collection;
}
// This functions Creates a MediaStream with label kStreams[0] and
// |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
// corresponding SessionDescriptionInterface. The SessionDescriptionInterface
// is returned in |desc| and the MediaStream is stored in
// |reference_collection_|
void CreateSessionDescriptionAndReference(
size_t number_of_audio_tracks,
size_t number_of_video_tracks,
SessionDescriptionInterface** desc) {
ASSERT_TRUE(desc != NULL);
ASSERT_LE(number_of_audio_tracks, 2u);
ASSERT_LE(number_of_video_tracks, 2u);
reference_collection_ = StreamCollection::Create();
std::string sdp_ms1 = std::string(kSdpStringInit);
std::string mediastream_label = kStreams[0];
talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream(
webrtc::MediaStream::Create(mediastream_label));
reference_collection_->AddStream(stream);
if (number_of_audio_tracks > 0) {
sdp_ms1 += std::string(kSdpStringAudio);
sdp_ms1 += std::string(kSdpStringMs1Audio0);
AddAudioTrack(kAudioTracks[0], stream);
}
if (number_of_audio_tracks > 1) {
sdp_ms1 += kSdpStringMs1Audio1;
AddAudioTrack(kAudioTracks[1], stream);
}
if (number_of_video_tracks > 0) {
sdp_ms1 += std::string(kSdpStringVideo);
sdp_ms1 += std::string(kSdpStringMs1Video0);
AddVideoTrack(kVideoTracks[0], stream);
}
if (number_of_video_tracks > 1) {
sdp_ms1 += kSdpStringMs1Video1;
AddVideoTrack(kVideoTracks[1], stream);
}
*desc = webrtc::CreateSessionDescription(
SessionDescriptionInterface::kOffer, sdp_ms1, NULL);
}
void AddAudioTrack(const std::string& track_id,
MediaStreamInterface* stream) {
talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
webrtc::AudioTrack::Create(track_id, NULL));
ASSERT_TRUE(stream->AddTrack(audio_track));
}
void AddVideoTrack(const std::string& track_id,
MediaStreamInterface* stream) {
talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track(
webrtc::VideoTrack::Create(track_id, NULL));
ASSERT_TRUE(stream->AddTrack(video_track));
}
talk_base::scoped_refptr<StreamCollection> reference_collection_;
talk_base::scoped_ptr<MockSignalingObserver> observer_;
talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_;
};
// Test that a MediaSessionOptions is created for an offer if
// kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set but no
// MediaStreams are sent.
TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
FakeConstraints constraints;
constraints.SetMandatoryReceiveAudio(true);
constraints.SetMandatoryReceiveVideo(true);
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
EXPECT_TRUE(options.has_audio);
EXPECT_TRUE(options.has_video);
EXPECT_TRUE(options.bundle_enabled);
}
// Test that a correct MediaSessionOptions is created for an offer if
// kOfferToReceiveAudio constraints is set but no MediaStreams are sent.
TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) {
FakeConstraints constraints;
constraints.SetMandatoryReceiveAudio(true);
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
EXPECT_TRUE(options.has_audio);
EXPECT_FALSE(options.has_video);
EXPECT_TRUE(options.bundle_enabled);
}
// Test that a correct MediaSessionOptions is created for an offer if
// no constraints or MediaStreams are sent.
TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) {
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
EXPECT_TRUE(options.has_audio);
EXPECT_FALSE(options.has_video);
EXPECT_TRUE(options.bundle_enabled);
}
// Test that a correct MediaSessionOptions is created for an offer if
// kOfferToReceiveVideo constraints is set but no MediaStreams are sent.
TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) {
FakeConstraints constraints;
constraints.SetMandatoryReceiveAudio(false);
constraints.SetMandatoryReceiveVideo(true);
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
EXPECT_FALSE(options.has_audio);
EXPECT_TRUE(options.has_video);
EXPECT_TRUE(options.bundle_enabled);
}
// Test that a correct MediaSessionOptions is created for an offer if
// kUseRtpMux constraints is set to false.
TEST_F(MediaStreamSignalingTest,
GetMediaSessionOptionsForOfferWithBundleDisabled) {
FakeConstraints constraints;
constraints.SetMandatoryReceiveAudio(true);
constraints.SetMandatoryReceiveVideo(true);
constraints.SetMandatoryUseRtpMux(false);
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
EXPECT_TRUE(options.has_audio);
EXPECT_TRUE(options.has_video);
EXPECT_FALSE(options.bundle_enabled);
}
// Test that a correct MediaSessionOptions is created to restart ice if
// kIceRestart constraints is set. It also tests that subsequent
// MediaSessionOptions don't have |transport_options.ice_restart| set.
TEST_F(MediaStreamSignalingTest,
GetMediaSessionOptionsForOfferWithIceRestart) {
FakeConstraints constraints;
constraints.SetMandatoryIceRestart(true);
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
EXPECT_TRUE(options.transport_options.ice_restart);
EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
EXPECT_FALSE(options.transport_options.ice_restart);
}
// Test that GetMediaSessionOptionsForOffer and GetOptionsForAnswer work as
// expected if unknown constraints are used.
TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsWithBadConstraints) {
FakeConstraints mandatory;
mandatory.AddMandatory("bad_key", "bad_value");
cricket::MediaSessionOptions options;
EXPECT_FALSE(signaling_->GetOptionsForOffer(&mandatory, &options));
EXPECT_FALSE(signaling_->GetOptionsForAnswer(&mandatory, &options));
FakeConstraints optional;
optional.AddOptional("bad_key", "bad_value");
EXPECT_TRUE(signaling_->GetOptionsForOffer(&optional, &options));
EXPECT_TRUE(signaling_->GetOptionsForAnswer(&optional, &options));
}
// Test that a correct MediaSessionOptions are created for an offer if
// a MediaStream is sent and later updated with a new track.
// MediaConstraints are not used.
TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) {
talk_base::scoped_refptr<StreamCollection> local_streams(
CreateStreamCollection(1));
MediaStreamInterface* local_stream = local_streams->at(0);
EXPECT_TRUE(signaling_->AddLocalStream(local_stream));
cricket::MediaSessionOptions options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
VerifyMediaOptions(local_streams, options);
cricket::MediaSessionOptions updated_options;
local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL));
EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
VerifyMediaOptions(local_streams, options);
}
// Test that the MediaConstraints in an answer don't affect if audio and video
// is offered in an offer but that if kOfferToReceiveAudio or
// kOfferToReceiveVideo constraints are true in an offer, the media type will be
// included in subsequent answers.
TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) {
FakeConstraints answer_c;
answer_c.SetMandatoryReceiveAudio(true);
answer_c.SetMandatoryReceiveVideo(true);
cricket::MediaSessionOptions answer_options;
EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options));
EXPECT_TRUE(answer_options.has_audio);
EXPECT_TRUE(answer_options.has_video);
FakeConstraints offer_c;
offer_c.SetMandatoryReceiveAudio(false);
offer_c.SetMandatoryReceiveVideo(false);
cricket::MediaSessionOptions offer_options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&offer_c, &offer_options));
EXPECT_FALSE(offer_options.has_audio);
EXPECT_FALSE(offer_options.has_video);
FakeConstraints updated_offer_c;
updated_offer_c.SetMandatoryReceiveAudio(true);
updated_offer_c.SetMandatoryReceiveVideo(true);
cricket::MediaSessionOptions updated_offer_options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(&updated_offer_c,
&updated_offer_options));
EXPECT_TRUE(updated_offer_options.has_audio);
EXPECT_TRUE(updated_offer_options.has_video);
// Since an offer has been created with both audio and video, subsequent
// offers and answers should contain both audio and video.
// Answers will only contain the media types that exist in the offer
// regardless of the value of |updated_answer_options.has_audio| and
// |updated_answer_options.has_video|.
FakeConstraints updated_answer_c;
answer_c.SetMandatoryReceiveAudio(false);
answer_c.SetMandatoryReceiveVideo(false);
cricket::MediaSessionOptions updated_answer_options;
EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c,
&updated_answer_options));
EXPECT_TRUE(updated_answer_options.has_audio);
EXPECT_TRUE(updated_answer_options.has_video);
EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL,
&updated_offer_options));
EXPECT_TRUE(updated_offer_options.has_audio);
EXPECT_TRUE(updated_offer_options.has_video);
}
// This test verifies that the remote MediaStreams corresponding to a received
// SDP string is created. In this test the two separate MediaStreams are
// signaled.
TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithStream1, NULL));
EXPECT_TRUE(desc != NULL);
signaling_->OnRemoteDescriptionChanged(desc.get());
talk_base::scoped_refptr<StreamCollection> reference(
CreateStreamCollection(1));
EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
reference.get()));
EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
reference.get()));
EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks());
observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks());
observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
// Create a session description based on another SDP with another
// MediaStream.
talk_base::scoped_ptr<SessionDescriptionInterface> update_desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWith2Stream, NULL));
EXPECT_TRUE(update_desc != NULL);
signaling_->OnRemoteDescriptionChanged(update_desc.get());
talk_base::scoped_refptr<StreamCollection> reference2(
CreateStreamCollection(2));
EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
reference2.get()));
EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
reference2.get()));
EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks());
observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3);
EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks());
observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4);
}
// This test verifies that the remote MediaStreams corresponding to a received
// SDP string is created. In this test the same remote MediaStream is signaled
// but MediaStream tracks are added and removed.
TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1;
CreateSessionDescriptionAndReference(1, 1, desc_ms1.use());
signaling_->OnRemoteDescriptionChanged(desc_ms1.get());
EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
reference_collection_));
// Add extra audio and video tracks to the same MediaStream.
talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use());
signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get());
EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
reference_collection_));
EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
reference_collection_));
// Remove the extra audio and video tracks again.
CreateSessionDescriptionAndReference(1, 1, desc_ms1.use());
signaling_->OnRemoteDescriptionChanged(desc_ms1.get());
EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
reference_collection_));
EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
reference_collection_));
}
// This test that remote tracks are ended if a
// local session description is set that rejects the media content type.
TEST_F(MediaStreamSignalingTest, RejectMediaContent) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithStream1, NULL));
EXPECT_TRUE(desc != NULL);
signaling_->OnRemoteDescriptionChanged(desc.get());
ASSERT_EQ(1u, observer_->remote_streams()->count());
MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
talk_base::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
remote_stream->GetVideoTracks()[0];
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
talk_base::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
remote_stream->GetAudioTracks()[0];
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
cricket::ContentInfo* video_info =
desc->description()->GetContentByName("video");
ASSERT_TRUE(video_info != NULL);
video_info->rejected = true;
signaling_->OnLocalDescriptionChanged(desc.get());
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
cricket::ContentInfo* audio_info =
desc->description()->GetContentByName("audio");
ASSERT_TRUE(audio_info != NULL);
audio_info->rejected = true;
signaling_->OnLocalDescriptionChanged(desc.get());
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
}
// This tests that a default MediaStream is created if a remote session
// description doesn't contain any streams and no MSID support.
// It also tests that the default stream is updated if a video m-line is added
// in a subsequent session description.
TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithoutStreamsAudioOnly,
NULL));
ASSERT_TRUE(desc_audio_only != NULL);
signaling_->OnRemoteDescriptionChanged(desc_audio_only.get());
EXPECT_EQ(1u, signaling_->remote_streams()->count());
ASSERT_EQ(1u, observer_->remote_streams()->count());
MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
EXPECT_EQ("default", remote_stream->label());
talk_base::scoped_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithoutStreams, NULL));
ASSERT_TRUE(desc != NULL);
signaling_->OnRemoteDescriptionChanged(desc.get());
EXPECT_EQ(1u, signaling_->remote_streams()->count());
ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0);
observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0);
}
// This tests that a default MediaStream is created if the remote session
// description doesn't contain any streams and don't contain an indication if
// MSID is supported.
TEST_F(MediaStreamSignalingTest,
SdpWithoutMsidAndStreamsCreatesDefaultStream) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithoutStreams,
NULL));
ASSERT_TRUE(desc != NULL);
signaling_->OnRemoteDescriptionChanged(desc.get());
ASSERT_EQ(1u, observer_->remote_streams()->count());
MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
}
// This tests that a default MediaStream is not created if the remote session
// description doesn't contain any streams but does support MSID.
TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithMsidWithoutStreams,
NULL));
signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get());
EXPECT_EQ(0u, observer_->remote_streams()->count());
}
// This test that a default MediaStream is not created if a remote session
// description is updated to not have any MediaStreams.
TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithStream1,
NULL));
ASSERT_TRUE(desc != NULL);
signaling_->OnRemoteDescriptionChanged(desc.get());
talk_base::scoped_refptr<StreamCollection> reference(
CreateStreamCollection(1));
EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
reference.get()));
talk_base::scoped_ptr<SessionDescriptionInterface> desc_without_streams(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
kSdpStringWithoutStreams,
NULL));
signaling_->OnRemoteDescriptionChanged(desc_without_streams.get());
EXPECT_EQ(0u, observer_->remote_streams()->count());
}
// This test that the correct MediaStreamSignalingObserver methods are called
// when MediaStreamSignaling::OnLocalDescriptionChanged is called with an
// updated local session description.
TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc_1;
CreateSessionDescriptionAndReference(2, 2, desc_1.use());
signaling_->AddLocalStream(reference_collection_->at(0));
signaling_->OnLocalDescriptionChanged(desc_1.get());
EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
// Remove an audio and video track.
talk_base::scoped_ptr<SessionDescriptionInterface> desc_2;
CreateSessionDescriptionAndReference(1, 1, desc_2.use());
signaling_->OnLocalDescriptionChanged(desc_2.get());
EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
}
// This test that the correct MediaStreamSignalingObserver methods are called
// when MediaStreamSignaling::AddLocalStream is called after
// MediaStreamSignaling::OnLocalDescriptionChanged is called.
TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc_1;
CreateSessionDescriptionAndReference(2, 2, desc_1.use());
signaling_->OnLocalDescriptionChanged(desc_1.get());
EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks());
signaling_->AddLocalStream(reference_collection_->at(0));
EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
}
// This test that the correct MediaStreamSignalingObserver methods are called
// if the ssrc on a local track is changed when
// MediaStreamSignaling::OnLocalDescriptionChanged is called.
TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) {
talk_base::scoped_ptr<SessionDescriptionInterface> desc;
CreateSessionDescriptionAndReference(1, 1, desc.use());
signaling_->AddLocalStream(reference_collection_->at(0));
signaling_->OnLocalDescriptionChanged(desc.get());
EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
// Change the ssrc of the audio and video track.
std::string sdp;
desc->ToString(&sdp);
std::string ssrc_org = "a=ssrc:1";
std::string ssrc_to = "a=ssrc:97";
talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
ssrc_to.c_str(), ssrc_to.length(),
&sdp);
ssrc_org = "a=ssrc:2";
ssrc_to = "a=ssrc:98";
talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
ssrc_to.c_str(), ssrc_to.length(),
&sdp);
talk_base::scoped_ptr<SessionDescriptionInterface> updated_desc(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
sdp, NULL));
signaling_->OnLocalDescriptionChanged(updated_desc.get());
EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97);
observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98);
}

View File

@ -0,0 +1,81 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_
#define TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_
#include <string>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
namespace webrtc {
// MediaTrack implements the interface common to AudioTrackInterface and
// VideoTrackInterface.
template <typename T>
class MediaStreamTrack : public Notifier<T> {
public:
typedef typename T::TrackState TypedTrackState;
virtual std::string id() const { return id_; }
virtual MediaStreamTrackInterface::TrackState state() const {
return state_;
}
virtual bool enabled() const { return enabled_; }
virtual bool set_enabled(bool enable) {
bool fire_on_change = (enable != enabled_);
enabled_ = enable;
if (fire_on_change) {
Notifier<T>::FireOnChanged();
}
return fire_on_change;
}
virtual bool set_state(MediaStreamTrackInterface::TrackState new_state) {
bool fire_on_change = (state_ != new_state);
state_ = new_state;
if (fire_on_change)
Notifier<T>::FireOnChanged();
return true;
}
protected:
explicit MediaStreamTrack(const std::string& id)
: enabled_(true),
id_(id),
state_(MediaStreamTrackInterface::kInitializing) {
}
private:
bool enabled_;
std::string id_;
MediaStreamTrackInterface::TrackState state_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMTRACK_H_

View File

@ -0,0 +1,73 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// This file includes proxy classes for tracks. The purpose is
// to make sure tracks are only accessed from the signaling thread.
#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
#define TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/proxy.h"
namespace webrtc {
BEGIN_PROXY_MAP(AudioTrack)
PROXY_CONSTMETHOD0(std::string, kind)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled)
PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
PROXY_METHOD0(cricket::AudioRenderer*, FrameInput)
PROXY_METHOD1(bool, set_enabled, bool)
PROXY_METHOD1(bool, set_state, TrackState)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()
BEGIN_PROXY_MAP(VideoTrack)
PROXY_CONSTMETHOD0(std::string, kind)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled)
PROXY_METHOD1(bool, set_enabled, bool)
PROXY_METHOD1(bool, set_state, TrackState)
PROXY_METHOD1(void, AddRenderer, VideoRendererInterface*)
PROXY_METHOD1(void, RemoveRenderer, VideoRendererInterface*)
PROXY_METHOD0(cricket::VideoRenderer*, FrameInput)
PROXY_CONSTMETHOD0(VideoSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()
} // namespace webrtc
#endif // TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_

View File

@ -0,0 +1,77 @@
/*
* libjingle
* Copyright 2011, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_NOTIFIER_H_
#define TALK_APP_WEBRTC_NOTIFIER_H_
#include <list>
#include "talk/base/common.h"
#include "talk/app/webrtc/mediastreaminterface.h"
namespace webrtc {
// Implement a template version of a notifier.
template <class T>
class Notifier : public T {
public:
Notifier() {
}
virtual void RegisterObserver(ObserverInterface* observer) {
ASSERT(observer != NULL);
observers_.push_back(observer);
}
virtual void UnregisterObserver(ObserverInterface* observer) {
for (std::list<ObserverInterface*>::iterator it = observers_.begin();
it != observers_.end(); it++) {
if (*it == observer) {
observers_.erase(it);
break;
}
}
}
void FireOnChanged() {
// Copy the list of observers to avoid a crash if the observer object
// unregisters as a result of the OnChanged() call. If the same list is used
// UnregisterObserver will affect the list make the iterator invalid.
std::list<ObserverInterface*> observers = observers_;
for (std::list<ObserverInterface*>::iterator it = observers.begin();
it != observers.end(); ++it) {
(*it)->OnChanged();
}
}
protected:
std::list<ObserverInterface*> observers_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_NOTIFIER_H_

View File

@ -0,0 +1,45 @@
This directory contains the ObjectiveC implementation of the
webrtc::PeerConnection API. This can be built for Mac or iOS.
Prerequisites:
- Make sure gclient is checking out tools necessary to target iOS: your
.gclient file should contain a line like:
target_os = ['ios', 'mac']
Make sure to re-run gclient sync after adding this to download the tools.
- Set up webrtc-related GYP variables:
- For Mac:
export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 OS=mac
target_arch=x64 libjingle_objc=1 libpeer_target_type=static_library
$GYP_DEFINES"
- For iOS:
export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 OS=ios
libjingle_enable_video=0 libjingle_objc=1 enable_video=0 $GYP_DEFINES"
- Finally, run "gclient runhooks" to generate iOS or Mac targeting Xcode
projects.
Example of building & using the app:
cd <path/to/libjingle>/trunk/talk
- Open libjingle.xcproj. Select iPhone or iPad simulator and build everything.
Then switch to iOS device and build everything. This creates x86 and ARM
archives.
cd examples/ios
./makeLibs.sh
- This will generate fat archives containing both targets and copy them to
./libs.
- This step must be rerun every time you run gclient sync or build the API
libraries.
- Open AppRTCDemo.xcodeproj, select your device or simulator and run.
- If you have any problems deploying for the first time, check the project
properties to ensure that the Bundle Identifier matches your phone
provisioning profile. Or use the simulator as it doesn't require a profile.
- In desktop chrome, navigate to http://apprtc.appspot.com and note the r=<NNN>
room number in the resulting URL.
- Enter that number into the text field on the phone.
- Alternatively, you can background the app and launch Safari. In Safari, open
the url apprtc://apprtc.appspot.com/?r=<NNN> where <NNN> is the room name.
Other options are to put the link in an email and send it to your self.
Clicking on it will launch AppRTCDemo and navigate to the room.

View File

@ -0,0 +1,37 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCAudioTrack.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@interface RTCAudioTrack (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::AudioTrackInterface> audioTrack;
@end

View File

@ -0,0 +1,45 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCAudioTrack+internal.h"
#import "RTCMediaStreamTrack+internal.h"
@implementation RTCAudioTrack
@end
@implementation RTCAudioTrack (Internal)
- (talk_base::scoped_refptr<webrtc::AudioTrackInterface>)audioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.mediaTrack.get());
}
@end

View File

@ -0,0 +1,54 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import <Foundation/Foundation.h>
#import "RTCTypes.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@interface RTCEnumConverter : NSObject
+ (RTCICEConnectionState)convertIceConnectionStateToObjC:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+ (RTCICEGatheringState)convertIceGatheringStateToObjC:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+ (RTCSignalingState)convertSignalingStateToObjC:
(webrtc::PeerConnectionInterface::SignalingState)nativeState;
+ (RTCSourceState)convertSourceStateToObjC:
(webrtc::MediaSourceInterface::SourceState)nativeState;
+ (webrtc::MediaStreamTrackInterface::TrackState)convertTrackStateToNative:
(RTCTrackState)state;
+ (RTCTrackState)convertTrackStateToObjC:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState;
@end

View File

@ -0,0 +1,126 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCEnumConverter.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@implementation RTCEnumConverter
+ (RTCICEConnectionState)convertIceConnectionStateToObjC:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceConnectionNew:
return RTCICEConnectionNew;
case webrtc::PeerConnectionInterface::kIceConnectionChecking:
return RTCICEConnectionChecking;
case webrtc::PeerConnectionInterface::kIceConnectionConnected:
return RTCICEConnectionConnected;
case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
return RTCICEConnectionCompleted;
case webrtc::PeerConnectionInterface::kIceConnectionFailed:
return RTCICEConnectionFailed;
case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
return RTCICEConnectionDisconnected;
case webrtc::PeerConnectionInterface::kIceConnectionClosed:
return RTCICEConnectionClosed;
}
}
+ (RTCICEGatheringState)convertIceGatheringStateToObjC:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceGatheringNew:
return RTCICEGatheringNew;
case webrtc::PeerConnectionInterface::kIceGatheringGathering:
return RTCICEGatheringGathering;
case webrtc::PeerConnectionInterface::kIceGatheringComplete:
return RTCICEGatheringComplete;
}
}
+ (RTCSignalingState)convertSignalingStateToObjC:
(webrtc::PeerConnectionInterface::SignalingState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kStable:
return RTCSignalingStable;
case webrtc::PeerConnectionInterface::kHaveLocalOffer:
return RTCSignalingHaveLocalOffer;
case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
return RTCSignalingHaveLocalPrAnswer;
case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
return RTCSignalingHaveRemoteOffer;
case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
return RTCSignalingHaveRemotePrAnswer;
case webrtc::PeerConnectionInterface::kClosed:
return RTCSignalingClosed;
}
}
+ (RTCSourceState)convertSourceStateToObjC:
(webrtc::MediaSourceInterface::SourceState)nativeState {
switch (nativeState) {
case webrtc::MediaSourceInterface::kInitializing:
return RTCSourceStateInitializing;
case webrtc::MediaSourceInterface::kLive:
return RTCSourceStateLive;
case webrtc::MediaSourceInterface::kEnded:
return RTCSourceStateEnded;
case webrtc::MediaSourceInterface::kMuted:
return RTCSourceStateMuted;
}
}
+ (webrtc::MediaStreamTrackInterface::TrackState)
convertTrackStateToNative:(RTCTrackState)state {
switch (state) {
case RTCTrackStateInitializing:
return webrtc::MediaStreamTrackInterface::kInitializing;
case RTCTrackStateLive:
return webrtc::MediaStreamTrackInterface::kLive;
case RTCTrackStateEnded:
return webrtc::MediaStreamTrackInterface::kEnded;
case RTCTrackStateFailed:
return webrtc::MediaStreamTrackInterface::kFailed;
}
}
+ (RTCTrackState)convertTrackStateToObjC:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState {
switch (nativeState) {
case webrtc::MediaStreamTrackInterface::kInitializing:
return RTCTrackStateInitializing;
case webrtc::MediaStreamTrackInterface::kLive:
return RTCTrackStateLive;
case webrtc::MediaStreamTrackInterface::kEnded:
return RTCTrackStateEnded;
case webrtc::MediaStreamTrackInterface::kFailed:
return RTCTrackStateFailed;
}
}
@end

View File

@ -0,0 +1,34 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCI420Frame.h"
@implementation RTCI420Frame
// TODO(hughv): Should this just be a cricket::VideoFrame wrapper object?
@end

View File

@ -0,0 +1,39 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCICECandidate.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@interface RTCICECandidate (Internal)
@property(nonatomic, assign, readonly) const
webrtc::IceCandidateInterface *candidate;
- (id)initWithCandidate:(const webrtc::IceCandidateInterface *)candidate;
@end

View File

@ -0,0 +1,86 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCICECandidate+internal.h"
@implementation RTCICECandidate {
NSString *_sdpMid;
NSInteger _sdpMLineIndex;
NSString *_sdp;
}
- (id)initWithMid:(NSString *)sdpMid
index:(NSInteger)sdpMLineIndex
sdp:(NSString *)sdp {
if (!sdpMid || !sdp) {
NSAssert(NO, @"nil arguments not allowed");
return nil;
}
if ((self = [super init])) {
_sdpMid = [sdpMid copy];
_sdpMLineIndex = sdpMLineIndex;
_sdp = [sdp copy];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"%@:%ld:%@",
self.sdpMid,
(long)self.sdpMLineIndex,
self.sdp];
}
@end
@implementation RTCICECandidate (Internal)
- (id)initWithCandidate:(const webrtc::IceCandidateInterface *)candidate {
if ((self = [super init])) {
std::string sdp;
if (candidate->ToString(&sdp)) {
_sdpMid = @(candidate->sdp_mid().c_str());
_sdpMLineIndex = candidate->sdp_mline_index();
_sdp = @(sdp.c_str());
} else {
self = nil;
NSAssert(NO, @"ICECandidateInterface->ToString failed");
}
}
return self;
}
- (const webrtc::IceCandidateInterface *)candidate {
return webrtc::CreateIceCandidate(
[self.sdpMid UTF8String], self.sdpMLineIndex, [self.sdp UTF8String]);
}
@end

View File

@ -0,0 +1,37 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCICEServer.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@interface RTCICEServer (Internal)
@property(nonatomic, assign, readonly)
webrtc::PeerConnectionInterface::IceServer iceServer;
@end

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCICEServer+internal.h"
@implementation RTCICEServer
- (id)initWithURI:(NSURL *)URI password:(NSString *)password {
if (!URI || !password) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
_URI = URI;
_password = [password copy];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"Server: [%@]\nPassword: [%@]",
[self.URI absoluteString], self.password];
}
@end
@implementation RTCICEServer (Internal)
- (webrtc::PeerConnectionInterface::IceServer)iceServer {
webrtc::PeerConnectionInterface::IceServer iceServer;
iceServer.uri = [[self.URI absoluteString] UTF8String];
iceServer.password = [self.password UTF8String];
return iceServer;
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCMediaConstraints.h"
#import "RTCMediaConstraintsNative.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@interface RTCMediaConstraints (Internal)
// Ownership is retained for the lifetime of this object.
@property(nonatomic, assign, readonly) const
webrtc::RTCMediaConstraintsNative *constraints;
@end

View File

@ -0,0 +1,76 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCMediaConstraints+internal.h"
#import "RTCPair.h"
#include "talk/base/scoped_ptr.h"
// TODO(hughv): Add accessors for mandatory and optional constraints.
// TODO(hughv): Add description.
@implementation RTCMediaConstraints {
talk_base::scoped_ptr<webrtc::RTCMediaConstraintsNative> _constraints;
webrtc::MediaConstraintsInterface::Constraints _mandatory;
webrtc::MediaConstraintsInterface::Constraints _optional;
}
- (id)initWithMandatoryConstraints:(NSArray *)mandatory
optionalConstraints:(NSArray *)optional {
if ((self = [super init])) {
_mandatory = [[self class] constraintsFromArray:mandatory];
_optional = [[self class] constraintsFromArray:optional];
_constraints.reset(
new webrtc::RTCMediaConstraintsNative(_mandatory, _optional));
}
return self;
}
+ (webrtc::MediaConstraintsInterface::Constraints)
constraintsFromArray:(NSArray *)array {
webrtc::MediaConstraintsInterface::Constraints constraints;
for (RTCPair *pair in array) {
constraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
[pair.key UTF8String], [pair.value UTF8String]));
}
return constraints;
}
@end
@implementation RTCMediaConstraints (internal)
- (const webrtc::RTCMediaConstraintsNative *)constraints {
return _constraints.get();
}
@end

View File

@ -0,0 +1,51 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/objc/RTCMediaConstraintsNative.h"
namespace webrtc {
RTCMediaConstraintsNative::~RTCMediaConstraintsNative() {}
RTCMediaConstraintsNative::RTCMediaConstraintsNative() {}
RTCMediaConstraintsNative::RTCMediaConstraintsNative(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional)
: mandatory_(mandatory), optional_(optional) {}
const MediaConstraintsInterface::Constraints&
RTCMediaConstraintsNative::GetMandatory() const {
return mandatory_;
}
const MediaConstraintsInterface::Constraints&
RTCMediaConstraintsNative::GetOptional() const {
return optional_;
}
} // namespace webrtc

View File

@ -0,0 +1,50 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_
#define TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_
#include "talk/app/webrtc/mediaconstraintsinterface.h"
namespace webrtc {
class RTCMediaConstraintsNative : public MediaConstraintsInterface {
public:
virtual ~RTCMediaConstraintsNative();
RTCMediaConstraintsNative();
RTCMediaConstraintsNative(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional);
virtual const Constraints& GetMandatory() const;
virtual const Constraints& GetOptional() const;
private:
MediaConstraintsInterface::Constraints mandatory_;
MediaConstraintsInterface::Constraints optional_;
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_OBJC_RTCMEDIACONSTRAINTSNATIVE_H_

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCMediaSource.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@interface RTCMediaSource (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::MediaSourceInterface> mediaSource;
- (id)initWithMediaSource:
(talk_base::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource;
@end

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCMediaSource+internal.h"
#import "RTCEnumConverter.h"
@implementation RTCMediaSource {
talk_base::scoped_refptr<webrtc::MediaSourceInterface> _mediaSource;
}
- (RTCSourceState)state {
return [RTCEnumConverter convertSourceStateToObjC:self.mediaSource->state()];
}
@end
@implementation RTCMediaSource (Internal)
- (id)initWithMediaSource:
(talk_base::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource {
if (!mediaSource) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
_mediaSource = mediaSource;
}
return self;
}
- (talk_base::scoped_refptr<webrtc::MediaSourceInterface>)mediaSource {
return _mediaSource;
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCMediaStream.h"
#include "talk/app/webrtc/mediastreamtrack.h"
@interface RTCMediaStream (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::MediaStreamInterface> mediaStream;
- (id)initWithMediaStream:
(talk_base::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream;
@end

View File

@ -0,0 +1,145 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCMediaStream+internal.h"
#import "RTCAudioTrack+internal.h"
#import "RTCMediaStreamTrack+internal.h"
#import "RTCVideoTrack+internal.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@implementation RTCMediaStream {
NSMutableArray *_audioTracks;
NSMutableArray *_videoTracks;
talk_base::scoped_refptr<webrtc::MediaStreamInterface> _mediaStream;
}
- (NSString *)description {
return [NSString stringWithFormat:@"[%@:A=%lu:V=%lu]",
[self label],
(unsigned long)[self.audioTracks count],
(unsigned long)[self.videoTracks count]];
}
- (NSArray *)audioTracks {
return [_audioTracks copy];
}
- (NSArray *)videoTracks {
return [_videoTracks copy];
}
- (NSString *)label {
return @(self.mediaStream->label().c_str());
}
- (BOOL)addAudioTrack:(RTCAudioTrack *)track {
if (self.mediaStream->AddTrack(track.audioTrack)) {
[_audioTracks addObject:track];
return YES;
}
return NO;
}
- (BOOL)addVideoTrack:(RTCVideoTrack *)track {
if (self.mediaStream->AddTrack(track.videoTrack)) {
[_videoTracks addObject:track];
return YES;
}
return NO;
}
- (BOOL)removeAudioTrack:(RTCAudioTrack *)track {
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:track];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
if (index != NSNotFound && self.mediaStream->RemoveTrack(track.audioTrack)) {
[_audioTracks removeObjectAtIndex:index];
return YES;
}
return NO;
}
- (BOOL)removeVideoTrack:(RTCVideoTrack *)track {
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCVideoTrack");
if (index != NSNotFound && self.mediaStream->RemoveTrack(track.videoTrack)) {
[_videoTracks removeObjectAtIndex:index];
return YES;
}
return NO;
}
@end
@implementation RTCMediaStream (Internal)
- (id)initWithMediaStream:
(talk_base::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
if (!mediaStream) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
webrtc::AudioTrackVector audio_tracks = mediaStream->GetAudioTracks();
webrtc::VideoTrackVector video_tracks = mediaStream->GetVideoTracks();
_audioTracks = [NSMutableArray arrayWithCapacity:audio_tracks.size()];
_videoTracks = [NSMutableArray arrayWithCapacity:video_tracks.size()];
_mediaStream = mediaStream;
for (size_t i = 0; i < audio_tracks.size(); ++i) {
talk_base::scoped_refptr<webrtc::AudioTrackInterface> track =
audio_tracks[i];
RTCAudioTrack *audioTrack =
[[RTCAudioTrack alloc] initWithMediaTrack:track];
[_audioTracks addObject:audioTrack];
}
// TODO(hughv): Add video.
// for (size_t i = 0; i < video_tracks.size(); ++i) {
// talk_base::scoped_refptr<webrtc::VideoTrackInterface> track =
// video_tracks[i];
// RTCVideoTrack *videoTrack =
// [[RTCVideoTrack alloc] initWithMediaTrack:track];
// [_videoTracks addObject:videoTrack];
// }
}
return self;
}
- (talk_base::scoped_refptr<webrtc::MediaStreamInterface>)mediaStream {
return _mediaStream;
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCMediaStreamTrack.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@interface RTCMediaStreamTrack (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface> mediaTrack;
- (id)initWithMediaTrack:
(talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack;
@end

View File

@ -0,0 +1,103 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCMediaStreamTrack+internal.h"
#import "RTCEnumConverter.h"
@implementation RTCMediaStreamTrack {
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface> _mediaTrack;
}
@synthesize label;
- (BOOL)isEqual:(id)other {
// Equality is purely based on the label just like the C++ implementation.
if (self == other) return YES;
if (![other isKindOfClass:[self class]] ||
![self isKindOfClass:[other class]]) {
return NO;
}
RTCMediaStreamTrack *otherMediaStream = (RTCMediaStreamTrack *)other;
return [self.label isEqual:otherMediaStream.label];
}
- (NSUInteger)hash {
return [self.label hash];
}
- (NSString *)kind {
return @(self.mediaTrack->kind().c_str());
}
- (NSString *)label {
return @(self.mediaTrack->id().c_str());
}
- (BOOL)isEnabled {
return self.mediaTrack->enabled();
}
- (BOOL)setEnabled:(BOOL)enabled {
return self.mediaTrack->set_enabled(enabled);
}
- (RTCTrackState)state {
return [RTCEnumConverter convertTrackStateToObjC:self.mediaTrack->state()];
}
- (BOOL)setState:(RTCTrackState)state {
return self.mediaTrack->set_state(
[RTCEnumConverter convertTrackStateToNative:state]);
}
@end
@implementation RTCMediaStreamTrack (Internal)
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
if (!mediaTrack) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
_mediaTrack = mediaTrack;
label = @(mediaTrack->id().c_str());
}
return self;
}
- (talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
return _mediaTrack;
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCPair.h"
@implementation RTCPair
- (id)initWithKey:(NSString *)key value:(NSString *)value {
if ((self = [super init])) {
_key = [key copy];
_value = [value copy];
}
return self;
}
@end

View File

@ -0,0 +1,44 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCPeerConnection.h"
#import "RTCPeerConnectionDelegate.h"
#import "RTCPeerConnectionObserver.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@interface RTCPeerConnection (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection;
- (id)initWithPeerConnection:(
talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection
observer:(webrtc::RTCPeerConnectionObserver *)observer;
@end

View File

@ -0,0 +1,247 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCPeerConnection+internal.h"
#import "RTCEnumConverter.h"
#import "RTCICECandidate+internal.h"
#import "RTCICEServer+internal.h"
#import "RTCMediaConstraints+internal.h"
#import "RTCMediaStream+internal.h"
#import "RTCSessionDescription+internal.h"
#import "RTCSessionDescriptonDelegate.h"
#import "RTCSessionDescription.h"
#include "talk/app/webrtc/jsep.h"
NSString* const kRTCSessionDescriptionDelegateErrorDomain = @"RTCSDPError";
int const kRTCSessionDescriptionDelegateErrorCode = -1;
namespace webrtc {
class RTCCreateSessionDescriptionObserver
: public CreateSessionDescriptionObserver {
public:
RTCCreateSessionDescriptionObserver(id<RTCSessionDescriptonDelegate> delegate,
RTCPeerConnection *peerConnection) {
_delegate = delegate;
_peerConnection = peerConnection;
}
virtual void OnSuccess(SessionDescriptionInterface *desc) OVERRIDE {
RTCSessionDescription *session =
[[RTCSessionDescription alloc] initWithSessionDescription:desc];
[_delegate peerConnection:_peerConnection
didCreateSessionDescription:session
error:nil];
}
virtual void OnFailure(const std::string &error) OVERRIDE {
NSString *str = @(error.c_str());
NSError *err =
[NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
code:kRTCSessionDescriptionDelegateErrorCode
userInfo:@{ @"error" : str }];
[_delegate peerConnection:_peerConnection
didCreateSessionDescription:nil
error:err];
}
private:
id<RTCSessionDescriptonDelegate> _delegate;
RTCPeerConnection *_peerConnection;
};
class RTCSetSessionDescriptionObserver : public SetSessionDescriptionObserver {
public:
RTCSetSessionDescriptionObserver(id<RTCSessionDescriptonDelegate> delegate,
RTCPeerConnection *peerConnection) {
_delegate = delegate;
_peerConnection = peerConnection;
}
virtual void OnSuccess() OVERRIDE {
[_delegate peerConnection:_peerConnection
didSetSessionDescriptionWithError:nil];
}
virtual void OnFailure(const std::string &error) OVERRIDE {
NSString *str = @(error.c_str());
NSError *err =
[NSError errorWithDomain:kRTCSessionDescriptionDelegateErrorDomain
code:kRTCSessionDescriptionDelegateErrorCode
userInfo:@{ @"error" : str }];
[_delegate peerConnection:_peerConnection
didSetSessionDescriptionWithError:err];
}
private:
id<RTCSessionDescriptonDelegate> _delegate;
RTCPeerConnection *_peerConnection;
};
}
@implementation RTCPeerConnection {
NSMutableArray *_localStreams;
talk_base::scoped_ptr<webrtc::RTCPeerConnectionObserver>_observer;
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
}
- (BOOL)addICECandidate:(RTCICECandidate *)candidate {
const webrtc::IceCandidateInterface *iceCandidate = candidate.candidate;
return self.peerConnection->AddIceCandidate(iceCandidate);
delete iceCandidate;
}
- (BOOL)addStream:(RTCMediaStream *)stream
constraints:(RTCMediaConstraints *)constraints {
BOOL ret = self.peerConnection->AddStream(stream.mediaStream,
constraints.constraints);
if (!ret) {
return NO;
}
[_localStreams addObject:stream];
return YES;
}
- (void)createAnswerWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
constraints:(RTCMediaConstraints *)constraints {
talk_base::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
observer(new talk_base::RefCountedObject<
webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
self.peerConnection->CreateAnswer(observer, constraints.constraints);
}
- (void)createOfferWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
constraints:(RTCMediaConstraints *)constraints {
talk_base::scoped_refptr<webrtc::RTCCreateSessionDescriptionObserver>
observer(new talk_base::RefCountedObject<
webrtc::RTCCreateSessionDescriptionObserver>(delegate, self));
self.peerConnection->CreateOffer(observer, constraints.constraints);
}
- (void)removeStream:(RTCMediaStream *)stream {
self.peerConnection->RemoveStream(stream.mediaStream);
[_localStreams removeObject:stream];
}
- (void)
setLocalDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription *)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
delegate, self));
self.peerConnection->SetLocalDescription(observer, sdp.sessionDescription);
}
- (void)
setRemoteDescriptionWithDelegate:(id<RTCSessionDescriptonDelegate>)delegate
sessionDescription:(RTCSessionDescription *)sdp {
talk_base::scoped_refptr<webrtc::RTCSetSessionDescriptionObserver> observer(
new talk_base::RefCountedObject<webrtc::RTCSetSessionDescriptionObserver>(
delegate, self));
self.peerConnection->SetRemoteDescription(observer, sdp.sessionDescription);
}
- (BOOL)updateICEServers:(NSArray *)servers
constraints:(RTCMediaConstraints *)constraints {
webrtc::PeerConnectionInterface::IceServers iceServers;
for (RTCICEServer *server in servers) {
iceServers.push_back(server.iceServer);
}
return self.peerConnection->UpdateIce(iceServers, constraints.constraints);
}
- (RTCSessionDescription *)localDescription {
const webrtc::SessionDescriptionInterface *sdi =
self.peerConnection->local_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
}
- (NSArray *)localStreams {
return [_localStreams copy];
}
- (RTCSessionDescription *)remoteDescription {
const webrtc::SessionDescriptionInterface *sdi =
self.peerConnection->remote_description();
return sdi ?
[[RTCSessionDescription alloc] initWithSessionDescription:sdi] :
nil;
}
- (RTCICEConnectionState)iceConnectionState {
return [RTCEnumConverter convertIceConnectionStateToObjC:
self.peerConnection->ice_connection_state()];
}
- (RTCICEGatheringState)iceGatheringState {
return [RTCEnumConverter convertIceGatheringStateToObjC:
self.peerConnection->ice_gathering_state()];
}
- (RTCSignalingState)signalingState {
return [RTCEnumConverter
convertSignalingStateToObjC:self.peerConnection->signaling_state()];
}
- (void)close {
self.peerConnection->Close();
}
@end
@implementation RTCPeerConnection (Internal)
- (id)initWithPeerConnection:(
talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection
observer:(webrtc::RTCPeerConnectionObserver *)observer {
if (!peerConnection || !observer) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
_peerConnection = peerConnection;
_localStreams = [[NSMutableArray alloc] init];
_observer.reset(observer);
}
return self;
}
- (talk_base::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection {
return _peerConnection;
}
@end

View File

@ -0,0 +1,127 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCPeerConnectionFactory.h"
#include <vector>
#import "RTCAudioTrack+internal.h"
#import "RTCICEServer+internal.h"
#import "RTCMediaConstraints+internal.h"
#import "RTCMediaSource+internal.h"
#import "RTCMediaStream+internal.h"
#import "RTCMediaStreamTrack+internal.h"
#import "RTCPeerConnection+internal.h"
#import "RTCPeerConnectionDelegate.h"
#import "RTCPeerConnectionObserver.h"
#import "RTCVideoCapturer+internal.h"
#import "RTCVideoSource+internal.h"
#import "RTCVideoTrack+internal.h"
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectionfactory.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/videosourceinterface.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/logging.h"
@interface RTCPeerConnectionFactory ()
@property(nonatomic, assign) talk_base::scoped_refptr<
webrtc::PeerConnectionFactoryInterface> nativeFactory;
@end
@implementation RTCPeerConnectionFactory
- (id)init {
if ((self = [super init])) {
_nativeFactory = webrtc::CreatePeerConnectionFactory();
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
// Uncomment to get sensitive logs emitted (to stderr or logcat).
// talk_base::LogMessage::LogToDebug(talk_base::LS_SENSITIVE);
}
return self;
}
- (RTCPeerConnection *)
peerConnectionWithICEServers:(NSArray *)servers
constraints:(RTCMediaConstraints *)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate {
webrtc::PeerConnectionInterface::IceServers iceServers;
for (RTCICEServer *server in servers) {
iceServers.push_back(server.iceServer);
}
webrtc::RTCPeerConnectionObserver *observer =
new webrtc::RTCPeerConnectionObserver(delegate);
talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection =
self.nativeFactory->CreatePeerConnection(
iceServers, constraints.constraints, observer);
RTCPeerConnection *pc =
[[RTCPeerConnection alloc] initWithPeerConnection:peerConnection
observer:observer];
observer->SetPeerConnection(pc);
return pc;
}
- (RTCMediaStream *)mediaStreamWithLabel:(NSString *)label {
talk_base::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream =
self.nativeFactory->CreateLocalMediaStream([label UTF8String]);
return [[RTCMediaStream alloc] initWithMediaStream:nativeMediaStream];
}
- (RTCVideoSource *)videoSourceWithCapturer:(RTCVideoCapturer *)capturer
constraints:(RTCMediaConstraints *)constraints {
if (!capturer) {
return nil;
}
talk_base::scoped_refptr<webrtc::VideoSourceInterface> source =
self.nativeFactory->CreateVideoSource(capturer.capturer.get(),
constraints.constraints);
return [[RTCVideoSource alloc] initWithMediaSource:source];
}
- (RTCVideoTrack *)videoTrackWithID:(NSString *)videoId
source:(RTCVideoSource *)source {
talk_base::scoped_refptr<webrtc::VideoTrackInterface> track =
self.nativeFactory->CreateVideoTrack([videoId UTF8String],
source.videoSource);
return [[RTCVideoTrack alloc] initWithMediaTrack:track];
}
- (RTCAudioTrack *)audioTrackWithID:(NSString *)audioId {
talk_base::scoped_refptr<webrtc::AudioTrackInterface> track =
self.nativeFactory->CreateAudioTrack([audioId UTF8String], NULL);
return [[RTCAudioTrack alloc] initWithMediaTrack:track];
}
@end

View File

@ -0,0 +1,79 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/peerconnectioninterface.h"
#import "RTCPeerConnection.h"
#import "RTCPeerConnectionDelegate.h"
// These objects are created by RTCPeerConnectionFactory to wrap an
// id<RTCPeerConnectionDelegate> and call methods on that interface.
namespace webrtc {
class RTCPeerConnectionObserver : public PeerConnectionObserver {
public:
explicit RTCPeerConnectionObserver(id<RTCPeerConnectionDelegate> delegate);
void SetPeerConnection(RTCPeerConnection *peerConnection);
virtual void OnError() OVERRIDE;
// Triggered when the SignalingState changed.
virtual void OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) OVERRIDE;
// Triggered when media is received on a new stream from remote peer.
virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE;
// Triggered when a remote peer close a stream.
virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE;
// Triggered when a remote peer open a data channel.
virtual void OnDataChannel(DataChannelInterface* data_channel) OVERRIDE;
// Triggered when renegotation is needed, for example the ICE has restarted.
virtual void OnRenegotiationNeeded() OVERRIDE;
// Called any time the ICEConnectionState changes
virtual void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) OVERRIDE;
// Called any time the ICEGatheringState changes
virtual void OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) OVERRIDE;
// New Ice candidate have been found.
virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE;
private:
id<RTCPeerConnectionDelegate> _delegate;
RTCPeerConnection *_peerConnection;
};
} // namespace webrtc

View File

@ -0,0 +1,103 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCPeerConnectionObserver.h"
#import "RTCICECandidate+internal.h"
#import "RTCMediaStream+internal.h"
#import "RTCEnumConverter.h"
namespace webrtc {
RTCPeerConnectionObserver::RTCPeerConnectionObserver(
id<RTCPeerConnectionDelegate> delegate) {
_delegate = delegate;
}
void RTCPeerConnectionObserver::SetPeerConnection(
RTCPeerConnection *peerConnection) {
_peerConnection = peerConnection;
}
void RTCPeerConnectionObserver::OnError() {
[_delegate peerConnectionOnError:_peerConnection];
}
void RTCPeerConnectionObserver::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
[_delegate peerConnection:_peerConnection
signalingStateChanged:
[RTCEnumConverter convertSignalingStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnAddStream(MediaStreamInterface* stream) {
RTCMediaStream* mediaStream =
[[RTCMediaStream alloc] initWithMediaStream:stream];
[_delegate peerConnection:_peerConnection addedStream:mediaStream];
}
void RTCPeerConnectionObserver::OnRemoveStream(MediaStreamInterface* stream) {
RTCMediaStream* mediaStream =
[[RTCMediaStream alloc] initWithMediaStream:stream];
[_delegate peerConnection:_peerConnection removedStream:mediaStream];
}
void RTCPeerConnectionObserver::OnDataChannel(
DataChannelInterface* data_channel) {
// TODO(hughv): Implement for future version.
}
void RTCPeerConnectionObserver::OnRenegotiationNeeded() {
[_delegate peerConnectionOnRenegotiationNeeded:_peerConnection];
}
void RTCPeerConnectionObserver::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
[_delegate peerConnection:_peerConnection
iceConnectionChanged:
[RTCEnumConverter convertIceConnectionStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
[_delegate peerConnection:_peerConnection
iceGatheringChanged:
[RTCEnumConverter convertIceGatheringStateToObjC:new_state]];
}
void RTCPeerConnectionObserver::OnIceCandidate(
const IceCandidateInterface* candidate) {
RTCICECandidate* iceCandidate =
[[RTCICECandidate alloc] initWithCandidate:candidate];
[_delegate peerConnection:_peerConnection gotICECandidate:iceCandidate];
}
} // namespace webrtc

View File

@ -0,0 +1,41 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCSessionDescription.h"
#include "talk/app/webrtc/jsep.h"
#include "talk/app/webrtc/webrtcsession.h"
@interface RTCSessionDescription (Internal)
// Caller assumes ownership of this object!
- (webrtc::SessionDescriptionInterface *)sessionDescription;
- (id)initWithSessionDescription:
(const webrtc::SessionDescriptionInterface*)sessionDescription;
@end

View File

@ -0,0 +1,81 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCSessionDescription+internal.h"
@implementation RTCSessionDescription {
NSString *_description;
NSString *_type;
}
- (id)initWithType:(NSString *)type sdp:(NSString *)sdp {
if (!type || !sdp) {
NSAssert(NO, @"nil arguments not allowed");
return nil;
}
if ((self = [super init])) {
_description = sdp;
_type = type;
}
return self;
}
@end
@implementation RTCSessionDescription (Internal)
- (id)initWithSessionDescription:
(const webrtc::SessionDescriptionInterface *)sessionDescription {
if (!sessionDescription) {
NSAssert(NO, @"nil arguments not allowed");
self = nil;
return nil;
}
if ((self = [super init])) {
const std::string &type = sessionDescription->type();
std::string sdp;
if (!sessionDescription->ToString(&sdp)) {
NSAssert(NO, @"Invalid SessionDescriptionInterface.");
self = nil;
} else {
_description = @(sdp.c_str());
_type = @(type.c_str());
}
}
return self;
}
- (webrtc::SessionDescriptionInterface *)sessionDescription {
return webrtc::CreateSessionDescription(
[self.type UTF8String], [self.description UTF8String], NULL);
}
@end

View File

@ -0,0 +1,38 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCVideoCapturer.h"
#include "talk/app/webrtc/videosourceinterface.h"
@interface RTCVideoCapturer (Internal)
@property(nonatomic, assign, readonly) const talk_base::scoped_ptr<cricket::VideoCapturer> &capturer;
- (id)initWithCapturer:(cricket::VideoCapturer*)capturer;
@end

View File

@ -0,0 +1,76 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCVideoCapturer+internal.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/devices/devicemanager.h"
@implementation RTCVideoCapturer {
talk_base::scoped_ptr<cricket::VideoCapturer>_capturer;
}
+ (RTCVideoCapturer *)capturerWithDeviceName:(NSString *)deviceName {
const std::string &device_name = std::string([deviceName UTF8String]);
talk_base::scoped_ptr<cricket::DeviceManagerInterface> device_manager(
cricket::DeviceManagerFactory::Create());
bool initialized = device_manager->Init();
NSAssert(initialized, @"DeviceManager::Init() failed");
cricket::Device device;
if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
LOG(LS_ERROR) << "GetVideoCaptureDevice failed";
return 0;
}
talk_base::scoped_ptr<cricket::VideoCapturer> capturer(
device_manager->CreateVideoCapturer(device));
RTCVideoCapturer *rtcCapturer =
[[RTCVideoCapturer alloc] initWithCapturer:capturer.release()];
return rtcCapturer;
}
@end
@implementation RTCVideoCapturer (Internal)
- (id)initWithCapturer:(cricket::VideoCapturer *)capturer {
if ((self = [super init])) {
_capturer.reset(capturer);
}
return self;
}
// TODO(hughv): When capturer is implemented, this needs to return
// _capturer.release() instead. For now, this isn't used.
- (const talk_base::scoped_ptr<cricket::VideoCapturer> &)capturer {
return _capturer;
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCVideoRenderer.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@interface RTCVideoRenderer (Internal)
// TODO(hughv): Use smart pointer.
@property(nonatomic, assign, readonly)
webrtc::VideoRendererInterface *videoRenderer;
- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer;
@end

View File

@ -0,0 +1,72 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCVideoRenderer+internal.h"
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#endif
#import "RTCI420Frame.h"
#import "RTCVideoRendererDelegate.h"
@implementation RTCVideoRenderer
+ (RTCVideoRenderer *)videoRenderGUIWithFrame:(CGRect)frame {
// TODO (hughv): Implement.
return nil;
}
- (id)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
if ((self = [super init])) {
_delegate = delegate;
// TODO (hughv): Create video renderer.
}
return self;
}
@end
@implementation RTCVideoRenderer (Internal)
- (id)initWithVideoRenderer:(webrtc::VideoRendererInterface *)videoRenderer {
if ((self = [super init])) {
// TODO (hughv): Implement.
}
return self;
}
- (webrtc::VideoRendererInterface *)videoRenderer {
// TODO (hughv): Implement.
return NULL;
}
@end

View File

@ -0,0 +1,37 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCVideoSource.h"
#include "talk/app/webrtc/videosourceinterface.h"
@interface RTCVideoSource (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::VideoSourceInterface>videoSource;
@end

View File

@ -0,0 +1,44 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCVideoSource+internal.h"
#import "RTCMediaSource+internal.h"
@implementation RTCVideoSource
@end
@implementation RTCVideoSource (Internal)
- (talk_base::scoped_refptr<webrtc::VideoSourceInterface>)videoSource {
return static_cast<webrtc::VideoSourceInterface *>(self.mediaSource.get());
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCVideoTrack.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
@class RTCVideoRenderer;
@interface RTCVideoTrack (Internal)
@property(nonatomic, assign, readonly)
talk_base::scoped_refptr<webrtc::VideoTrackInterface> videoTrack;
@end

View File

@ -0,0 +1,77 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "RTCVideoTrack+internal.h"
#import "RTCMediaStreamTrack+internal.h"
#import "RTCVideoRenderer+internal.h"
@implementation RTCVideoTrack {
NSMutableArray *_rendererArray;
}
- (id)initWithMediaTrack:(
talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface>)mediaTrack {
if (self = [super initWithMediaTrack:mediaTrack]) {
_rendererArray = [NSMutableArray array];
}
return self;
}
- (void)addRenderer:(RTCVideoRenderer *)renderer {
NSAssert1(![self.renderers containsObject:renderer],
@"renderers already contains object [%@]",
[renderer description]);
[_rendererArray addObject:renderer];
self.videoTrack->AddRenderer(renderer.videoRenderer);
}
- (void)removeRenderer:(RTCVideoRenderer *)renderer {
NSUInteger index = [self.renderers indexOfObjectIdenticalTo:renderer];
if (index != NSNotFound) {
[_rendererArray removeObjectAtIndex:index];
self.videoTrack->RemoveRenderer(renderer.videoRenderer);
}
}
- (NSArray *)renderers {
return [_rendererArray copy];
}
@end
@implementation RTCVideoTrack (Internal)
- (talk_base::scoped_refptr<webrtc::VideoTrackInterface>)videoTrack {
return static_cast<webrtc::VideoTrackInterface *>(self.mediaTrack.get());
}
@end

View File

@ -0,0 +1,40 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "RTCMediaSource.h"
// RTCAudioSource is an ObjectiveC wrapper for AudioSourceInterface. It is
// used as the source for one or more RTCAudioTrack objects.
@interface RTCAudioSource : RTCMediaSource
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__(
(unavailable("init is not a supported initializer for this class.")));
#endif /* DOXYGEN_SHOULD_SKIP_THIS */
@end

Some files were not shown because too many files have changed in this diff Show More