Update talk to 52534915.

R=sergeyu@chromium.org

Review URL: https://webrtc-codereview.appspot.com/2251004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4786 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
wu@webrtc.org 2013-09-19 05:49:50 +00:00
parent 532f3dc548
commit 967bfff54d
44 changed files with 647 additions and 199 deletions

View File

@ -76,7 +76,7 @@ class SctpDataChannelTest : public testing::Test {
talk_base::Thread::Current())),
media_stream_signaling_(
new webrtc::MediaStreamSignaling(talk_base::Thread::Current(),
NULL)),
NULL, channel_manager_.get())),
session_(channel_manager_.get(),
talk_base::Thread::Current(),
talk_base::Thread::Current(),

View File

@ -28,7 +28,7 @@
#include "talk/app/webrtc/mediastreamhandler.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videosourceinterface.h"
namespace webrtc {
@ -154,6 +154,8 @@ RemoteVideoTrackHandler::RemoteVideoTrackHandler(
remote_video_track_(track),
provider_(provider) {
OnEnabledChanged();
provider_->SetVideoPlayout(ssrc, true,
remote_video_track_->GetSource()->FrameInput());
}
RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
@ -169,9 +171,6 @@ void RemoteVideoTrackHandler::OnStateChanged() {
}
void RemoteVideoTrackHandler::OnEnabledChanged() {
provider_->SetVideoPlayout(ssrc(),
remote_video_track_->enabled(),
remote_video_track_->FrameInput());
}
MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,

View File

@ -30,9 +30,9 @@
#include <string>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/gunit.h"
#include "talk/media/base/fakevideocapturer.h"
@ -86,6 +86,7 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> {
virtual void RemoveSink(cricket::VideoRenderer* output) {}
virtual SourceState state() const { return state_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoRenderer* FrameInput() { return NULL; }
protected:
FakeVideoSource() : state_(kLive) {}
@ -149,8 +150,8 @@ class MediaStreamHandlerTest : public testing::Test {
}
void AddRemoteVideoTrack() {
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true,
video_track_->FrameInput()));
EXPECT_CALL(video_provider_, SetVideoPlayout(
kVideoSsrc, true, video_track_->GetSource()->FrameInput()));
handlers_.AddRemoteVideoTrack(stream_, stream_->GetVideoTracks()[0],
kVideoSsrc);
}
@ -283,11 +284,8 @@ TEST_F(MediaStreamHandlerTest, LocalVideoTrackDisable) {
TEST_F(MediaStreamHandlerTest, RemoteVideoTrackDisable) {
AddRemoteVideoTrack();
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, _));
video_track_->set_enabled(false);
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true,
video_track_->FrameInput()));
video_track_->set_enabled(true);
RemoveRemoteVideoTrack();

View File

@ -134,12 +134,6 @@ class VideoTrackInterface : public MediaStreamTrackInterface {
// Deregister a renderer.
virtual void RemoveRenderer(VideoRendererInterface* renderer) = 0;
// Gets a pointer to the frame input of this VideoTrack.
// The pointer is valid for the lifetime of this VideoTrack.
// VideoFrames rendered to the cricket::VideoRenderer will be rendered on all
// registered renderers.
virtual cricket::VideoRenderer* FrameInput() = 0;
virtual VideoSourceInterface* GetSource() const = 0;
protected:

View File

@ -33,6 +33,8 @@
#include "talk/app/webrtc/mediastreamproxy.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/mediastreamtrackproxy.h"
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/bytebuffer.h"
@ -132,8 +134,10 @@ static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) {
// Factory class for creating remote MediaStreams and MediaStreamTracks.
class RemoteMediaStreamFactory {
public:
explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread)
: signaling_thread_(signaling_thread) {
explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread,
cricket::ChannelManager* channel_manager)
: signaling_thread_(signaling_thread),
channel_manager_(channel_manager) {
}
talk_base::scoped_refptr<MediaStreamInterface> CreateMediaStream(
@ -144,21 +148,24 @@ class RemoteMediaStreamFactory {
AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
const std::string& track_id) {
return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(stream,
track_id);
return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
stream, track_id, static_cast<AudioSourceInterface*>(NULL));
}
VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
const std::string& track_id) {
return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(stream,
track_id);
return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
stream, track_id, VideoSource::Create(channel_manager_,
new RemoteVideoCapturer(),
NULL).get());
}
private:
template <typename TI, typename T, typename TP>
TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id) {
template <typename TI, typename T, typename TP, typename S>
TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id,
S* source) {
talk_base::scoped_refptr<TI> track(
TP::Create(signaling_thread_, T::Create(track_id, NULL)));
TP::Create(signaling_thread_, T::Create(track_id, source)));
track->set_state(webrtc::MediaStreamTrackInterface::kLive);
if (stream->AddTrack(track)) {
return track;
@ -167,17 +174,20 @@ class RemoteMediaStreamFactory {
}
talk_base::Thread* signaling_thread_;
cricket::ChannelManager* channel_manager_;
};
MediaStreamSignaling::MediaStreamSignaling(
talk_base::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer)
MediaStreamSignalingObserver* stream_observer,
cricket::ChannelManager* channel_manager)
: signaling_thread_(signaling_thread),
data_channel_factory_(NULL),
stream_observer_(stream_observer),
local_streams_(StreamCollection::Create()),
remote_streams_(StreamCollection::Create()),
remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread)),
remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread,
channel_manager)),
last_allocated_sctp_id_(0) {
options_.has_video = false;
options_.has_audio = false;

View File

@ -159,7 +159,8 @@ class MediaStreamSignalingObserver {
class MediaStreamSignaling {
public:
MediaStreamSignaling(talk_base::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer);
MediaStreamSignalingObserver* stream_observer,
cricket::ChannelManager* channel_manager);
virtual ~MediaStreamSignaling();
// Notify all referenced objects that MediaStreamSignaling will be teared

View File

@ -37,8 +37,11 @@
#include "talk/base/scoped_ptr.h"
#include "talk/base/stringutils.h"
#include "talk/base/thread.h"
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/devices/fakedevicemanager.h"
#include "talk/p2p/base/constants.h"
#include "talk/p2p/base/sessiondescription.h"
#include "talk/session/media/channelmanager.h"
static const char kStreams[][8] = {"stream1", "stream2"};
static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
@ -285,13 +288,13 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
}
virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32 ssrc) {
VideoTrackInterface* video_track,
uint32 ssrc) {
AddTrack(&remote_video_tracks_, stream, video_track, ssrc);
}
virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) {
AudioTrackInterface* audio_track) {
RemoveTrack(&remote_audio_tracks_, stream, audio_track);
}
@ -392,8 +395,10 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling {
public:
explicit MediaStreamSignalingForTest(MockSignalingObserver* observer)
: webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer) {
MediaStreamSignalingForTest(MockSignalingObserver* observer,
cricket::ChannelManager* channel_manager)
: webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer,
channel_manager) {
};
using webrtc::MediaStreamSignaling::GetOptionsForOffer;
@ -406,7 +411,12 @@ class MediaStreamSignalingTest: public testing::Test {
protected:
virtual void SetUp() {
observer_.reset(new MockSignalingObserver());
signaling_.reset(new MediaStreamSignalingForTest(observer_.get()));
channel_manager_.reset(
new cricket::ChannelManager(new cricket::FakeMediaEngine(),
new cricket::FakeDeviceManager(),
talk_base::Thread::Current()));
signaling_.reset(new MediaStreamSignalingForTest(observer_.get(),
channel_manager_.get()));
}
// Create a collection of streams.
@ -497,6 +507,9 @@ class MediaStreamSignalingTest: public testing::Test {
ASSERT_TRUE(stream->AddTrack(video_track));
}
// ChannelManager is used by VideoSource, so it should be released after all
// the video tracks. Put it as the first private variable should ensure that.
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_refptr<StreamCollection> reference_collection_;
talk_base::scoped_ptr<MockSignalingObserver> observer_;
talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_;
@ -688,6 +701,9 @@ TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) {
observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks());
observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
ASSERT_EQ(1u, observer_->remote_streams()->count());
MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL);
// Create a session description based on another SDP with another
// MediaStream.

View File

@ -61,7 +61,6 @@ BEGIN_PROXY_MAP(VideoTrack)
PROXY_METHOD1(void, AddRenderer, VideoRendererInterface*)
PROXY_METHOD1(void, RemoveRenderer, VideoRendererInterface*)
PROXY_METHOD0(cricket::VideoRenderer*, FrameInput)
PROXY_CONSTMETHOD0(VideoSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)

View File

@ -303,7 +303,7 @@ bool PeerConnection::DoInitialize(
port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
mediastream_signaling_.reset(new MediaStreamSignaling(
factory_->signaling_thread(), this));
factory_->signaling_thread(), this, factory_->channel_manager()));
session_.reset(new WebRtcSession(factory_->channel_manager(),
factory_->signaling_thread(),

View File

@ -29,12 +29,12 @@
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/mediastreamproxy.h"
#include "talk/app/webrtc/mediastreamtrackproxy.h"
#include "talk/app/webrtc/peerconnection.h"
#include "talk/app/webrtc/peerconnectionproxy.h"
#include "talk/app/webrtc/portallocatorfactory.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videosourceproxy.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/media/devices/dummydevicemanager.h"
@ -269,9 +269,8 @@ talk_base::scoped_refptr<VideoSourceInterface>
PeerConnectionFactory::CreateVideoSource_s(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints) {
talk_base::scoped_refptr<LocalVideoSource> source(
LocalVideoSource::Create(channel_manager_.get(), capturer,
constraints));
talk_base::scoped_refptr<VideoSource> source(
VideoSource::Create(channel_manager_.get(), capturer, constraints));
return VideoSourceProxy::Create(signaling_thread_, source);
}

View File

@ -29,12 +29,12 @@
#include "talk/app/webrtc/fakeportallocatorfactory.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
#include "talk/app/webrtc/test/testsdpstrings.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/base/gunit.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/sslstreamadapter.h"

View File

@ -0,0 +1,95 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/base/logging.h"
#include "talk/media/base/videoframe.h"
namespace webrtc {
RemoteVideoCapturer::RemoteVideoCapturer() {}
RemoteVideoCapturer::~RemoteVideoCapturer() {}
cricket::CaptureState RemoteVideoCapturer::Start(
const cricket::VideoFormat& capture_format) {
if (capture_state() == cricket::CS_RUNNING) {
LOG(LS_WARNING)
<< "RemoteVideoCapturer::Start called when it's already started.";
return capture_state();
}
LOG(LS_INFO) << "RemoteVideoCapturer::Start";
SetCaptureFormat(&capture_format);
return cricket::CS_RUNNING;
}
void RemoteVideoCapturer::Stop() {
if (capture_state() == cricket::CS_STOPPED) {
LOG(LS_WARNING)
<< "RemoteVideoCapturer::Stop called when it's already stopped.";
return;
}
LOG(LS_INFO) << "RemoteVideoCapturer::Stop";
SetCaptureFormat(NULL);
SetCaptureState(cricket::CS_STOPPED);
}
bool RemoteVideoCapturer::IsRunning() {
return capture_state() == cricket::CS_RUNNING;
}
bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
if (!fourccs)
return false;
fourccs->push_back(cricket::FOURCC_I420);
return true;
}
bool RemoteVideoCapturer::GetBestCaptureFormat(
const cricket::VideoFormat& desired, cricket::VideoFormat* best_format) {
if (!best_format) {
return false;
}
// RemoteVideoCapturer does not support capability enumeration.
// Use the desired format as the best format.
best_format->width = desired.width;
best_format->height = desired.height;
best_format->fourcc = cricket::FOURCC_I420;
best_format->interval = desired.interval;
return true;
}
bool RemoteVideoCapturer::IsScreencast() const {
// TODO(ronghuawu): what about remote screencast stream.
return false;
}
} // namespace webrtc

View File

@ -0,0 +1,65 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_
#define TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_
#include <vector>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videorenderer.h"
namespace webrtc {
// RemoteVideoCapturer implements a simple cricket::VideoCapturer which
// gets decoded remote video frames from media channel.
// It's used as the remote video source's VideoCapturer so that the remote video
// can be used as a cricket::VideoCapturer and in that way a remote video stream
// can implement the MediaStreamSourceInterface.
class RemoteVideoCapturer : public cricket::VideoCapturer {
public:
RemoteVideoCapturer();
virtual ~RemoteVideoCapturer();
// cricket::VideoCapturer implementation.
virtual cricket::CaptureState Start(
const cricket::VideoFormat& capture_format) OVERRIDE;
virtual void Stop() OVERRIDE;
virtual bool IsRunning() OVERRIDE;
virtual bool GetPreferredFourccs(std::vector<uint32>* fourccs) OVERRIDE;
virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
cricket::VideoFormat* best_format) OVERRIDE;
virtual bool IsScreencast() const OVERRIDE;
private:
DISALLOW_COPY_AND_ASSIGN(RemoteVideoCapturer);
};
} // namespace webrtc
#endif // TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_

View File

@ -0,0 +1,132 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <string>
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/base/gunit.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
using cricket::CaptureState;
using cricket::VideoCapturer;
using cricket::VideoFormat;
using cricket::VideoFormatPod;
using cricket::VideoFrame;
static const int kMaxWaitMs = 1000;
static const VideoFormatPod kTestFormat =
{640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
class RemoteVideoCapturerTest : public testing::Test,
public sigslot::has_slots<> {
protected:
RemoteVideoCapturerTest()
: captured_frame_num_(0),
capture_state_(cricket::CS_STOPPED) {}
virtual void SetUp() {
capturer_.SignalStateChange.connect(
this, &RemoteVideoCapturerTest::OnStateChange);
capturer_.SignalVideoFrame.connect(
this, &RemoteVideoCapturerTest::OnVideoFrame);
}
~RemoteVideoCapturerTest() {
capturer_.SignalStateChange.disconnect(this);
capturer_.SignalVideoFrame.disconnect(this);
}
int captured_frame_num() const {
return captured_frame_num_;
}
CaptureState capture_state() const {
return capture_state_;
}
webrtc::RemoteVideoCapturer capturer_;
private:
void OnStateChange(VideoCapturer* capturer,
CaptureState capture_state) {
EXPECT_EQ(&capturer_, capturer);
capture_state_ = capture_state;
}
void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) {
EXPECT_EQ(&capturer_, capturer);
++captured_frame_num_;
}
int captured_frame_num_;
CaptureState capture_state_;
};
TEST_F(RemoteVideoCapturerTest, StartStop) {
// Start
EXPECT_TRUE(
capturer_.StartCapturing(VideoFormat(kTestFormat)));
EXPECT_TRUE_WAIT((cricket::CS_RUNNING == capture_state()), kMaxWaitMs);
EXPECT_EQ(VideoFormat(kTestFormat),
*capturer_.GetCaptureFormat());
EXPECT_TRUE(capturer_.IsRunning());
// Stop
capturer_.Stop();
EXPECT_TRUE_WAIT((cricket::CS_STOPPED == capture_state()), kMaxWaitMs);
EXPECT_TRUE(NULL == capturer_.GetCaptureFormat());
}
TEST_F(RemoteVideoCapturerTest, GetPreferredFourccs) {
EXPECT_FALSE(capturer_.GetPreferredFourccs(NULL));
std::vector<uint32> fourccs;
EXPECT_TRUE(capturer_.GetPreferredFourccs(&fourccs));
EXPECT_EQ(1u, fourccs.size());
EXPECT_EQ(cricket::FOURCC_I420, fourccs.at(0));
}
TEST_F(RemoteVideoCapturerTest, GetBestCaptureFormat) {
VideoFormat desired = VideoFormat(kTestFormat);
EXPECT_FALSE(capturer_.GetBestCaptureFormat(desired, NULL));
VideoFormat expected_format = VideoFormat(kTestFormat);
expected_format.fourcc = cricket::FOURCC_I420;
VideoFormat best_format;
EXPECT_TRUE(capturer_.GetBestCaptureFormat(desired, &best_format));
EXPECT_EQ(expected_format, best_format);
}
TEST_F(RemoteVideoCapturerTest, InputFrame) {
EXPECT_EQ(0, captured_frame_num());
cricket::WebRtcVideoFrame test_frame;
capturer_.SignalVideoFrame(&capturer_, &test_frame);
EXPECT_EQ(1, captured_frame_num());
capturer_.SignalVideoFrame(&capturer_, &test_frame);
EXPECT_EQ(2, captured_frame_num());
}

View File

@ -94,6 +94,7 @@ const char StatsReport::kStatsValueNameRemoteAddress[] = "googRemoteAddress";
const char StatsReport::kStatsValueNameRetransmitBitrate[] =
"googRetransmitBitrate";
const char StatsReport::kStatsValueNameRtt[] = "googRtt";
const char StatsReport::kStatsValueNameSsrc[] = "ssrc";
const char StatsReport::kStatsValueNameTargetEncBitrate[] =
"googTargetEncBitrate";
const char StatsReport::kStatsValueNameTransmitBitrate[] =
@ -101,7 +102,8 @@ const char StatsReport::kStatsValueNameTransmitBitrate[] =
const char StatsReport::kStatsValueNameTransportId[] = "transportId";
const char StatsReport::kStatsValueNameTransportType[] = "googTransportType";
const char StatsReport::kStatsValueNameTrackId[] = "googTrackId";
const char StatsReport::kStatsValueNameSsrc[] = "ssrc";
const char StatsReport::kStatsValueNameTypingNoiseState[] =
"googTypingNoiseState";
const char StatsReport::kStatsValueNameWritable[] = "googWritable";
const char StatsReport::kStatsReportTypeSession[] = "googLibjingleSession";
@ -115,6 +117,7 @@ const char StatsReport::kStatsReportTypeCandidatePair[] = "googCandidatePair";
const char StatsReport::kStatsReportVideoBweId[] = "bweforvideo";
// Implementations of functions in statstypes.h
void StatsReport::AddValue(const std::string& name, const std::string& value) {
Value temp;
@ -200,6 +203,8 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
report->AddValue(StatsReport::kStatsValueNameEchoReturnLossEnhancement,
info.echo_return_loss_enhancement);
report->AddValue(StatsReport::kStatsValueNameCodecName, info.codec_name);
report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState,
info.typing_noise_detected);
}
void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {

View File

@ -151,6 +151,7 @@ class StatsReport {
static const char kStatsValueNameChannelId[];
static const char kStatsValueNameTrackId[];
static const char kStatsValueNameSsrc[];
static const char kStatsValueNameTypingNoiseState[];
};
typedef std::vector<StatsReport> StatsReports;

View File

@ -44,8 +44,9 @@ static const char kAudioTrack2[] = "audio2";
class FakeMediaStreamSignaling : public webrtc::MediaStreamSignaling,
public webrtc::MediaStreamSignalingObserver {
public:
FakeMediaStreamSignaling() :
webrtc::MediaStreamSignaling(talk_base::Thread::Current(), this) {
explicit FakeMediaStreamSignaling(cricket::ChannelManager* channel_manager) :
webrtc::MediaStreamSignaling(talk_base::Thread::Current(), this,
channel_manager) {
}
void SendAudioVideoStream1() {

View File

@ -25,7 +25,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localvideosource.h"
#include "talk/app/webrtc/videosource.h"
#include <vector>
@ -72,7 +72,7 @@ enum {
// Default resolution. If no constraint is specified, this is the resolution we
// will use.
static const cricket::VideoFormatPod kDefaultResolution =
static const cricket::VideoFormatPod kDefaultFormat =
{640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
// List of formats used if the camera doesn't support capability enumeration.
@ -276,7 +276,7 @@ const cricket::VideoFormat& GetBestCaptureFormat(
const std::vector<cricket::VideoFormat>& formats) {
ASSERT(formats.size() > 0);
int default_area = kDefaultResolution.width * kDefaultResolution.height;
int default_area = kDefaultFormat.width * kDefaultFormat.height;
std::vector<cricket::VideoFormat>::const_iterator it = formats.begin();
std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin();
@ -328,38 +328,72 @@ bool ExtractVideoOptions(const MediaConstraintsInterface* all_constraints,
return all_valid;
}
class FrameInputWrapper : public cricket::VideoRenderer {
public:
explicit FrameInputWrapper(cricket::VideoCapturer* capturer)
: capturer_(capturer) {
ASSERT(capturer_ != NULL);
}
virtual ~FrameInputWrapper() {}
// VideoRenderer implementation.
virtual bool SetSize(int width, int height, int reserved) OVERRIDE {
return true;
}
virtual bool RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
if (!capturer_->IsRunning()) {
return true;
}
// This signal will be made on media engine render thread. The clients
// of this signal should have no assumptions on what thread this signal
// come from.
capturer_->SignalVideoFrame(capturer_, frame);
return true;
}
private:
cricket::VideoCapturer* capturer_;
int width_;
int height_;
DISALLOW_COPY_AND_ASSIGN(FrameInputWrapper);
};
} // anonymous namespace
namespace webrtc {
talk_base::scoped_refptr<LocalVideoSource> LocalVideoSource::Create(
talk_base::scoped_refptr<VideoSource> VideoSource::Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
const webrtc::MediaConstraintsInterface* constraints) {
ASSERT(channel_manager != NULL);
ASSERT(capturer != NULL);
talk_base::scoped_refptr<LocalVideoSource> source(
new talk_base::RefCountedObject<LocalVideoSource>(channel_manager,
capturer));
talk_base::scoped_refptr<VideoSource> source(
new talk_base::RefCountedObject<VideoSource>(channel_manager,
capturer));
source->Initialize(constraints);
return source;
}
LocalVideoSource::LocalVideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer)
VideoSource::VideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer)
: channel_manager_(channel_manager),
video_capturer_(capturer),
state_(kInitializing) {
channel_manager_->SignalVideoCaptureStateChange.connect(
this, &LocalVideoSource::OnStateChange);
this, &VideoSource::OnStateChange);
}
LocalVideoSource::~LocalVideoSource() {
VideoSource::~VideoSource() {
channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
channel_manager_->SignalVideoCaptureStateChange.disconnect(this);
}
void LocalVideoSource::Initialize(
void VideoSource::Initialize(
const webrtc::MediaConstraintsInterface* constraints) {
std::vector<cricket::VideoFormat> formats;
@ -371,7 +405,7 @@ void LocalVideoSource::Initialize(
// format from the constraints if any.
// Note that this only affects tab capturing, not desktop capturing,
// since desktop capturer does not respect the VideoFormat passed in.
formats.push_back(cricket::VideoFormat(kDefaultResolution));
formats.push_back(cricket::VideoFormat(kDefaultFormat));
} else {
// The VideoCapturer implementation doesn't support capability enumeration.
// We need to guess what the camera support.
@ -422,25 +456,34 @@ void LocalVideoSource::Initialize(
// Initialize hasn't succeeded until a successful state change has occurred.
}
void LocalVideoSource::AddSink(cricket::VideoRenderer* output) {
cricket::VideoRenderer* VideoSource::FrameInput() {
// Defer creation of frame_input_ until it's needed, e.g. the local video
// sources will never need it.
if (!frame_input_) {
frame_input_.reset(new FrameInputWrapper(video_capturer_.get()));
}
return frame_input_.get();
}
void VideoSource::AddSink(cricket::VideoRenderer* output) {
channel_manager_->AddVideoRenderer(video_capturer_.get(), output);
}
void LocalVideoSource::RemoveSink(cricket::VideoRenderer* output) {
void VideoSource::RemoveSink(cricket::VideoRenderer* output) {
channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output);
}
// OnStateChange listens to the ChannelManager::SignalVideoCaptureStateChange.
// This signal is triggered for all video capturers. Not only the one we are
// interested in.
void LocalVideoSource::OnStateChange(cricket::VideoCapturer* capturer,
void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state) {
if (capturer == video_capturer_.get()) {
SetState(GetReadyState(capture_state));
}
}
void LocalVideoSource::SetState(SourceState new_state) {
void VideoSource::SetState(SourceState new_state) {
if (VERIFY(state_ != new_state)) {
state_ = new_state;
FireOnChanged();

View File

@ -25,18 +25,19 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_
#define TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_
#ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_
#define TALK_APP_WEBRTC_VIDEOSOURCE_H_
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
#include "talk/app/webrtc/videosourceinterface.h"
#include "talk/app/webrtc/videotrackrenderers.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/sigslot.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
// LocalVideoSource implements VideoSourceInterface. It owns a
// VideoSource implements VideoSourceInterface. It owns a
// cricket::VideoCapturer and make sure the camera is started at a resolution
// that honors the constraints.
// The state is set depending on the result of starting the capturer.
@ -53,20 +54,21 @@ namespace webrtc {
class MediaConstraintsInterface;
class LocalVideoSource : public Notifier<VideoSourceInterface>,
public sigslot::has_slots<> {
class VideoSource : public Notifier<VideoSourceInterface>,
public sigslot::has_slots<> {
public:
// Creates an instance of LocalVideoSource.
// LocalVideoSource take ownership of |capturer|.
// Creates an instance of VideoSource.
// VideoSource take ownership of |capturer|.
// |constraints| can be NULL and in that case the camera is opened using a
// default resolution.
static talk_base::scoped_refptr<LocalVideoSource> Create(
static talk_base::scoped_refptr<VideoSource> Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
const webrtc::MediaConstraintsInterface* constraints);
virtual SourceState state() const { return state_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoRenderer* FrameInput();
virtual cricket::VideoCapturer* GetVideoCapturer() {
return video_capturer_.get();
@ -77,18 +79,19 @@ class LocalVideoSource : public Notifier<VideoSourceInterface>,
virtual void RemoveSink(cricket::VideoRenderer* output);
protected:
LocalVideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer);
~LocalVideoSource();
VideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer);
virtual ~VideoSource();
void Initialize(const webrtc::MediaConstraintsInterface* constraints);
private:
void Initialize(const webrtc::MediaConstraintsInterface* constraints);
void OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state);
void SetState(SourceState new_state);
cricket::ChannelManager* channel_manager_;
talk_base::scoped_ptr<cricket::VideoCapturer> video_capturer_;
talk_base::scoped_ptr<cricket::VideoRenderer> frame_input_;
cricket::VideoFormat format_;
cricket::VideoOptions options_;
@ -97,4 +100,4 @@ class LocalVideoSource : public Notifier<VideoSourceInterface>,
} // namespace webrtc
#endif // TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_
#endif // TALK_APP_WEBRTC_VIDEOSOURCE_H_

View File

@ -25,20 +25,21 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/app/webrtc/localvideosource.h"
#include <string>
#include <vector>
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/base/gunit.h"
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/base/fakevideorenderer.h"
#include "talk/media/devices/fakedevicemanager.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "talk/session/media/channelmanager.h"
using webrtc::FakeConstraints;
using webrtc::LocalVideoSource;
using webrtc::VideoSource;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
using webrtc::ObserverInterface;
@ -123,9 +124,9 @@ class StateObserver : public ObserverInterface {
talk_base::scoped_refptr<VideoSourceInterface> source_;
};
class LocalVideoSourceTest : public testing::Test {
class VideoSourceTest : public testing::Test {
protected:
LocalVideoSourceTest()
VideoSourceTest()
: channel_manager_(new cricket::ChannelManager(
new cricket::FakeMediaEngine(),
new cricket::FakeDeviceManager(), talk_base::Thread::Current())) {
@ -136,39 +137,39 @@ class LocalVideoSourceTest : public testing::Test {
capturer_ = new TestVideoCapturer();
}
void CreateLocalVideoSource() {
CreateLocalVideoSource(NULL);
void CreateVideoSource() {
CreateVideoSource(NULL);
}
void CreateLocalVideoSource(
void CreateVideoSource(
const webrtc::MediaConstraintsInterface* constraints) {
// VideoSource take ownership of |capturer_|
local_source_ = LocalVideoSource::Create(channel_manager_.get(),
capturer_,
constraints);
source_ = VideoSource::Create(channel_manager_.get(),
capturer_,
constraints);
ASSERT_TRUE(local_source_.get() != NULL);
EXPECT_EQ(capturer_, local_source_->GetVideoCapturer());
ASSERT_TRUE(source_.get() != NULL);
EXPECT_EQ(capturer_, source_->GetVideoCapturer());
state_observer_.reset(new StateObserver(local_source_));
local_source_->RegisterObserver(state_observer_.get());
local_source_->AddSink(&renderer_);
state_observer_.reset(new StateObserver(source_));
source_->RegisterObserver(state_observer_.get());
source_->AddSink(&renderer_);
}
TestVideoCapturer* capturer_; // Raw pointer. Owned by local_source_.
TestVideoCapturer* capturer_; // Raw pointer. Owned by source_.
cricket::FakeVideoRenderer renderer_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_ptr<StateObserver> state_observer_;
talk_base::scoped_refptr<LocalVideoSource> local_source_;
talk_base::scoped_refptr<VideoSource> source_;
};
// Test that a LocalVideoSource transition to kLive state when the capture
// Test that a VideoSource transition to kLive state when the capture
// device have started and kEnded if it is stopped.
// It also test that an output can receive video frames.
TEST_F(LocalVideoSourceTest, StartStop) {
TEST_F(VideoSourceTest, StartStop) {
// Initialize without constraints.
CreateLocalVideoSource();
CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
@ -180,10 +181,41 @@ TEST_F(LocalVideoSourceTest, StartStop) {
kMaxWaitMs);
}
// Test that a LocalVideoSource transition to kEnded if the capture device
// Test start stop with a remote VideoSource - the video source that has a
// RemoteVideoCapturer and takes video frames from FrameInput.
TEST_F(VideoSourceTest, StartStopRemote) {
// Will use RemoteVideoCapturer.
delete capturer_;
source_ = VideoSource::Create(channel_manager_.get(),
new webrtc::RemoteVideoCapturer(),
NULL);
ASSERT_TRUE(source_.get() != NULL);
EXPECT_TRUE(NULL != source_->GetVideoCapturer());
state_observer_.reset(new StateObserver(source_));
source_->RegisterObserver(state_observer_.get());
source_->AddSink(&renderer_);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
cricket::VideoRenderer* frameinput = source_->FrameInput();
cricket::WebRtcVideoFrame test_frame;
frameinput->SetSize(1280, 720, 0);
frameinput->RenderFrame(&test_frame);
EXPECT_EQ(1, renderer_.num_rendered_frames());
source_->GetVideoCapturer()->Stop();
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that a VideoSource transition to kEnded if the capture device
// fails.
TEST_F(LocalVideoSourceTest, CameraFailed) {
CreateLocalVideoSource();
TEST_F(VideoSourceTest, CameraFailed) {
CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
@ -194,13 +226,13 @@ TEST_F(LocalVideoSourceTest, CameraFailed) {
// Test that the capture output is CIF if we set max constraints to CIF.
// and the capture device support CIF.
TEST_F(LocalVideoSourceTest, MandatoryConstraintCif5Fps) {
TEST_F(VideoSourceTest, MandatoryConstraintCif5Fps) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 5);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -212,7 +244,7 @@ TEST_F(LocalVideoSourceTest, MandatoryConstraintCif5Fps) {
// Test that the capture output is 720P if the camera support it and the
// optional constraint is set to 720P.
TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) {
TEST_F(VideoSourceTest, MandatoryMinVgaOptional720P) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
@ -220,7 +252,7 @@ TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) {
constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio,
1280.0 / 720);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -233,7 +265,7 @@ TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) {
// Test that the capture output have aspect ratio 4:3 if a mandatory constraint
// require it even if an optional constraint request a higher resolution
// that don't have this aspect ratio.
TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) {
TEST_F(VideoSourceTest, MandatoryAspectRatio4To3) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
@ -241,7 +273,7 @@ TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) {
640.0 / 480);
constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -254,20 +286,20 @@ TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) {
// Test that the source state transition to kEnded if the mandatory aspect ratio
// is set higher than supported.
TEST_F(LocalVideoSourceTest, MandatoryAspectRatioTooHigh) {
TEST_F(VideoSourceTest, MandatoryAspectRatioTooHigh) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, 2);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that the source ignores an optional aspect ratio that is higher than
// supported.
TEST_F(LocalVideoSourceTest, OptionalAspectRatioTooHigh) {
TEST_F(VideoSourceTest, OptionalAspectRatioTooHigh) {
FakeConstraints constraints;
constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 2);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -278,10 +310,10 @@ TEST_F(LocalVideoSourceTest, OptionalAspectRatioTooHigh) {
// Test that the source starts video with the default resolution if the
// camera doesn't support capability enumeration and there are no constraints.
TEST_F(LocalVideoSourceTest, NoCameraCapability) {
TEST_F(VideoSourceTest, NoCameraCapability) {
capturer_->TestWithoutCameraFormats();
CreateLocalVideoSource();
CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -294,7 +326,7 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability) {
// Test that the source can start the video and get the requested aspect ratio
// if the camera doesn't support capability enumeration and the aspect ratio is
// set.
TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) {
TEST_F(VideoSourceTest, NoCameraCapability16To9Ratio) {
capturer_->TestWithoutCameraFormats();
FakeConstraints constraints;
@ -303,7 +335,7 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) {
constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio,
requested_aspect_ratio);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -313,26 +345,26 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) {
// Test that the source state transitions to kEnded if an unknown mandatory
// constraint is found.
TEST_F(LocalVideoSourceTest, InvalidMandatoryConstraint) {
TEST_F(VideoSourceTest, InvalidMandatoryConstraint) {
FakeConstraints constraints;
constraints.AddMandatory("weird key", 640);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
}
// Test that the source ignores an unknown optional constraint.
TEST_F(LocalVideoSourceTest, InvalidOptionalConstraint) {
TEST_F(VideoSourceTest, InvalidOptionalConstraint) {
FakeConstraints constraints;
constraints.AddOptional("weird key", 640);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
}
TEST_F(LocalVideoSourceTest, SetValidOptionValues) {
TEST_F(VideoSourceTest, SetValidOptionValues) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kNoiseReduction, "false");
constraints.AddMandatory(
@ -342,90 +374,90 @@ TEST_F(LocalVideoSourceTest, SetValidOptionValues) {
constraints.AddOptional(
MediaConstraintsInterface::kCpuOveruseDetection, "true");
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(local_source_->options()->
EXPECT_TRUE(source_->options()->
video_temporal_layer_screencast.Get(&value));
EXPECT_FALSE(value);
EXPECT_TRUE(local_source_->options()->video_leaky_bucket.Get(&value));
EXPECT_TRUE(source_->options()->video_leaky_bucket.Get(&value));
EXPECT_TRUE(value);
EXPECT_TRUE(local_source_->options()->
EXPECT_TRUE(source_->options()->
cpu_overuse_detection.GetWithDefaultIfUnset(false));
}
TEST_F(LocalVideoSourceTest, OptionNotSet) {
TEST_F(VideoSourceTest, OptionNotSet) {
FakeConstraints constraints;
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(local_source_->options()->cpu_overuse_detection.Get(&value));
EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(source_->options()->cpu_overuse_detection.Get(&value));
}
TEST_F(LocalVideoSourceTest, MandatoryOptionOverridesOptional) {
TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) {
FakeConstraints constraints;
constraints.AddMandatory(
MediaConstraintsInterface::kNoiseReduction, true);
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, false);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
bool value = false;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionKeyOptional) {
TEST_F(VideoSourceTest, InvalidOptionKeyOptional) {
FakeConstraints constraints;
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, false);
constraints.AddOptional("invalidKey", false);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
}
TEST_F(LocalVideoSourceTest, InvalidOptionKeyMandatory) {
TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) {
FakeConstraints constraints;
constraints.AddMandatory(
MediaConstraintsInterface::kNoiseReduction, false);
constraints.AddMandatory("invalidKey", false);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionValueOptional) {
TEST_F(VideoSourceTest, InvalidOptionValueOptional) {
FakeConstraints constraints;
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, "true");
constraints.AddOptional(
MediaConstraintsInterface::kLeakyBucket, "not boolean");
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
bool value = false;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value));
}
TEST_F(LocalVideoSourceTest, InvalidOptionValueMandatory) {
TEST_F(VideoSourceTest, InvalidOptionValueMandatory) {
FakeConstraints constraints;
// Optional constraints should be ignored if the mandatory constraints fail.
constraints.AddOptional(
@ -434,15 +466,15 @@ TEST_F(LocalVideoSourceTest, InvalidOptionValueMandatory) {
constraints.AddMandatory(
MediaConstraintsInterface::kLeakyBucket, "True");
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
bool value;
EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
}
TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) {
TEST_F(VideoSourceTest, MixedOptionsAndConstraints) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
@ -453,7 +485,7 @@ TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) {
constraints.AddOptional(
MediaConstraintsInterface::kNoiseReduction, true);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -463,18 +495,18 @@ TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) {
EXPECT_EQ(5, format->framerate());
bool value = true;
EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value));
EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
EXPECT_FALSE(value);
EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value));
EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value));
}
// Tests that the source starts video with the default resolution for
// screencast if no constraint is set.
TEST_F(LocalVideoSourceTest, ScreencastResolutionNoConstraint) {
TEST_F(VideoSourceTest, ScreencastResolutionNoConstraint) {
capturer_->TestWithoutCameraFormats();
capturer_->SetScreencast(true);
CreateLocalVideoSource();
CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -486,7 +518,7 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionNoConstraint) {
// Tests that the source starts video with the max width and height set by
// constraints for screencast.
TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) {
TEST_F(VideoSourceTest, ScreencastResolutionWithConstraint) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 480);
constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 270);
@ -494,7 +526,7 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) {
capturer_->TestWithoutCameraFormats();
capturer_->SetScreencast(true);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
@ -504,21 +536,21 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) {
EXPECT_EQ(30, format->framerate());
}
TEST_F(LocalVideoSourceTest, MandatorySubOneFpsConstraints) {
TEST_F(VideoSourceTest, MandatorySubOneFpsConstraints) {
FakeConstraints constraints;
constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 0.5);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
ASSERT_TRUE(capturer_->GetCaptureFormat() == NULL);
}
TEST_F(LocalVideoSourceTest, OptionalSubOneFpsConstraints) {
TEST_F(VideoSourceTest, OptionalSubOneFpsConstraints) {
FakeConstraints constraints;
constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 0.5);
CreateLocalVideoSource(&constraints);
CreateVideoSource(&constraints);
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();

View File

@ -47,6 +47,7 @@ class VideoSourceInterface : public MediaSourceInterface {
virtual void AddSink(cricket::VideoRenderer* output) = 0;
virtual void RemoveSink(cricket::VideoRenderer* output) = 0;
virtual const cricket::VideoOptions* options() const = 0;
virtual cricket::VideoRenderer* FrameInput() = 0;
protected:
virtual ~VideoSourceInterface() {}

View File

@ -42,6 +42,7 @@ BEGIN_PROXY_MAP(VideoSource)
PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*)
PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*)
PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options)
PROXY_METHOD0(cricket::VideoRenderer*, FrameInput)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()

View File

@ -39,12 +39,12 @@ VideoTrack::VideoTrack(const std::string& label,
: MediaStreamTrack<VideoTrackInterface>(label),
video_source_(video_source) {
if (video_source_)
video_source_->AddSink(FrameInput());
video_source_->AddSink(&renderers_);
}
VideoTrack::~VideoTrack() {
if (video_source_)
video_source_->RemoveSink(FrameInput());
video_source_->RemoveSink(&renderers_);
}
std::string VideoTrack::kind() const {
@ -59,10 +59,6 @@ void VideoTrack::RemoveRenderer(VideoRendererInterface* renderer) {
renderers_.RemoveRenderer(renderer);
}
cricket::VideoRenderer* VideoTrack::FrameInput() {
return &renderers_;
}
bool VideoTrack::set_enabled(bool enable) {
renderers_.SetEnabled(enable);
return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);

View File

@ -44,7 +44,6 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface> {
virtual void AddRenderer(VideoRendererInterface* renderer);
virtual void RemoveRenderer(VideoRendererInterface* renderer);
virtual cricket::VideoRenderer* FrameInput();
virtual VideoSourceInterface* GetSource() const {
return video_source_.get();
}

View File

@ -28,38 +28,53 @@
#include <string>
#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
#include "talk/app/webrtc/remotevideocapturer.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/base/gunit.h"
#include "talk/base/scoped_ptr.h"
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/devices/fakedevicemanager.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "talk/session/media/channelmanager.h"
using webrtc::FakeVideoTrackRenderer;
using webrtc::VideoSource;
using webrtc::VideoTrack;
using webrtc::VideoTrackInterface;
// Test adding renderers to a video track and render to them by providing
// VideoFrames to the track frame input.
// frames to the source.
TEST(VideoTrack, RenderVideo) {
static const char kVideoTrackId[] = "track_id";
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
channel_manager_.reset(
new cricket::ChannelManager(new cricket::FakeMediaEngine(),
new cricket::FakeDeviceManager(),
talk_base::Thread::Current()));
ASSERT_TRUE(channel_manager_->Init());
talk_base::scoped_refptr<VideoTrackInterface> video_track(
VideoTrack::Create(kVideoTrackId, NULL));
VideoTrack::Create(kVideoTrackId,
VideoSource::Create(channel_manager_.get(),
new webrtc::RemoteVideoCapturer(),
NULL)));
// FakeVideoTrackRenderer register itself to |video_track|
talk_base::scoped_ptr<FakeVideoTrackRenderer> renderer_1(
new FakeVideoTrackRenderer(video_track.get()));
cricket::VideoRenderer* render_input = video_track->FrameInput();
cricket::VideoRenderer* render_input = video_track->GetSource()->FrameInput();
ASSERT_FALSE(render_input == NULL);
render_input->SetSize(123, 123, 0);
EXPECT_EQ(1, renderer_1->num_set_sizes());
EXPECT_EQ(123, renderer_1->width());
EXPECT_EQ(123, renderer_1->height());
cricket::WebRtcVideoFrame frame;
frame.InitToBlack(123, 123, 1, 1, 0, 0);
render_input->RenderFrame(&frame);
EXPECT_EQ(1, renderer_1->num_rendered_frames());
EXPECT_EQ(1, renderer_1->num_set_sizes());
EXPECT_EQ(123, renderer_1->width());
EXPECT_EQ(123, renderer_1->height());
// FakeVideoTrackRenderer register itself to |video_track|
talk_base::scoped_ptr<FakeVideoTrackRenderer> renderer_2(
new FakeVideoTrackRenderer(video_track.get()));

View File

@ -2121,7 +2121,7 @@ bool ParseMediaDescription(const std::string& message,
// this for backwards-compatibility. Once we don't need that any
// more, remove this.
bool support_dc_sdp_bandwidth_temporarily = true;
if (!support_dc_sdp_bandwidth_temporarily) {
if (content.get() && !support_dc_sdp_bandwidth_temporarily) {
content->set_bandwidth(cricket::kAutoBandwidth);
}
} else {

View File

@ -275,7 +275,8 @@ class WebRtcSessionTest : public testing::Test {
ss_scope_(fss_.get()),
stun_server_(talk_base::Thread::Current(), kStunAddr),
allocator_(&network_manager_, kStunAddr,
SocketAddress(), SocketAddress(), SocketAddress()) {
SocketAddress(), SocketAddress(), SocketAddress()),
mediastream_signaling_(channel_manager_.get()) {
tdesc_factory_->set_protocol(cricket::ICEPROTO_HYBRID);
allocator_.set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
cricket::PORTALLOCATOR_DISABLE_RELAY |
@ -2485,9 +2486,9 @@ TEST_F(WebRtcSessionTest, TestRtpDataChannelConstraintTakesPrecedence) {
EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type());
}
// Test fails on windows. https://code.google.com/p/webrtc/issues/detail?id=2374
TEST_F(WebRtcSessionTest,
DISABLED_TestCreateOfferWithSctpEnabledWithoutStreams) {
TEST_F(WebRtcSessionTest, TestCreateOfferWithSctpEnabledWithoutStreams) {
MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp);
constraints_.reset(new FakeConstraints());
constraints_->AddOptional(
webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true);

View File

@ -28,7 +28,11 @@
#ifndef TALK_BASE_GUNIT_PROD_H_
#define TALK_BASE_GUNIT_PROD_H_
#if defined(ANDROID) || defined (GTEST_RELATIVE_PATH)
#if defined(ANDROID)
// Android doesn't use gtest at all, so anything that relies on gtest should
// check this define first.
#define NO_GTEST
#elif defined (GTEST_RELATIVE_PATH)
#include "gtest/gtest_prod.h"
#else
#include "testing/base/gunit_prod.h"

View File

@ -1265,7 +1265,14 @@ void PhysicalSocketServer::Remove(Dispatcher *pdispatcher) {
DispatcherList::iterator pos = std::find(dispatchers_.begin(),
dispatchers_.end(),
pdispatcher);
ASSERT(pos != dispatchers_.end());
// We silently ignore duplicate calls to Add, so we should silently ignore
// the (expected) symmetric calls to Remove. Note that this may still hide
// a real issue, so we at least log a warning about it.
if (pos == dispatchers_.end()) {
LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown "
<< "dispatcher, potentially from a duplicate call to Add.";
return;
}
size_t index = pos - dispatchers_.begin();
dispatchers_.erase(pos);
for (IteratorList::iterator it = iterators_.begin(); it != iterators_.end();

View File

@ -1136,8 +1136,6 @@
'app/webrtc/jsepsessiondescription.h',
'app/webrtc/localaudiosource.cc',
'app/webrtc/localaudiosource.h',
'app/webrtc/localvideosource.cc',
'app/webrtc/localvideosource.h',
'app/webrtc/mediaconstraintsinterface.cc',
'app/webrtc/mediaconstraintsinterface.h',
'app/webrtc/mediastream.cc',
@ -1161,10 +1159,14 @@
'app/webrtc/portallocatorfactory.cc',
'app/webrtc/portallocatorfactory.h',
'app/webrtc/proxy.h',
'app/webrtc/remotevideocapturer.cc',
'app/webrtc/remotevideocapturer.h',
'app/webrtc/statscollector.cc',
'app/webrtc/statscollector.h',
'app/webrtc/statstypes.h',
'app/webrtc/streamcollection.h',
'app/webrtc/videosource.cc',
'app/webrtc/videosource.h',
'app/webrtc/videosourceinterface.h',
'app/webrtc/videosourceproxy.h',
'app/webrtc/videotrack.cc',

View File

@ -379,7 +379,6 @@
'app/webrtc/dtmfsender_unittest.cc',
'app/webrtc/jsepsessiondescription_unittest.cc',
'app/webrtc/localaudiosource_unittest.cc',
'app/webrtc/localvideosource_unittest.cc',
# 'app/webrtc/mediastream_unittest.cc',
# 'app/webrtc/mediastreamhandler_unittest.cc',
'app/webrtc/mediastreamsignaling_unittest.cc',
@ -387,6 +386,7 @@
'app/webrtc/peerconnectionfactory_unittest.cc',
'app/webrtc/peerconnectioninterface_unittest.cc',
# 'app/webrtc/peerconnectionproxy_unittest.cc',
'app/webrtc/remotevideocapturer_unittest.cc',
'app/webrtc/test/fakeaudiocapturemodule.cc',
'app/webrtc/test/fakeaudiocapturemodule.h',
'app/webrtc/test/fakeaudiocapturemodule_unittest.cc',
@ -397,6 +397,7 @@
'app/webrtc/test/fakevideotrackrenderer.h',
'app/webrtc/test/mockpeerconnectionobservers.h',
'app/webrtc/test/testsdpstrings.h',
'app/webrtc/videosource_unittest.cc',
'app/webrtc/videotrack_unittest.cc',
'app/webrtc/webrtcsdp_unittest.cc',
'app/webrtc/webrtcsession_unittest.cc',

View File

@ -519,7 +519,8 @@ struct VoiceSenderInfo {
echo_delay_median_ms(0),
echo_delay_std_ms(0),
echo_return_loss(0),
echo_return_loss_enhancement(0) {
echo_return_loss_enhancement(0),
typing_noise_detected(false) {
}
uint32 ssrc;
@ -537,6 +538,7 @@ struct VoiceSenderInfo {
int echo_delay_std_ms;
int echo_return_loss;
int echo_return_loss_enhancement;
bool typing_noise_detected;
};
struct VoiceReceiverInfo {

View File

@ -259,10 +259,10 @@ class CompositeMediaEngine : public MediaEngineInterface {
}
virtual void SetVoiceLogging(int min_sev, const char* filter) {
return voice_.SetLogging(min_sev, filter);
voice_.SetLogging(min_sev, filter);
}
virtual void SetVideoLogging(int min_sev, const char* filter) {
return video_.SetLogging(min_sev, filter);
video_.SetLogging(min_sev, filter);
}
virtual bool RegisterVoiceProcessor(uint32 ssrc,

View File

@ -149,9 +149,9 @@ bool DeviceManager::GetAudioOutputDevice(const std::string& name, Device* out) {
bool DeviceManager::GetVideoCaptureDevices(std::vector<Device>* devices) {
devices->clear();
#if defined(IOS)
// On iOS, we treat the camera(s) as a single device. Even if there are
// multiple cameras, that's abstracted away at a higher level.
#if defined(ANDROID) || defined(IOS)
// On Android and iOS, we treat the camera(s) as a single device. Even if
// there are multiple cameras, that's abstracted away at a higher level.
Device dev("camera", "1"); // name and ID
devices->push_back(dev);
return true;

View File

@ -1498,6 +1498,7 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
desired_playout_(false),
nack_enabled_(false),
playout_(false),
typing_noise_detected_(false),
desired_send_(SEND_NOTHING),
send_(SEND_NOTHING),
default_receive_ssrc_(0) {
@ -2818,6 +2819,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
sinfo.echo_return_loss_enhancement = echo_return_loss_enhancement;
sinfo.echo_delay_median_ms = echo_delay_median_ms;
sinfo.echo_delay_std_ms = echo_delay_std_ms;
sinfo.typing_noise_detected = typing_noise_detected_;
info->senders.push_back(sinfo);
}
@ -2926,6 +2928,13 @@ bool WebRtcVoiceMediaChannel::FindSsrc(int channel_num, uint32* ssrc) {
}
void WebRtcVoiceMediaChannel::OnError(uint32 ssrc, int error) {
#ifdef USE_WEBRTC_DEV_BRANCH
if (error == VE_TYPING_NOISE_WARNING) {
typing_noise_detected_ = true;
} else if (error == VE_TYPING_NOISE_OFF_WARNING) {
typing_noise_detected_ = false;
}
#endif
SignalMediaError(ssrc, WebRtcErrorToChannelError(error));
}

View File

@ -404,6 +404,7 @@ class WebRtcVoiceMediaChannel
bool desired_playout_;
bool nack_enabled_;
bool playout_;
bool typing_noise_detected_;
SendFlags desired_send_;
SendFlags send_;

View File

@ -210,6 +210,12 @@ void SessionManager::OnIncomingResponse(const buzz::XmlElement* orig_stanza,
}
Session* session = FindSession(msg.sid, msg.to);
if (!session) {
// Also try the QN_FROM in the response stanza, in case we sent the request
// to a bare JID but got the response from a full JID.
std::string ack_from = response_stanza->Attr(buzz::QN_FROM);
session = FindSession(msg.sid, ack_from);
}
if (session) {
session->OnIncomingResponse(orig_stanza, response_stanza, msg);
}

View File

@ -32,7 +32,6 @@
#include <string>
#include <vector>
#include "talk/base/gunit_prod.h"
#include "talk/p2p/client/basicportallocator.h"
class HttpPortAllocatorTest_TestSessionRequestUrl_Test;
@ -129,6 +128,9 @@ class HttpPortAllocatorSessionBase : public BasicPortAllocatorSession {
virtual void SendSessionRequest(const std::string& host, int port) = 0;
virtual void ReceiveSessionResponse(const std::string& response);
// Made public for testing. Should be protected.
std::string GetSessionRequestUrl();
protected:
virtual void GetPortConfigurations();
void TryCreateRelaySession();
@ -137,11 +139,7 @@ class HttpPortAllocatorSessionBase : public BasicPortAllocatorSession {
BasicPortAllocatorSession::allocator());
}
std::string GetSessionRequestUrl();
private:
FRIEND_TEST(::HttpPortAllocatorTest, TestSessionRequestUrl);
std::vector<std::string> relay_hosts_;
std::vector<talk_base::SocketAddress> stun_hosts_;
std::string relay_token_;

View File

@ -163,9 +163,6 @@ class ChannelManager : public talk_base::MessageHandler,
bool monitoring() const { return monitoring_; }
// Sets the local renderer where to renderer the local camera.
bool SetLocalRenderer(VideoRenderer* renderer);
// Sets the externally provided video capturer. The ssrc is the ssrc of the
// (video) stream for which the video capturer should be set.
bool SetVideoCapturer(VideoCapturer* capturer);
bool capturing() const { return capturing_; }
// Configures the logging output of the mediaengine(s).

View File

@ -363,6 +363,7 @@ const StaticQName QN_ATTR_STATUS = { STR_EMPTY, "status" };
// Presence connection status
const char STR_PSTN_CONFERENCE_STATUS_CONNECTING[] = "connecting";
const char STR_PSTN_CONFERENCE_STATUS_JOINING[] = "joining";
const char STR_PSTN_CONFERENCE_STATUS_CONNECTED[] = "connected";
const char STR_PSTN_CONFERENCE_STATUS_HANGUP[] = "hangup";

View File

@ -322,6 +322,7 @@ extern const StaticQName QN_ATTR_STATUS;
// Presence connection status
extern const char STR_PSTN_CONFERENCE_STATUS_CONNECTING[];
extern const char STR_PSTN_CONFERENCE_STATUS_JOINING[];
extern const char STR_PSTN_CONFERENCE_STATUS_CONNECTED[];
extern const char STR_PSTN_CONFERENCE_STATUS_HANGUP[];

View File

@ -238,7 +238,10 @@ class PubSubStateClient : public sigslot::has_slots<> {
}
PubSubStateChange<C> change;
change.publisher_nick = info.publisher_nick;
if (!retracted) {
// Retracts do not have publisher information.
change.publisher_nick = info.publisher_nick;
}
change.published_nick = info.published_nick;
change.old_state = old_state;
change.new_state = new_state;

View File

@ -81,9 +81,14 @@ enum XmppPresenceAvailable {
enum XmppPresenceConnectionStatus {
XMPP_CONNECTION_STATUS_UNKNOWN = 0,
// Status set by the server while the user is being rung.
XMPP_CONNECTION_STATUS_CONNECTING = 1,
XMPP_CONNECTION_STATUS_CONNECTED = 2,
XMPP_CONNECTION_STATUS_HANGUP = 3,
// Status set by the client when the user has accepted the ring but before
// the client has joined the call.
XMPP_CONNECTION_STATUS_JOINING = 2,
// Status set by the client as part of joining the call.
XMPP_CONNECTION_STATUS_CONNECTED = 3,
XMPP_CONNECTION_STATUS_HANGUP = 4,
};
//! Presence Information

View File

@ -300,6 +300,8 @@ XmppPresenceImpl::connection_status() const {
return XMPP_CONNECTION_STATUS_CONNECTING;
else if (status == STR_PSTN_CONFERENCE_STATUS_CONNECTED)
return XMPP_CONNECTION_STATUS_CONNECTED;
else if (status == STR_PSTN_CONFERENCE_STATUS_JOINING)
return XMPP_CONNECTION_STATUS_JOINING;
else if (status == STR_PSTN_CONFERENCE_STATUS_HANGUP)
return XMPP_CONNECTION_STATUS_HANGUP;
}
@ -349,8 +351,11 @@ XmppPresenceImpl::set_raw_xml(const XmlElement * xml) {
xml->Name() != QN_PRESENCE)
return XMPP_RETURN_BADARGUMENT;
raw_xml_.reset(new XmlElement(*xml));
const std::string& type = xml->Attr(QN_TYPE);
if (type != STR_EMPTY && type != "unavailable")
return XMPP_RETURN_BADARGUMENT;
raw_xml_.reset(new XmlElement(*xml));
return XMPP_RETURN_OK;
}