Remove HybridVideoEngine.

This is currently unused dead code.

R=pthatcher@webrtc.org
BUG=

Review URL: https://webrtc-codereview.appspot.com/24409004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7055 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2014-09-04 07:32:26 +00:00
parent 9d453931c5
commit bcb6bcfe6c
4 changed files with 0 additions and 1127 deletions

View File

@ -487,8 +487,6 @@
'media/base/filemediaengine.cc',
'media/base/filemediaengine.h',
'media/base/hybriddataengine.h',
'media/base/hybridvideoengine.cc',
'media/base/hybridvideoengine.h',
'media/base/mediachannel.h',
'media/base/mediacommon.h',
'media/base/mediaengine.cc',

View File

@ -1,356 +0,0 @@
/*
* libjingle
* Copyright 2004 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/media/base/hybridvideoengine.h"
#include "webrtc/base/logging.h"
namespace cricket {
HybridVideoMediaChannel::HybridVideoMediaChannel(
HybridVideoEngineInterface* engine,
VideoMediaChannel* channel1,
VideoMediaChannel* channel2)
: engine_(engine),
channel1_(channel1),
channel2_(channel2),
active_channel_(NULL),
sending_(false) {
}
HybridVideoMediaChannel::~HybridVideoMediaChannel() {
}
void HybridVideoMediaChannel::SetInterface(NetworkInterface* iface) {
if (channel1_) {
channel1_->SetInterface(iface);
}
if (channel2_) {
channel2_->SetInterface(iface);
}
}
bool HybridVideoMediaChannel::SetOptions(const VideoOptions &options) {
bool ret = true;
if (channel1_) {
ret = channel1_->SetOptions(options);
}
if (channel2_ && ret) {
ret = channel2_->SetOptions(options);
}
return ret;
}
bool HybridVideoMediaChannel::GetOptions(VideoOptions *options) const {
if (active_channel_) {
return active_channel_->GetOptions(options);
}
if (channel1_) {
return channel1_->GetOptions(options);
}
if (channel2_) {
return channel2_->GetOptions(options);
}
return false;
}
bool HybridVideoMediaChannel::SetRecvCodecs(
const std::vector<VideoCodec>& codecs) {
// Only give each channel the codecs it knows about.
bool ret = true;
std::vector<VideoCodec> codecs1, codecs2;
SplitCodecs(codecs, &codecs1, &codecs2);
if (channel1_) {
ret = channel1_->SetRecvCodecs(codecs1);
}
if (channel2_ && ret) {
ret = channel2_->SetRecvCodecs(codecs2);
}
return ret;
}
bool HybridVideoMediaChannel::SetRecvRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions) {
bool ret = true;
if (channel1_) {
ret = channel1_->SetRecvRtpHeaderExtensions(extensions);
}
if (channel2_ && ret) {
ret = channel2_->SetRecvRtpHeaderExtensions(extensions);
}
return ret;
}
bool HybridVideoMediaChannel::SetRenderer(uint32 ssrc,
VideoRenderer* renderer) {
bool ret = true;
if (channel1_) {
ret = channel1_->SetRenderer(ssrc, renderer);
}
if (channel2_ && ret) {
ret = channel2_->SetRenderer(ssrc, renderer);
}
return ret;
}
bool HybridVideoMediaChannel::SetRender(bool render) {
bool ret = true;
if (channel1_) {
ret = channel1_->SetRender(render);
}
if (channel2_ && ret) {
ret = channel2_->SetRender(render);
}
return ret;
}
bool HybridVideoMediaChannel::MuteStream(uint32 ssrc, bool muted) {
bool ret = true;
if (channel1_) {
ret = channel1_->MuteStream(ssrc, muted);
}
if (channel2_ && ret) {
ret = channel2_->MuteStream(ssrc, muted);
}
return ret;
}
bool HybridVideoMediaChannel::SetSendCodecs(
const std::vector<VideoCodec>& codecs) {
// Use the input to this function to decide what impl we're going to use.
if (!active_channel_ && !SelectActiveChannel(codecs)) {
LOG(LS_WARNING) << "Failed to select active channel";
return false;
}
// Only give the active channel the codecs it knows about.
std::vector<VideoCodec> codecs1, codecs2;
SplitCodecs(codecs, &codecs1, &codecs2);
const std::vector<VideoCodec>& codecs_to_set =
(active_channel_ == channel1_.get()) ? codecs1 : codecs2;
bool return_value = active_channel_->SetSendCodecs(codecs_to_set);
if (!return_value) {
return false;
}
VideoCodec send_codec;
return_value = active_channel_->GetSendCodec(&send_codec);
if (!return_value) {
return false;
}
engine_->OnNewSendResolution(send_codec.width, send_codec.height);
active_channel_->UpdateAspectRatio(send_codec.width, send_codec.height);
return true;
}
bool HybridVideoMediaChannel::GetSendCodec(VideoCodec* send_codec) {
if (!active_channel_) {
return false;
}
return active_channel_->GetSendCodec(send_codec);
}
bool HybridVideoMediaChannel::SetSendStreamFormat(uint32 ssrc,
const VideoFormat& format) {
return active_channel_ && active_channel_->SetSendStreamFormat(ssrc, format);
}
bool HybridVideoMediaChannel::SetSendRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions) {
return active_channel_ &&
active_channel_->SetSendRtpHeaderExtensions(extensions);
}
bool HybridVideoMediaChannel::SetStartSendBandwidth(int bps) {
return active_channel_ && active_channel_->SetStartSendBandwidth(bps);
}
bool HybridVideoMediaChannel::SetMaxSendBandwidth(int bps) {
return active_channel_ && active_channel_->SetMaxSendBandwidth(bps);
}
bool HybridVideoMediaChannel::SetSend(bool send) {
if (send == sending()) {
return true; // no action required if already set.
}
bool ret = active_channel_ &&
active_channel_->SetSend(send);
// Returns error and don't connect the signal if starting up.
// Disconnects the signal anyway if shutting down.
if (ret || !send) {
// TODO(juberti): Remove this hack that connects the WebRTC channel
// to the capturer.
if (active_channel_ == channel1_.get()) {
engine_->OnSendChange1(channel1_.get(), send);
} else {
engine_->OnSendChange2(channel2_.get(), send);
}
// If succeeded, remember the state as is.
// If failed to open, sending_ should be false.
// If failed to stop, sending_ should also be false, as we disconnect the
// capture anyway.
// The failure on SetSend(false) is a known issue in webrtc.
sending_ = send;
}
return ret;
}
bool HybridVideoMediaChannel::SetCapturer(uint32 ssrc,
VideoCapturer* capturer) {
bool ret = true;
if (channel1_.get()) {
ret = channel1_->SetCapturer(ssrc, capturer);
}
if (channel2_.get() && ret) {
ret = channel2_->SetCapturer(ssrc, capturer);
}
return ret;
}
bool HybridVideoMediaChannel::AddSendStream(const StreamParams& sp) {
bool ret = true;
if (channel1_) {
ret = channel1_->AddSendStream(sp);
}
if (channel2_ && ret) {
ret = channel2_->AddSendStream(sp);
}
return ret;
}
bool HybridVideoMediaChannel::RemoveSendStream(uint32 ssrc) {
bool ret = true;
if (channel1_) {
ret = channel1_->RemoveSendStream(ssrc);
}
if (channel2_ && ret) {
ret = channel2_->RemoveSendStream(ssrc);
}
return ret;
}
bool HybridVideoMediaChannel::AddRecvStream(const StreamParams& sp) {
return active_channel_ &&
active_channel_->AddRecvStream(sp);
}
bool HybridVideoMediaChannel::RemoveRecvStream(uint32 ssrc) {
return active_channel_ &&
active_channel_->RemoveRecvStream(ssrc);
}
bool HybridVideoMediaChannel::SendIntraFrame() {
return active_channel_ &&
active_channel_->SendIntraFrame();
}
bool HybridVideoMediaChannel::RequestIntraFrame() {
return active_channel_ &&
active_channel_->RequestIntraFrame();
}
bool HybridVideoMediaChannel::GetStats(
const StatsOptions& options, VideoMediaInfo* info) {
// TODO(juberti): Ensure that returning no stats until SetSendCodecs is OK.
return active_channel_ &&
active_channel_->GetStats(options, info);
}
void HybridVideoMediaChannel::OnPacketReceived(
rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
// Eat packets until we have an active channel;
if (active_channel_) {
active_channel_->OnPacketReceived(packet, packet_time);
} else {
LOG(LS_INFO) << "HybridVideoChannel: Eating early RTP packet";
}
}
void HybridVideoMediaChannel::OnRtcpReceived(
rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
// Eat packets until we have an active channel;
if (active_channel_) {
active_channel_->OnRtcpReceived(packet, packet_time);
} else {
LOG(LS_INFO) << "HybridVideoChannel: Eating early RTCP packet";
}
}
void HybridVideoMediaChannel::OnReadyToSend(bool ready) {
if (channel1_) {
channel1_->OnReadyToSend(ready);
}
if (channel2_) {
channel2_->OnReadyToSend(ready);
}
}
void HybridVideoMediaChannel::UpdateAspectRatio(int ratio_w, int ratio_h) {
if (active_channel_) active_channel_->UpdateAspectRatio(ratio_w, ratio_h);
}
bool HybridVideoMediaChannel::SelectActiveChannel(
const std::vector<VideoCodec>& codecs) {
if (!active_channel_ && !codecs.empty()) {
if (engine_->HasCodec1(codecs[0])) {
channel2_.reset();
active_channel_ = channel1_.get();
} else if (engine_->HasCodec2(codecs[0])) {
channel1_.reset();
active_channel_ = channel2_.get();
}
}
if (NULL == active_channel_) {
return false;
}
// Connect signals from the active channel.
active_channel_->SignalMediaError.connect(
this,
&HybridVideoMediaChannel::OnMediaError);
return true;
}
void HybridVideoMediaChannel::SplitCodecs(
const std::vector<VideoCodec>& codecs,
std::vector<VideoCodec>* codecs1, std::vector<VideoCodec>* codecs2) {
codecs1->clear();
codecs2->clear();
for (size_t i = 0; i < codecs.size(); ++i) {
if (engine_->HasCodec1(codecs[i])) {
codecs1->push_back(codecs[i]);
}
if (engine_->HasCodec2(codecs[i])) {
codecs2->push_back(codecs[i]);
}
}
}
void HybridVideoMediaChannel::OnMediaError(uint32 ssrc, Error error) {
SignalMediaError(ssrc, error);
}
} // namespace cricket

View File

@ -1,283 +0,0 @@
/*
* libjingle
* Copyright 2004 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_
#define TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_
#include <string>
#include <vector>
#include "talk/media/base/codec.h"
#include "talk/media/base/mediachannel.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/sigslotrepeater.h"
namespace cricket {
struct Device;
struct VideoFormat;
class HybridVideoEngineInterface;
class VideoCapturer;
class VideoFrame;
class VideoRenderer;
// HybridVideoMediaChannels work with a HybridVideoEngine to combine
// two unrelated VideoMediaChannel implementations into a single class.
class HybridVideoMediaChannel : public VideoMediaChannel {
public:
HybridVideoMediaChannel(HybridVideoEngineInterface* engine,
VideoMediaChannel* channel1,
VideoMediaChannel* channel2);
virtual ~HybridVideoMediaChannel();
// VideoMediaChannel methods
virtual void SetInterface(NetworkInterface* iface);
virtual bool SetOptions(const VideoOptions& options);
virtual bool GetOptions(VideoOptions* options) const;
virtual bool AddSendStream(const StreamParams& sp);
virtual bool RemoveSendStream(uint32 ssrc);
virtual bool SetRenderer(uint32 ssrc, VideoRenderer* renderer);
virtual bool SetRender(bool render);
virtual bool MuteStream(uint32 ssrc, bool muted);
virtual bool SetRecvCodecs(const std::vector<VideoCodec>& codecs);
virtual bool SetRecvRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions);
virtual bool SetSendCodecs(const std::vector<VideoCodec>& codecs);
virtual bool GetSendCodec(VideoCodec* codec);
virtual bool SetSendStreamFormat(uint32 ssrc, const VideoFormat& format);
virtual bool SetSendRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions);
virtual bool SetStartSendBandwidth(int bps);
virtual bool SetMaxSendBandwidth(int bps);
virtual bool SetSend(bool send);
virtual bool AddRecvStream(const StreamParams& sp);
virtual bool RemoveRecvStream(uint32 ssrc);
virtual bool SetCapturer(uint32 ssrc, VideoCapturer* capturer);
virtual bool SendIntraFrame();
virtual bool RequestIntraFrame();
virtual bool GetStats(const StatsOptions& options, VideoMediaInfo* info);
virtual void OnPacketReceived(rtc::Buffer* packet,
const rtc::PacketTime& packet_time);
virtual void OnRtcpReceived(rtc::Buffer* packet,
const rtc::PacketTime& packet_time);
virtual void OnReadyToSend(bool ready);
virtual void UpdateAspectRatio(int ratio_w, int ratio_h);
void OnLocalFrame(VideoCapturer*, const VideoFrame*);
void OnLocalFrameFormat(VideoCapturer*, const VideoFormat*);
bool sending() const { return sending_; }
private:
bool SelectActiveChannel(const std::vector<VideoCodec>& codecs);
void SplitCodecs(const std::vector<VideoCodec>& codecs,
std::vector<VideoCodec>* codecs1,
std::vector<VideoCodec>* codecs2);
void OnMediaError(uint32 ssrc, Error error);
HybridVideoEngineInterface* engine_;
rtc::scoped_ptr<VideoMediaChannel> channel1_;
rtc::scoped_ptr<VideoMediaChannel> channel2_;
VideoMediaChannel* active_channel_;
bool sending_;
};
// Interface class for HybridVideoChannels to talk to the engine.
class HybridVideoEngineInterface {
public:
virtual ~HybridVideoEngineInterface() {}
virtual bool HasCodec1(const VideoCodec& codec) = 0;
virtual bool HasCodec2(const VideoCodec& codec) = 0;
virtual void OnSendChange1(VideoMediaChannel* channel1, bool send) = 0;
virtual void OnSendChange2(VideoMediaChannel* channel1, bool send) = 0;
virtual void OnNewSendResolution(int width, int height) = 0;
};
// The HybridVideoEngine class combines two unrelated VideoEngine impls
// into a single class. It creates HybridVideoMediaChannels that also contain
// a VideoMediaChannel implementation from each engine. Policy is then used
// during call setup to determine which VideoMediaChannel should be used.
// Currently, this policy is based on what codec the remote side wants to use.
template<class VIDEO1, class VIDEO2>
class HybridVideoEngine : public HybridVideoEngineInterface {
public:
HybridVideoEngine() {
// Unify the codec lists.
codecs_ = video1_.codecs();
codecs_.insert(codecs_.end(), video2_.codecs().begin(),
video2_.codecs().end());
rtp_header_extensions_ = video1_.rtp_header_extensions();
rtp_header_extensions_.insert(rtp_header_extensions_.end(),
video2_.rtp_header_extensions().begin(),
video2_.rtp_header_extensions().end());
SignalCaptureStateChange.repeat(video2_.SignalCaptureStateChange);
}
bool Init(rtc::Thread* worker_thread) {
if (!video1_.Init(worker_thread)) {
LOG(LS_ERROR) << "Failed to init VideoEngine1";
return false;
}
if (!video2_.Init(worker_thread)) {
LOG(LS_ERROR) << "Failed to init VideoEngine2";
video1_.Terminate();
return false;
}
return true;
}
void Terminate() {
video1_.Terminate();
video2_.Terminate();
}
int GetCapabilities() {
return (video1_.GetCapabilities() | video2_.GetCapabilities());
}
HybridVideoMediaChannel* CreateChannel(VoiceMediaChannel* channel) {
rtc::scoped_ptr<VideoMediaChannel> channel1(
video1_.CreateChannel(channel));
if (!channel1) {
LOG(LS_ERROR) << "Failed to create VideoMediaChannel1";
return NULL;
}
rtc::scoped_ptr<VideoMediaChannel> channel2(
video2_.CreateChannel(channel));
if (!channel2) {
LOG(LS_ERROR) << "Failed to create VideoMediaChannel2";
return NULL;
}
return new HybridVideoMediaChannel(this,
channel1.release(), channel2.release());
}
bool SetOptions(const VideoOptions& options) {
return video1_.SetOptions(options) && video2_.SetOptions(options);
}
bool SetDefaultEncoderConfig(const VideoEncoderConfig& config) {
VideoEncoderConfig conf = config;
if (video1_.codecs().size() > 0) {
conf.max_codec.name = video1_.codecs()[0].name;
if (!video1_.SetDefaultEncoderConfig(conf)) {
LOG(LS_ERROR) << "Failed to SetDefaultEncoderConfig for video1";
return false;
}
}
if (video2_.codecs().size() > 0) {
conf.max_codec.name = video2_.codecs()[0].name;
if (!video2_.SetDefaultEncoderConfig(conf)) {
LOG(LS_ERROR) << "Failed to SetDefaultEncoderConfig for video2";
return false;
}
}
return true;
}
VideoEncoderConfig GetDefaultEncoderConfig() const {
// This looks pretty strange, but, in practice, it'll do sane things if
// GetDefaultEncoderConfig is only called after SetDefaultEncoderConfig,
// since both engines should be essentially equivalent at that point. If it
// hasn't been called, though, we'll use the first meaningful encoder
// config, or the config from the second video engine if neither are
// meaningful.
VideoEncoderConfig config = video1_.GetDefaultEncoderConfig();
if (config.max_codec.width != 0) {
return config;
} else {
return video2_.GetDefaultEncoderConfig();
}
}
const std::vector<VideoCodec>& codecs() const {
return codecs_;
}
const std::vector<RtpHeaderExtension>& rtp_header_extensions() const {
return rtp_header_extensions_;
}
void SetLogging(int min_sev, const char* filter) {
video1_.SetLogging(min_sev, filter);
video2_.SetLogging(min_sev, filter);
}
VideoFormat GetStartCaptureFormat() const {
return video2_.GetStartCaptureFormat();
}
// TODO(juberti): Remove these functions after we do the capturer refactoring.
// For now they are set to always use the second engine for capturing, which
// is convenient given our intended use case.
bool SetCaptureDevice(const Device* device) {
return video2_.SetCaptureDevice(device);
}
VideoCapturer* GetVideoCapturer() const {
return video2_.GetVideoCapturer();
}
sigslot::repeater2<VideoCapturer*, CaptureState> SignalCaptureStateChange;
virtual bool HasCodec1(const VideoCodec& codec) {
return HasCodec(video1_, codec);
}
virtual bool HasCodec2(const VideoCodec& codec) {
return HasCodec(video2_, codec);
}
template<typename VIDEO>
bool HasCodec(const VIDEO& engine, const VideoCodec& codec) const {
for (std::vector<VideoCodec>::const_iterator i = engine.codecs().begin();
i != engine.codecs().end();
++i) {
if (i->Matches(codec)) {
return true;
}
}
return false;
}
virtual void OnSendChange1(VideoMediaChannel* channel1, bool send) {
}
virtual void OnSendChange2(VideoMediaChannel* channel2, bool send) {
}
virtual void OnNewSendResolution(int width, int height) {
}
protected:
VIDEO1 video1_;
VIDEO2 video2_;
std::vector<VideoCodec> codecs_;
std::vector<RtpHeaderExtension> rtp_header_extensions_;
};
} // namespace cricket
#endif // TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_

View File

@ -1,486 +0,0 @@
/*
* libjingle
* Copyright 2004 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "talk/media/base/fakemediaengine.h"
#include "talk/media/base/fakenetworkinterface.h"
#include "talk/media/base/fakevideocapturer.h"
#include "talk/media/base/hybridvideoengine.h"
#include "talk/media/base/mediachannel.h"
#include "talk/media/base/testutils.h"
#include "webrtc/base/gunit.h"
static const cricket::VideoCodec kGenericCodec(97, "Generic", 640, 360, 30, 0);
static const cricket::VideoCodec kVp8Codec(100, "VP8", 640, 360, 30, 0);
static const cricket::VideoCodec kCodecsVp8Only[] = { kVp8Codec };
static const cricket::VideoCodec kCodecsGenericOnly[] = { kGenericCodec };
static const cricket::VideoCodec kCodecsVp8First[] = { kVp8Codec,
kGenericCodec };
static const cricket::VideoCodec kCodecsGenericFirst[] = { kGenericCodec,
kVp8Codec };
using cricket::StreamParams;
class FakeVp8VideoEngine : public cricket::FakeVideoEngine {
public:
FakeVp8VideoEngine() {
SetCodecs(MAKE_VECTOR(kCodecsVp8Only));
}
};
class FakeGenericVideoEngine : public cricket::FakeVideoEngine {
public:
FakeGenericVideoEngine() {
SetCodecs(MAKE_VECTOR(kCodecsGenericOnly));
}
// For testing purposes, mimic the behavior of a media engine that throws out
// resolutions that don't match the codec list. A width or height of 0
// trivially will never match the codec list, so this is sufficient for
// testing the case we want (0x0).
virtual bool FindCodec(const cricket::VideoCodec& codec) {
if (codec.width == 0 || codec.height == 0) {
return false;
} else {
return cricket::FakeVideoEngine::FindCodec(codec);
}
}
};
class HybridVideoEngineForTest : public cricket::HybridVideoEngine<
FakeVp8VideoEngine, FakeGenericVideoEngine> {
public:
HybridVideoEngineForTest()
:
num_ch1_send_on_(0),
num_ch1_send_off_(0),
send_width_(0),
send_height_(0) { }
cricket::FakeVideoEngine* sub_engine1() { return &video1_; }
cricket::FakeVideoEngine* sub_engine2() { return &video2_; }
// From base class HybridVideoEngine.
void OnSendChange1(cricket::VideoMediaChannel* channel1, bool send) {
if (send) {
++num_ch1_send_on_;
} else {
++num_ch1_send_off_;
}
}
// From base class HybridVideoEngine
void OnNewSendResolution(int width, int height) {
send_width_ = width;
send_height_ = height;
}
int num_ch1_send_on() const { return num_ch1_send_on_; }
int num_ch1_send_off() const { return num_ch1_send_off_; }
int send_width() const { return send_width_; }
int send_height() const { return send_height_; }
private:
int num_ch1_send_on_;
int num_ch1_send_off_;
int send_width_;
int send_height_;
};
class HybridVideoEngineTest : public testing::Test {
public:
HybridVideoEngineTest() : sub_channel1_(NULL), sub_channel2_(NULL) {
}
~HybridVideoEngineTest() {
engine_.Terminate();
}
bool SetupEngine() {
bool result = engine_.Init(rtc::Thread::Current());
if (result) {
channel_.reset(engine_.CreateChannel(NULL));
result = (channel_.get() != NULL);
sub_channel1_ = engine_.sub_engine1()->GetChannel(0);
sub_channel2_ = engine_.sub_engine2()->GetChannel(0);
}
return result;
}
bool SetupRenderAndAddStream(const StreamParams& sp) {
if (!SetupEngine())
return false;
channel_->SetInterface(transport_.get());
return channel_->SetRecvCodecs(engine_.codecs()) &&
channel_->AddSendStream(sp) &&
channel_->SetRender(true);
}
void DeliverPacket(const void* data, int len) {
rtc::Buffer packet(data, len);
channel_->OnPacketReceived(&packet, rtc::CreatePacketTime(0));
}
void DeliverRtcp(const void* data, int len) {
rtc::Buffer packet(data, len);
channel_->OnRtcpReceived(&packet, rtc::CreatePacketTime(0));
}
protected:
void TestSetSendCodecs(cricket::FakeVideoEngine* sub_engine,
const std::vector<cricket::VideoCodec>& codecs) {
EXPECT_TRUE(SetupRenderAndAddStream(StreamParams::CreateLegacy(1234)));
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
cricket::FakeVideoMediaChannel* sub_channel = sub_engine->GetChannel(0);
ASSERT_EQ(1U, sub_channel->send_codecs().size());
EXPECT_EQ(codecs[0], sub_channel->send_codecs()[0]);
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(sub_channel->sending());
}
void TestSetSendBandwidth(cricket::FakeVideoEngine* sub_engine,
const std::vector<cricket::VideoCodec>& codecs,
int start_bitrate,
int max_bitrate) {
EXPECT_TRUE(SetupRenderAndAddStream(StreamParams::CreateLegacy(1234)));
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
EXPECT_TRUE(channel_->SetStartSendBandwidth(start_bitrate));
EXPECT_TRUE(channel_->SetMaxSendBandwidth(max_bitrate));
cricket::FakeVideoMediaChannel* sub_channel = sub_engine->GetChannel(0);
EXPECT_EQ(start_bitrate, sub_channel->start_bps());
EXPECT_EQ(max_bitrate, sub_channel->max_bps());
}
HybridVideoEngineForTest engine_;
rtc::scoped_ptr<cricket::HybridVideoMediaChannel> channel_;
rtc::scoped_ptr<cricket::FakeNetworkInterface> transport_;
cricket::FakeVideoMediaChannel* sub_channel1_;
cricket::FakeVideoMediaChannel* sub_channel2_;
};
TEST_F(HybridVideoEngineTest, StartupShutdown) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
engine_.Terminate();
}
// Tests that SetDefaultVideoEncoderConfig passes down to both engines.
TEST_F(HybridVideoEngineTest, SetDefaultVideoEncoderConfig) {
cricket::VideoEncoderConfig config(
cricket::VideoCodec(105, "", 640, 400, 30, 0), 1, 2);
EXPECT_TRUE(engine_.SetDefaultEncoderConfig(config));
cricket::VideoEncoderConfig config_1 = config;
config_1.max_codec.name = kCodecsVp8Only[0].name;
EXPECT_EQ(config_1, engine_.sub_engine1()->default_encoder_config());
cricket::VideoEncoderConfig config_2 = config;
config_2.max_codec.name = kCodecsGenericOnly[0].name;
EXPECT_EQ(config_2, engine_.sub_engine2()->default_encoder_config());
}
// Tests that GetDefaultVideoEncoderConfig picks a meaningful encoder config
// based on the underlying engine config and then after a call to
// SetDefaultEncoderConfig on the hybrid engine.
TEST_F(HybridVideoEngineTest, SetDefaultVideoEncoderConfigDefaultValue) {
cricket::VideoEncoderConfig blank_config;
cricket::VideoEncoderConfig meaningful_config1(
cricket::VideoCodec(111, "abcd", 320, 240, 30, 0), 1, 2);
cricket::VideoEncoderConfig meaningful_config2(
cricket::VideoCodec(111, "abcd", 1280, 720, 30, 0), 1, 2);
cricket::VideoEncoderConfig meaningful_config3(
cricket::VideoCodec(111, "abcd", 640, 360, 30, 0), 1, 2);
engine_.sub_engine1()->SetDefaultEncoderConfig(blank_config);
engine_.sub_engine2()->SetDefaultEncoderConfig(blank_config);
EXPECT_EQ(blank_config, engine_.GetDefaultEncoderConfig());
engine_.sub_engine2()->SetDefaultEncoderConfig(meaningful_config2);
EXPECT_EQ(meaningful_config2, engine_.GetDefaultEncoderConfig());
engine_.sub_engine1()->SetDefaultEncoderConfig(meaningful_config1);
EXPECT_EQ(meaningful_config1, engine_.GetDefaultEncoderConfig());
EXPECT_TRUE(engine_.SetDefaultEncoderConfig(meaningful_config3));
// The overall config should now match, though the codec name will have been
// rewritten for the first media engine.
meaningful_config3.max_codec.name = kCodecsVp8Only[0].name;
EXPECT_EQ(meaningful_config3, engine_.GetDefaultEncoderConfig());
}
// Tests that our engine has the right codecs in the right order.
TEST_F(HybridVideoEngineTest, CheckCodecs) {
const std::vector<cricket::VideoCodec>& c = engine_.codecs();
ASSERT_EQ(2U, c.size());
EXPECT_EQ(kVp8Codec, c[0]);
EXPECT_EQ(kGenericCodec, c[1]);
}
// Tests that our engine has the right caps.
TEST_F(HybridVideoEngineTest, CheckCaps) {
EXPECT_EQ(cricket::VIDEO_SEND | cricket::VIDEO_RECV,
engine_.GetCapabilities());
}
// Tests that we can create and destroy a channel.
TEST_F(HybridVideoEngineTest, CreateChannel) {
EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(sub_channel1_ != NULL);
EXPECT_TRUE(sub_channel2_ != NULL);
}
// Tests that we properly handle failures in CreateChannel.
TEST_F(HybridVideoEngineTest, CreateChannelFail) {
engine_.sub_engine1()->set_fail_create_channel(true);
EXPECT_FALSE(SetupEngine());
EXPECT_TRUE(channel_.get() == NULL);
EXPECT_TRUE(sub_channel1_ == NULL);
EXPECT_TRUE(sub_channel2_ == NULL);
engine_.sub_engine1()->set_fail_create_channel(false);
engine_.sub_engine2()->set_fail_create_channel(true);
EXPECT_FALSE(SetupEngine());
EXPECT_TRUE(channel_.get() == NULL);
EXPECT_TRUE(sub_channel1_ == NULL);
EXPECT_TRUE(sub_channel2_ == NULL);
}
// Test that we set our inbound codecs and settings properly.
TEST_F(HybridVideoEngineTest, SetLocalDescription) {
EXPECT_TRUE(SetupEngine());
channel_->SetInterface(transport_.get());
EXPECT_TRUE(channel_->SetRecvCodecs(engine_.codecs()));
ASSERT_EQ(1U, sub_channel1_->recv_codecs().size());
ASSERT_EQ(1U, sub_channel2_->recv_codecs().size());
EXPECT_EQ(kVp8Codec, sub_channel1_->recv_codecs()[0]);
EXPECT_EQ(kGenericCodec, sub_channel2_->recv_codecs()[0]);
StreamParams stream;
stream.id = "TestStream";
stream.ssrcs.push_back(1234);
stream.cname = "5678";
EXPECT_TRUE(channel_->AddSendStream(stream));
EXPECT_EQ(1234U, sub_channel1_->send_ssrc());
EXPECT_EQ(1234U, sub_channel2_->send_ssrc());
EXPECT_EQ("5678", sub_channel1_->rtcp_cname());
EXPECT_EQ("5678", sub_channel2_->rtcp_cname());
EXPECT_TRUE(channel_->SetRender(true));
// We've called SetRender, so we should be playing out, but not yet sending.
EXPECT_TRUE(sub_channel1_->playout());
EXPECT_TRUE(sub_channel2_->playout());
EXPECT_FALSE(sub_channel1_->sending());
EXPECT_FALSE(sub_channel2_->sending());
// We may get SetSend(false) calls during call setup.
// Since this causes no change in state, they should no-op and return true.
EXPECT_TRUE(channel_->SetSend(false));
EXPECT_FALSE(sub_channel1_->sending());
EXPECT_FALSE(sub_channel2_->sending());
}
TEST_F(HybridVideoEngineTest, OnNewSendResolution) {
EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsVp8First)));
EXPECT_EQ(640, engine_.send_width());
EXPECT_EQ(360, engine_.send_height());
}
// Test that we converge to the active channel for engine 1.
TEST_F(HybridVideoEngineTest, SetSendCodecs1) {
// This will nuke the object that sub_channel2_ points to.
TestSetSendCodecs(engine_.sub_engine1(), MAKE_VECTOR(kCodecsVp8First));
EXPECT_TRUE(engine_.sub_engine2()->GetChannel(0) == NULL);
}
// Test that we converge to the active channel for engine 2.
TEST_F(HybridVideoEngineTest, SetSendCodecs2) {
// This will nuke the object that sub_channel1_ points to.
TestSetSendCodecs(engine_.sub_engine2(), MAKE_VECTOR(kCodecsGenericFirst));
EXPECT_TRUE(engine_.sub_engine1()->GetChannel(0) == NULL);
}
// Test that we don't accidentally eat 0x0 in SetSendCodecs
TEST_F(HybridVideoEngineTest, SetSendCodecs0x0) {
EXPECT_TRUE(SetupRenderAndAddStream(StreamParams::CreateLegacy(1234)));
// Send using generic codec, but with 0x0 resolution.
std::vector<cricket::VideoCodec> codecs(MAKE_VECTOR(kCodecsGenericFirst));
codecs.resize(1);
codecs[0].width = 0;
codecs[0].height = 0;
EXPECT_TRUE(channel_->SetSendCodecs(codecs));
}
// Test setting the send bandwidth for VP8.
TEST_F(HybridVideoEngineTest, SetSendBandwidth1) {
TestSetSendBandwidth(engine_.sub_engine1(),
MAKE_VECTOR(kCodecsVp8First),
100000,
384000);
}
// Test setting the send bandwidth for a generic codec.
TEST_F(HybridVideoEngineTest, SetSendBandwidth2) {
TestSetSendBandwidth(engine_.sub_engine2(),
MAKE_VECTOR(kCodecsGenericFirst),
100001,
384002);
}
// Test that we dump RTP packets that arrive early.
TEST_F(HybridVideoEngineTest, HandleEarlyRtp) {
static const uint8 kPacket[1024] = { 0 };
static const uint8 kRtcp[1024] = { 1 };
EXPECT_TRUE(SetupRenderAndAddStream(StreamParams::CreateLegacy(1234)));
DeliverPacket(kPacket, sizeof(kPacket));
DeliverRtcp(kRtcp, sizeof(kRtcp));
EXPECT_TRUE(sub_channel1_->CheckNoRtp());
EXPECT_TRUE(sub_channel2_->CheckNoRtp());
EXPECT_TRUE(sub_channel1_->CheckNoRtcp());
EXPECT_TRUE(sub_channel2_->CheckNoRtcp());
}
// Test that we properly pass on normal RTP packets.
TEST_F(HybridVideoEngineTest, HandleRtp) {
static const uint8 kPacket[1024] = { 0 };
static const uint8 kRtcp[1024] = { 1 };
EXPECT_TRUE(SetupRenderAndAddStream(StreamParams::CreateLegacy(1234)));
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsVp8First)));
EXPECT_TRUE(channel_->SetSend(true));
DeliverPacket(kPacket, sizeof(kPacket));
DeliverRtcp(kRtcp, sizeof(kRtcp));
EXPECT_TRUE(sub_channel1_->CheckRtp(kPacket, sizeof(kPacket)));
EXPECT_TRUE(sub_channel1_->CheckRtcp(kRtcp, sizeof(kRtcp)));
}
// Test that we properly connect media error signal.
TEST_F(HybridVideoEngineTest, MediaErrorSignal) {
cricket::VideoMediaErrorCatcher catcher;
// Verify no signal from either channel before the active channel is set.
EXPECT_TRUE(SetupEngine());
channel_->SignalMediaError.connect(&catcher,
&cricket::VideoMediaErrorCatcher::OnError);
sub_channel1_->SignalMediaError(1, cricket::VideoMediaChannel::ERROR_OTHER);
EXPECT_EQ(0U, catcher.ssrc());
sub_channel2_->SignalMediaError(2,
cricket::VideoMediaChannel::ERROR_REC_DEVICE_OPEN_FAILED);
EXPECT_EQ(0U, catcher.ssrc());
// Set vp8 as active channel and verify that a signal comes from it.
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsVp8First)));
sub_channel1_->SignalMediaError(1, cricket::VideoMediaChannel::ERROR_OTHER);
EXPECT_EQ(cricket::VideoMediaChannel::ERROR_OTHER, catcher.error());
EXPECT_EQ(1U, catcher.ssrc());
// Set generic codec as active channel and verify that a signal comes from it.
EXPECT_TRUE(SetupEngine());
channel_->SignalMediaError.connect(&catcher,
&cricket::VideoMediaErrorCatcher::OnError);
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsGenericFirst)));
sub_channel2_->SignalMediaError(2,
cricket::VideoMediaChannel::ERROR_REC_DEVICE_OPEN_FAILED);
EXPECT_EQ(cricket::VideoMediaChannel::ERROR_REC_DEVICE_OPEN_FAILED,
catcher.error());
EXPECT_EQ(2U, catcher.ssrc());
}
// Test that SetSend doesn't re-enter.
TEST_F(HybridVideoEngineTest, RepeatSetSend) {
EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsVp8First)));
// Verify initial status.
EXPECT_FALSE(channel_->sending());
EXPECT_FALSE(sub_channel1_->sending());
EXPECT_EQ(0, engine_.num_ch1_send_on());
EXPECT_EQ(0, engine_.num_ch1_send_off());
// Verfiy SetSend(true) works correctly.
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(channel_->sending());
EXPECT_TRUE(sub_channel1_->sending());
EXPECT_EQ(1, engine_.num_ch1_send_on());
EXPECT_EQ(0, engine_.num_ch1_send_off());
// SetSend(true) again and verify nothing changes.
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(channel_->sending());
EXPECT_TRUE(sub_channel1_->sending());
EXPECT_EQ(1, engine_.num_ch1_send_on());
EXPECT_EQ(0, engine_.num_ch1_send_off());
// Verify SetSend(false) works correctly.
EXPECT_TRUE(channel_->SetSend(false));
EXPECT_FALSE(channel_->sending());
EXPECT_FALSE(sub_channel1_->sending());
EXPECT_EQ(1, engine_.num_ch1_send_on());
EXPECT_EQ(1, engine_.num_ch1_send_off());
// SetSend(false) again and verfiy nothing changes.
EXPECT_TRUE(channel_->SetSend(false));
EXPECT_FALSE(channel_->sending());
EXPECT_FALSE(sub_channel1_->sending());
EXPECT_EQ(1, engine_.num_ch1_send_on());
EXPECT_EQ(1, engine_.num_ch1_send_off());
}
// Test that SetOptions.
TEST_F(HybridVideoEngineTest, SetOptions) {
cricket::VideoOptions vmo;
vmo.video_high_bitrate.Set(true);
vmo.system_low_adaptation_threshhold.Set(0.10f);
EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(channel_->SetOptions(vmo));
bool high_bitrate;
float low;
EXPECT_TRUE(sub_channel1_->GetOptions(&vmo));
EXPECT_TRUE(vmo.video_high_bitrate.Get(&high_bitrate));
EXPECT_TRUE(high_bitrate);
EXPECT_TRUE(vmo.system_low_adaptation_threshhold.Get(&low));
EXPECT_EQ(0.10f, low);
EXPECT_TRUE(sub_channel2_->GetOptions(&vmo));
EXPECT_TRUE(vmo.video_high_bitrate.Get(&high_bitrate));
EXPECT_TRUE(high_bitrate);
EXPECT_TRUE(vmo.system_low_adaptation_threshhold.Get(&low));
EXPECT_EQ(0.10f, low);
vmo.video_high_bitrate.Set(false);
vmo.system_low_adaptation_threshhold.Set(0.50f);
EXPECT_TRUE(channel_->SetOptions(vmo));
EXPECT_TRUE(sub_channel1_->GetOptions(&vmo));
EXPECT_TRUE(vmo.video_high_bitrate.Get(&high_bitrate));
EXPECT_FALSE(high_bitrate);
EXPECT_TRUE(vmo.system_low_adaptation_threshhold.Get(&low));
EXPECT_EQ(0.50f, low);
EXPECT_TRUE(sub_channel2_->GetOptions(&vmo));
EXPECT_TRUE(vmo.video_high_bitrate.Get(&high_bitrate));
EXPECT_FALSE(high_bitrate);
EXPECT_TRUE(vmo.system_low_adaptation_threshhold.Get(&low));
EXPECT_EQ(0.50f, low);
}
TEST_F(HybridVideoEngineTest, SetCapturer) {
EXPECT_TRUE(SetupEngine());
// Set vp8 as active channel and verify that capturer can be set.
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsVp8First)));
cricket::FakeVideoCapturer fake_video_capturer;
EXPECT_TRUE(channel_->SetCapturer(0, &fake_video_capturer));
EXPECT_TRUE(channel_->SetCapturer(0, NULL));
// Set generic codec active channel and verify that capturer can be set.
EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(channel_->SetSendCodecs(MAKE_VECTOR(kCodecsGenericFirst)));
EXPECT_TRUE(channel_->SetCapturer(0, &fake_video_capturer));
EXPECT_TRUE(channel_->SetCapturer(0, NULL));
}