Log Fixit for parts of video_engine folder.

BUG=3153
R=pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/11179004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5853 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org
2014-04-07 10:56:31 +00:00
parent e8d1865408
commit 5574dacd1f
15 changed files with 267 additions and 1438 deletions

View File

@@ -15,7 +15,6 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {

View File

@@ -19,7 +19,7 @@
#include "webrtc/modules/video_coding/utility/include/exp_filter.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -438,19 +438,13 @@ int32_t OveruseFrameDetector::Process() {
observer_->NormalUsage();
}
WEBRTC_TRACE(
webrtc::kTraceInfo,
webrtc::kTraceVideo,
-1,
"Capture input stats: avg: %.2fms, std_dev: %.2fms (rampup delay: "
"%dms, overuse: >=%.2fms, "
"underuse: <%.2fms)",
capture_deltas_.Mean(),
capture_deltas_.StdDev(),
in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_,
options_.high_capture_jitter_threshold_ms,
options_.low_capture_jitter_threshold_ms);
int rampup_delay =
in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
LOG(LS_VERBOSE) << "Capture input stats: avg: " << capture_deltas_.Mean()
<< " std_dev " << capture_deltas_.StdDev()
<< " rampup delay " << rampup_delay
<< " overuse >= " << options_.high_capture_jitter_threshold_ms
<< " underuse < " << options_.low_capture_jitter_threshold_ms;
return 0;
}

View File

@@ -16,7 +16,7 @@
#include <algorithm>
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -96,18 +96,12 @@ bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
assert(total_audio_delay_target_ms && total_video_delay_target_ms);
int current_video_delay_ms = *total_video_delay_target_ms;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
"Audio delay is: %d for voice channel: %d",
current_audio_delay_ms, audio_channel_id_);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
"Network delay diff is: %d for voice channel: %d",
channel_delay_->network_delay, audio_channel_id_);
LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
<< ", network delay diff: " << channel_delay_->network_delay
<< " current diff: " << relative_delay_ms
<< " for channel " << audio_channel_id_;
// Calculate the difference between the lowest possible video delay and
// the current audio delay.
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
"Current diff is: %d for audio channel: %d",
relative_delay_ms, audio_channel_id_);
int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
relative_delay_ms;
@@ -198,11 +192,10 @@ bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
channel_delay_->last_video_delay_ms = new_video_delay_ms;
channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
"Sync video delay %d ms for video channel and audio delay %d for audio "
"channel %d",
new_video_delay_ms, channel_delay_->extra_audio_delay_ms,
audio_channel_id_);
LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
<< " and audio delay " << channel_delay_->extra_audio_delay_ms
<< " for video channel " << video_channel_id_
<< " for audio channel " << audio_channel_id_;
// Return values.
*total_video_delay_target_ms = new_video_delay_ms;

View File

@@ -157,10 +157,9 @@ void ViEAutoTest::ViEImageProcessAPITest()
tbCapture.captureId, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
// Double deregister
EXPECT_NE(0, ViE.image_process->DeregisterCaptureEffectFilter(
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
// Non-existing capture device
EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
tbChannel.videoChannel, effectFilter));
@@ -174,7 +173,7 @@ void ViEAutoTest::ViEImageProcessAPITest()
tbChannel.videoChannel, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.image_process->DeregisterRenderEffectFilter(
EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
tbChannel.videoChannel));
// Non-existing channel id
@@ -190,7 +189,7 @@ void ViEAutoTest::ViEImageProcessAPITest()
tbChannel.videoChannel, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.image_process->DeregisterSendEffectFilter(
EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
tbCapture.captureId, effectFilter));

View File

@@ -20,7 +20,7 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -45,42 +45,27 @@ ViEBase* ViEBase::GetInterface(VideoEngine* video_engine) {
}
int ViEBaseImpl::Release() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
"ViEBase::Release()");
(*this)--; // Decrease ref count.
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
"ViEBase release too many times");
shared_data_.SetLastError(kViEAPIDoesNotExist);
LOG(LS_WARNING) << "ViEBase released too many times.";
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_.instance_id(),
"ViEBase reference count: %d", ref_count);
return ref_count;
}
ViEBaseImpl::ViEBaseImpl(const Config& config)
: shared_data_(config) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
"ViEBaseImpl::ViEBaseImpl() Ctor");
}
: shared_data_(config) {}
ViEBaseImpl::~ViEBaseImpl() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
"ViEBaseImpl::ViEBaseImpl() Dtor");
}
ViEBaseImpl::~ViEBaseImpl() {}
int ViEBaseImpl::Init() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
"Init");
return 0;
}
int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s", __FUNCTION__);
LOG_F(LS_INFO) << "SetVoiceEngine";
if (shared_data_.channel_manager()->SetVoiceEngine(voice_engine) != 0) {
shared_data_.SetLastError(kViEBaseVoEFailure);
return -1;
@@ -90,15 +75,10 @@ int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel,
CpuOveruseObserver* observer) {
LOG_F(LS_INFO) << "RegisterCpuOveruseObserver on channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError,
kTraceVideo,
ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist",
__FUNCTION__,
video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -123,12 +103,6 @@ int ViEBaseImpl::SetCpuOveruseOptions(int video_channel,
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError,
kTraceVideo,
ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist",
__FUNCTION__,
video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -155,12 +129,6 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError,
kTraceVideo,
ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist",
__FUNCTION__,
video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -188,18 +156,13 @@ int ViEBaseImpl::CreateChannel(int& video_channel) { // NOLINT
int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
const Config* config) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s", __FUNCTION__);
if (shared_data_.channel_manager()->CreateChannel(&video_channel,
config) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: Could not create channel", __FUNCTION__);
video_channel = -1;
shared_data_.SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: channel created: %d", __FUNCTION__, video_channel);
LOG(LS_INFO) << "Video channel created: " << video_channel;
return 0;
}
@@ -214,15 +177,10 @@ int ViEBaseImpl::CreateReceiveChannel(int& video_channel, // NOLINT
}
int ViEBaseImpl::DeleteChannel(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s(%d)", __FUNCTION__, video_channel);
{
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -239,25 +197,19 @@ int ViEBaseImpl::DeleteChannel(const int video_channel) {
}
if (shared_data_.channel_manager()->DeleteChannel(video_channel) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: Could not delete channel %d", __FUNCTION__,
video_channel);
shared_data_.SetLastError(kViEBaseUnknownError);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: channel deleted: %d", __FUNCTION__, video_channel);
LOG(LS_INFO) << "Channel deleted " << video_channel;
return 0;
}
int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
const int audio_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s(%d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "ConnectAudioChannel, video channel " << video_channel
<< ", audio channel " << audio_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(video_channel)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -271,12 +223,9 @@ int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
}
int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s(%d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "DisconnectAudioChannel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(video_channel)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -290,16 +239,10 @@ int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
}
int ViEBaseImpl::StartSend(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "StartSend: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -307,9 +250,7 @@ int ViEBaseImpl::StartSend(const int video_channel) {
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
assert(vie_encoder != NULL);
if (vie_encoder->Owner() != video_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"Can't start ssend on a receive only channel.");
LOG_F(LS_ERROR) << "Can't start send on a receive only channel.";
shared_data_.SetLastError(kViEBaseReceiveOnlyChannel);
return -1;
}
@@ -319,13 +260,10 @@ int ViEBaseImpl::StartSend(const int video_channel) {
int32_t error = vie_channel->StartSend();
if (error != 0) {
vie_encoder->Restart();
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Could not start sending on channel %d", __FUNCTION__,
video_channel);
if (error == kViEBaseAlreadySending) {
shared_data_.SetLastError(kViEBaseAlreadySending);
}
LOG_F(LS_ERROR) << "Could not start sending " << video_channel;
shared_data_.SetLastError(kViEBaseUnknownError);
return -1;
}
@@ -335,29 +273,21 @@ int ViEBaseImpl::StartSend(const int video_channel) {
}
int ViEBaseImpl::StopSend(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "StopSend " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
int32_t error = vie_channel->StopSend();
if (error != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Could not stop sending on channel %d", __FUNCTION__,
video_channel);
if (error == kViEBaseNotSending) {
shared_data_.SetLastError(kViEBaseNotSending);
} else {
LOG_F(LS_ERROR) << "Could not stop sending " << video_channel;
shared_data_.SetLastError(kViEBaseUnknownError);
}
return -1;
@@ -366,16 +296,11 @@ int ViEBaseImpl::StopSend(const int video_channel) {
}
int ViEBaseImpl::StartReceive(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "StartReceive " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -387,16 +312,10 @@ int ViEBaseImpl::StartReceive(const int video_channel) {
}
int ViEBaseImpl::StopReceive(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "StopReceive " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),
"%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -408,8 +327,6 @@ int ViEBaseImpl::StopReceive(const int video_channel) {
}
int ViEBaseImpl::GetVersion(char version[1024]) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"GetVersion(version=?)");
assert(kViEVersionMaxMessageSize == 1024);
if (!version) {
shared_data_.SetLastError(kViEBaseInvalidArgument);
@@ -423,17 +340,10 @@ int ViEBaseImpl::GetVersion(char version[1024]) {
// Add build info.
version_stream << "Build: " << BUILDINFO << std::endl;
#ifdef WEBRTC_EXTERNAL_TRANSPORT
version_stream << "External transport build" << std::endl;
#endif
int version_length = version_stream.tellp();
assert(version_length < 1024);
memcpy(version, version_stream.str().c_str(), version_length);
version[version_length] = '\0';
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
ViEId(shared_data_.instance_id()), "GetVersion() => %s",
version);
return 0;
}
@@ -445,9 +355,6 @@ int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
int original_channel, bool sender) {
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(original_channel)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s - original_channel does not exist.", __FUNCTION__,
shared_data_.instance_id());
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -455,14 +362,13 @@ int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
if (shared_data_.channel_manager()->CreateChannel(&video_channel,
original_channel,
sender) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: Could not create channel", __FUNCTION__);
video_channel = -1;
shared_data_.SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s: channel created: %d", __FUNCTION__, video_channel);
LOG_F(LS_INFO) << "VideoChannel created: " << video_channel
<< ", base channel " << original_channel
<< ", is send channel : " << sender;
return 0;
}

View File

@@ -12,7 +12,7 @@
#include <map>
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -43,37 +43,24 @@ ViECapture* ViECapture::GetInterface(VideoEngine* video_engine) {
}
int ViECaptureImpl::Release() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
"ViECapture::Release()");
// Decrease ref count
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViECapture release too many times");
LOG(LS_WARNING) << "ViECapture released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
"ViECapture reference count: %d", ref_count);
return ref_count;
}
ViECaptureImpl::ViECaptureImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViECaptureImpl::ViECaptureImpl() Ctor");
}
: shared_data_(shared_data) {}
ViECaptureImpl::~ViECaptureImpl() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViECaptureImpl::~ViECaptureImpl() Dtor");
}
ViECaptureImpl::~ViECaptureImpl() {}
int ViECaptureImpl::NumberOfCaptureDevices() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
return shared_data_->input_manager()->NumberOfCaptureDevices();
}
@@ -83,8 +70,6 @@ int ViECaptureImpl::GetCaptureDevice(unsigned int list_number,
unsigned int device_nameUTF8Length,
char* unique_idUTF8,
unsigned int unique_idUTF8Length) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(list_number: %d)", __FUNCTION__, list_number);
return shared_data_->input_manager()->GetDeviceName(
list_number,
device_nameUTF8, device_nameUTF8Length,
@@ -95,8 +80,7 @@ int ViECaptureImpl::AllocateCaptureDevice(
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(unique_idUTF8: %s)", __FUNCTION__, unique_idUTF8);
LOG(LS_INFO) << "AllocateCaptureDevice " << unique_idUTF8;
const int32_t result =
shared_data_->input_manager()->CreateCaptureDevice(
unique_idUTF8,
@@ -111,8 +95,6 @@ int ViECaptureImpl::AllocateCaptureDevice(
int ViECaptureImpl::AllocateExternalCaptureDevice(
int& capture_id, ViEExternalCapture*& external_capture) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
const int32_t result =
shared_data_->input_manager()->CreateExternalCaptureDevice(
external_capture, capture_id);
@@ -121,56 +103,45 @@ int ViECaptureImpl::AllocateExternalCaptureDevice(
shared_data_->SetLastError(result);
return -1;
}
LOG(LS_INFO) << "External capture device allocated: " << capture_id;
return 0;
}
int ViECaptureImpl::AllocateCaptureDevice(
VideoCaptureModule& capture_module, int& capture_id) { // NOLINT
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
int32_t result = shared_data_->input_manager()->CreateCaptureDevice(
&capture_module, capture_id);
if (result != 0) {
shared_data_->SetLastError(result);
return -1;
}
LOG(LS_INFO) << "External capture device, by module, allocated: "
<< capture_id;
return 0;
}
int ViECaptureImpl::ReleaseCaptureDevice(const int capture_id) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
LOG(LS_INFO) << "ReleaseCaptureDevice " << capture_id;
{
ViEInputManagerScoped is((*(shared_data_->input_manager())));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
}
// Destroy the capture device.
return shared_data_->input_manager()->DestroyCaptureDevice(capture_id);
}
int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(capture_id: %d, video_channel: %d)", __FUNCTION__,
capture_id, video_channel);
LOG(LS_INFO) << "Connect capture id " << capture_id
<< " to channel " << video_channel;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -178,26 +149,18 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__,
video_channel);
LOG(LS_ERROR) << "Channel doesn't exist.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
if (vie_encoder->Owner() != video_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"Can't connect capture device to a receive only channel.");
LOG(LS_ERROR) << "Can't connect capture device to a receive device.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
// Check if the encoder already has a connected frame provider
if (is.FrameProvider(vie_encoder) != NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d already connected to a capture device.",
__FUNCTION__, video_channel);
LOG(LS_ERROR) << "Channel already connected to capture device.";
shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected);
return -1;
}
@@ -215,17 +178,12 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
LOG(LS_INFO) << "DisconnectCaptureDevice " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id()),
"%s: Channel %d doesn't exist", __FUNCTION__,
video_channel);
LOG(LS_ERROR) << "Channel doesn't exist.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
@@ -233,19 +191,11 @@ int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
if (!frame_provider) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id()),
"%s: No capture device connected to channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
if (frame_provider->Id() < kViECaptureIdBase ||
frame_provider->Id() > kViECaptureIdMax) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id()),
"%s: No capture device connected to channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
@@ -263,16 +213,11 @@ int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
int ViECaptureImpl::StartCapture(const int capture_id,
const CaptureCapability& capture_capability) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
LOG(LS_ERROR) << "StartCapture " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -288,28 +233,22 @@ int ViECaptureImpl::StartCapture(const int capture_id,
}
int ViECaptureImpl::StopCapture(const int capture_id) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
LOG(LS_INFO) << "StopCapture " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
if (!vie_capture->Started()) {
shared_data_->SetLastError(kViECaptureDeviceNotStarted);
return -1;
return 0;
}
if (vie_capture->Stop() != 0) {
shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
@@ -331,16 +270,12 @@ int ViECaptureImpl::SetRotateCapturedFrames(
i_rotation = 270;
break;
}
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(rotation: %d)", __FUNCTION__, i_rotation);
LOG(LS_INFO) << "SetRotateCaptureFrames for " << capture_id
<< ", rotation " << i_rotation;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -353,17 +288,12 @@ int ViECaptureImpl::SetRotateCapturedFrames(
int ViECaptureImpl::SetCaptureDelay(const int capture_id,
const unsigned int capture_delay_ms) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_id: %d, capture_delay_ms %u)", __FUNCTION__,
capture_id, capture_delay_ms);
LOG(LS_INFO) << "SetCaptureDelay " << capture_delay_ms
<< ", for device " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -378,8 +308,6 @@ int ViECaptureImpl::SetCaptureDelay(const int capture_id,
int ViECaptureImpl::NumberOfCapabilities(
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
#if defined(WEBRTC_MAC)
// TODO(mflodman) Move to capture module!
@@ -387,9 +315,7 @@ int ViECaptureImpl::NumberOfCapabilities(
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s This API is not supported on Mac OS", __FUNCTION__,
shared_data_->instance_id());
LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
return -1;
#endif
return shared_data_->input_manager()->NumberOfCaptureCapabilities(
@@ -401,18 +327,14 @@ int ViECaptureImpl::GetCaptureCapability(const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
const unsigned int capability_number,
CaptureCapability& capability) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
#if defined(WEBRTC_MAC)
// TODO(mflodman) Move to capture module!
// QTKit framework handles all capabilities and capture settings
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s This API is not supported on Mac OS", __FUNCTION__,
shared_data_->instance_id());
return -1;
#endif
if (shared_data_->input_manager()->GetCaptureCapability(
@@ -436,15 +358,9 @@ int ViECaptureImpl::ShowCaptureSettingsDialogBox(
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s This API is not supported on Mac OS", __FUNCTION__,
shared_data_->instance_id());
LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
return -1;
#endif
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s capture_id (capture_device_name: %s)", __FUNCTION__,
unique_idUTF8);
return shared_data_->input_manager()->DisplayCaptureSettingsDialogBox(
unique_idUTF8, dialog_title,
parent_window, x, y);
@@ -452,8 +368,6 @@ int ViECaptureImpl::ShowCaptureSettingsDialogBox(
int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
RotateCapturedFrame& orientation) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s (capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
if (shared_data_->input_manager()->GetOrientation(
unique_idUTF8,
orientation) != 0) {
@@ -466,13 +380,11 @@ int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
const bool enable) {
LOG(LS_INFO) << "EnableBrightnessAlarm for device " << capture_id
<< ", status " << enable;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -485,20 +397,15 @@ int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
int ViECaptureImpl::RegisterObserver(const int capture_id,
ViECaptureObserver& observer) {
LOG(LS_INFO) << "Register capture observer " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
if (vie_capture->IsObserverRegistered()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Observer already registered", __FUNCTION__);
LOG_F(LS_ERROR) << "Observer already registered.";
shared_data_->SetLastError(kViECaptureObserverAlreadyRegistered);
return -1;
}
@@ -513,10 +420,6 @@ int ViECaptureImpl::DeregisterObserver(const int capture_id) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s: Capture device %d doesn't exist", __FUNCTION__,
capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}

View File

@@ -19,8 +19,8 @@
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/overuse_frame_detector.h"
@@ -59,23 +59,14 @@ ViECapturer::ViECapturer(int capture_id,
observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
observer_(NULL),
overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
"ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
capture_id, engine_id);
unsigned int t_id = 0;
if (capture_thread_.Start(t_id)) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
"%s: thread started: %u", __FUNCTION__, t_id);
} else {
if (!capture_thread_.Start(t_id)) {
assert(false);
}
module_process_thread_.RegisterModule(overuse_detector_.get());
}
ViECapturer::~ViECapturer() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
"ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
capture_id_, engine_id_);
module_process_thread_.DeRegisterModule(overuse_detector_.get());
// Stop the thread.
@@ -100,10 +91,6 @@ ViECapturer::~ViECapturer() {
delete &deliver_event_;
} else {
assert(false);
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
ViEId(engine_id_, capture_id_),
"%s: Not able to stop capture thread for device %d, leaking",
__FUNCTION__, capture_id_);
}
if (image_proc_module_) {
@@ -206,8 +193,6 @@ int ViECapturer::FrameCallbackChanged() {
}
int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
__FUNCTION__);
int width;
int height;
int frame_rate;
@@ -244,15 +229,11 @@ int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
}
int32_t ViECapturer::Stop() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
__FUNCTION__);
requested_capability_ = CaptureCapability();
return capture_module_->StopCapture();
}
bool ViECapturer::Started() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
__FUNCTION__);
return capture_module_->CaptureStarted();
}
@@ -310,10 +291,6 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time) { // NOLINT
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"ExternalCapture::IncomingFrame width %d, height %d, "
"capture_time %u", width, height, capture_time);
if (!external_capture_module_) {
return -1;
}
@@ -328,11 +305,6 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time) { // NOLINT
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"ExternalCapture::IncomingFrame width %d, height %d, "
" capture_time %u", video_frame.width, video_frame.height,
capture_time);
if (!external_capture_module_) {
return -1;
}
@@ -354,10 +326,7 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
video_frame.v_pitch);
if (ret < 0) {
WEBRTC_TRACE(kTraceError,
kTraceVideo,
ViEId(engine_id_, capture_id_),
"Failed to create I420VideoFrame");
LOG_F(LS_ERROR) << "Could not create I420Frame.";
return -1;
}
@@ -372,8 +341,6 @@ void ViECapturer::SwapFrame(I420VideoFrame* frame) {
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
I420VideoFrame& video_frame) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
CriticalSectionScoped cs(capture_cs_.get());
// Make sure we render this frame earlier since we know the render time set
// is slightly off since it's being set when the frame has been received from
@@ -392,9 +359,8 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
void ViECapturer::OnCaptureDelayChanged(const int32_t id,
const int32_t delay) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
delay);
LOG(LS_INFO) << "Capture delayed change to " << delay
<< " for device " << id;
// Deliver the network delay to all registered callbacks.
ViEFrameProviderBase::SetFrameDelay(delay);
@@ -404,26 +370,9 @@ int32_t ViECapturer::RegisterEffectFilter(
ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(deliver_cs_.get());
if (!effect_filter) {
if (!effect_filter_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: no effect filter added for capture device %d",
__FUNCTION__, capture_id_);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: deregister effect filter for device %d", __FUNCTION__,
capture_id_);
} else {
if (effect_filter_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: effect filter already added for capture device %d",
__FUNCTION__, capture_id_);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: register effect filter for device %d", __FUNCTION__,
capture_id_);
if (effect_filter != NULL && effect_filter_ != NULL) {
LOG_F(LS_ERROR) << "Effect filter already registered.";
return -1;
}
effect_filter_ = effect_filter;
return 0;
@@ -435,9 +384,7 @@ int32_t ViECapturer::IncImageProcRefCount() {
image_proc_module_ = VideoProcessingModule::Create(
ViEModuleId(engine_id_, capture_id_));
if (!image_proc_module_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: could not create video processing module",
__FUNCTION__);
LOG_F(LS_ERROR) << "Could not create video processing module.";
return -1;
}
}
@@ -456,10 +403,6 @@ int32_t ViECapturer::DecImageProcRefCount() {
}
int32_t ViECapturer::EnableDenoising(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (denoising_enabled_) {
@@ -478,20 +421,13 @@ int32_t ViECapturer::EnableDenoising(bool enable) {
denoising_enabled_ = false;
DecImageProcRefCount();
}
return 0;
}
int32_t ViECapturer::EnableDeflickering(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (deflicker_frame_stats_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: deflickering already enabled", __FUNCTION__);
return -1;
}
if (IncImageProcRefCount() != 0) {
@@ -500,8 +436,6 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
} else {
if (deflicker_frame_stats_ == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: deflickering not enabled", __FUNCTION__);
return -1;
}
DecImageProcRefCount();
@@ -512,15 +446,9 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
}
int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (brightness_frame_stats_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: BrightnessAlarm already enabled", __FUNCTION__);
return -1;
}
if (IncImageProcRefCount() != 0) {
@@ -530,8 +458,6 @@ int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
} else {
DecImageProcRefCount();
if (brightness_frame_stats_ == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: deflickering not enabled", __FUNCTION__);
return -1;
}
delete brightness_frame_stats_;
@@ -578,9 +504,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
*video_frame) == 0) {
image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
} else {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: could not get frame stats for captured frame",
__FUNCTION__);
LOG_F(LS_ERROR) << "Could not get frame stats.";
}
}
if (denoising_enabled_) {
@@ -603,8 +527,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
current_brightness_level_ = Bright;
break;
default:
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: Brightness detection failed", __FUNCTION__);
break;
}
}
}
@@ -643,12 +566,7 @@ int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
{
CriticalSectionScoped cs(observer_cs_.get());
if (observer_) {
WEBRTC_TRACE(kTraceError,
kTraceVideo,
ViEId(engine_id_, capture_id_),
"%s Observer already registered",
__FUNCTION__,
capture_id_);
LOG_F(LS_ERROR) << "Observer already registered.";
return -1;
}
observer_ = observer;
@@ -676,17 +594,13 @@ bool ViECapturer::IsObserverRegistered() {
void ViECapturer::OnCaptureFrameRate(const int32_t id,
const uint32_t frame_rate) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"OnCaptureFrameRate %d", frame_rate);
CriticalSectionScoped cs(observer_cs_.get());
observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
}
void ViECapturer::OnNoPictureAlarm(const int32_t id,
const VideoCaptureAlarm alarm) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"OnNoPictureAlarm %d", alarm);
LOG(LS_WARNING) << "OnNoPictureAlarm " << id;
CriticalSectionScoped cs(observer_cs_.get());
CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;

File diff suppressed because it is too large Load Diff

View File

@@ -17,8 +17,8 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_engine/encoder_state_feedback.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -111,8 +111,8 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
if (header.extension.hasAbsoluteSendTime) {
// If we see AST in header, switch RBE strategy immediately.
if (!using_absolute_send_time_) {
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_),
"WrappingBitrateEstimator: Switching to absolute send time RBE.");
LOG(LS_INFO) <<
"WrappingBitrateEstimator: Switching to absolute send time RBE.";
using_absolute_send_time_ = true;
PickEstimator();
}
@@ -122,9 +122,8 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
if (using_absolute_send_time_) {
++packets_since_absolute_send_time_;
if (packets_since_absolute_send_time_ >= kTimeOffsetSwitchThreshold) {
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_),
"WrappingBitrateEstimator: Switching to transmission time offset "
"RBE.");
LOG(LS_INFO) << "WrappingBitrateEstimator: Switching to transmission "
<< "time offset RBE.";
using_absolute_send_time_ = false;
PickEstimator();
}

View File

@@ -15,7 +15,7 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_engine/encoder_state_feedback.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -39,18 +39,12 @@ ViEChannelManager::ViEChannelManager(
voice_engine_(NULL),
module_process_thread_(NULL),
engine_config_(config) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id),
"ViEChannelManager::ViEChannelManager(engine_id: %d)",
engine_id);
for (int idx = 0; idx < free_channel_ids_size_; idx++) {
free_channel_ids_[idx] = true;
}
}
ViEChannelManager::~ViEChannelManager() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_),
"ViEChannelManager Destructor, engine_id: %d", engine_id_);
while (channel_map_.size() > 0) {
ChannelMap::iterator it = channel_map_.begin();
// DeleteChannel will erase this channel from the map and invalidate |it|.
@@ -225,8 +219,6 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
ChannelMap::iterator c_it = channel_map_.find(channel_id);
if (c_it == channel_map_.end()) {
// No such channel.
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
return -1;
}
vie_channel = c_it->second;
@@ -276,22 +268,17 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
// deleted, which might take time.
// If statment just to show that this object is not always deleted.
if (vie_encoder) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s ViEEncoder deleted for channel %d", __FUNCTION__,
channel_id);
LOG(LS_VERBOSE) << "ViEEncoder deleted for channel " << channel_id;
delete vie_encoder;
}
// If statment just to show that this object is not always deleted.
if (group) {
// Delete the group if empty last since the encoder holds a pointer to the
// BitrateController object that the group owns.
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s ChannelGroup deleted for channel %d", __FUNCTION__,
channel_id);
LOG(LS_VERBOSE) << "Channel group deleted for channel " << channel_id;
delete group;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s Channel %d deleted", __FUNCTION__, channel_id);
LOG(LS_VERBOSE) << "Channel deleted " << channel_id;
return 0;
}
@@ -306,9 +293,6 @@ int ViEChannelManager::SetVoiceEngine(VoiceEngine* voice_engine) {
// Get new sync interface.
sync_interface = VoEVideoSync::GetInterface(voice_engine);
if (!sync_interface) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"%s Can't get audio sync interface from VoiceEngine.",
__FUNCTION__);
return -1;
}
}
@@ -329,8 +313,7 @@ int ViEChannelManager::ConnectVoiceChannel(int channel_id,
int audio_channel_id) {
CriticalSectionScoped cs(channel_id_critsect_);
if (!voice_sync_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
"No VoE set");
LOG_F(LS_ERROR) << "No VoE set.";
return -1;
}
ViEChannel* channel = ViEChannelPtr(channel_id);
@@ -438,21 +421,15 @@ bool ViEChannelManager::CreateChannelObject(
send_rtp_rtcp_module,
sender);
if (vie_channel->Init() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"%s could not init channel", __FUNCTION__, channel_id);
delete vie_channel;
return false;
}
VideoCodec encoder;
if (vie_encoder->GetEncoder(&encoder) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
"%s: Could not GetEncoder.", __FUNCTION__);
delete vie_channel;
return false;
}
if (sender && vie_channel->SetSendCodec(encoder) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
"%s: Could not SetSendCodec.", __FUNCTION__);
delete vie_channel;
return false;
}
@@ -466,8 +443,7 @@ ViEChannel* ViEChannelManager::ViEChannelPtr(int channel_id) const {
CriticalSectionScoped cs(channel_id_critsect_);
ChannelMap::const_iterator it = channel_map_.find(channel_id);
if (it == channel_map_.end()) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
LOG(LS_ERROR) << "Channel doesn't exist " << channel_id;
return NULL;
}
return it->second;
@@ -492,8 +468,7 @@ int ViEChannelManager::FreeChannelId() {
}
idx++;
}
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"Max number of channels reached: %d", channel_map_.size());
LOG(LS_ERROR) << "Max number of channels reached.";
return -1;
}

View File

@@ -15,7 +15,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -28,6 +27,51 @@
namespace webrtc {
static void LogCodec(const VideoCodec& codec) {
LOG(LS_INFO) << "CodecType " << codec.codecType
<< ", pl_type " << codec.plType
<< ", resolution " << codec.width
<< " x " << codec.height
<< ", start br " << codec.startBitrate
<< ", min br " << codec.minBitrate
<< ", max br " << codec.maxBitrate
<< ", max fpr " << codec.maxFramerate
<< ", max qp " << codec.qpMax
<< ", number of streams "
<< codec.numberOfSimulcastStreams;
if (codec.codecType == kVideoCodecVP8) {
LOG(LS_INFO) << "VP8 specific settings";
LOG(LS_INFO) << "pivtureLossIndicationOn "
<< codec.codecSpecific.VP8.pictureLossIndicationOn
<< ", feedbackModeOn "
<< codec.codecSpecific.VP8.feedbackModeOn
<< ", complexity "
<< codec.codecSpecific.VP8.complexity
<< ", resilience "
<< codec.codecSpecific.VP8.resilience
<< ", numberOfTeporalLayers "
<< codec.codecSpecific.VP8.numberOfTemporalLayers
<< ", keyFrameinterval "
<< codec.codecSpecific.VP8.keyFrameInterval;
for (int idx = 0; idx < codec.numberOfSimulcastStreams; ++idx) {
LOG(LS_INFO) << "Stream " << codec.simulcastStream[idx].width
<< " x " << codec.simulcastStream[idx].height;
LOG(LS_INFO) << "Temporal layers "
<< codec.simulcastStream[idx].numberOfTemporalLayers
<< ", min br "
<< codec.simulcastStream[idx].minBitrate
<< ", target br "
<< codec.simulcastStream[idx].targetBitrate
<< ", max br "
<< codec.simulcastStream[idx].maxBitrate
<< ", qp max "
<< codec.simulcastStream[idx].qpMax;
}
}
}
ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
if (!video_engine) {
@@ -44,45 +88,33 @@ ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
}
int ViECodecImpl::Release() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
"ViECodecImpl::Release()");
LOG(LS_INFO) << "ViECodec::Release.";
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViECodec released too many times");
LOG(LS_WARNING) << "ViECodec released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
"ViECodec reference count: %d", ref_count);
return ref_count;
}
ViECodecImpl::ViECodecImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViECodecImpl::ViECodecImpl() Ctor");
}
ViECodecImpl::~ViECodecImpl() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViECodecImpl::~ViECodecImpl() Dtor");
}
int ViECodecImpl::NumberOfCodecs() const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
// +2 because of FEC(RED and ULPFEC)
return static_cast<int>((VideoCodingModule::NumberOfCodecs() + 2));
}
int ViECodecImpl::GetCodec(const unsigned char list_number,
VideoCodec& video_codec) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(list_number: %d)", __FUNCTION__, list_number);
if (list_number == VideoCodingModule::NumberOfCodecs()) {
memset(&video_codec, 0, sizeof(VideoCodec));
strcpy(video_codec.plName, "red");
@@ -94,9 +126,6 @@ int ViECodecImpl::GetCodec(const unsigned char list_number,
video_codec.codecType = kVideoCodecULPFEC;
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
} else if (VideoCodingModule::Codec(list_number, &video_codec) != VCM_OK) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: Could not get codec for list_number: %u", __FUNCTION__,
list_number);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -105,33 +134,8 @@ int ViECodecImpl::GetCodec(const unsigned char list_number,
int ViECodecImpl::SetSendCodec(const int video_channel,
const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
video_channel, video_codec.codecType);
WEBRTC_TRACE(kTraceInfo, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d"
"maxBr: %d, min_br: %d, frame_rate: %d, qpMax: %u,"
"numberOfSimulcastStreams: %u )", __FUNCTION__,
video_codec.codecType, video_codec.plType, video_codec.width,
video_codec.height, video_codec.startBitrate,
video_codec.maxBitrate, video_codec.minBitrate,
video_codec.maxFramerate, video_codec.qpMax,
video_codec.numberOfSimulcastStreams);
if (video_codec.codecType == kVideoCodecVP8) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"pictureLossIndicationOn: %d, feedbackModeOn: %d, "
"complexity: %d, resilience: %d, numberOfTemporalLayers: %u"
"keyFrameInterval %d",
video_codec.codecSpecific.VP8.pictureLossIndicationOn,
video_codec.codecSpecific.VP8.feedbackModeOn,
video_codec.codecSpecific.VP8.complexity,
video_codec.codecSpecific.VP8.resilience,
video_codec.codecSpecific.VP8.numberOfTemporalLayers,
video_codec.codecSpecific.VP8.keyFrameInterval);
}
LOG(LS_INFO) << "SetSendCodec for channel " << video_channel;
LogCodec(video_codec);
if (!CodecValid(video_codec)) {
// Error logged.
shared_data_->SetLastError(kViECodecInvalidCodec);
@@ -141,9 +145,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -151,9 +152,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
assert(vie_encoder);
if (vie_encoder->Owner() != video_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Receive only channel %d", __FUNCTION__, video_channel);
LOG_F(LS_ERROR) << "Receive only channel.";
shared_data_->SetLastError(kViECodecReceiveOnlyChannel);
return -1;
}
@@ -166,10 +165,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
video_codec_internal.height *
video_codec_internal.maxFramerate)
/ 1000;
WEBRTC_TRACE(kTraceInfo, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: New max bitrate set to %d kbps", __FUNCTION__,
video_codec_internal.maxBitrate);
LOG(LS_INFO) << "New max bitrate set " << video_codec_internal.maxBitrate;
}
if (video_codec_internal.startBitrate > video_codec_internal.maxBitrate) {
@@ -192,10 +188,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
vie_encoder->Pause();
if (vie_encoder->SetEncoder(video_codec_internal) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not change encoder for channel %d", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -207,10 +199,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
++it) {
bool ret = true;
if ((*it)->SetSendCodec(video_codec_internal, new_rtp_stream) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not set send codec for channel %d", __FUNCTION__,
video_channel);
ret = false;
}
if (!ret) {
@@ -225,9 +213,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
if (video_codec_internal.numberOfSimulcastStreams == 0) {
unsigned int ssrc = 0;
if (vie_channel->GetLocalSSRC(0, &ssrc) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not get ssrc", __FUNCTION__);
LOG_F(LS_ERROR) << "Could not get ssrc.";
}
ssrcs.push_back(ssrc);
} else {
@@ -235,9 +221,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
++idx) {
unsigned int ssrc = 0;
if (vie_channel->GetLocalSSRC(idx, &ssrc) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not get ssrc for idx %d", __FUNCTION__, idx);
LOG_F(LS_ERROR) << "Could not get ssrc for stream " << idx;
}
ssrcs.push_back(ssrc);
}
@@ -264,16 +248,9 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
int ViECodecImpl::GetSendCodec(const int video_channel,
VideoCodec& video_codec) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -282,21 +259,11 @@ int ViECodecImpl::GetSendCodec(const int video_channel,
int ViECodecImpl::SetReceiveCodec(const int video_channel,
const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
video_channel, video_codec.codecType);
WEBRTC_TRACE(kTraceInfo, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d,"
"maxBr: %d, min_br: %d, frame_rate: %d", __FUNCTION__,
video_codec.codecType, video_codec.plType, video_codec.width,
video_codec.height, video_codec.startBitrate,
video_codec.maxBitrate, video_codec.minBitrate,
video_codec.maxFramerate);
LOG(LS_INFO) << "SetReceiveCodec for channel " << video_channel;
LOG(LS_INFO) << "Codec type " << video_codec.codecType
<< ", payload type " << video_codec.plType;
if (CodecValid(video_codec) == false) {
// Error logged.
shared_data_->SetLastError(kViECodecInvalidCodec);
return -1;
}
@@ -304,18 +271,11 @@ int ViECodecImpl::SetReceiveCodec(const int video_channel,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_channel->SetReceiveCodec(video_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not set receive codec for channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -324,17 +284,9 @@ int ViECodecImpl::SetReceiveCodec(const int video_channel,
int ViECodecImpl::GetReceiveCodec(const int video_channel,
VideoCodec& video_codec) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
video_channel, video_codec.codecType);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -350,16 +302,11 @@ int ViECodecImpl::GetCodecConfigParameters(
const int video_channel,
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
LOG(LS_INFO) << "GetCodecConfigParameters " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -374,17 +321,12 @@ int ViECodecImpl::GetCodecConfigParameters(
int ViECodecImpl::SetImageScaleStatus(const int video_channel,
const bool enable) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
enable);
LOG(LS_INFO) << "SetImageScaleStates for channel " << video_channel
<< ", enable: " << enable;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -399,17 +341,9 @@ int ViECodecImpl::SetImageScaleStatus(const int video_channel,
int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No send codec for channel %d", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -424,17 +358,9 @@ int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__,
video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -447,20 +373,11 @@ int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
int ViECodecImpl::GetReceiveSideDelay(const int video_channel,
int* delay_ms) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
if (delay_ms == NULL) {
LOG_F(LS_ERROR) << "NULL pointer argument.";
return -1;
}
assert(delay_ms != NULL);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -474,18 +391,9 @@ int ViECodecImpl::GetReceiveSideDelay(const int video_channel,
int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
unsigned int* bitrate) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No send codec for channel %d", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -493,17 +401,9 @@ int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
}
unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -512,16 +412,12 @@ unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
const bool enable) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
LOG(LS_INFO) << "SetKeyFrameRequestCallbackStatus for " << video_channel
<< ", enacle " << enable;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -535,17 +431,13 @@ int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
const bool enable,
const bool only_key_frames) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d, enable: %d, only_key_frames: %d)",
__FUNCTION__, video_channel, enable);
LOG(LS_INFO) << "SetSignalKeyPacketLossStatus for " << video_channel
<< "enable, " << enable
<< ", only key frames " << only_key_frames;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -558,23 +450,15 @@ int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
ViEEncoderObserver& observer) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
LOG(LS_INFO) << "RegisterEncoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_encoder->RegisterCodecObserver(&observer) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not register codec observer at channel",
__FUNCTION__);
shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
@@ -582,15 +466,11 @@ int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
}
int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
LOG(LS_INFO) << "DeregisterEncoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -603,23 +483,15 @@ int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
ViEDecoderObserver& observer) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
LOG(LS_INFO) << "RegisterDecoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_channel->RegisterCodecObserver(&observer) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not register codec observer at channel",
__FUNCTION__);
shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
@@ -627,16 +499,11 @@ int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
}
int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id()), "%s",
__FUNCTION__);
LOG(LS_INFO) << "DeregisterDecodeObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -648,15 +515,11 @@ int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
}
int ViECodecImpl::SendKeyFrame(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
LOG(LS_INFO) << "SendKeyFrame on channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -669,17 +532,12 @@ int ViECodecImpl::SendKeyFrame(const int video_channel) {
int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
const bool wait) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id()),
"%s(video_channel: %d, wait: %d)", __FUNCTION__, video_channel,
wait);
LOG(LS_INFO) << "WaitForFirstKeyFrame for channel " << video_channel
<< ", wait " << wait;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -692,44 +550,35 @@ int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
int ViECodecImpl::StartDebugRecording(int video_channel,
const char* file_name_utf8) {
LOG(LS_INFO) << "StartDebugRecording for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StartDebugRecording(file_name_utf8);
}
int ViECodecImpl::StopDebugRecording(int video_channel) {
LOG(LS_INFO) << "StopDebugRecording for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StopDebugRecording();
}
void ViECodecImpl::SuspendBelowMinBitrate(int video_channel) {
LOG(LS_INFO) << "SuspendBelowMinBitrate for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return;
}
vie_encoder->SuspendBelowMinBitrate();
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
return;
}
// Must enable pacing when enabling SuspendBelowMinBitrate. Otherwise, no
@@ -743,9 +592,6 @@ bool ViECodecImpl::GetSendSideDelay(int video_channel, int* avg_delay_ms,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return false;
}
@@ -763,8 +609,7 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// We only care about the type and name for red.
return true;
}
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
"Codec type doesn't match pl_name", video_codec.plType);
LOG_F(LS_ERROR) << "Invalid RED configuration.";
return false;
} else if (video_codec.codecType == kVideoCodecULPFEC) {
#if defined(WIN32)
@@ -775,8 +620,7 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// We only care about the type and name for ULPFEC.
return true;
}
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
"Codec type doesn't match pl_name", video_codec.plType);
LOG_F(LS_ERROR) << "Invalid ULPFEC configuration.";
return false;
} else if ((video_codec.codecType == kVideoCodecVP8 &&
strncmp(video_codec.plName, "VP8", 4) == 0) ||
@@ -784,32 +628,28 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
strncmp(video_codec.plName, "I420", 4) == 0)) {
// OK.
} else if (video_codec.codecType != kVideoCodecGeneric) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
"Codec type doesn't match pl_name", video_codec.plType);
LOG(LS_ERROR) << "Codec type and name mismatch.";
return false;
}
if (video_codec.plType == 0 || video_codec.plType > 127) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
"Invalid codec payload type: %d", video_codec.plType);
LOG(LS_ERROR) << "Invalif payload type: " << video_codec.plType;
return false;
}
if (video_codec.width > kViEMaxCodecWidth ||
video_codec.height > kViEMaxCodecHeight) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid codec size: %u x %u",
video_codec.width, video_codec.height);
LOG(LS_ERROR) << "Invalid codec resolution " << video_codec.width
<< " x " << video_codec.height;
return false;
}
if (video_codec.startBitrate < kViEMinCodecBitrate) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid start_bitrate: %u",
video_codec.startBitrate);
LOG(LS_ERROR) << "Invalid start bitrate.";
return false;
}
if (video_codec.minBitrate < kViEMinCodecBitrate) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid min_bitrate: %u",
video_codec.minBitrate);
LOG(LS_ERROR) << "Invalid min bitrate.";
return false;
}
return true;

View File

@@ -26,7 +26,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
@@ -166,11 +165,6 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
qm_callback_(NULL),
video_suspended_(false),
pre_encode_callback_(NULL) {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(engine_id, channel_id),
"%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
this);
RtpRtcp::Configuration configuration;
configuration.id = ViEModuleId(engine_id_, channel_id_);
configuration.audio = false; // Video.
@@ -184,9 +178,6 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
bool ViEEncoder::Init() {
if (vcm_.InitializeSender() != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s InitializeSender failure", __FUNCTION__);
return false;
}
vpm_.EnableTemporalDecimation(true);
@@ -197,9 +188,6 @@ bool ViEEncoder::Init() {
if (module_process_thread_.RegisterModule(&vcm_) != 0 ||
module_process_thread_.RegisterModule(default_rtp_rtcp_.get()) != 0 ||
module_process_thread_.RegisterModule(paced_sender_.get()) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterModule failure", __FUNCTION__);
return false;
}
if (qm_callback_) {
@@ -210,9 +198,6 @@ bool ViEEncoder::Init() {
#ifdef VIDEOCODEC_VP8
VideoCodec video_codec;
if (vcm_.Codec(webrtc::kVideoCodecVP8, &video_codec) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s Codec failure", __FUNCTION__);
return false;
}
{
@@ -221,15 +206,9 @@ bool ViEEncoder::Init() {
}
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
default_rtp_rtcp_->MaxDataPayloadLength()) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterSendCodec failure", __FUNCTION__);
return false;
}
if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterSendPayload failure", __FUNCTION__);
return false;
}
#else
@@ -248,30 +227,18 @@ bool ViEEncoder::Init() {
#endif
if (vcm_.RegisterTransportCallback(this) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"ViEEncoder: VCM::RegisterTransportCallback failure");
return false;
}
if (vcm_.RegisterSendStatisticsCallback(this) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"ViEEncoder: VCM::RegisterSendStatisticsCallback failure");
return false;
}
if (vcm_.RegisterVideoQMCallback(qm_callback_) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"VCM::RegisterQMCallback failure");
return false;
}
return true;
}
ViEEncoder::~ViEEncoder() {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"ViEEncoder Destructor 0x%p, engine_id: %d", this, engine_id_);
if (bitrate_controller_) {
bitrate_controller_->RemoveBitrateObserver(bitrate_observer_.get());
}
@@ -289,10 +256,6 @@ int ViEEncoder::Owner() const {
}
void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(%s)", __FUNCTION__,
is_transmitting ? "transmitting" : "not transmitting");
{
CriticalSectionScoped cs(data_cs_.get());
network_is_transmitting_ = is_transmitting;
@@ -305,17 +268,11 @@ void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
}
void ViEEncoder::Pause() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
encoder_paused_ = true;
}
void ViEEncoder::Restart() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
encoder_paused_ = false;
}
@@ -326,9 +283,6 @@ uint8_t ViEEncoder::NumberOfCodecs() {
int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
if (vcm_.Codec(list_index, video_codec) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: Could not get codec",
__FUNCTION__);
return -1;
}
return 0;
@@ -337,43 +291,27 @@ int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: pltype %u", __FUNCTION__,
pl_type);
if (encoder == NULL)
return -1;
if (vcm_.RegisterExternalEncoder(encoder, pl_type, internal_source) !=
VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not register external encoder");
VCM_OK) {
return -1;
}
return 0;
}
int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: pltype %u", __FUNCTION__, pl_type);
webrtc::VideoCodec current_send_codec;
if (vcm_.SendCodec(&current_send_codec) == VCM_OK) {
uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Failed to get the current encoder target bitrate.");
LOG(LS_WARNING) << "Failed to get the current encoder target bitrate.";
}
current_send_codec.startBitrate = (current_bitrate_bps + 500) / 1000;
}
if (vcm_.RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not deregister external encoder");
return -1;
}
@@ -393,9 +331,6 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
current_send_codec.extra_options = NULL;
if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not use internal encoder");
return -1;
}
}
@@ -403,23 +338,13 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
}
int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
video_codec.codecType, video_codec.width, video_codec.height);
// Setting target width and height for VPM.
if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate) != VPM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not set VPM target dimensions");
return -1;
}
if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could register RTP module video payload");
return -1;
}
// Convert from kbps to bps.
@@ -438,9 +363,6 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
}
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not register send codec");
return -1;
}
@@ -448,9 +370,6 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
// start and stop sending.
if (default_rtp_rtcp_->Sending() == false) {
if (default_rtp_rtcp_->SetSendingStatus(true) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could start RTP module sending");
return -1;
}
}
@@ -472,13 +391,7 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
}
int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
if (vcm_.SendCodec(video_codec) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not get VCM send codec");
return -1;
}
return 0;
@@ -487,16 +400,10 @@ int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
int32_t ViEEncoder::GetCodecConfigParameters(
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
int32_t num_parameters =
vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize);
if (num_parameters <= 0) {
config_parameters_size = 0;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not get config parameters");
return -1;
}
config_parameters_size = static_cast<unsigned char>(num_parameters);
@@ -504,16 +411,11 @@ int32_t ViEEncoder::GetCodecConfigParameters(
}
int32_t ViEEncoder::ScaleInputImage(bool enable) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s(enable %d)", __FUNCTION__,
enable);
VideoFrameResampling resampling_mode = kFastRescaling;
if (enable == true) {
// TODO(mflodman) What?
if (enable) {
// kInterpolation is currently not supported.
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s not supported",
__FUNCTION__, enable);
LOG_F(LS_ERROR) << "Not supported.";
return -1;
}
vpm_.SetInputFrameResampleMode(resampling_mode);
@@ -560,9 +462,6 @@ bool ViEEncoder::EncoderPaused() const {
}
RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
return default_rtp_rtcp_.get();
}
@@ -570,11 +469,6 @@ void ViEEncoder::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const uint32_t CSRC[kRtpCsrcSize]) {
WEBRTC_TRACE(webrtc::kTraceStream,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: %llu", __FUNCTION__,
video_frame->timestamp());
if (default_rtp_rtcp_->SendingMedia() == false) {
// We've paused or we have no channels attached, don't encode.
return;
@@ -640,11 +534,6 @@ void ViEEncoder::DeliverFrame(int id,
return;
}
if (ret != VPM_OK) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error preprocessing frame %u", __FUNCTION__,
video_frame->timestamp());
return;
}
// Frame was not sampled => use original.
@@ -673,47 +562,24 @@ void ViEEncoder::DeliverFrame(int id,
has_received_sli_ = false;
has_received_rpsi_ = false;
if (vcm_.AddVideoFrame(*decimated_frame,
vpm_.ContentMetrics(),
&codec_specific_info) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error encoding frame %u", __FUNCTION__,
video_frame->timestamp());
}
vcm_.AddVideoFrame(*decimated_frame, vpm_.ContentMetrics(),
&codec_specific_info);
return;
}
#endif
if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error encoding frame %u", __FUNCTION__,
video_frame->timestamp());
}
vcm_.AddVideoFrame(*decimated_frame);
}
void ViEEncoder::DelayChanged(int id, int frame_delay) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: %u", __FUNCTION__,
frame_delay);
default_rtp_rtcp_->SetCameraDelay(frame_delay);
}
int ViEEncoder::GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
if (vcm_.SendCodec(&video_codec) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Could not get VCM send codec");
return -1;
}
@@ -724,21 +590,13 @@ int ViEEncoder::GetPreferedFrameSettings(int* width,
}
int ViEEncoder::SendKeyFrame() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
return vcm_.IntraFrameRequest(0);
}
int32_t ViEEncoder::SendCodecStatistics(
uint32_t* num_key_frames, uint32_t* num_delta_frames) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
webrtc::VCMFrameCount sent_frames;
if (vcm_.SentFrameCount(sent_frames) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Could not get sent frame information", __FUNCTION__);
return -1;
}
*num_key_frames = sent_frames.numKeyFrames;
@@ -752,9 +610,6 @@ int32_t ViEEncoder::PacerQueuingDelayMs() const {
int32_t ViEEncoder::EstimatedSendBandwidth(
uint32_t* available_bandwidth) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (!bitrate_controller_->AvailableBandwidth(available_bandwidth)) {
return -1;
}
@@ -762,8 +617,6 @@ int32_t ViEEncoder::EstimatedSendBandwidth(
}
int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (vcm_.Bitrate(bitrate) != 0)
return -1;
return 0;
@@ -799,9 +652,6 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
}
if (fec_enabled_ || nack_enabled_) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: FEC status ",
__FUNCTION__, fec_enabled);
vcm_.RegisterProtectionCallback(this);
// The send codec must be registered to set correct MTU.
webrtc::VideoCodec codec;
@@ -809,17 +659,12 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
uint16_t max_pay_load = default_rtp_rtcp_->MaxDataPayloadLength();
uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"Failed to get the current encoder target bitrate.");
LOG_F(LS_WARNING) <<
"Failed to get the current encoder target bitrate.";
}
// Convert to start bitrate in kbps.
codec.startBitrate = (current_bitrate_bps + 500) / 1000;
if (vcm_.RegisterSendCodec(&codec, number_of_cores_, max_pay_load) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Failed to update Sendcodec when enabling FEC",
__FUNCTION__, fec_enabled);
return -1;
}
}
@@ -878,30 +723,8 @@ int32_t ViEEncoder::ProtectionRequest(
uint32_t* sent_video_rate_bps,
uint32_t* sent_nack_rate_bps,
uint32_t* sent_fec_rate_bps) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s, deltaFECRate: %u, key_fecrate: %u, "
"delta_use_uep_protection: %d, key_use_uep_protection: %d, "
"delta_max_fec_frames: %d, key_max_fec_frames: %d, "
"delta_mask_type: %d, key_mask_type: %d, ",
__FUNCTION__,
delta_fec_params->fec_rate,
key_fec_params->fec_rate,
delta_fec_params->use_uep_protection,
key_fec_params->use_uep_protection,
delta_fec_params->max_fec_frames,
key_fec_params->max_fec_frames,
delta_fec_params->fec_mask_type,
key_fec_params->fec_mask_type);
if (default_rtp_rtcp_->SetFecParameters(delta_fec_params,
key_fec_params) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Could not update FEC parameters", __FUNCTION__);
}
default_rtp_rtcp_->BitrateSent(NULL,
sent_video_rate_bps,
sent_fec_rate_bps,
default_rtp_rtcp_->SetFecParameters(delta_fec_params, key_fec_params);
default_rtp_rtcp_->BitrateSent(NULL, sent_video_rate_bps, sent_fec_rate_bps,
sent_nack_rate_bps);
return 0;
}
@@ -910,9 +733,6 @@ int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: bitrate %u, framerate %u",
__FUNCTION__, bit_rate, frame_rate);
codec_observer_->OutgoingRate(channel_id_, frame_rate, bit_rate);
}
return 0;
@@ -920,29 +740,11 @@ int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
int32_t ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: observer added",
__FUNCTION__);
if (codec_observer_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: observer already set.",
__FUNCTION__);
return -1;
}
codec_observer_ = observer;
} else {
if (codec_observer_ == NULL) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: observer does not exist.", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: observer removed",
__FUNCTION__);
codec_observer_ = NULL;
if (observer && codec_observer_) {
LOG_F(LS_ERROR) << "Observer already set.";
return -1;
}
codec_observer_ = observer;
return 0;
}
@@ -960,8 +762,6 @@ void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
// Key frame request from remote side, signal to VCM.
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
int idx = 0;
@@ -981,9 +781,6 @@ void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
int64_t now = TickTime::MillisecondTimestamp();
if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Not encoding new intra due to timing", __FUNCTION__);
return;
}
time_last_intra_request_ms_[ssrc] = now;
@@ -1045,11 +842,9 @@ void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
const uint32_t round_trip_time_ms) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
__FUNCTION__, bitrate_bps, fraction_lost, round_trip_time_ms);
LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
<< " packet loss " << fraction_lost
<< " rtt " << round_trip_time_ms;
vcm_.SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
bool video_is_suspended = vcm_.VideoSuspended();
int bitrate_kbps = bitrate_bps / 1000;
@@ -1109,10 +904,8 @@ void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
// Video suspend-state changed, inform codec observer.
CriticalSectionScoped crit(callback_cs_.get());
if (codec_observer_) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: video_suspended_ changed to %i",
__FUNCTION__, video_is_suspended);
LOG(LS_INFO) << "Video suspended " << video_is_suspended
<< " for channel " << channel_id_;
codec_observer_->SuspendChange(channel_id_, video_is_suspended);
}
}
@@ -1123,26 +916,9 @@ PacedSender* ViEEncoder::GetPacedSender() {
int32_t ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
if (effect_filter == NULL) {
if (effect_filter_ == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: no effect filter added",
__FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: deregister effect filter",
__FUNCTION__);
} else {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: register effect",
__FUNCTION__);
if (effect_filter_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: effect filter already added ", __FUNCTION__);
return -1;
}
if (effect_filter != NULL && effect_filter_ != NULL) {
LOG_F(LS_ERROR) << "Filter already set.";
return -1;
}
effect_filter_ = effect_filter;
return 0;

View File

@@ -11,7 +11,7 @@
#include "webrtc/video_engine/vie_external_codec_impl.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
@@ -37,62 +37,42 @@ ViEExternalCodec* ViEExternalCodec::GetInterface(VideoEngine* video_engine) {
}
int ViEExternalCodecImpl::Release() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodec::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodec release too many times");
LOG(LS_WARNING) << "ViEExternalCodec released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodec reference count: %d", ref_count);
return ref_count;
}
ViEExternalCodecImpl::ViEExternalCodecImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodecImpl::ViEExternalCodecImpl() Ctor");
}
ViEExternalCodecImpl::~ViEExternalCodecImpl() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodecImpl::~ViEExternalCodecImpl() Dtor");
}
int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
const unsigned char pl_type,
VideoEncoder* encoder,
bool internal_source) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s channel %d pl_type %d encoder 0x%x", __FUNCTION__,
video_channel, pl_type, encoder);
assert(encoder != NULL);
LOG(LS_INFO) << "Register external encoder for channel " << video_channel
<< ", pl_type " << static_cast<int>(pl_type)
<< ", internal_source " << internal_source;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument video_channel %u. Does it exist?",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
if (!encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument Encoder 0x%x.", __FUNCTION__, encoder);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
if (vie_encoder->RegisterExternalEncoder(encoder, pl_type, internal_source)
!= 0) {
if (vie_encoder->RegisterExternalEncoder(encoder, pl_type,
internal_source) != 0) {
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -101,17 +81,11 @@ int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
int ViEExternalCodecImpl::DeRegisterExternalSendCodec(
const int video_channel, const unsigned char pl_type) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s channel %d pl_type %d", __FUNCTION__, video_channel,
pl_type);
LOG(LS_INFO) << "Deregister external encoder for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument video_channel %u. Does it exist?",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -129,25 +103,15 @@ int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
VideoDecoder* decoder,
bool decoder_render,
int render_delay) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s channel %d pl_type %d decoder 0x%x, decoder_render %d, "
"renderDelay %d", __FUNCTION__, video_channel, pl_type, decoder,
decoder_render, render_delay);
LOG(LS_INFO) << "Register exrernal decoder for channel " << video_channel
<< ", pl_type " << pl_type
<< ", decoder_render " << decoder_render
<< ", render_delay " << render_delay;
assert(decoder != NULL);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument video_channel %u. Does it exist?",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
if (!decoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument decoder 0x%x.", __FUNCTION__, decoder);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -161,18 +125,13 @@ int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
}
int ViEExternalCodecImpl::DeRegisterExternalReceiveCodec(
const int video_channel, const unsigned char pl_type) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s channel %d pl_type %u", __FUNCTION__, video_channel,
pl_type);
const int video_channel, const unsigned char pl_type) {
LOG(LS_INFO) << "DeRegisterExternalReceiveCodec for channel " << video_channel
<< ", pl_type " << pl_type;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Invalid argument video_channel %u. Does it exist?",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}

View File

@@ -14,8 +14,8 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@@ -29,9 +29,8 @@ ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
ViEFrameProviderBase::~ViEFrameProviderBase() {
if (frame_callbacks_.size() > 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"FrameCallbacks still exist when Provider deleted %d",
frame_callbacks_.size());
LOG_F(LS_WARNING) << "FrameCallbacks still exist when Provider deleted: "
<< frame_callbacks_.size();
}
for (FrameCallbacks::iterator it = frame_callbacks_.begin();
@@ -76,8 +75,7 @@ void ViEFrameProviderBase::DeliverFrame(
static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
if (process_time > 25) {
// Warn if the delivery time is too long.
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s Too long time: %ums", __FUNCTION__, process_time);
LOG(LS_WARNING) << "Too long time delivering frame " << process_time;
}
#endif
}
@@ -131,16 +129,10 @@ int ViEFrameProviderBase::GetBestFormat(int* best_width,
int ViEFrameProviderBase::RegisterFrameCallback(
int observer_id, ViEFrameCallback* callback_object) {
assert(callback_object);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
{
CriticalSectionScoped cs(provider_cs_.get());
if (std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
callback_object) != frame_callbacks_.end()) {
// This object is already registered.
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p already registered", __FUNCTION__,
callback_object);
assert(false && "frameObserver already registered");
return -1;
}
@@ -157,21 +149,15 @@ int ViEFrameProviderBase::RegisterFrameCallback(
int ViEFrameProviderBase::DeregisterFrameCallback(
const ViEFrameCallback* callback_object) {
assert(callback_object);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
CriticalSectionScoped cs(provider_cs_.get());
FrameCallbacks::iterator it = std::find(frame_callbacks_.begin(),
frame_callbacks_.end(),
callback_object);
if (it == frame_callbacks_.end()) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p not found", __FUNCTION__, callback_object);
return -1;
}
frame_callbacks_.erase(it);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p deregistered", __FUNCTION__, callback_object);
// Notify implementer of this class that the callback list have changed.
FrameCallbackChanged();
@@ -181,8 +167,6 @@ int ViEFrameProviderBase::DeregisterFrameCallback(
bool ViEFrameProviderBase::IsFrameCallbackRegistered(
const ViEFrameCallback* callback_object) {
assert(callback_object);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s(0x%p)", __FUNCTION__, callback_object);
CriticalSectionScoped cs(provider_cs_.get());
return std::find(frame_callbacks_.begin(), frame_callbacks_.end(),

View File

@@ -17,6 +17,7 @@
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
@@ -420,8 +421,10 @@ ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(int provider_id) const {
ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
if (!(capture_id >= kViECaptureIdBase &&
capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices))
capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices)) {
LOG(LS_ERROR) << "Capture device doesn't exist " << capture_id << ".";
return NULL;
}
return static_cast<ViECapturer*>(ViEFrameProvider(capture_id));
}