Refactored ViEFrameProviderBase.

Only style changes, ointers/references and functions will come in a later CL.

vie_capturer.cc and vie_file_player.cc are only changed du to inheriting protected members from ViEFrameProviderBase.

Review URL: http://webrtc-codereview.appspot.com/324001

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1148 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2011-12-09 15:20:58 +00:00
parent 0744ee563d
commit d5651b98c5
5 changed files with 360 additions and 411 deletions

View File

@ -92,9 +92,9 @@ ViECapturer::ViECapturer(int captureId,
ViECapturer::~ViECapturer() ViECapturer::~ViECapturer()
{ {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), ViEId(engine_id_, _captureId),
"ViECapturer Destructor, captureId: %d, engineId: %d", "ViECapturer Destructor, captureId: %d, engineId: %d",
_captureId, _engineId); _captureId, engine_id_);
// Stop the thread // Stop the thread
_deliverCritsect.Enter(); _deliverCritsect.Enter();
@ -104,12 +104,12 @@ ViECapturer::~ViECapturer()
_captureCritsect.Leave(); _captureCritsect.Leave();
_deliverCritsect.Leave(); _deliverCritsect.Leave();
_providerCritSect.Enter(); provider_crit_sect_.Enter();
if (_vieEncoder) if (_vieEncoder)
{ {
_vieEncoder->DeRegisterExternalEncoder(_codec.plType); _vieEncoder->DeRegisterExternalEncoder(_codec.plType);
} }
_providerCritSect.Leave(); provider_crit_sect_.Leave();
// Stop the camera input // Stop the camera input
if (_captureModule) if (_captureModule)
@ -128,7 +128,7 @@ ViECapturer::~ViECapturer()
} else } else
{ {
assert(false); assert(false);
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoRenderer, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoRenderer, ViEId(engine_id_, _captureId),
"%s: Not able to stop capture thread for device %d, leaking", "%s: Not able to stop capture thread for device %d, leaking",
__FUNCTION__, _captureId); __FUNCTION__, _captureId);
// Not possible to stop the thread, leak it... // Not possible to stop the thread, leak it...
@ -207,11 +207,11 @@ WebRtc_Word32 ViECapturer::Init(const WebRtc_UWord8* deviceUniqueIdUTF8,
if (deviceUniqueIdUTF8 == NULL) if (deviceUniqueIdUTF8 == NULL)
{ {
_captureModule = VideoCaptureFactory::Create( _captureModule = VideoCaptureFactory::Create(
ViEModuleId(_engineId, _captureId), _externalCaptureModule); ViEModuleId(engine_id_, _captureId), _externalCaptureModule);
} else } else
{ {
_captureModule = VideoCaptureFactory::Create( _captureModule = VideoCaptureFactory::Create(
ViEModuleId(_engineId, _captureId), deviceUniqueIdUTF8); ViEModuleId(engine_id_, _captureId), deviceUniqueIdUTF8);
} }
if (!_captureModule) if (!_captureModule)
return -1; return -1;
@ -260,7 +260,7 @@ int ViECapturer::FrameCallbackChanged()
WebRtc_Word32 ViECapturer::Start(const CaptureCapability captureCapability) WebRtc_Word32 ViECapturer::Start(const CaptureCapability captureCapability)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__); "%s", __FUNCTION__);
int width; int width;
@ -316,7 +316,7 @@ WebRtc_Word32 ViECapturer::Start(const CaptureCapability captureCapability)
WebRtc_Word32 ViECapturer::Stop() WebRtc_Word32 ViECapturer::Stop()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__); "%s", __FUNCTION__);
_requestedCapability = CaptureCapability(); _requestedCapability = CaptureCapability();
return _captureModule->StopCapture(); return _captureModule->StopCapture();
@ -330,7 +330,7 @@ WebRtc_Word32 ViECapturer::Stop()
bool ViECapturer::Started() bool ViECapturer::Started()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__); "%s", __FUNCTION__);
return _captureModule->CaptureStarted(); return _captureModule->CaptureStarted();
} }
@ -392,7 +392,7 @@ int ViECapturer::IncomingFrame(unsigned char* videoFrame,
RawVideoType videoType, RawVideoType videoType,
unsigned long long captureTime) unsigned long long captureTime)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u", "%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u",
width, height, captureTime); width, height, captureTime);
@ -414,7 +414,7 @@ int ViECapturer::IncomingFrameI420(
const ViEVideoFrameI420& video_frame, const ViEVideoFrameI420& video_frame,
unsigned long long captureTime) { unsigned long long captureTime) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), ViEId(engine_id_, _captureId),
"%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u", "%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u",
video_frame.width, video_frame.height, captureTime); video_frame.width, video_frame.height, captureTime);
@ -446,7 +446,7 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 captureId,
VideoFrame& videoFrame, VideoFrame& videoFrame,
VideoCodecType codecType) VideoCodecType codecType)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureId: %d)", __FUNCTION__, captureId); "%s(captureId: %d)", __FUNCTION__, captureId);
CriticalSectionScoped cs(_captureCritsect); CriticalSectionScoped cs(_captureCritsect);
@ -455,7 +455,7 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 captureId,
if (_encodedFrame.Length() != 0) // The last encoded frame has not been sent yet. Need to wait if (_encodedFrame.Length() != 0) // The last encoded frame has not been sent yet. Need to wait
{ {
_vieDeliverEvent.Reset(); _vieDeliverEvent.Reset();
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureId: %d) Last encoded frame not yet delivered.", "%s(captureId: %d) Last encoded frame not yet delivered.",
__FUNCTION__, captureId); __FUNCTION__, captureId);
_captureCritsect.Leave(); _captureCritsect.Leave();
@ -476,7 +476,7 @@ void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay) const WebRtc_Word32 delay)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), ViEId(engine_id_, _captureId),
"%s(captureId: %d) delay %d", __FUNCTION__, _captureId, "%s(captureId: %d) delay %d", __FUNCTION__, _captureId,
delay); delay);
@ -497,24 +497,24 @@ WebRtc_Word32 ViECapturer::RegisterEffectFilter(ViEEffectFilter* effectFilter)
{ {
if (_effectFilter == NULL) if (_effectFilter == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: no effect filter added for capture device %d", "%s: no effect filter added for capture device %d",
__FUNCTION__, _captureId); __FUNCTION__, _captureId);
return -1; return -1;
} }
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId,_captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_,_captureId),
"%s: deregister effect filter for device %d", __FUNCTION__, "%s: deregister effect filter for device %d", __FUNCTION__,
_captureId); _captureId);
} else } else
{ {
if (_effectFilter) if (_effectFilter)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,_captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_,_captureId),
"%s: effect filter already added for capture device %d", "%s: effect filter already added for capture device %d",
__FUNCTION__, _captureId); __FUNCTION__, _captureId);
return -1; return -1;
} }
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: register effect filter for device %d", __FUNCTION__, "%s: register effect filter for device %d", __FUNCTION__,
_captureId); _captureId);
} }
@ -534,10 +534,10 @@ WebRtc_Word32 ViECapturer::IncImageProcRefCount()
if (!_imageProcModule) if (!_imageProcModule)
{ {
assert(_imageProcModuleRefCounter==0); assert(_imageProcModuleRefCounter==0);
_imageProcModule = VideoProcessingModule::Create(ViEModuleId(_engineId, _captureId)); _imageProcModule = VideoProcessingModule::Create(ViEModuleId(engine_id_, _captureId));
if (_imageProcModule == NULL) if (_imageProcModule == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: could not create video processing module", "%s: could not create video processing module",
__FUNCTION__); __FUNCTION__);
return -1; return -1;
@ -560,7 +560,7 @@ WebRtc_Word32 ViECapturer::DecImageProcRefCount()
WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__, "%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable); _captureId, enable);
@ -570,7 +570,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
// Sanity check // Sanity check
if (_denoisingEnabled) if (_denoisingEnabled)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: denoising already enabled", __FUNCTION__); "%s: denoising already enabled", __FUNCTION__);
return -1; return -1;
} }
@ -584,7 +584,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
// Sanity check // Sanity check
if (_denoisingEnabled == false) if (_denoisingEnabled == false)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: denoising not enabled", __FUNCTION__); "%s: denoising not enabled", __FUNCTION__);
return -1; return -1;
} }
@ -597,7 +597,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__, "%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable); _captureId, enable);
@ -607,7 +607,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
// Sanity check // Sanity check
if (_deflickerFrameStats) if (_deflickerFrameStats)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering already enabled", __FUNCTION__); "%s: deflickering already enabled", __FUNCTION__);
return -1; return -1;
} }
@ -622,7 +622,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
// Sanity check // Sanity check
if (_deflickerFrameStats == NULL) if (_deflickerFrameStats == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering not enabled", __FUNCTION__); "%s: deflickering not enabled", __FUNCTION__);
return -1; return -1;
} }
@ -635,7 +635,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
} }
WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__, "%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable); _captureId, enable);
@ -645,7 +645,7 @@ WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
// Sanity check // Sanity check
if (_brightnessFrameStats) if (_brightnessFrameStats)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: BrightnessAlarm already enabled", __FUNCTION__); "%s: BrightnessAlarm already enabled", __FUNCTION__);
return -1; return -1;
} }
@ -660,7 +660,7 @@ WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
// Sanity check // Sanity check
if (_brightnessFrameStats == NULL) if (_brightnessFrameStats == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering not enabled", __FUNCTION__); "%s: deflickering not enabled", __FUNCTION__);
return -1; return -1;
} }
@ -703,7 +703,7 @@ bool ViECapturer::ViECaptureProcess()
CriticalSectionScoped cs(_observerCritsect); CriticalSectionScoped cs(_observerCritsect);
if (_observer) if (_observer)
{ {
_observer->BrightnessAlarm(_id, _currentBrightnessLevel); _observer->BrightnessAlarm(id_, _currentBrightnessLevel);
_reportedBrightnessLevel = _currentBrightnessLevel; _reportedBrightnessLevel = _currentBrightnessLevel;
} }
} }
@ -722,7 +722,7 @@ void ViECapturer::DeliverI420Frame(VideoFrame& videoFrame)
_imageProcModule->Deflickering(videoFrame, *_deflickerFrameStats); _imageProcModule->Deflickering(videoFrame, *_deflickerFrameStats);
} else } else
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: could not get frame stats for captured frame", __FUNCTION__); "%s: could not get frame stats for captured frame", __FUNCTION__);
} }
} }
@ -750,7 +750,7 @@ void ViECapturer::DeliverI420Frame(VideoFrame& videoFrame)
break; break;
default: default:
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), ViEId(engine_id_, _captureId),
"%s: Brightness detection failed", __FUNCTION__); "%s: Brightness detection failed", __FUNCTION__);
} }
} }
@ -776,7 +776,7 @@ void ViECapturer::DeliverCodedFrame(VideoFrame& videoFrame)
_encodeCompleteCallback->Encoded(encodedImage); _encodeCompleteCallback->Encoded(encodedImage);
} }
if (NumberOfRegistersFrameCallbacks() > 0 && _decoderInitialized) if (NumberOfRegisteredFrameCallbacks() > 0 && _decoderInitialized)
{ {
videoFrame.Swap(_decodeBuffer.payloadData, _decodeBuffer.bufferSize, videoFrame.Swap(_decodeBuffer.payloadData, _decodeBuffer.bufferSize,
_decodeBuffer.payloadSize); _decodeBuffer.payloadSize);
@ -795,19 +795,19 @@ void ViECapturer::DeliverCodedFrame(VideoFrame& videoFrame)
int ViECapturer::DeregisterFrameCallback(const ViEFrameCallback* callbackObject) int ViECapturer::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
{ {
_providerCritSect.Enter(); provider_crit_sect_.Enter();
if (callbackObject == _vieEncoder) //Don't use this camera as encoder anymore. Need to tell the ViEEncoder. if (callbackObject == _vieEncoder) //Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
{ {
ViEEncoder* vieEncoder = NULL; ViEEncoder* vieEncoder = NULL;
vieEncoder = _vieEncoder; vieEncoder = _vieEncoder;
_vieEncoder = NULL; _vieEncoder = NULL;
_providerCritSect.Leave(); provider_crit_sect_.Leave();
_deliverCritsect.Enter(); //Need to take this here in order to avoid deadlock with VCM. The reason is that VCM will call ::Release and a deadlock can occure. _deliverCritsect.Enter(); //Need to take this here in order to avoid deadlock with VCM. The reason is that VCM will call ::Release and a deadlock can occure.
vieEncoder->DeRegisterExternalEncoder(_codec.plType); vieEncoder->DeRegisterExternalEncoder(_codec.plType);
_deliverCritsect.Leave(); _deliverCritsect.Leave();
return 0; return 0;
} }
_providerCritSect.Leave(); provider_crit_sect_.Leave();
return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject); return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
} }
@ -816,7 +816,7 @@ int ViECapturer::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
*/ */
bool ViECapturer::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject) bool ViECapturer::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject)
{ {
CriticalSectionScoped cs(_providerCritSect); CriticalSectionScoped cs(provider_crit_sect_);
if (callbackObject == _vieEncoder) if (callbackObject == _vieEncoder)
{ {
return true; return true;
@ -834,12 +834,12 @@ WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vieEncoder, ViEEncoder& vieEncoder,
WebRtc_Word32 vieEncoderId) WebRtc_Word32 vieEncoderId)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
{ {
if (_vieEncoder && &vieEncoder != _vieEncoder) if (_vieEncoder && &vieEncoder != _vieEncoder)
{ {
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d Capture device already encoding)", "%s(captureDeviceId: %d Capture device already encoding)",
__FUNCTION__, _captureId); __FUNCTION__, _captureId);
return -1; return -1;
@ -903,7 +903,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores, WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize) WebRtc_UWord32 maxPayloadSize)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect); CriticalSectionScoped cs(_encodingCritsect);
@ -920,7 +920,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codecSettings,
false) == 0) false) == 0)
{ {
_decoderInitialized = true; _decoderInitialized = true;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d) VCM Decoder initialized", "%s(captureDeviceId: %d) VCM Decoder initialized",
__FUNCTION__, _captureId); __FUNCTION__, _captureId);
} }
@ -956,7 +956,7 @@ ViECapturer::Encode(const RawImage& inputImage,
WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback* callback)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_deliverCritsect); CriticalSectionScoped cs(_deliverCritsect);
@ -969,7 +969,7 @@ WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback*
} }
WebRtc_Word32 ViECapturer::Release() WebRtc_Word32 ViECapturer::Release()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
{ {
@ -1002,7 +1002,7 @@ WebRtc_Word32 ViECapturer::Release()
WebRtc_Word32 ViECapturer::Reset() WebRtc_Word32 ViECapturer::Reset()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
return 0; return 0;
@ -1010,7 +1010,7 @@ WebRtc_Word32 ViECapturer::Reset()
WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss, WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt) int rtt)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect); CriticalSectionScoped cs(_encodingCritsect);
@ -1023,7 +1023,7 @@ WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss,
WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 newBitRate, WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate) WebRtc_UWord32 frameRate)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId); "%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect); CriticalSectionScoped cs(_encodingCritsect);
@ -1055,7 +1055,7 @@ WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver& observer)
{ {
if (_observer) if (_observer)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_,
_captureId), _captureId),
"%s Observer already registered", __FUNCTION__, "%s Observer already registered", __FUNCTION__,
_captureId); _captureId);
@ -1076,7 +1076,7 @@ WebRtc_Word32 ViECapturer::DeRegisterObserver()
CriticalSectionScoped cs(_observerCritsect); CriticalSectionScoped cs(_observerCritsect);
if (!_observer) if (!_observer)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s No observer registered", __FUNCTION__, _captureId); "%s No observer registered", __FUNCTION__, _captureId);
return -1; return -1;
} }
@ -1100,18 +1100,18 @@ void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate) const WebRtc_UWord32 frameRate)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), "OnCaptureFrameRate %d", ViEId(engine_id_, _captureId), "OnCaptureFrameRate %d",
frameRate); frameRate);
CriticalSectionScoped cs(_observerCritsect); CriticalSectionScoped cs(_observerCritsect);
_observer->CapturedFrameRate(_id, (WebRtc_UWord8) frameRate); _observer->CapturedFrameRate(id_, (WebRtc_UWord8) frameRate);
} }
void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id, void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm alarm) const VideoCaptureAlarm alarm)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), "OnNoPictureAlarm %d", alarm); ViEId(engine_id_, _captureId), "OnNoPictureAlarm %d", alarm);
CriticalSectionScoped cs(_observerCritsect); CriticalSectionScoped cs(_observerCritsect);
CaptureAlarm vieAlarm = (alarm == Raised) ? AlarmRaised : AlarmCleared; CaptureAlarm vieAlarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;

View File

@ -66,7 +66,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrFeedBackCritSect = CriticalSectionWrapper::CreateCriticalSection(); _ptrFeedBackCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrFeedBackCritSect) if (!_ptrFeedBackCritSect)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect"); "ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1; return -1;
} }
@ -74,7 +74,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrAudioCritSect = CriticalSectionWrapper::CreateCriticalSection(); _ptrAudioCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrAudioCritSect) if (!_ptrAudioCritSect)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect"); "ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1; return -1;
} }
@ -82,30 +82,30 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrDecodeEvent = EventWrapper::Create(); _ptrDecodeEvent = EventWrapper::Create();
if (!_ptrDecodeEvent) if (!_ptrDecodeEvent)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate event"); "ViEFilePlayer::StartPlay() failed to allocate event");
return -1; return -1;
} }
if (strlen(fileNameUTF8) > FileWrapper::kMaxFileNameSize) if (strlen(fileNameUTF8) > FileWrapper::kMaxFileNameSize)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() To long filename"); "ViEFilePlayer::StartPlay() To long filename");
return -1; return -1;
} }
strncpy(_fileName, fileNameUTF8, strlen(fileNameUTF8) + 1); strncpy(_fileName, fileNameUTF8, strlen(fileNameUTF8) + 1);
_filePlayer = FilePlayer::CreateFilePlayer(ViEId(_engineId, _id), _filePlayer = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
fileFormat); fileFormat);
if (!_filePlayer) if (!_filePlayer)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to create file player"); "ViEFilePlayer::StartPlay() failed to create file player");
return -1; return -1;
} }
if (_filePlayer->RegisterModuleFileCallback(this) == -1) if (_filePlayer->RegisterModuleFileCallback(this) == -1)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to RegisterModuleFileCallback"); "ViEFilePlayer::StartPlay() failed to RegisterModuleFileCallback");
_filePlayer = NULL; _filePlayer = NULL;
return -1; return -1;
@ -115,7 +115,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
"ViEFilePlayThread"); "ViEFilePlayThread");
if (!_ptrDecodeThread) if (!_ptrDecodeThread)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to start decode thread."); "ViEFilePlayer::StartPlay() failed to start decode thread.");
_filePlayer = NULL; _filePlayer = NULL;
return -1; return -1;
@ -131,7 +131,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_audioStream = false; _audioStream = false;
if (error) if (error)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to Start play video file"); "ViEFilePlayer::StartPlay() failed to Start play video file");
return -1; return -1;
} }
@ -149,7 +149,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
if (!_veFileInterface) if (!_veFileInterface)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id), ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get VEFile interface"); "ViEFilePlayer::StartPlay() failed to get VEFile interface");
return -1; return -1;
} }
@ -157,7 +157,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
if (!_veVideoSync) if (!_veVideoSync)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id), ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get " "ViEFilePlayer::StartPlay() failed to get "
"VoEVideoSync interface"); "VoEVideoSync interface");
return -1; return -1;
@ -175,7 +175,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
*/ */
int ViEFilePlayer::FrameCallbackChanged() int ViEFilePlayer::FrameCallbackChanged()
{ {
if (ViEFrameProviderBase::NumberOfRegistersFrameCallbacks() > _videoClients) if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() > _videoClients)
{ {
if (!_playBackStarted) if (!_playBackStarted)
{ {
@ -186,13 +186,13 @@ int ViEFilePlayer::FrameCallbackChanged()
WEBRTC_TRACE( WEBRTC_TRACE(
webrtc::kTraceStateInfo, webrtc::kTraceStateInfo,
webrtc::kTraceVideo, webrtc::kTraceVideo,
ViEId(_engineId, _id), ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Started filedecode thread %u", "ViEFilePlayer::FrameCallbackChanged() Started filedecode thread %u",
threadId); threadId);
} else } else
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id), ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Failed to start file decode thread."); "ViEFilePlayer::FrameCallbackChanged() Failed to start file decode thread.");
} }
} else if (!_filePlayer->IsPlayingFile()) } else if (!_filePlayer->IsPlayingFile())
@ -201,14 +201,14 @@ int ViEFilePlayer::FrameCallbackChanged()
!_audioStream) != 0) !_audioStream) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id), ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart the file player."); "ViEFilePlayer::FrameCallbackChanged(), Failed to restart the file player.");
} }
} }
} }
_videoClients = ViEFrameProviderBase::NumberOfRegistersFrameCallbacks(); _videoClients = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
return 0; return 0;
} }
@ -265,7 +265,7 @@ int ViEFilePlayer::StopPlay() //Only called from destructor.
} else } else
{ {
assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread"); assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Failed to stop file decode thread."); "ViEFilePlayer::StartPlay() Failed to stop file decode thread.");
} }
} }
@ -372,15 +372,15 @@ bool ViEFilePlayer::NeedsAudioFromFile(void* buf)
// From FileCallback // From FileCallback
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id) void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, id), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, id),
"%s: fileId %d", __FUNCTION__, _id); "%s: fileId %d", __FUNCTION__, id_);
_filePlayer->StopPlayingFile(); _filePlayer->StopPlayingFile();
CriticalSectionScoped lock(*_ptrFeedBackCritSect); CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer) if (_observer)
{ {
_observer->PlayFileEnded(_id); _observer->PlayFileEnded(id_);
} }
} }
@ -415,7 +415,7 @@ int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
if (!_veFileInterface) if (!_veFileInterface)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__); "%s No VEFile interface.", __FUNCTION__);
return -1; return -1;
} }
@ -425,7 +425,7 @@ int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
kFileFormatPcm16kHzFile, kFileFormatPcm16kHzFile,
volumeScaling) != 0) volumeScaling) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::SendAudioOnChannel() VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f", "ViEFilePlayer::SendAudioOnChannel() VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
audioChannel, mixMicrophone, volumeScaling); audioChannel, mixMicrophone, volumeScaling);
return -1; return -1;
@ -448,14 +448,14 @@ int ViEFilePlayer::StopSendAudioOnChannel(const int audioChannel)
MapItem* audioItem = _audioChannelsSending.Find(audioChannel); MapItem* audioItem = _audioChannelsSending.Find(audioChannel);
if (!audioItem) if (!audioItem)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"_s AudioChannel %d not sending", __FUNCTION__, audioChannel); "_s AudioChannel %d not sending", __FUNCTION__, audioChannel);
return -1; return -1;
} }
result = _veFileInterface->StopPlayingFileAsMicrophone(audioChannel); result = _veFileInterface->StopPlayingFileAsMicrophone(audioChannel);
if (result != 0) if (result != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() VE_StopPlayingFileAsMicrophone failed. audioChannel %d", "ViEFilePlayer::StopSendAudioOnChannel() VE_StopPlayingFileAsMicrophone failed. audioChannel %d",
audioChannel); audioChannel);
} }
@ -470,7 +470,7 @@ int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
{ {
if (!_veFileInterface) if (!_veFileInterface)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__); "%s No VEFile interface.", __FUNCTION__);
return -1; return -1;
} }
@ -480,7 +480,7 @@ int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
kFileFormatPcm16kHzFile, kFileFormatPcm16kHzFile,
volumeScaling) != 0) volumeScaling) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f", "%s VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
__FUNCTION__, audioChannel, volumeScaling); __FUNCTION__, audioChannel, volumeScaling);
return -1; return -1;
@ -498,13 +498,13 @@ int ViEFilePlayer::StopPlayAudioLocally(const int audioChannel)
{ {
if (!_veFileInterface) if (!_veFileInterface)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__); "%s No VEFile interface.", __FUNCTION__);
return -1; return -1;
} }
if (_veFileInterface->StopPlayingFileLocally(audioChannel) != 0) if (_veFileInterface->StopPlayingFileLocally(audioChannel) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StopPlayingFileLocally failed. audioChannel %d.", "%s VE_StopPlayingFileLocally failed. audioChannel %d.",
__FUNCTION__, audioChannel); __FUNCTION__, audioChannel);
return -1; return -1;

View File

@ -8,307 +8,260 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "vie_frame_provider_base.h" #include "video_engine/vie_frame_provider_base.h"
#include "critical_section_wrapper.h"
#include "tick_util.h" #include "system_wrappers/interface/critical_section_wrapper.h"
#include "trace.h" #include "system_wrappers/interface/tick_util.h"
#include "vie_defines.h" #include "system_wrappers/interface/trace.h"
#include "video_engine/vie_defines.h"
namespace webrtc { namespace webrtc {
ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engineId): ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
_id(Id), : id_(Id),
_engineId(engineId), engine_id_(engine_id),
_frameCallbackMap(), frame_callbacks_(),
_providerCritSect(*CriticalSectionWrapper::CreateCriticalSection()), provider_crit_sect_(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrExtraFrame(NULL), extra_frame_(NULL),
_frameDelay(0) frame_delay_(0) {
{
} }
ViEFrameProviderBase::~ViEFrameProviderBase() ViEFrameProviderBase::~ViEFrameProviderBase() {
{ if (frame_callbacks_.Size() > 0) {
if(_frameCallbackMap.Size()>0) WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
{ "FrameCallbacks still exist when Provider deleted %d",
WEBRTC_TRACE(webrtc::kTraceWarning, frame_callbacks_.Size());
webrtc::kTraceVideo, }
ViEId(_engineId,_id), for (MapItem* item = frame_callbacks_.First(); item != NULL;
"FrameCallbacks still exist when Provider deleted %d", item = frame_callbacks_.Next(item)) {
_frameCallbackMap.Size()); static_cast<ViEFrameCallback*>(item->GetItem())->ProviderDestroyed(id_);
} }
for(MapItem* item=_frameCallbackMap.First();item!=NULL;item=_frameCallbackMap.Next(item))
{
static_cast<ViEFrameCallback*>(item->GetItem())->ProviderDestroyed(_id);
}
while(_frameCallbackMap.Erase(_frameCallbackMap.First()) == 0) while (frame_callbacks_.Erase(frame_callbacks_.First()) == 0) {
; }
delete &_providerCritSect; delete &provider_crit_sect_;
delete _ptrExtraFrame; delete extra_frame_;
} }
int ViEFrameProviderBase::Id() int ViEFrameProviderBase::Id() {
{ return id_;
return _id;
} }
void ViEFrameProviderBase::DeliverFrame(webrtc::VideoFrame& videoFrame,int numCSRCs, void ViEFrameProviderBase::DeliverFrame(
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) VideoFrame& video_frame,
{ int num_csrcs,
#ifdef _DEBUG const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
const TickTime startProcessTime=TickTime::Now(); #ifdef DEBUG_
const TickTime start_process_time = TickTime::Now();
#endif #endif
CriticalSectionScoped cs(_providerCritSect); CriticalSectionScoped cs(provider_crit_sect_);
// Deliver the frame to all registered callbacks // Deliver the frame to all registered callbacks.
if (_frameCallbackMap.Size() > 0) if (frame_callbacks_.Size() > 0) {
{ if (frame_callbacks_.Size() == 1) {
if(_frameCallbackMap.Size()==1) // We don't have to copy the frame.
{ ViEFrameCallback* frame_observer =
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(_frameCallbackMap.First()->GetItem()); static_cast<ViEFrameCallback*>(frame_callbacks_.First()->GetItem());
frameObserver->DeliverFrame(_id,videoFrame,numCSRCs,CSRC); frame_observer->DeliverFrame(id_, video_frame, num_csrcs, CSRC);
} else {
// Make a copy of the frame for all callbacks.
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
if (extra_frame_ == NULL) {
extra_frame_ = new VideoFrame();
} }
else if (map_item != NULL) {
{ ViEFrameCallback* frame_observer =
// Make a copy of the frame for all callbacks static_cast<ViEFrameCallback*>(map_item->GetItem());
for (MapItem* mapItem = _frameCallbackMap.First(); if (frame_observer != NULL) {
mapItem != NULL; // We must copy the frame each time since the previous receiver
mapItem = _frameCallbackMap.Next(mapItem)) // might swap it to avoid a copy.
{ extra_frame_->CopyFrame(video_frame);
if (_ptrExtraFrame == NULL) frame_observer->DeliverFrame(id_, *extra_frame_, num_csrcs, CSRC);
{ }
_ptrExtraFrame = new webrtc::VideoFrame();
}
if (mapItem != NULL)
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem());
if (frameObserver != NULL)
{
// We must copy the frame each time since the previous receiver might swap it...
_ptrExtraFrame->CopyFrame(videoFrame);
frameObserver->DeliverFrame(_id, *_ptrExtraFrame,numCSRCs,CSRC);
}
}
}
} }
}
} }
}
#ifdef _DEBUG #ifdef DEBUG_
const int processTime=(int) (TickTime::Now()-startProcessTime).Milliseconds(); const int process_time =
if(processTime>25) // Warn If the delivery time is too long. static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
{ if (process_time > 25) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId,_id), "%s Too long time: %ums",__FUNCTION__,processTime); // Warn if the delivery time is too long.
} WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s Too long time: %ums", __FUNCTION__, process_time);
}
#endif #endif
} }
void ViEFrameProviderBase::SetFrameDelay(int frameDelay) void ViEFrameProviderBase::SetFrameDelay(int frame_delay) {
{ CriticalSectionScoped cs(provider_crit_sect_);
frame_delay_ = frame_delay;
CriticalSectionScoped cs(_providerCritSect); for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
_frameDelay=frameDelay; map_item = frame_callbacks_.Next(map_item)) {
ViEFrameCallback* frame_observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
assert(frame_observer);
frame_observer->DelayChanged(id_, frame_delay);
}
}
for (MapItem* mapItem = _frameCallbackMap.First(); int ViEFrameProviderBase::FrameDelay() {
mapItem != NULL; return frame_delay_;
mapItem = _frameCallbackMap.Next(mapItem)) }
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem()); int ViEFrameProviderBase::GetBestFormat(int& best_width,
assert(frameObserver); int& best_height,
frameObserver->DelayChanged(_id,frameDelay); int& best_frame_rate) {
int largest_width = 0;
int largest_height = 0;
int highest_frame_rate = 0;
CriticalSectionScoped cs(provider_crit_sect_);
// Check if this one already exists.
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
int prefered_width = 0;
int prefered_height = 0;
int prefered_frame_rate = 0;
ViEFrameCallback* callback_object =
static_cast<ViEFrameCallback*>(map_item->GetItem());
assert(callback_object);
if (callback_object->GetPreferedFrameSettings(prefered_width,
prefered_height,
prefered_frame_rate) == 0) {
if (prefered_width > largest_width) {
largest_width = prefered_width;
}
if (prefered_height > largest_height) {
largest_height = prefered_height;
}
if (prefered_frame_rate > highest_frame_rate) {
highest_frame_rate = prefered_frame_rate;
}
} }
}
best_width = largest_width;
best_height = largest_height;
best_frame_rate = highest_frame_rate;
return 0;
} }
int ViEFrameProviderBase::FrameDelay() int ViEFrameProviderBase::RegisterFrameCallback(
{ int observer_id, ViEFrameCallback* callback_object) {
return _frameDelay; if (callback_object == NULL) {
} WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
int ViEFrameProviderBase::GetBestFormat(int& bestWidth, {
int& bestHeight, CriticalSectionScoped cs(provider_crit_sect_);
int& bestFrameRate)
{
int largestWidth = 0; // Check if the callback already exists.
int largestHeight = 0; for (MapItem* map_item = frame_callbacks_.First();
int highestFrameRate = 0; map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
CriticalSectionScoped cs(_providerCritSect); const ViEFrameCallback* observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
// Check if this one already exists... if (observer == callback_object) {
for (MapItem* mapItem = _frameCallbackMap.First(); // This callback is already registered.
mapItem != NULL; WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
mapItem = _frameCallbackMap.Next(mapItem)) "%s 0x%p already registered", __FUNCTION__,
{ callback_object);
assert("!frameObserver already registered");
int preferedWidth=0;
int preferedHeight=0;
int preferedFrameRate=0;
ViEFrameCallback* callbackObject = static_cast<ViEFrameCallback*>(mapItem->GetItem());
assert(callbackObject);
if(callbackObject->GetPreferedFrameSettings(preferedWidth,preferedHeight,preferedFrameRate)==0)
{
if (preferedWidth > largestWidth)
{
largestWidth = preferedWidth;
}
if (preferedHeight > largestHeight)
{
largestHeight = preferedHeight;
}
if (preferedFrameRate > highestFrameRate)
{
highestFrameRate = preferedFrameRate;
}
}
}
bestWidth = largestWidth;
bestHeight = largestHeight;
bestFrameRate = highestFrameRate;
return 0;
}
int ViEFrameProviderBase::RegisterFrameCallback(int observerId,ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return -1; return -1;
}
} }
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", __FUNCTION__, callbackObject);
{ if (frame_callbacks_.Insert(observer_id, callback_object) != 0) {
CriticalSectionScoped cs(_providerCritSect); WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: Could not add 0x%p to list", __FUNCTION__,
// Check if this one already exists... callback_object);
for (MapItem* mapItem = _frameCallbackMap.First(); return -1;
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// This callback is already registered
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p already registered", __FUNCTION__, callbackObject);
assert("!frameObserver already registered");
return -1;
}
}
if (_frameCallbackMap.Insert(observerId,callbackObject) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: Could not add 0x%p to list", __FUNCTION__, callbackObject);
return -1;
}
} }
// Report current capture delay }
callbackObject->DelayChanged(_id,_frameDelay); // Report current capture delay
callback_object->DelayChanged(id_, frame_delay_);
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed
return 0;
// Notify implementer of this class that the callback list have changed.
FrameCallbackChanged();
return 0;
} }
int ViEFrameProviderBase::DeregisterFrameCallback(
const ViEFrameCallback* callback_object) {
if (!callback_object) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
// ---------------------------------------------------------------------------- {
// DeregisterFrameCallback CriticalSectionScoped cs(provider_crit_sect_);
// ---------------------------------------------------------------------------- bool item_found = false;
int ViEFrameProviderBase::DeregisterFrameCallback(const ViEFrameCallback* callbackObject) // Try to find the callback in our list.
{ for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
if (callbackObject == NULL) map_item = frame_callbacks_.Next(map_item)) {
{ const ViEFrameCallback* observer =
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id), static_cast<ViEFrameCallback*>(map_item->GetItem());
"%s: No argument", __FUNCTION__); if (observer == callback_object) {
return -1; // We found it, remove it!
frame_callbacks_.Erase(map_item);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p deregistered", __FUNCTION__, callback_object);
item_found = true;
break;
}
} }
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id), if (!item_found) {
"%s(0x%p)", __FUNCTION__, callbackObject); WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p not found", __FUNCTION__, callback_object);
return -1;
{
CriticalSectionScoped cs(_providerCritSect);
bool itemFound=false;
// Try to find the callback in our list
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// We found it, remove it!
_frameCallbackMap.Erase(mapItem);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p deregistered", __FUNCTION__, callbackObject);
itemFound=true;
break;
}
}
if(!itemFound)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not found", __FUNCTION__, callbackObject);
return -1;
}
} }
}
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed // Notify implementer of this class that the callback list have changed.
return 0; FrameCallbackChanged();
return 0;
} }
// ---------------------------------------------------------------------------- bool ViEFrameProviderBase::IsFrameCallbackRegistered(
// IsFrameCallbackRegistered const ViEFrameCallback* callback_object) {
// ---------------------------------------------------------------------------- if (!callback_object) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
bool ViEFrameProviderBase::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject) "%s: No argument", __FUNCTION__);
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return false;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", __FUNCTION__, callbackObject);
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
if (callbackObject == mapItem->GetItem())
{
// We found the callback
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p is registered", __FUNCTION__, callbackObject);
return true;
}
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not registered", __FUNCTION__, callbackObject);
return false; return false;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s(0x%p)", __FUNCTION__, callback_object);
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
if (callback_object == map_item->GetItem()) {
// We found the callback.
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p is registered", __FUNCTION__, callback_object);
return true;
}
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p not registered", __FUNCTION__, callback_object);
return false;
} }
// ---------------------------------------------------------------------------- int ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() {
// NumberOfRegistersFrameCallbacks CriticalSectionScoped cs(provider_crit_sect_);
// ---------------------------------------------------------------------------- return frame_callbacks_.Size();
int ViEFrameProviderBase::NumberOfRegistersFrameCallbacks()
{
CriticalSectionScoped cs(_providerCritSect);
return _frameCallbackMap.Size();
} }
} // namespac webrtc } // namespac webrtc

View File

@ -8,95 +8,91 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
/* #ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
* vie_frame_provider_base.h #define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_ #include "modules/interface/module_common_types.h"
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_ #include "system_wrappers/interface/map_wrapper.h"
// Defines
#include "typedefs.h" #include "typedefs.h"
#include "module_common_types.h"
#include "map_wrapper.h"
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper; class CriticalSectionWrapper;
class VideoEncoder; class VideoEncoder;
class ViEFrameCallback // ViEFrameCallback shall be implemented by all classes receiving frames from a
{ // frame provider.
public: class ViEFrameCallback {
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0, public:
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0; virtual void DeliverFrame(int id,
/* VideoFrame& video_frame,
* Delay has changed from the provider. int num_csrcs = 0,
* frameDelay new capture delay in Ms. const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
*/
virtual void DelayChanged(int id, int frameDelay)=0;
/* // The capture delay has changed from the provider. |frame_delay| is given in
Fetch the width, height and frame rate preferred by this observer. // ms.
return 0 on success, -1 otherwise. virtual void DelayChanged(int id, int frame_delay) = 0;
*/
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate)=0;
virtual void ProviderDestroyed(int id) = 0; // Get the width, height and frame rate preferred by this observer.
virtual int GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate) = 0;
protected: // ProviderDestroyed is called when the frame is about to be destroyed. There
virtual ~ViEFrameCallback() // must not be any more calls to the frame provider after this.
{ virtual void ProviderDestroyed(int id) = 0;
}
; virtual ~ViEFrameCallback() {}
}; };
class ViEFrameProviderBase // ViEFrameProviderBase is a base class that will deliver frames to all
{ // registered ViEFrameCallbacks.
public: class ViEFrameProviderBase {
ViEFrameProviderBase(int Id, int engineId); public:
virtual ~ViEFrameProviderBase(); ViEFrameProviderBase(int Id, int engine_id);
int Id(); virtual ~ViEFrameProviderBase();
// Register frame callbacks, i.e. a receiver of the captured frame. // Returns the frame provider id.
virtual int RegisterFrameCallback(int observerId, int Id();
ViEFrameCallback* callbackObject);
virtual int
DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
virtual bool
IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
int NumberOfRegistersFrameCallbacks(); // Register frame callbacks, i.e. a receiver of the captured frame.
virtual int RegisterFrameCallback(int observer_id,
ViEFrameCallback* callback_object);
// FrameCallbackChanged virtual int DeregisterFrameCallback(const ViEFrameCallback* callback_object);
// Inherited classes should check for new frameSettings and reconfigure output if possible.
// Return 0 on success, -1 otherwise.
virtual int FrameCallbackChanged() = 0;
protected: virtual bool IsFrameCallbackRegistered(
void DeliverFrame(VideoFrame& videoFrame, int numCSRCs = 0, const ViEFrameCallback* callback_object);
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frameDelay);
int FrameDelay();
int GetBestFormat(int& bestWidth,
int& bestHeight,
int& bestFrameRate);
int _id; int NumberOfRegisteredFrameCallbacks();
int _engineId;
protected: // FrameCallbackChanged
// Frame callbacks // Inherited classes should check for new frame_settings and reconfigure
MapWrapper _frameCallbackMap; // output if possible.
CriticalSectionWrapper& _providerCritSect; virtual int FrameCallbackChanged() = 0;
private:
VideoFrame* _ptrExtraFrame; protected:
void DeliverFrame(VideoFrame& video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frame_delay);
int FrameDelay();
int GetBestFormat(int& best_width,
int& best_height,
int& best_frame_rate);
//Members int id_;
int _frameDelay; int engine_id_;
// Frame callbacks.
MapWrapper frame_callbacks_;
CriticalSectionWrapper& provider_crit_sect_;
private:
VideoFrame* extra_frame_;
int frame_delay_;
}; };
} //namespace webrtc } // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_

View File

@ -405,7 +405,7 @@ int ViEInputManager::DestroyCaptureDevice(const int captureId)
__FUNCTION__, captureId); __FUNCTION__, captureId);
return -1; return -1;
} }
WebRtc_UWord32 numCallbacks = vieCapture->NumberOfRegistersFrameCallbacks(); WebRtc_UWord32 numCallbacks = vieCapture->NumberOfRegisteredFrameCallbacks();
if (numCallbacks > 0) if (numCallbacks > 0)
{ {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId), WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),
@ -537,7 +537,7 @@ int ViEInputManager::DestroyFilePlayer(int fileId)
return -1; return -1;
} }
int numCallbacks = int numCallbacks =
vieFilePlayer->NumberOfRegistersFrameCallbacks(); vieFilePlayer->NumberOfRegisteredFrameCallbacks();
if (numCallbacks > 0) if (numCallbacks > 0)
{ {
WEBRTC_TRACE( webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId), WEBRTC_TRACE( webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),