Refactored ViEFrameProviderBase.

Only style changes, ointers/references and functions will come in a later CL.

vie_capturer.cc and vie_file_player.cc are only changed du to inheriting protected members from ViEFrameProviderBase.

Review URL: http://webrtc-codereview.appspot.com/324001

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1148 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2011-12-09 15:20:58 +00:00
parent 0744ee563d
commit d5651b98c5
5 changed files with 360 additions and 411 deletions

View File

@ -92,9 +92,9 @@ ViECapturer::ViECapturer(int captureId,
ViECapturer::~ViECapturer()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(_engineId, _captureId),
ViEId(engine_id_, _captureId),
"ViECapturer Destructor, captureId: %d, engineId: %d",
_captureId, _engineId);
_captureId, engine_id_);
// Stop the thread
_deliverCritsect.Enter();
@ -104,12 +104,12 @@ ViECapturer::~ViECapturer()
_captureCritsect.Leave();
_deliverCritsect.Leave();
_providerCritSect.Enter();
provider_crit_sect_.Enter();
if (_vieEncoder)
{
_vieEncoder->DeRegisterExternalEncoder(_codec.plType);
}
_providerCritSect.Leave();
provider_crit_sect_.Leave();
// Stop the camera input
if (_captureModule)
@ -128,7 +128,7 @@ ViECapturer::~ViECapturer()
} else
{
assert(false);
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoRenderer, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoRenderer, ViEId(engine_id_, _captureId),
"%s: Not able to stop capture thread for device %d, leaking",
__FUNCTION__, _captureId);
// Not possible to stop the thread, leak it...
@ -207,11 +207,11 @@ WebRtc_Word32 ViECapturer::Init(const WebRtc_UWord8* deviceUniqueIdUTF8,
if (deviceUniqueIdUTF8 == NULL)
{
_captureModule = VideoCaptureFactory::Create(
ViEModuleId(_engineId, _captureId), _externalCaptureModule);
ViEModuleId(engine_id_, _captureId), _externalCaptureModule);
} else
{
_captureModule = VideoCaptureFactory::Create(
ViEModuleId(_engineId, _captureId), deviceUniqueIdUTF8);
ViEModuleId(engine_id_, _captureId), deviceUniqueIdUTF8);
}
if (!_captureModule)
return -1;
@ -260,7 +260,7 @@ int ViECapturer::FrameCallbackChanged()
WebRtc_Word32 ViECapturer::Start(const CaptureCapability captureCapability)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__);
int width;
@ -316,7 +316,7 @@ WebRtc_Word32 ViECapturer::Start(const CaptureCapability captureCapability)
WebRtc_Word32 ViECapturer::Stop()
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__);
_requestedCapability = CaptureCapability();
return _captureModule->StopCapture();
@ -330,7 +330,7 @@ WebRtc_Word32 ViECapturer::Stop()
bool ViECapturer::Started()
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s", __FUNCTION__);
return _captureModule->CaptureStarted();
}
@ -392,7 +392,7 @@ int ViECapturer::IncomingFrame(unsigned char* videoFrame,
RawVideoType videoType,
unsigned long long captureTime)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u",
width, height, captureTime);
@ -414,7 +414,7 @@ int ViECapturer::IncomingFrameI420(
const ViEVideoFrameI420& video_frame,
unsigned long long captureTime) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_engineId, _captureId),
ViEId(engine_id_, _captureId),
"%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u",
video_frame.width, video_frame.height, captureTime);
@ -446,7 +446,7 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 captureId,
VideoFrame& videoFrame,
VideoCodecType codecType)
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureId: %d)", __FUNCTION__, captureId);
CriticalSectionScoped cs(_captureCritsect);
@ -455,7 +455,7 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 captureId,
if (_encodedFrame.Length() != 0) // The last encoded frame has not been sent yet. Need to wait
{
_vieDeliverEvent.Reset();
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureId: %d) Last encoded frame not yet delivered.",
__FUNCTION__, captureId);
_captureCritsect.Leave();
@ -476,7 +476,7 @@ void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay)
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId),
ViEId(engine_id_, _captureId),
"%s(captureId: %d) delay %d", __FUNCTION__, _captureId,
delay);
@ -497,24 +497,24 @@ WebRtc_Word32 ViECapturer::RegisterEffectFilter(ViEEffectFilter* effectFilter)
{
if (_effectFilter == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: no effect filter added for capture device %d",
__FUNCTION__, _captureId);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId,_captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_,_captureId),
"%s: deregister effect filter for device %d", __FUNCTION__,
_captureId);
} else
{
if (_effectFilter)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,_captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_,_captureId),
"%s: effect filter already added for capture device %d",
__FUNCTION__, _captureId);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: register effect filter for device %d", __FUNCTION__,
_captureId);
}
@ -534,10 +534,10 @@ WebRtc_Word32 ViECapturer::IncImageProcRefCount()
if (!_imageProcModule)
{
assert(_imageProcModuleRefCounter==0);
_imageProcModule = VideoProcessingModule::Create(ViEModuleId(_engineId, _captureId));
_imageProcModule = VideoProcessingModule::Create(ViEModuleId(engine_id_, _captureId));
if (_imageProcModule == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: could not create video processing module",
__FUNCTION__);
return -1;
@ -560,7 +560,7 @@ WebRtc_Word32 ViECapturer::DecImageProcRefCount()
WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable);
@ -570,7 +570,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
// Sanity check
if (_denoisingEnabled)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: denoising already enabled", __FUNCTION__);
return -1;
}
@ -584,7 +584,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
// Sanity check
if (_denoisingEnabled == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: denoising not enabled", __FUNCTION__);
return -1;
}
@ -597,7 +597,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable)
WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable);
@ -607,7 +607,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
// Sanity check
if (_deflickerFrameStats)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering already enabled", __FUNCTION__);
return -1;
}
@ -622,7 +622,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
// Sanity check
if (_deflickerFrameStats == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering not enabled", __FUNCTION__);
return -1;
}
@ -635,7 +635,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable)
}
WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d, enable: %d)", __FUNCTION__,
_captureId, enable);
@ -645,7 +645,7 @@ WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
// Sanity check
if (_brightnessFrameStats)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: BrightnessAlarm already enabled", __FUNCTION__);
return -1;
}
@ -660,7 +660,7 @@ WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable)
// Sanity check
if (_brightnessFrameStats == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: deflickering not enabled", __FUNCTION__);
return -1;
}
@ -703,7 +703,7 @@ bool ViECapturer::ViECaptureProcess()
CriticalSectionScoped cs(_observerCritsect);
if (_observer)
{
_observer->BrightnessAlarm(_id, _currentBrightnessLevel);
_observer->BrightnessAlarm(id_, _currentBrightnessLevel);
_reportedBrightnessLevel = _currentBrightnessLevel;
}
}
@ -722,7 +722,7 @@ void ViECapturer::DeliverI420Frame(VideoFrame& videoFrame)
_imageProcModule->Deflickering(videoFrame, *_deflickerFrameStats);
} else
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s: could not get frame stats for captured frame", __FUNCTION__);
}
}
@ -750,7 +750,7 @@ void ViECapturer::DeliverI420Frame(VideoFrame& videoFrame)
break;
default:
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_engineId, _captureId),
ViEId(engine_id_, _captureId),
"%s: Brightness detection failed", __FUNCTION__);
}
}
@ -776,7 +776,7 @@ void ViECapturer::DeliverCodedFrame(VideoFrame& videoFrame)
_encodeCompleteCallback->Encoded(encodedImage);
}
if (NumberOfRegistersFrameCallbacks() > 0 && _decoderInitialized)
if (NumberOfRegisteredFrameCallbacks() > 0 && _decoderInitialized)
{
videoFrame.Swap(_decodeBuffer.payloadData, _decodeBuffer.bufferSize,
_decodeBuffer.payloadSize);
@ -795,19 +795,19 @@ void ViECapturer::DeliverCodedFrame(VideoFrame& videoFrame)
int ViECapturer::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
{
_providerCritSect.Enter();
provider_crit_sect_.Enter();
if (callbackObject == _vieEncoder) //Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
{
ViEEncoder* vieEncoder = NULL;
vieEncoder = _vieEncoder;
_vieEncoder = NULL;
_providerCritSect.Leave();
provider_crit_sect_.Leave();
_deliverCritsect.Enter(); //Need to take this here in order to avoid deadlock with VCM. The reason is that VCM will call ::Release and a deadlock can occure.
vieEncoder->DeRegisterExternalEncoder(_codec.plType);
_deliverCritsect.Leave();
return 0;
}
_providerCritSect.Leave();
provider_crit_sect_.Leave();
return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
}
@ -816,7 +816,7 @@ int ViECapturer::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
*/
bool ViECapturer::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject)
{
CriticalSectionScoped cs(_providerCritSect);
CriticalSectionScoped cs(provider_crit_sect_);
if (callbackObject == _vieEncoder)
{
return true;
@ -834,12 +834,12 @@ WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vieEncoder,
WebRtc_Word32 vieEncoderId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
{
if (_vieEncoder && &vieEncoder != _vieEncoder)
{
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d Capture device already encoding)",
__FUNCTION__, _captureId);
return -1;
@ -903,7 +903,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect);
@ -920,7 +920,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codecSettings,
false) == 0)
{
_decoderInitialized = true;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d) VCM Decoder initialized",
__FUNCTION__, _captureId);
}
@ -956,7 +956,7 @@ ViECapturer::Encode(const RawImage& inputImage,
WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback* callback)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_deliverCritsect);
@ -969,7 +969,7 @@ WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback*
}
WebRtc_Word32 ViECapturer::Release()
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
{
@ -1002,7 +1002,7 @@ WebRtc_Word32 ViECapturer::Release()
WebRtc_Word32 ViECapturer::Reset()
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
return 0;
@ -1010,7 +1010,7 @@ WebRtc_Word32 ViECapturer::Reset()
WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect);
@ -1023,7 +1023,7 @@ WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss,
WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
CriticalSectionScoped cs(_encodingCritsect);
@ -1055,7 +1055,7 @@ WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver& observer)
{
if (_observer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_,
_captureId),
"%s Observer already registered", __FUNCTION__,
_captureId);
@ -1076,7 +1076,7 @@ WebRtc_Word32 ViECapturer::DeRegisterObserver()
CriticalSectionScoped cs(_observerCritsect);
if (!_observer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, _captureId),
"%s No observer registered", __FUNCTION__, _captureId);
return -1;
}
@ -1100,18 +1100,18 @@ void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate)
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), "OnCaptureFrameRate %d",
ViEId(engine_id_, _captureId), "OnCaptureFrameRate %d",
frameRate);
CriticalSectionScoped cs(_observerCritsect);
_observer->CapturedFrameRate(_id, (WebRtc_UWord8) frameRate);
_observer->CapturedFrameRate(id_, (WebRtc_UWord8) frameRate);
}
void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm alarm)
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(_engineId, _captureId), "OnNoPictureAlarm %d", alarm);
ViEId(engine_id_, _captureId), "OnNoPictureAlarm %d", alarm);
CriticalSectionScoped cs(_observerCritsect);
CaptureAlarm vieAlarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;

View File

@ -66,7 +66,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrFeedBackCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrFeedBackCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
@ -74,7 +74,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrAudioCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrAudioCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
@ -82,30 +82,30 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_ptrDecodeEvent = EventWrapper::Create();
if (!_ptrDecodeEvent)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate event");
return -1;
}
if (strlen(fileNameUTF8) > FileWrapper::kMaxFileNameSize)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() To long filename");
return -1;
}
strncpy(_fileName, fileNameUTF8, strlen(fileNameUTF8) + 1);
_filePlayer = FilePlayer::CreateFilePlayer(ViEId(_engineId, _id),
_filePlayer = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
fileFormat);
if (!_filePlayer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to create file player");
return -1;
}
if (_filePlayer->RegisterModuleFileCallback(this) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to RegisterModuleFileCallback");
_filePlayer = NULL;
return -1;
@ -115,7 +115,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
"ViEFilePlayThread");
if (!_ptrDecodeThread)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to start decode thread.");
_filePlayer = NULL;
return -1;
@ -131,7 +131,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
_audioStream = false;
if (error)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to Start play video file");
return -1;
}
@ -149,7 +149,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get VEFile interface");
return -1;
}
@ -157,7 +157,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
if (!_veVideoSync)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get "
"VoEVideoSync interface");
return -1;
@ -175,7 +175,7 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
*/
int ViEFilePlayer::FrameCallbackChanged()
{
if (ViEFrameProviderBase::NumberOfRegistersFrameCallbacks() > _videoClients)
if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() > _videoClients)
{
if (!_playBackStarted)
{
@ -186,13 +186,13 @@ int ViEFilePlayer::FrameCallbackChanged()
WEBRTC_TRACE(
webrtc::kTraceStateInfo,
webrtc::kTraceVideo,
ViEId(_engineId, _id),
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Started filedecode thread %u",
threadId);
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Failed to start file decode thread.");
}
} else if (!_filePlayer->IsPlayingFile())
@ -201,14 +201,14 @@ int ViEFilePlayer::FrameCallbackChanged()
!_audioStream) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart the file player.");
}
}
}
_videoClients = ViEFrameProviderBase::NumberOfRegistersFrameCallbacks();
_videoClients = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
return 0;
}
@ -265,7 +265,7 @@ int ViEFilePlayer::StopPlay() //Only called from destructor.
} else
{
assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Failed to stop file decode thread.");
}
}
@ -372,15 +372,15 @@ bool ViEFilePlayer::NeedsAudioFromFile(void* buf)
// From FileCallback
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, id),
"%s: fileId %d", __FUNCTION__, _id);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, id),
"%s: fileId %d", __FUNCTION__, id_);
_filePlayer->StopPlayingFile();
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer)
{
_observer->PlayFileEnded(_id);
_observer->PlayFileEnded(id_);
}
}
@ -415,7 +415,7 @@ int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
@ -425,7 +425,7 @@ int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::SendAudioOnChannel() VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
audioChannel, mixMicrophone, volumeScaling);
return -1;
@ -448,14 +448,14 @@ int ViEFilePlayer::StopSendAudioOnChannel(const int audioChannel)
MapItem* audioItem = _audioChannelsSending.Find(audioChannel);
if (!audioItem)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"_s AudioChannel %d not sending", __FUNCTION__, audioChannel);
return -1;
}
result = _veFileInterface->StopPlayingFileAsMicrophone(audioChannel);
if (result != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() VE_StopPlayingFileAsMicrophone failed. audioChannel %d",
audioChannel);
}
@ -470,7 +470,7 @@ int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
@ -480,7 +480,7 @@ int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
__FUNCTION__, audioChannel, volumeScaling);
return -1;
@ -498,13 +498,13 @@ int ViEFilePlayer::StopPlayAudioLocally(const int audioChannel)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StopPlayingFileLocally(audioChannel) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StopPlayingFileLocally failed. audioChannel %d.",
__FUNCTION__, audioChannel);
return -1;

View File

@ -8,307 +8,260 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "vie_frame_provider_base.h"
#include "critical_section_wrapper.h"
#include "tick_util.h"
#include "trace.h"
#include "vie_defines.h"
#include "video_engine/vie_frame_provider_base.h"
#include "system_wrappers/interface/critical_section_wrapper.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
#include "video_engine/vie_defines.h"
namespace webrtc {
ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engineId):
_id(Id),
_engineId(engineId),
_frameCallbackMap(),
_providerCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrExtraFrame(NULL),
_frameDelay(0)
{
ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
: id_(Id),
engine_id_(engine_id),
frame_callbacks_(),
provider_crit_sect_(*CriticalSectionWrapper::CreateCriticalSection()),
extra_frame_(NULL),
frame_delay_(0) {
}
ViEFrameProviderBase::~ViEFrameProviderBase()
{
if(_frameCallbackMap.Size()>0)
{
WEBRTC_TRACE(webrtc::kTraceWarning,
webrtc::kTraceVideo,
ViEId(_engineId,_id),
"FrameCallbacks still exist when Provider deleted %d",
_frameCallbackMap.Size());
}
for(MapItem* item=_frameCallbackMap.First();item!=NULL;item=_frameCallbackMap.Next(item))
{
static_cast<ViEFrameCallback*>(item->GetItem())->ProviderDestroyed(_id);
}
ViEFrameProviderBase::~ViEFrameProviderBase() {
if (frame_callbacks_.Size() > 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"FrameCallbacks still exist when Provider deleted %d",
frame_callbacks_.Size());
}
for (MapItem* item = frame_callbacks_.First(); item != NULL;
item = frame_callbacks_.Next(item)) {
static_cast<ViEFrameCallback*>(item->GetItem())->ProviderDestroyed(id_);
}
while(_frameCallbackMap.Erase(_frameCallbackMap.First()) == 0)
;
while (frame_callbacks_.Erase(frame_callbacks_.First()) == 0) {
}
delete &_providerCritSect;
delete _ptrExtraFrame;
delete &provider_crit_sect_;
delete extra_frame_;
}
int ViEFrameProviderBase::Id()
{
return _id;
int ViEFrameProviderBase::Id() {
return id_;
}
void ViEFrameProviderBase::DeliverFrame(webrtc::VideoFrame& videoFrame,int numCSRCs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize])
{
#ifdef _DEBUG
const TickTime startProcessTime=TickTime::Now();
void ViEFrameProviderBase::DeliverFrame(
VideoFrame& video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
#ifdef DEBUG_
const TickTime start_process_time = TickTime::Now();
#endif
CriticalSectionScoped cs(_providerCritSect);
CriticalSectionScoped cs(provider_crit_sect_);
// Deliver the frame to all registered callbacks
if (_frameCallbackMap.Size() > 0)
{
if(_frameCallbackMap.Size()==1)
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(_frameCallbackMap.First()->GetItem());
frameObserver->DeliverFrame(_id,videoFrame,numCSRCs,CSRC);
// Deliver the frame to all registered callbacks.
if (frame_callbacks_.Size() > 0) {
if (frame_callbacks_.Size() == 1) {
// We don't have to copy the frame.
ViEFrameCallback* frame_observer =
static_cast<ViEFrameCallback*>(frame_callbacks_.First()->GetItem());
frame_observer->DeliverFrame(id_, video_frame, num_csrcs, CSRC);
} else {
// Make a copy of the frame for all callbacks.
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
if (extra_frame_ == NULL) {
extra_frame_ = new VideoFrame();
}
else
{
// Make a copy of the frame for all callbacks
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
if (_ptrExtraFrame == NULL)
{
_ptrExtraFrame = new webrtc::VideoFrame();
}
if (mapItem != NULL)
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem());
if (frameObserver != NULL)
{
// We must copy the frame each time since the previous receiver might swap it...
_ptrExtraFrame->CopyFrame(videoFrame);
frameObserver->DeliverFrame(_id, *_ptrExtraFrame,numCSRCs,CSRC);
}
}
}
if (map_item != NULL) {
ViEFrameCallback* frame_observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
if (frame_observer != NULL) {
// We must copy the frame each time since the previous receiver
// might swap it to avoid a copy.
extra_frame_->CopyFrame(video_frame);
frame_observer->DeliverFrame(id_, *extra_frame_, num_csrcs, CSRC);
}
}
}
}
#ifdef _DEBUG
const int processTime=(int) (TickTime::Now()-startProcessTime).Milliseconds();
if(processTime>25) // Warn If the delivery time is too long.
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId,_id), "%s Too long time: %ums",__FUNCTION__,processTime);
}
}
#ifdef DEBUG_
const int process_time =
static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
if (process_time > 25) {
// Warn if the delivery time is too long.
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s Too long time: %ums", __FUNCTION__, process_time);
}
#endif
}
void ViEFrameProviderBase::SetFrameDelay(int frameDelay)
{
void ViEFrameProviderBase::SetFrameDelay(int frame_delay) {
CriticalSectionScoped cs(provider_crit_sect_);
frame_delay_ = frame_delay;
CriticalSectionScoped cs(_providerCritSect);
_frameDelay=frameDelay;
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
ViEFrameCallback* frame_observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
assert(frame_observer);
frame_observer->DelayChanged(id_, frame_delay);
}
}
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem());
assert(frameObserver);
frameObserver->DelayChanged(_id,frameDelay);
int ViEFrameProviderBase::FrameDelay() {
return frame_delay_;
}
int ViEFrameProviderBase::GetBestFormat(int& best_width,
int& best_height,
int& best_frame_rate) {
int largest_width = 0;
int largest_height = 0;
int highest_frame_rate = 0;
CriticalSectionScoped cs(provider_crit_sect_);
// Check if this one already exists.
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
int prefered_width = 0;
int prefered_height = 0;
int prefered_frame_rate = 0;
ViEFrameCallback* callback_object =
static_cast<ViEFrameCallback*>(map_item->GetItem());
assert(callback_object);
if (callback_object->GetPreferedFrameSettings(prefered_width,
prefered_height,
prefered_frame_rate) == 0) {
if (prefered_width > largest_width) {
largest_width = prefered_width;
}
if (prefered_height > largest_height) {
largest_height = prefered_height;
}
if (prefered_frame_rate > highest_frame_rate) {
highest_frame_rate = prefered_frame_rate;
}
}
}
best_width = largest_width;
best_height = largest_height;
best_frame_rate = highest_frame_rate;
return 0;
}
int ViEFrameProviderBase::FrameDelay()
{
return _frameDelay;
}
int ViEFrameProviderBase::RegisterFrameCallback(
int observer_id, ViEFrameCallback* callback_object) {
if (callback_object == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
int ViEFrameProviderBase::GetBestFormat(int& bestWidth,
int& bestHeight,
int& bestFrameRate)
{
{
CriticalSectionScoped cs(provider_crit_sect_);
int largestWidth = 0;
int largestHeight = 0;
int highestFrameRate = 0;
CriticalSectionScoped cs(_providerCritSect);
// Check if this one already exists...
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
int preferedWidth=0;
int preferedHeight=0;
int preferedFrameRate=0;
ViEFrameCallback* callbackObject = static_cast<ViEFrameCallback*>(mapItem->GetItem());
assert(callbackObject);
if(callbackObject->GetPreferedFrameSettings(preferedWidth,preferedHeight,preferedFrameRate)==0)
{
if (preferedWidth > largestWidth)
{
largestWidth = preferedWidth;
}
if (preferedHeight > largestHeight)
{
largestHeight = preferedHeight;
}
if (preferedFrameRate > highestFrameRate)
{
highestFrameRate = preferedFrameRate;
}
}
}
bestWidth = largestWidth;
bestHeight = largestHeight;
bestFrameRate = highestFrameRate;
return 0;
}
int ViEFrameProviderBase::RegisterFrameCallback(int observerId,ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
// Check if the callback already exists.
for (MapItem* map_item = frame_callbacks_.First();
map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
const ViEFrameCallback* observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
if (observer == callback_object) {
// This callback is already registered.
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p already registered", __FUNCTION__,
callback_object);
assert("!frameObserver already registered");
return -1;
}
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", __FUNCTION__, callbackObject);
{
CriticalSectionScoped cs(_providerCritSect);
// Check if this one already exists...
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// This callback is already registered
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p already registered", __FUNCTION__, callbackObject);
assert("!frameObserver already registered");
return -1;
}
}
if (_frameCallbackMap.Insert(observerId,callbackObject) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: Could not add 0x%p to list", __FUNCTION__, callbackObject);
return -1;
}
if (frame_callbacks_.Insert(observer_id, callback_object) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: Could not add 0x%p to list", __FUNCTION__,
callback_object);
return -1;
}
// Report current capture delay
callbackObject->DelayChanged(_id,_frameDelay);
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed
return 0;
}
// Report current capture delay
callback_object->DelayChanged(id_, frame_delay_);
// Notify implementer of this class that the callback list have changed.
FrameCallbackChanged();
return 0;
}
int ViEFrameProviderBase::DeregisterFrameCallback(
const ViEFrameCallback* callback_object) {
if (!callback_object) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
__FUNCTION__, callback_object);
// ----------------------------------------------------------------------------
// DeregisterFrameCallback
// ----------------------------------------------------------------------------
{
CriticalSectionScoped cs(provider_crit_sect_);
bool item_found = false;
int ViEFrameProviderBase::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return -1;
// Try to find the callback in our list.
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
const ViEFrameCallback* observer =
static_cast<ViEFrameCallback*>(map_item->GetItem());
if (observer == callback_object) {
// We found it, remove it!
frame_callbacks_.Erase(map_item);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p deregistered", __FUNCTION__, callback_object);
item_found = true;
break;
}
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", __FUNCTION__, callbackObject);
{
CriticalSectionScoped cs(_providerCritSect);
bool itemFound=false;
// Try to find the callback in our list
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// We found it, remove it!
_frameCallbackMap.Erase(mapItem);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p deregistered", __FUNCTION__, callbackObject);
itemFound=true;
break;
}
}
if(!itemFound)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not found", __FUNCTION__, callbackObject);
return -1;
}
if (!item_found) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p not found", __FUNCTION__, callback_object);
return -1;
}
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed
return 0;
}
// Notify implementer of this class that the callback list have changed.
FrameCallbackChanged();
return 0;
}
// ----------------------------------------------------------------------------
// IsFrameCallbackRegistered
// ----------------------------------------------------------------------------
bool ViEFrameProviderBase::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return false;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", __FUNCTION__, callbackObject);
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
if (callbackObject == mapItem->GetItem())
{
// We found the callback
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p is registered", __FUNCTION__, callbackObject);
return true;
}
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not registered", __FUNCTION__, callbackObject);
bool ViEFrameProviderBase::IsFrameCallbackRegistered(
const ViEFrameCallback* callback_object) {
if (!callback_object) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s: No argument", __FUNCTION__);
return false;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s(0x%p)", __FUNCTION__, callback_object);
for (MapItem* map_item = frame_callbacks_.First(); map_item != NULL;
map_item = frame_callbacks_.Next(map_item)) {
if (callback_object == map_item->GetItem()) {
// We found the callback.
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p is registered", __FUNCTION__, callback_object);
return true;
}
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
"%s 0x%p not registered", __FUNCTION__, callback_object);
return false;
}
// ----------------------------------------------------------------------------
// NumberOfRegistersFrameCallbacks
// ----------------------------------------------------------------------------
int ViEFrameProviderBase::NumberOfRegistersFrameCallbacks()
{
CriticalSectionScoped cs(_providerCritSect);
return _frameCallbackMap.Size();
int ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() {
CriticalSectionScoped cs(provider_crit_sect_);
return frame_callbacks_.Size();
}
} // namespac webrtc
} // namespac webrtc

View File

@ -8,95 +8,91 @@
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_frame_provider_base.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
// Defines
#include "modules/interface/module_common_types.h"
#include "system_wrappers/interface/map_wrapper.h"
#include "typedefs.h"
#include "module_common_types.h"
#include "map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoEncoder;
class ViEFrameCallback
{
public:
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
/*
* Delay has changed from the provider.
* frameDelay new capture delay in Ms.
*/
virtual void DelayChanged(int id, int frameDelay)=0;
// ViEFrameCallback shall be implemented by all classes receiving frames from a
// frame provider.
class ViEFrameCallback {
public:
virtual void DeliverFrame(int id,
VideoFrame& video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
/*
Fetch the width, height and frame rate preferred by this observer.
return 0 on success, -1 otherwise.
*/
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate)=0;
// The capture delay has changed from the provider. |frame_delay| is given in
// ms.
virtual void DelayChanged(int id, int frame_delay) = 0;
virtual void ProviderDestroyed(int id) = 0;
// Get the width, height and frame rate preferred by this observer.
virtual int GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate) = 0;
protected:
virtual ~ViEFrameCallback()
{
}
;
// ProviderDestroyed is called when the frame is about to be destroyed. There
// must not be any more calls to the frame provider after this.
virtual void ProviderDestroyed(int id) = 0;
virtual ~ViEFrameCallback() {}
};
class ViEFrameProviderBase
{
public:
ViEFrameProviderBase(int Id, int engineId);
virtual ~ViEFrameProviderBase();
int Id();
// ViEFrameProviderBase is a base class that will deliver frames to all
// registered ViEFrameCallbacks.
class ViEFrameProviderBase {
public:
ViEFrameProviderBase(int Id, int engine_id);
virtual ~ViEFrameProviderBase();
// Register frame callbacks, i.e. a receiver of the captured frame.
virtual int RegisterFrameCallback(int observerId,
ViEFrameCallback* callbackObject);
virtual int
DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
virtual bool
IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
// Returns the frame provider id.
int Id();
int NumberOfRegistersFrameCallbacks();
// Register frame callbacks, i.e. a receiver of the captured frame.
virtual int RegisterFrameCallback(int observer_id,
ViEFrameCallback* callback_object);
// FrameCallbackChanged
// Inherited classes should check for new frameSettings and reconfigure output if possible.
// Return 0 on success, -1 otherwise.
virtual int FrameCallbackChanged() = 0;
virtual int DeregisterFrameCallback(const ViEFrameCallback* callback_object);
protected:
void DeliverFrame(VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frameDelay);
int FrameDelay();
int GetBestFormat(int& bestWidth,
int& bestHeight,
int& bestFrameRate);
virtual bool IsFrameCallbackRegistered(
const ViEFrameCallback* callback_object);
int _id;
int _engineId;
int NumberOfRegisteredFrameCallbacks();
protected:
// Frame callbacks
MapWrapper _frameCallbackMap;
CriticalSectionWrapper& _providerCritSect;
private:
// FrameCallbackChanged
// Inherited classes should check for new frame_settings and reconfigure
// output if possible.
virtual int FrameCallbackChanged() = 0;
VideoFrame* _ptrExtraFrame;
protected:
void DeliverFrame(VideoFrame& video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frame_delay);
int FrameDelay();
int GetBestFormat(int& best_width,
int& best_height,
int& best_frame_rate);
//Members
int _frameDelay;
int id_;
int engine_id_;
// Frame callbacks.
MapWrapper frame_callbacks_;
CriticalSectionWrapper& provider_crit_sect_;
private:
VideoFrame* extra_frame_;
int frame_delay_;
};
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_

View File

@ -405,7 +405,7 @@ int ViEInputManager::DestroyCaptureDevice(const int captureId)
__FUNCTION__, captureId);
return -1;
}
WebRtc_UWord32 numCallbacks = vieCapture->NumberOfRegistersFrameCallbacks();
WebRtc_UWord32 numCallbacks = vieCapture->NumberOfRegisteredFrameCallbacks();
if (numCallbacks > 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),
@ -537,7 +537,7 @@ int ViEInputManager::DestroyFilePlayer(int fileId)
return -1;
}
int numCallbacks =
vieFilePlayer->NumberOfRegistersFrameCallbacks();
vieFilePlayer->NumberOfRegisteredFrameCallbacks();
if (numCallbacks > 0)
{
WEBRTC_TRACE( webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),