Refactor ViEFilePlayer.

Types and arguments will be done in a  later CL.

Review URL: http://webrtc-codereview.appspot.com/324002

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1180 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2011-12-14 08:38:37 +00:00
parent e6f64835a0
commit 7991c0501f
2 changed files with 555 additions and 610 deletions

View File

@ -8,155 +8,157 @@
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_player.cc
*
*/
#include "video_engine/vie_file_player.h"
#include "modules/utility/interface/file_player.h"
#include "system_wrappers/interface/critical_section_wrapper.h"
#include "system_wrappers/interface/event_wrapper.h"
#include "system_wrappers/interface/thread_wrapper.h"
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
#include "video_engine/main/interface/vie_file.h"
#include "video_engine/vie_input_manager.h"
#include "voice_engine/main/interface/voe_base.h"
#include "voice_engine/main/interface/voe_file.h"
#include "voice_engine/main/interface/voe_video_sync.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "vie_file_player.h"
#include "tick_util.h"
#include "thread_wrapper.h"
#include "event_wrapper.h"
#include "vie_input_manager.h"
namespace webrtc {
ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(int fileId,
int engineId,
const char* fileNameUTF8,
const int kThreadWaitTimeMs = 100;
ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
int file_id,
int engine_id,
const char* file_nameUTF8,
const bool loop,
const webrtc::FileFormats fileFormat,
ViEInputManager& inputManager,
VoiceEngine* vePtr)
{
ViEFilePlayer* self = new ViEFilePlayer(fileId, engineId, inputManager);
if (!self || self->Init(fileNameUTF8, loop, fileFormat, vePtr) != 0)
{
const FileFormats file_format,
ViEInputManager& input_manager,
VoiceEngine* voe_ptr) {
ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id, input_manager);
if (!self || self->Init(file_nameUTF8, loop, file_format, voe_ptr) != 0) {
delete self;
self = NULL;
}
return self;
}
ViEFilePlayer::ViEFilePlayer(int Id, int engineId,
ViEInputManager& inputManager)
: ViEFrameProviderBase(Id, engineId), _playBackStarted(false),
_inputManager(inputManager), _ptrFeedBackCritSect(NULL),
_ptrAudioCritSect(NULL), _filePlayer(NULL), _audioStream(false),
_videoClients(0), _audioClients(0), _localAudioChannel(-1), _observer(NULL),
_veFileInterface(NULL), _veVideoSync(NULL), _ptrDecodeThread(NULL),
_ptrDecodeEvent(NULL), _decodedAudioLength(0), _audioChannelBuffers(),
_decodedVideo()
{
ViEFilePlayer::ViEFilePlayer(int Id,
int engine_id,
ViEInputManager& input_manager)
: ViEFrameProviderBase(Id, engine_id),
play_back_started_(false),
input_manager_(input_manager),
feedback_cs_(NULL),
audio_cs_(NULL),
file_player_(NULL),
audio_stream_(false),
video_clients_(0),
audio_clients_(0),
local_audio_channel_(-1),
observer_(NULL),
voe_file_interface_(NULL),
voe_video_sync_(NULL),
decode_thread_(NULL),
decode_event_(NULL),
decoded_audio_length_(0) {
}
ViEFilePlayer::~ViEFilePlayer()
{
ViEFilePlayer::~ViEFilePlayer() {
// StopPlay deletes decode_thread_.
StopPlay();
delete _ptrDecodeEvent;
delete _ptrAudioCritSect;
delete _ptrFeedBackCritSect;
delete decode_event_;
delete audio_cs_;
delete feedback_cs_;
}
int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
const webrtc::FileFormats fileFormat,
VoiceEngine* vePtr)
{
_ptrFeedBackCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrFeedBackCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
int ViEFilePlayer::Init(const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voice_engine) {
feedback_cs_ = CriticalSectionWrapper::CreateCriticalSection();
if (!feedback_cs_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
_ptrAudioCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrAudioCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
audio_cs_ = CriticalSectionWrapper::CreateCriticalSection();
if (!audio_cs_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
_ptrDecodeEvent = EventWrapper::Create();
if (!_ptrDecodeEvent)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
decode_event_ = EventWrapper::Create();
if (!decode_event_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate event");
return -1;
}
if (strlen(fileNameUTF8) > FileWrapper::kMaxFileNameSize)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() To long filename");
if (strlen(file_nameUTF8) > FileWrapper::kMaxFileNameSize) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Too long filename");
return -1;
}
strncpy(_fileName, fileNameUTF8, strlen(fileNameUTF8) + 1);
strncpy(file_name_, file_nameUTF8, strlen(file_nameUTF8) + 1);
_filePlayer = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
fileFormat);
if (!_filePlayer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
file_player_ = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
file_format);
if (!file_player_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to create file player");
return -1;
}
if (_filePlayer->RegisterModuleFileCallback(this) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to RegisterModuleFileCallback");
_filePlayer = NULL;
if (file_player_->RegisterModuleFileCallback(this) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to "
"RegisterModuleFileCallback");
file_player_ = NULL;
return -1;
}
_ptrDecodeThread = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
decode_thread_ = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
this, kHighestPriority,
"ViEFilePlayThread");
if (!_ptrDecodeThread)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
if (!decode_thread_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to start decode thread.");
_filePlayer = NULL;
file_player_ = NULL;
return -1;
}
// Always try to open with Audio since we don't know on what channels the audio should be played on.
WebRtc_Word32 error = _filePlayer->StartPlayingVideoFile(_fileName, loop,
// Always try to open with Audio since we don't know on what channels the
// audio should be played on.
WebRtc_Word32 error = file_player_->StartPlayingVideoFile(file_name_, loop,
false);
if (error) // Failed to open the file with audio. Try without
{
error = _filePlayer->StartPlayingVideoFile(_fileName, loop, true);
_audioStream = false;
if (error)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to Start play video file");
if (error) {
// Failed to open the file with audio, try without.
error = file_player_->StartPlayingVideoFile(file_name_, loop, true);
audio_stream_ = false;
if (error) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to Start play video "
"file");
return -1;
}
} else
{
_audioStream = true;
} else {
audio_stream_ = true;
}
if (_audioStream) // The file contain an audiostream
{
if (vePtr) // && localAudioChannel!=-1) // VeInterface have been provided and we want to play audio on local channel.
{
_veFileInterface = VoEFile::GetInterface(vePtr);
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get VEFile interface");
if (audio_stream_) {
if (voice_engine) {
// A VoiceEngine has been provided and we want to play audio on local
// a channel.
voe_file_interface_ = VoEFile::GetInterface(voice_engine);
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get VEFile "
"interface");
return -1;
}
_veVideoSync = VoEVideoSync::GetInterface(vePtr);
if (!_veVideoSync)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
voe_video_sync_ = VoEVideoSync::GetInterface(voice_engine);
if (!voe_video_sync_) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get "
"VoEVideoSync interface");
@ -165,405 +167,334 @@ int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
}
}
_ptrDecodeEvent->StartTimer(true, 10); // Read audio /(or just video) every 10ms.
// Read audio /(or just video) every 10ms.
decode_event_->StartTimer(true, 10);
return 0;
}
/*
//Implements ViEFrameProviderBase
int ViEFilePlayer::FrameCallbackChanged() {
// Starts the decode thread when someone cares.
*/
int ViEFilePlayer::FrameCallbackChanged()
{
if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() > _videoClients)
{
if (!_playBackStarted)
{
_playBackStarted = true;
unsigned int threadId;
if (_ptrDecodeThread->Start(threadId))
{
WEBRTC_TRACE(
webrtc::kTraceStateInfo,
webrtc::kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Started filedecode thread %u",
threadId);
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Failed to start file decode thread.");
if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() >
video_clients_) {
if (!play_back_started_) {
play_back_started_ = true;
unsigned int thread_id;
if (decode_thread_->Start(thread_id)) {
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Started file decode"
" thread %u", thread_id);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Failed to start "
"file decode thread.");
}
} else if (!_filePlayer->IsPlayingFile())
{
if (_filePlayer->StartPlayingVideoFile(_fileName, false,
!_audioStream) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart the file player.");
}
} else if (!file_player_->IsPlayingFile()) {
if (file_player_->StartPlayingVideoFile(file_name_, false,
!audio_stream_) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart "
"the file player.");
}
}
_videoClients = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
}
video_clients_ = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
return 0;
}
// File play decode function.
bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj)
{
return static_cast<ViEFilePlayer*> (obj)->FilePlayDecodeProcess();
bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj) {
return static_cast<ViEFilePlayer*>(obj)->FilePlayDecodeProcess();
}
bool ViEFilePlayer::FilePlayDecodeProcess()
{
if (_ptrDecodeEvent->Wait(kThreadWaitTimeMs) == kEventSignaled)
{
if (_audioStream && _audioClients == 0) // If there is audio but no one cares- read the audio self
{
bool ViEFilePlayer::FilePlayDecodeProcess() {
if (decode_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
if (audio_stream_ && audio_clients_ == 0) {
// There is audio but no one cares, read the audio here.
Read(NULL, 0);
}
if (_filePlayer->TimeUntilNextVideoFrame() < 10) // Less than 10ms to next videoframe
{
if (_filePlayer->GetVideoFromFile(_decodedVideo) != 0)
{
if (file_player_->TimeUntilNextVideoFrame() < 10) {
// Less than 10ms to next videoframe.
if (file_player_->GetVideoFromFile(decoded_video_) != 0) {
}
}
if (_decodedVideo.Length() > 0)
{
if (_localAudioChannel != -1 && _veVideoSync) // We are playing audio locally
{
int audioDelay = 0;
if (_veVideoSync->GetPlayoutBufferSize(audioDelay) == 0)
{
_decodedVideo.SetRenderTime(_decodedVideo.RenderTimeMs()
+ audioDelay);
if (decoded_video_.Length() > 0) {
if (local_audio_channel_ != -1 && voe_video_sync_) {
// We are playing audio locally.
int audio_delay = 0;
if (voe_video_sync_->GetPlayoutBufferSize(audio_delay) == 0) {
decoded_video_.SetRenderTime(decoded_video_.RenderTimeMs() +
audio_delay);
}
}
DeliverFrame(_decodedVideo);
_decodedVideo.SetLength(0);
DeliverFrame(decoded_video_);
decoded_video_.SetLength(0);
}
}
return true;
}
int ViEFilePlayer::StopPlay() //Only called from destructor.
{
if (_ptrDecodeThread)
{
_ptrDecodeThread->SetNotAlive();
if (_ptrDecodeThread->Stop())
{
delete _ptrDecodeThread;
} else
{
int ViEFilePlayer::StopPlay() {
// Only called from destructor.
if (decode_thread_) {
decode_thread_->SetNotAlive();
if (decode_thread_->Stop()) {
delete decode_thread_;
} else {
assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Failed to stop file decode thread.");
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Failed to stop file decode "
"thread.");
}
}
_ptrDecodeThread = NULL;
if (_ptrDecodeEvent)
{
_ptrDecodeEvent->StopTimer();
decode_thread_ = NULL;
if (decode_event_) {
decode_event_->StopTimer();
}
StopPlayAudio();
if (_veFileInterface)
{
_veFileInterface->Release();
_veFileInterface = NULL;
if (voe_file_interface_) {
voe_file_interface_->Release();
voe_file_interface_ = NULL;
}
if (_veVideoSync)
{
_veVideoSync->Release();
_veVideoSync = NULL;
if (voe_video_sync_) {
voe_video_sync_->Release();
voe_video_sync_ = NULL;
}
if (_filePlayer)
{
_filePlayer->StopPlayingFile();
FilePlayer::DestroyFilePlayer(_filePlayer);
_filePlayer = NULL;
if (file_player_) {
file_player_->StopPlayingFile();
FilePlayer::DestroyFilePlayer(file_player_);
file_player_ = NULL;
}
return 0;
}
int ViEFilePlayer::StopPlayAudio()
{
// Stop sending audio
while (MapItem* audioItem = _audioChannelsSending.First())
{
StopSendAudioOnChannel(audioItem->GetId());
}
// Stop local audio playback
if (_localAudioChannel != -1)
{
StopPlayAudioLocally(_localAudioChannel);
}
_localAudioChannel = -1;
while (_audioChannelBuffers.PopFront() != -1) {}
while (_audioChannelsSending.Erase(_audioChannelsSending.First()) != -1) {}
_audioClients = 0;
return 0;
}
// From webrtc::InStream
int ViEFilePlayer::Read(void *buf, int len)
{
CriticalSectionScoped lock(*_ptrAudioCritSect); // Protect from simultaneouse reading from multiple channels
if (NeedsAudioFromFile(buf))
{
if (_filePlayer->Get10msAudioFromFile(_decodedAudio,
_decodedAudioLength, 16000) != 0) // we will run the VE in 16KHz
{
// No data
_decodedAudioLength = 0;
int ViEFilePlayer::StopPlayAudio() {
// Stop sending audio.
while (MapItem* audio_item = audio_channels_sending_.First()) {
StopSendAudioOnChannel(audio_item->GetId());
}
// Stop local audio playback.
if (local_audio_channel_ != -1) {
StopPlayAudioLocally(local_audio_channel_);
}
local_audio_channel_ = -1;
while (audio_channel_buffers_.PopFront() != -1) {
}
while (audio_channels_sending_.Erase(audio_channels_sending_.First()) != -1) {
}
audio_clients_ = 0;
return 0;
}
int ViEFilePlayer::Read(void* buf, int len) {
// Protect from simultaneous reading from multiple channels.
CriticalSectionScoped lock(*audio_cs_);
if (NeedsAudioFromFile(buf)) {
// We will run the VoE in 16KHz.
if (file_player_->Get10msAudioFromFile(decoded_audio_,
decoded_audio_length_, 16000) != 0) {
// No data.
decoded_audio_length_ = 0;
return 0;
}
_decodedAudioLength *= 2; // 2 bytes per sample
if (buf != 0)
{
_audioChannelBuffers.PushBack(buf);
// 2 bytes per sample.
decoded_audio_length_ *= 2;
if (buf != 0) {
audio_channel_buffers_.PushBack(buf);
}
} else
{
// No need for new audiobuffer from file. Ie the buffer read from file has not been played on this channel.
} else {
// No need for new audiobuffer from file, ie the buffer read from file has
// not been played on this channel.
}
if (buf)
{
memcpy(buf, _decodedAudio, _decodedAudioLength);
if (buf) {
memcpy(buf, decoded_audio_, decoded_audio_length_);
}
return _decodedAudioLength;
return decoded_audio_length_;
}
bool ViEFilePlayer::NeedsAudioFromFile(void* buf)
{
bool needsNewAudio = false;
if (_audioChannelBuffers.GetSize() == 0)
{
bool ViEFilePlayer::NeedsAudioFromFile(void* buf) {
bool needs_new_audio = false;
if (audio_channel_buffers_.GetSize() == 0) {
return true;
}
//Check if we the buf already have read the current audio.
for (ListItem* item = _audioChannelBuffers.First(); item != NULL; item
= _audioChannelBuffers.Next(item))
{
if (item->GetItem() == buf)
{
needsNewAudio = true;
_audioChannelBuffers.Erase(item);
// Check if we the buf already have read the current audio.
for (ListItem* item = audio_channel_buffers_.First(); item != NULL;
item = audio_channel_buffers_.Next(item)) {
if (item->GetItem() == buf) {
needs_new_audio = true;
audio_channel_buffers_.Erase(item);
break;
}
}
return needsNewAudio;
return needs_new_audio;
}
// From FileCallback
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_, id),
"%s: fileId %d", __FUNCTION__, id_);
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id),
"%s: file_id %d", __FUNCTION__, id_);
file_player_->StopPlayingFile();
_filePlayer->StopPlayingFile();
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer)
{
_observer->PlayFileEnded(id_);
CriticalSectionScoped lock(*feedback_cs_);
if (observer_) {
observer_->PlayFileEnded(id_);
}
}
bool ViEFilePlayer::IsObserverRegistered()
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
return _observer != NULL;
bool ViEFilePlayer::IsObserverRegistered() {
CriticalSectionScoped lock(*feedback_cs_);
return observer_ != NULL;
}
int ViEFilePlayer::RegisterObserver(ViEFileObserver& observer)
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer)
int ViEFilePlayer::RegisterObserver(ViEFileObserver& observer) {
CriticalSectionScoped lock(*feedback_cs_);
if (observer_) {
return -1;
_observer = &observer;
return 0;
}
int ViEFilePlayer::DeRegisterObserver()
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
_observer = NULL;
}
observer_ = &observer;
return 0;
}
// ----------------------------------------------------------------------------
// SendAudioOnChannel
// Order the voice engine to send the audio on a channel
// ----------------------------------------------------------------------------
int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
bool mixMicrophone, float volumeScaling)
{
int ViEFilePlayer::DeRegisterObserver() {
CriticalSectionScoped lock(*feedback_cs_);
observer_ = NULL;
return 0;
}
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
int ViEFilePlayer::SendAudioOnChannel(const int audio_channel,
bool mix_microphone,
float volume_scaling) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StartPlayingFileAsMicrophone(audioChannel,
this,
mixMicrophone,
if (voe_file_interface_->StartPlayingFileAsMicrophone(audio_channel, this,
mix_microphone,
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::SendAudioOnChannel() VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
audioChannel, mixMicrophone, volumeScaling);
volume_scaling) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::SendAudioOnChannel() "
"VE_StartPlayingFileAsMicrophone failed. audio_channel %d, "
" mix_microphone %d, volume_scaling %.2f",
audio_channel, mix_microphone, volume_scaling);
return -1;
}
_audioChannelsSending.Insert(audioChannel, NULL);
CriticalSectionScoped lock(*_ptrAudioCritSect);
_audioClients++; // Increase the number of audioClients;
audio_channels_sending_.Insert(audio_channel, NULL);
CriticalSectionScoped lock(*audio_cs_);
audio_clients_++;
return 0;
}
// ----------------------------------------------------------------------------
// StopSendAudioOnChannel
// Order the voice engine to stop send the audio on a channel
// ----------------------------------------------------------------------------
int ViEFilePlayer::StopSendAudioOnChannel(const int audioChannel)
{
int ViEFilePlayer::StopSendAudioOnChannel(const int audio_channel) {
int result = 0;
MapItem* audioItem = _audioChannelsSending.Find(audioChannel);
if (!audioItem)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"_s AudioChannel %d not sending", __FUNCTION__, audioChannel);
MapItem* audio_item = audio_channels_sending_.Find(audio_channel);
if (!audio_item) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel AudioChannel %d not "
"sending", audio_channel);
return -1;
}
result = _veFileInterface->StopPlayingFileAsMicrophone(audioChannel);
if (result != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() VE_StopPlayingFileAsMicrophone failed. audioChannel %d",
audioChannel);
result = voe_file_interface_->StopPlayingFileAsMicrophone(audio_channel);
if (result != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() "
"VE_StopPlayingFileAsMicrophone failed. audio_channel %d",
audio_channel);
}
_audioChannelsSending.Erase(audioItem);
CriticalSectionScoped lock(*_ptrAudioCritSect);
_audioClients--; // Decrease the number of audioClients;
assert(_audioClients>=0);
audio_channels_sending_.Erase(audio_item);
CriticalSectionScoped lock(*audio_cs_);
audio_clients_--;
assert(audio_clients_ >= 0);
return 0;
}
int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
int ViEFilePlayer::PlayAudioLocally(const int audio_channel,
float volume_scaling) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StartPlayingFileLocally(
audioChannel,
this,
if (voe_file_interface_->StartPlayingFileLocally(audio_channel, this,
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
__FUNCTION__, audioChannel, volumeScaling);
volume_scaling) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StartPlayingFileAsMicrophone failed. audio_channel %d,"
" mix_microphone %d, volume_scaling %.2f",
__FUNCTION__, audio_channel, volume_scaling);
return -1;
}
CriticalSectionScoped lock(*_ptrAudioCritSect);
_localAudioChannel = audioChannel;
_audioClients++; // Increase the number of audioClients;
CriticalSectionScoped lock(*audio_cs_);
local_audio_channel_ = audio_channel;
audio_clients_++;
return 0;
}
int ViEFilePlayer::StopPlayAudioLocally(const int audioChannel)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
int ViEFilePlayer::StopPlayAudioLocally(const int audio_channel) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StopPlayingFileLocally(audioChannel) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StopPlayingFileLocally failed. audioChannel %d.",
__FUNCTION__, audioChannel);
if (voe_file_interface_->StopPlayingFileLocally(audio_channel) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StopPlayingFileLocally failed. audio_channel %d.",
__FUNCTION__, audio_channel);
return -1;
}
CriticalSectionScoped lock(*_ptrAudioCritSect);
_localAudioChannel = -1;
_audioClients--; // Decrease the number of audioClients;
CriticalSectionScoped lock(*audio_cs_);
local_audio_channel_ = -1;
audio_clients_--;
return 0;
}
//static
int ViEFilePlayer::GetFileInformation(int engineId, const char* fileName,
VideoCodec& videoCodec,
webrtc::CodecInst& audioCodec,
const webrtc::FileFormats fileFormat)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, engineId, "%s ", __FUNCTION__);
int ViEFilePlayer::GetFileInformation(int engine_id,
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, engine_id, "%s ", __FUNCTION__);
FilePlayer* filePlayer = FilePlayer::CreateFilePlayer(engineId, fileFormat);
if (!filePlayer)
{
FilePlayer* file_player = FilePlayer::CreateFilePlayer(engine_id,
file_format);
if (!file_player) {
return -1;
}
bool videoOnly = false;
bool video_only = false;
memset(&videoCodec, 0, sizeof(videoCodec));
memset(&audioCodec, 0, sizeof(audioCodec));
memset(&video_codec, 0, sizeof(video_codec));
memset(&audio_codec, 0, sizeof(audio_codec));
if (filePlayer->StartPlayingVideoFile(fileName, false, false) != 0)
{
videoOnly = true;
if (filePlayer->StartPlayingVideoFile(fileName, false, true) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
if (file_player->StartPlayingVideoFile(file_name, false, false) != 0) {
video_only = true;
if (file_player->StartPlayingVideoFile(file_name, false, true) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to open file.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
}
if (!videoOnly && filePlayer->AudioCodec(audioCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
if (!video_only && file_player->AudioCodec(audio_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to get audio codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
if (filePlayer->video_codec_info(videoCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
if (file_player->video_codec_info(video_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to get video codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
FilePlayer::DestroyFilePlayer(filePlayer);
FilePlayer::DestroyFilePlayer(file_player);
return 0;
}
} // namespace webrtc

View File

@ -8,64 +8,66 @@
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_player.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#include "common_types.h"
#include "modules/media_file/interface/media_file_defines.h"
#include "system_wrappers/interface/file_wrapper.h"
#include "system_wrappers/interface/list_wrapper.h"
#include "typedefs.h"
#include "common_types.h" // webrtc::OutStream
#include "file_player.h"
#include "media_file_defines.h"
#include "vie_file.h"
#include "voe_file.h"
#include "voe_video_sync.h"
#include "list_wrapper.h"
#include "vie_frame_provider_base.h"
#include "file_wrapper.h"
#include "video_engine/vie_frame_provider_base.h"
namespace webrtc {
namespace webrtc
{
class EventWrapper;
class FilePlayer;
class ThreadWrapper;
class ViEFileObserver;
class ViEInputManager;
class ViEFilePlayer: public ViEFrameProviderBase,
protected webrtc::FileCallback,
protected webrtc::InStream // for audio
{
public:
static ViEFilePlayer *CreateViEFilePlayer(int fileId, int engineId,
const char* fileNameUTF8,
const bool loop,
const webrtc::FileFormats fileFormat,
ViEInputManager& inputManager,
VoiceEngine* vePtr);
class VoEFile;
class VoEVideoSync;
class VoiceEngine;
static int GetFileInformation(const int engineId,
const char* fileName,
webrtc::VideoCodec& videoCodec,
webrtc::CodecInst& audioCodec,
const webrtc::FileFormats fileFormat);
class ViEFilePlayer
: public ViEFrameProviderBase,
protected FileCallback,
protected InStream {
public:
static ViEFilePlayer* CreateViEFilePlayer(int file_id,
int engine_id,
const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
ViEInputManager& input_manager,
VoiceEngine* voe_ptr);
static int GetFileInformation(const int engine_id,
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format);
~ViEFilePlayer();
bool IsObserverRegistered();
int RegisterObserver(ViEFileObserver& observer);
int DeRegisterObserver();
int SendAudioOnChannel(const int audioChannel, bool mixMicrophone,
float volumeScaling);
int StopSendAudioOnChannel(const int audioChannel);
int PlayAudioLocally(const int audioChannel, float volumeScaling);
int StopPlayAudioLocally(const int audioChannel);
int SendAudioOnChannel(const int audio_channel,
bool mix_microphone,
float volume_scaling);
int StopSendAudioOnChannel(const int audio_channel);
int PlayAudioLocally(const int audio_channel, float volume_scaling);
int StopPlayAudioLocally(const int audio_channel);
//Implement ViEFrameProviderBase
// Implements ViEFrameProviderBase.
virtual int FrameCallbackChanged();
protected:
ViEFilePlayer(int Id, int engineId, ViEInputManager& inputManager);
int Init(const WebRtc_Word8* fileNameUTF8, const bool loop,
const webrtc::FileFormats fileFormat, VoiceEngine* vePtr);
protected:
ViEFilePlayer(int Id, int engine_id, ViEInputManager& input_manager);
int Init(const WebRtc_Word8* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voe_ptr);
int StopPlay();
int StopPlayAudio();
@ -74,51 +76,63 @@ protected:
bool FilePlayDecodeProcess();
bool NeedsAudioFromFile(void* buf);
// From webrtc::InStream
virtual int Read(void *buf, int len);
virtual int Rewind() { return 0;}
// Implements webrtc::InStream.
virtual int Read(void* buf, int len);
virtual int Rewind() {
return 0;
}
// From FileCallback
// Implements FileCallback.
virtual void PlayNotification(const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*notificationMs*/){}
virtual void RecordNotification(const WebRtc_Word32 id,
const WebRtc_UWord32 notificationMs){}
const WebRtc_UWord32 /*notification_ms*/) {}
virtual void RecordNotification(const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*notification_ms*/) {}
virtual void PlayFileEnded(const WebRtc_Word32 id);
virtual void RecordFileEnded(const WebRtc_Word32 id) { }
virtual void RecordFileEnded(const WebRtc_Word32 /*id*/) {}
private:
bool play_back_started_;
ViEInputManager& input_manager_;
private:
enum { kThreadWaitTimeMs = 100 };
CriticalSectionWrapper* feedback_cs_;
CriticalSectionWrapper* audio_cs_;
bool _playBackStarted;
ViEInputManager& _inputManager;
FilePlayer* file_player_;
bool audio_stream_;
CriticalSectionWrapper* _ptrFeedBackCritSect;
CriticalSectionWrapper* _ptrAudioCritSect;
// Number of active video clients.
int video_clients_;
webrtc::FilePlayer* _filePlayer;
bool _audioStream;
// Number of audio channels sending this audio.
int audio_clients_;
int _videoClients; // Number of active video clients
int _audioClients; //No of audio channels sending this audio.
int _localAudioChannel; //Local audio channel playing this video. Sync video against this.
// Local audio channel playing this video. Sync video against this.
int local_audio_channel_;
ViEFileObserver* _observer;
WebRtc_Word8 _fileName[FileWrapper::kMaxFileNameSize];
ViEFileObserver* observer_;
WebRtc_Word8 file_name_[FileWrapper::kMaxFileNameSize];
// VE Interface
VoEFile* _veFileInterface;
VoEVideoSync* _veVideoSync;
// Thread for decoding video (and audio if no audio clients connected)
ThreadWrapper* _ptrDecodeThread;
EventWrapper* _ptrDecodeEvent;
WebRtc_Word16 _decodedAudio[320];
WebRtc_UWord32 _decodedAudioLength;
// VoE Interface.
VoEFile* voe_file_interface_;
VoEVideoSync* voe_video_sync_;
ListWrapper _audioChannelBuffers; //trick - list containing VE buffer reading this file. Used if multiple audio channels are sending.
MapWrapper _audioChannelsSending; // AudioChannels sending audio from this file
VideoFrame _decodedVideo; // Frame receiving decoded video from file.
// Thread for decoding video (and audio if no audio clients connected).
ThreadWrapper* decode_thread_;
EventWrapper* decode_event_;
WebRtc_Word16 decoded_audio_[320];
WebRtc_UWord32 decoded_audio_length_;
// Trick - list containing VoE buffer reading this file. Used if multiple
// audio channels are sending.
ListWrapper audio_channel_buffers_;
// AudioChannels sending audio from this file.
MapWrapper audio_channels_sending_;
// Frame receiving decoded video from file.
VideoFrame decoded_video_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_