Removed ViE file API.

R=asapersson@webrtc.org, niklas.enbom@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1723004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4267 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2013-06-26 09:12:49 +00:00
parent a5fd2f1348
commit 1c986e7c89
29 changed files with 52 additions and 3007 deletions

View File

@ -171,6 +171,12 @@ class WEBRTC_DLLEXPORT ViECodec {
virtual int WaitForFirstKeyFrame(const int video_channel,
const bool wait) = 0;
// Enables recording of debugging information.
virtual int StartDebugRecording(int video_channel,
const char* file_name_utf8) = 0;
// Disables recording of debugging information.
virtual int StopDebugRecording(int video_channel) = 0;
protected:
ViECodec() {}
virtual ~ViECodec() {}

View File

@ -1,216 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - File recording and playing.
// - Snapshots.
// - Background images.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
#include "webrtc/common_types.h"
namespace webrtc {
class VideoEngine;
struct VideoCodec;
// This structure contains picture data and describes the picture type.
struct ViEPicture {
unsigned char* data;
unsigned int size;
unsigned int width;
unsigned int height;
RawVideoType type;
ViEPicture() {
data = NULL;
size = 0;
width = 0;
height = 0;
type = kVideoI420;
}
// Call FreePicture to free data.
~ViEPicture() {
data = NULL;
size = 0;
width = 0;
height = 0;
type = kVideoUnknown;
}
};
// This enumerator tells which audio source to use for media files.
enum AudioSource {
NO_AUDIO,
MICROPHONE,
PLAYOUT,
VOICECALL
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViEFileObserver {
public:
// This method is called when the end is reached of a played file.
virtual void PlayFileEnded(const int32_t file_id) = 0;
protected:
virtual ~ViEFileObserver() {}
};
class WEBRTC_DLLEXPORT ViEFile {
public:
// Factory for the ViEFile subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViEFile* GetInterface(VideoEngine* video_engine);
// Releases the ViEFile sub-API and decreases an internal reference counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Starts playing a video file.
virtual int StartPlayFile(
const char* file_name_utf8,
int& file_id,
const bool loop = false,
const FileFormats file_format = kFileFormatAviFile) = 0;
// Stops a file from being played.
virtual int StopPlayFile(const int file_id) = 0;
// Registers an instance of a user implementation of the ViEFileObserver.
virtual int RegisterObserver(int file_id, ViEFileObserver& observer) = 0;
// Removes an already registered instance of ViEFileObserver.
virtual int DeregisterObserver(int file_id, ViEFileObserver& observer) = 0;
// This function tells which channel, if any, the file should be sent on.
virtual int SendFileOnChannel(const int file_id, const int video_channel) = 0;
// Stops a file from being sent on a a channel.
virtual int StopSendFileOnChannel(const int video_channel) = 0;
// Starts playing the file audio as microphone input for the specified voice
// channel.
virtual int StartPlayFileAsMicrophone(const int file_id,
const int audio_channel,
bool mix_microphone = false,
float volume_scaling = 1) = 0;
// The function stop the audio from being played on a VoiceEngine channel.
virtual int StopPlayFileAsMicrophone(const int file_id,
const int audio_channel) = 0;
// The function plays and mixes the file audio with the local speaker signal
// for playout.
virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
float volume_scaling = 1) = 0;
// Stops the audio from a file from being played locally.
virtual int StopPlayAudioLocally(const int file_id,
const int audio_channel) = 0;
// This function starts recording the video transmitted to another endpoint.
virtual int StartRecordOutgoingVideo(
const int video_channel,
const char* file_name_utf8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format = kFileFormatAviFile) = 0;
// This function starts recording the incoming video stream on a channel.
virtual int StartRecordIncomingVideo(
const int video_channel,
const char* file_name_utf8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format = kFileFormatAviFile) = 0;
// Stops the file recording of the outgoing stream.
virtual int StopRecordOutgoingVideo(const int video_channel) = 0;
// Stops the file recording of the incoming stream.
virtual int StopRecordIncomingVideo(const int video_channel) = 0;
// Gets the audio codec, video codec and file format of a recorded file.
virtual int GetFileInformation(
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format = kFileFormatAviFile) = 0;
// The function takes a snapshot of the last rendered image for a video
// channel.
virtual int GetRenderSnapshot(const int video_channel,
const char* file_name_utf8) = 0;
// The function takes a snapshot of the last rendered image for a video
// channel
virtual int GetRenderSnapshot(const int video_channel,
ViEPicture& picture) = 0;
// The function takes a snapshot of the last captured image by a specified
// capture device.
virtual int GetCaptureDeviceSnapshot(const int capture_id,
const char* file_name_utf8) = 0;
// The function takes a snapshot of the last captured image by a specified
// capture device.
virtual int GetCaptureDeviceSnapshot(const int capture_id,
ViEPicture& picture) = 0;
virtual int FreePicture(ViEPicture& picture) = 0;
// This function sets a jpg image to render before the first received video
// frame is decoded for a specified channel.
virtual int SetRenderStartImage(const int video_channel,
const char* file_name_utf8) = 0;
// This function sets an image to render before the first received video
// frame is decoded for a specified channel.
virtual int SetRenderStartImage(const int video_channel,
const ViEPicture& picture) = 0;
// This function sets a jpg image to render if no frame is decoded for a
// specified time interval.
virtual int SetRenderTimeoutImage(const int video_channel,
const char* file_name_utf8,
const unsigned int timeout_ms = 1000) = 0;
// This function sets an image to render if no frame is decoded for a
// specified time interval.
virtual int SetRenderTimeoutImage(const int video_channel,
const ViEPicture& picture,
const unsigned int timeout_ms) = 0;
// Enables recording of debugging information.
virtual int StartDebugRecording(int video_channel,
const char* file_name_utf8) = 0;
// Disables recording of debugging information.
virtual int StopDebugRecording(int video_channel) = 0;
protected:
ViEFile() {}
virtual ~ViEFile() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_

View File

@ -39,10 +39,6 @@ TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest),
tests_->ViEEncryptionAPITest();
}
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest), RunsFileTestWithoutErrors) {
tests_->ViEFileAPITest();
}
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest),
RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessAPITest();

View File

@ -47,11 +47,6 @@ TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
tests_->ViEEncryptionExtendedTest();
}
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
RunsFileTestWithoutErrors) {
tests_->ViEFileExtendedTest();
}
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessExtendedTest();

View File

@ -47,10 +47,6 @@ TEST_F(ViEStandardIntegrationTest, RunsEncryptionTestWithoutErrors) {
tests_->ViEEncryptionStandardTest();
}
TEST_F(ViEStandardIntegrationTest, RunsFileTestWithoutErrors) {
tests_->ViEFileStandardTest();
}
TEST_F(ViEStandardIntegrationTest, RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessStandardTest();
}

View File

@ -22,7 +22,6 @@
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/include/vie_file.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
@ -89,11 +88,6 @@ public:
void ViEEncryptionExtendedTest();
void ViEEncryptionAPITest();
// vie_autotest_file.ccs
void ViEFileStandardTest();
void ViEFileExtendedTest();
void ViEFileAPITest();
// vie_autotest_image_process.cc
void ViEImageProcessStandardTest();
void ViEImageProcessExtendedTest();

View File

@ -65,7 +65,6 @@ void ViEAutoTest::ViEStandardTest()
ViECaptureStandardTest();
ViECodecStandardTest();
ViEEncryptionStandardTest();
ViEFileStandardTest();
ViEImageProcessStandardTest();
ViERenderStandardTest();
ViERtpRtcpStandardTest();
@ -77,7 +76,6 @@ void ViEAutoTest::ViEExtendedTest()
ViECaptureExtendedTest();
ViECodecExtendedTest();
ViEEncryptionExtendedTest();
ViEFileExtendedTest();
ViEImageProcessExtendedTest();
ViERenderExtendedTest();
ViERtpRtcpExtendedTest();
@ -89,7 +87,6 @@ void ViEAutoTest::ViEAPITest()
ViECaptureAPITest();
ViECodecAPITest();
ViEEncryptionAPITest();
ViEFileAPITest();
ViEImageProcessAPITest();
ViERenderAPITest();
ViERtpRtcpAPITest();

View File

@ -66,23 +66,19 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
vieAutoTest.ViEEncryptionStandardTest();
break;
case 6: // file
vieAutoTest.ViEFileStandardTest();
break;
case 7: // image process
case 6: // image process
vieAutoTest.ViEImageProcessStandardTest();
break;
case 8: // network
case 7: // network
vieAutoTest.ViENetworkStandardTest();
break;
case 9: // Render
case 8: // Render
vieAutoTest.ViERenderStandardTest();
break;
case 10: // RTP/RTCP
case 9: // RTP/RTCP
vieAutoTest.ViERtpRtcpStandardTest();
break;
@ -109,26 +105,22 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
vieAutoTest.ViEEncryptionAPITest();
break;
case 6: // file
vieAutoTest.ViEFileAPITest();
break;
case 7: // image process
case 6: // image process
vieAutoTest.ViEImageProcessAPITest();
break;
case 8: // network
case 7: // network
vieAutoTest.ViENetworkAPITest();
break;
case 9: // Render
case 8: // Render
vieAutoTest.ViERenderAPITest();
break;
case 10: // RTP/RTCP
case 9: // RTP/RTCP
vieAutoTest.ViERtpRtcpAPITest();
break;
case 11:
case 10:
break;
default:
@ -154,19 +146,15 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
vieAutoTest.ViEEncryptionExtendedTest();
break;
case 6: // file
vieAutoTest.ViEFileExtendedTest();
break;
case 7: // image process
case 6: // image process
vieAutoTest.ViEImageProcessExtendedTest();
break;
case 8: // Render
case 7: // Render
vieAutoTest.ViERenderExtendedTest();
break;
case 9: // RTP/RTCP
case 8: // RTP/RTCP
vieAutoTest.ViERtpRtcpExtendedTest();
break;

View File

@ -37,8 +37,6 @@
#define DEFAULT_VIDEO_CODEC_MAX_BITRATE "1000"
#define DEFAULT_AUDIO_PORT "11113"
#define DEFAULT_AUDIO_CODEC "ISAC"
#define DEFAULT_INCOMING_FILE_NAME "IncomingFile.avi"
#define DEFAULT_OUTGOING_FILE_NAME "OutgoingFile.avi"
#define DEFAULT_VIDEO_CODEC_MAX_FRAMERATE "30"
#define DEFAULT_VIDEO_PROTECTION_METHOD "None"
#define DEFAULT_TEMPORAL_LAYER "0"
@ -63,16 +61,6 @@ enum VideoProtectionMethod {
using webrtc::FromChoices;
using webrtc::TypedInput;
class ViEAutotestFileObserver : public webrtc::ViEFileObserver {
public:
ViEAutotestFileObserver() {}
~ViEAutotestFileObserver() {}
void PlayFileEnded(const int32_t file_id) {
ViETest::Log("PlayFile ended");
}
};
class ViEAutotestEncoderObserver : public webrtc::ViEEncoderObserver {
public:
ViEAutotestEncoderObserver() {}
@ -558,7 +546,6 @@ int ViEAutoTest::ViECustomCall() {
number_of_errors += ViETest::TestError(error == 0,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
ViEAutotestFileObserver file_observer;
ViEAutotestEncoderObserver* codec_encoder_observer = NULL;
ViEAutotestDecoderObserver* codec_decoder_observer = NULL;

View File

@ -1,483 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
class ViEAutotestFileObserver: public webrtc::ViEFileObserver
{
public:
ViEAutotestFileObserver() {};
~ViEAutotestFileObserver() {};
void PlayFileEnded(const int32_t fileId)
{
ViETest::Log("PlayFile ended");
}
};
void ViEAutoTest::ViEFileStandardTest()
{
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
if (!FLAGS_include_timing_dependent_tests) {
ViETest::Log("Running in slow execution environment: skipping test...\n");
return;
}
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
{
ViETest::Log("Starting a loopback call...");
TbInterfaces interfaces("ViEFileStandardTest");
webrtc::VideoEngine* ptrViE = interfaces.video_engine;
webrtc::ViEBase* ptrViEBase = interfaces.base;
webrtc::ViECapture* ptrViECapture = interfaces.capture;
webrtc::ViERender* ptrViERender = interfaces.render;
webrtc::ViECodec* ptrViECodec = interfaces.codec;
webrtc::ViERTP_RTCP* ptrViERtpRtcp = interfaces.rtp_rtcp;
webrtc::ViENetwork* ptrViENetwork = interfaces.network;
TbCaptureDevice captureDevice = TbCaptureDevice(interfaces);
int captureId = captureDevice.captureId;
int videoChannel = -1;
EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
captureId, videoChannel));
EXPECT_EQ(0, ptrViERtpRtcp->SetRTCPStatus(
videoChannel, webrtc::kRtcpCompound_RFC4585));
EXPECT_EQ(0, ptrViERtpRtcp->SetKeyFrameRequestMethod(
videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true));
EXPECT_EQ(0, ptrViERender->AddRenderer(
captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ptrViERender->AddRenderer(
videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ptrViERender->StartRender(captureId));
EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
webrtc::VideoCodec videoCodec;
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
{
EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
EXPECT_EQ(0, ptrViECodec->SetReceiveCodec(videoChannel,
videoCodec));
}
// Find the codec used for encoding the channel
for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
{
EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
if (videoCodec.codecType == webrtc::kVideoCodecVP8)
{
EXPECT_EQ(0, ptrViECodec->SetSendCodec(videoChannel, videoCodec));
break;
}
}
// Find the codec used for recording.
for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
{
EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
if (videoCodec.codecType == webrtc::kVideoCodecI420)
{
break;
}
}
const char* ipAddress = "127.0.0.1";
const unsigned short rtpPort = 6000;
webrtc::scoped_ptr<webrtc::test::VideoChannelTransport>
video_channel_transport(
new webrtc::test::VideoChannelTransport(ptrViENetwork,
videoChannel));
EXPECT_EQ(0, video_channel_transport->SetSendDestination(ipAddress,
rtpPort));
EXPECT_EQ(0, video_channel_transport->SetLocalReceiver(rtpPort));
EXPECT_EQ(0, ptrViEBase->StartReceive(videoChannel));
EXPECT_EQ(0, ptrViEBase->StartSend(videoChannel));
webrtc::ViEFile* ptrViEFile = webrtc::ViEFile::GetInterface(ptrViE);
EXPECT_TRUE(ptrViEFile != NULL);
webrtc::VoiceEngine* ptrVEEngine = webrtc::VoiceEngine::Create();
webrtc::VoEBase* ptrVEBase = webrtc::VoEBase::GetInterface(ptrVEEngine);
ptrVEBase->Init();
int audioChannel = ptrVEBase->CreateChannel();
ptrViEBase->SetVoiceEngine(ptrVEEngine);
ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel);
webrtc::CodecInst audioCodec;
webrtc::VoECodec* ptrVECodec =
webrtc::VoECodec::GetInterface(ptrVEEngine);
for (int index = 0; index < ptrVECodec->NumOfCodecs(); index++)
{
ptrVECodec->GetCodec(index, audioCodec);
if (0 == strcmp(audioCodec.plname, "PCMU") || 0
== strcmp(audioCodec.plname, "PCMA"))
{
break; // these two types are allowed as avi recording formats
}
}
webrtc::CodecInst audioCodec2;
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
// Call started
ViETest::Log("Call started\nYou should see local preview from camera\n"
"in window 1 and the remote video in window 2.");
AutoTestSleep(2000);
const int RENDER_TIMEOUT = 1000;
const int TEST_SPACING = 1000;
const int VIDEO_LENGTH = 5000;
const std::string renderStartImage = webrtc::test::ResourcePath(
"video_engine/renderStartImage", "jpg");
const std::string renderTimeoutFile = webrtc::test::ResourcePath(
"video_engine/renderTimeoutImage", "jpg");
const std::string output = webrtc::test::OutputPath();
const std::string snapshotCaptureDeviceFileName =
output + "snapshotCaptureDevice.jpg";
const std::string incomingVideo = output + "incomingVideo.avi";
const std::string outgoingVideo = output + "outgoingVideo.avi";
const std::string snapshotRenderFileName =
output + "snapshotRenderer.jpg";
webrtc::ViEPicture capturePicture;
webrtc::ViEPicture renderPicture;
ViEAutotestFileObserver fileObserver;
int fileId;
AutoTestSleep(TEST_SPACING);
// Test debug information recording.
EXPECT_EQ(0, ptrViEFile->StartDebugRecording(videoChannel,
(webrtc::test::OutputPath() + "vie_autotest_debug.yuv").c_str()));
// testing StartRecordIncomingVideo and StopRecordIncomingVideo
{
ViETest::Log("Recording incoming video (currently no audio) for %d "
"seconds", VIDEO_LENGTH);
EXPECT_EQ(0, ptrViEFile->StartRecordIncomingVideo(
videoChannel, incomingVideo.c_str(), webrtc::NO_AUDIO,
audioCodec2, videoCodec));
AutoTestSleep(VIDEO_LENGTH);
ViETest::Log("Stop recording incoming video");
EXPECT_EQ(0, ptrViEFile->StopRecordIncomingVideo(videoChannel));
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// testing GetFileInformation
{
webrtc::VideoCodec fileVideoCodec;
webrtc::CodecInst fileAudioCodec;
ViETest::Log("Reading video file information");
EXPECT_EQ(0, ptrViEFile->GetFileInformation(
incomingVideo.c_str(), fileVideoCodec, fileAudioCodec));
PrintAudioCodec(fileAudioCodec);
PrintVideoCodec(fileVideoCodec);
}
// testing StartPlayFile and RegisterObserver
{
ViETest::Log("Start playing file: %s with observer",
incomingVideo.c_str());
EXPECT_EQ(0, ptrViEFile->StartPlayFile(incomingVideo.c_str(),
fileId));
ViETest::Log("Registering file observer");
EXPECT_EQ(0, ptrViEFile->RegisterObserver(fileId, fileObserver));
ViETest::Log("Done\n");
}
// testing SendFileOnChannel and StopSendFileOnChannel
{
ViETest::Log("Sending video on channel");
// should fail since we are sending the capture device.
EXPECT_NE(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
// Disconnect the camera
EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
// And try playing the file again.
EXPECT_EQ(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
AutoTestSleep(VIDEO_LENGTH);
ViETest::Log("Stopped sending video on channel");
EXPECT_EQ(0, ptrViEFile->StopSendFileOnChannel(videoChannel));
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// stop playing the file
{
ViETest::Log("Stop playing the file.");
EXPECT_EQ(0, ptrViEFile->StopPlayFile(fileId));
ViETest::Log("Done\n");
}
// testing StartRecordOutgoingVideo and StopRecordOutgoingVideo
{
// connect the camera to the output.
EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
captureId, videoChannel));
ViETest::Log("Recording outgoing video (currently no audio) for %d "
"seconds", VIDEO_LENGTH);
EXPECT_EQ(0, ptrViEFile->StartRecordOutgoingVideo(
videoChannel, outgoingVideo.c_str(), webrtc::NO_AUDIO,
audioCodec2, videoCodec));
AutoTestSleep(VIDEO_LENGTH);
ViETest::Log("Stop recording outgoing video");
EXPECT_EQ(0, ptrViEFile->StopRecordOutgoingVideo(videoChannel));
ViETest::Log("Done\n");
}
// again testing GetFileInformation
{
EXPECT_EQ(0, ptrViEFile->GetFileInformation(
incomingVideo.c_str(), videoCodec, audioCodec2));
PrintAudioCodec(audioCodec2);
PrintVideoCodec(videoCodec);
}
AutoTestSleep(TEST_SPACING);
// GetCaptureDeviceSnapshot
{
ViETest::Log("Testing GetCaptureDeviceSnapshot(int, ViEPicture)");
ViETest::Log("Taking a picture to use for displaying ViEPictures "
"for the rest of file test");
ViETest::Log("Hold an object to the camera. Ready?...");
AutoTestSleep(1000);
ViETest::Log("3");
AutoTestSleep(1000);
ViETest::Log("...2");
AutoTestSleep(1000);
ViETest::Log("...1");
AutoTestSleep(1000);
ViETest::Log("...Taking picture!");
EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
captureId, capturePicture));
ViETest::Log("Picture has been taken.");
AutoTestSleep(TEST_SPACING);
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// GetRenderSnapshot
{
ViETest::Log("Testing GetRenderSnapshot(int, char*)");
ViETest::Log("Taking snapshot of videoChannel %d", captureId);
EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
captureId, snapshotRenderFileName.c_str()));
ViETest::Log("Wrote image to file %s",
snapshotRenderFileName.c_str());
ViETest::Log("Done\n");
AutoTestSleep(TEST_SPACING);
}
// GetRenderSnapshot
{
ViETest::Log("Testing GetRenderSnapshot(int, ViEPicture)");
EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
captureId, renderPicture));
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// GetCaptureDeviceSnapshot
{
ViETest::Log("Testing GetCaptureDeviceSnapshot(int, char*)");
ViETest::Log("Taking snapshot from capture device %d", captureId);
EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
captureId, snapshotCaptureDeviceFileName.c_str()));
ViETest::Log("Wrote image to file %s",
snapshotCaptureDeviceFileName.c_str());
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// testing SetRenderStartImage(videoChannel, renderStartImage);
{
ViETest::Log("Testing SetRenderStartImage(int, char*)");
// set render image, then stop capture and stop render to display it
ViETest::Log("Stoping renderer, setting start image, then "
"restarting");
EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
videoChannel, renderStartImage.c_str()));
EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
ViETest::Log("Render start image should be displayed.");
AutoTestSleep(RENDER_TIMEOUT);
// restarting capture and render
EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// testing SetRenderStartImage(videoChannel, renderStartImage);
{
ViETest::Log("Testing SetRenderStartImage(int, ViEPicture)");
// set render image, then stop capture and stop render to display it
ViETest::Log("Stoping renderer, setting start image, then "
"restarting");
EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
captureId, capturePicture));
EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
videoChannel, capturePicture));
EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
ViETest::Log("Render start image should be displayed.");
AutoTestSleep(RENDER_TIMEOUT);
// restarting capture and render
EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// testing SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
// RENDER_TIMEOUT);
{
ViETest::Log("Testing SetRenderTimeoutImage(int, char*)");
ViETest::Log("Stopping capture device to induce timeout of %d ms",
RENDER_TIMEOUT);
EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
videoChannel, renderTimeoutFile.c_str(), RENDER_TIMEOUT));
// now stop sending frames to the remote renderer and wait for
// timeout
EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
AutoTestSleep(RENDER_TIMEOUT);
ViETest::Log("Timeout image should be displayed now for %d ms",
RENDER_TIMEOUT * 2);
AutoTestSleep(RENDER_TIMEOUT * 2);
// restart the capture device to undo the timeout
EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
ViETest::Log("Restarting capture device");
AutoTestSleep(RENDER_TIMEOUT);
ViETest::Log("Done\n");
}
AutoTestSleep(TEST_SPACING);
// Need to create a ViEPicture object to pass into this function.
// SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
// RENDER_TIMEOUT);
{
ViETest::Log("Testing SetRenderTimeoutImage(int, ViEPicture)");
ViETest::Log("Stopping capture device to induce timeout of %d",
RENDER_TIMEOUT);
EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
videoChannel, capturePicture, RENDER_TIMEOUT));
// now stop sending frames to the remote renderer and wait for
// timeout
EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
AutoTestSleep(RENDER_TIMEOUT);
ViETest::Log("Timeout image should be displayed now for %d",
RENDER_TIMEOUT * 2);
AutoTestSleep(RENDER_TIMEOUT * 2);
// restart the capture device to undo the timeout
EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
ViETest::Log("Restarting capture device");
ViETest::Log("Done\n");
}
// testing DeregisterObserver
{
ViETest::Log("Deregistering file observer");
// Should fail since we don't observe this file.
EXPECT_NE(0, ptrViEFile->DeregisterObserver(fileId, fileObserver));
}
// Stop debug record.
EXPECT_EQ(0, ptrViEFile->StopDebugRecording(videoChannel));
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
EXPECT_EQ(0, ptrViEBase->DisconnectAudioChannel(videoChannel));
EXPECT_EQ(0, ptrViEBase->SetVoiceEngine(NULL));
EXPECT_EQ(0, ptrVEBase->DeleteChannel(audioChannel));
// VoE reference counting is per-object, so we use EXPECT_NE
EXPECT_NE(0, ptrVEBase->Release());
EXPECT_NE(0, ptrVECodec->Release());
EXPECT_TRUE(webrtc::VoiceEngine::Delete(ptrVEEngine));
EXPECT_EQ(0, ptrViEBase->StopReceive(videoChannel));
EXPECT_EQ(0, ptrViEBase->StopSend(videoChannel));
EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
EXPECT_EQ(0, ptrViERender->RemoveRenderer(captureId));
EXPECT_EQ(0, ptrViERender->RemoveRenderer(videoChannel));
EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
EXPECT_EQ(0, ptrViEFile->FreePicture(capturePicture));
EXPECT_EQ(0, ptrViEFile->FreePicture(renderPicture));
EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel));
EXPECT_EQ(0, ptrViEFile->Release());
}
#endif
}
void ViEAutoTest::ViEFileExtendedTest()
{
}
void ViEAutoTest::ViEFileAPITest()
{
}

View File

@ -30,11 +30,10 @@ ViEAutoTestMain::ViEAutoTestMain() {
index_to_test_method_map_[2] = "RunsCaptureTestWithoutErrors";
index_to_test_method_map_[3] = "RunsCodecTestWithoutErrors";
index_to_test_method_map_[4] = "RunsEncryptionTestWithoutErrors";
index_to_test_method_map_[5] = "RunsFileTestWithoutErrors";
index_to_test_method_map_[6] = "RunsImageProcessTestWithoutErrors";
index_to_test_method_map_[7] = "RunsNetworkTestWithoutErrors";
index_to_test_method_map_[8] = "RunsRenderTestWithoutErrors";
index_to_test_method_map_[9] = "RunsRtpRtcpTestWithoutErrors";
index_to_test_method_map_[5] = "RunsImageProcessTestWithoutErrors";
index_to_test_method_map_[6] = "RunsNetworkTestWithoutErrors";
index_to_test_method_map_[7] = "RunsRenderTestWithoutErrors";
index_to_test_method_map_[8] = "RunsRtpRtcpTestWithoutErrors";
}
int ViEAutoTestMain::RunTests(int argc, char** argv) {

View File

@ -23,7 +23,6 @@
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_file.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
@ -428,12 +427,10 @@ int VideoEngineSampleRecordCode(void* window1, void* window2) {
return -1;
}
// Get file interface (video recording)
webrtc::ViEFile* vie_file = webrtc::ViEFile::GetInterface(ptrViE);
// Engine started
voe_apm->StartDebugRecording(audio_filename.c_str());
vie_file->StartDebugRecording(videoChannel, video_filename.c_str());
ptrViECodec->StartDebugRecording(videoChannel, video_filename.c_str());
ptrViERtpRtcp->StartRTPDump(videoChannel,
video_rtp_filename.c_str(), webrtc::kRtpOutgoing);
ptrVoERtpRtcp->StartRTPDump(audio_channel,
@ -461,7 +458,7 @@ int VideoEngineSampleRecordCode(void* window1, void* window2) {
ptrViERtpRtcp->StopRTPDump(videoChannel, webrtc::kRtpOutgoing);
ptrVoERtpRtcp->StopRTPDump(audio_channel, webrtc::kRtpOutgoing);
voe_apm->StopDebugRecording();
vie_file->StopDebugRecording(videoChannel);
ptrViECodec->StopDebugRecording(videoChannel);
if (enable_labeling == 1)
timing.close();

View File

@ -69,7 +69,6 @@
'source/vie_autotest_capture.cc',
'source/vie_autotest_codec.cc',
'source/vie_autotest_encryption.cc',
'source/vie_autotest_file.cc',
'source/vie_autotest_image_process.cc',
'source/vie_autotest_loopback.cc',
'source/vie_autotest_main.cc',

View File

@ -42,7 +42,6 @@
'include/vie_encryption.h',
'include/vie_errors.h',
'include/vie_external_codec.h',
'include/vie_file.h',
'include/vie_image_process.h',
'include/vie_network.h',
'include/vie_render.h',
@ -58,7 +57,6 @@
'vie_defines.h',
'vie_encryption_impl.h',
'vie_external_codec_impl.h',
'vie_file_impl.h',
'vie_image_process_impl.h',
'vie_impl.h',
'vie_network_impl.h',
@ -73,8 +71,6 @@
'vie_channel_manager.h',
'vie_encoder.h',
'vie_file_image.h',
'vie_file_player.h',
'vie_file_recorder.h',
'vie_frame_provider_base.h',
'vie_input_manager.h',
'vie_manager_base.h',
@ -93,7 +89,6 @@
'vie_codec_impl.cc',
'vie_encryption_impl.cc',
'vie_external_codec_impl.cc',
'vie_file_impl.cc',
'vie_image_process_impl.cc',
'vie_impl.cc',
'vie_network_impl.cc',
@ -107,8 +102,6 @@
'vie_channel_manager.cc',
'vie_encoder.cc',
'vie_file_image.cc',
'vie_file_player.cc',
'vie_file_recorder.cc',
'vie_frame_provider_base.cc',
'vie_input_manager.cc',
'vie_manager_base.cc',

View File

@ -100,7 +100,6 @@ ViEChannel::ViEChannel(int32_t channel_id,
external_encryption_(NULL),
effect_filter_(NULL),
color_enhancement_(false),
file_recorder_(channel_id),
mtu_(0),
sender_(sender),
nack_history_size_sender_(kSendSidePacketHistorySize),
@ -1699,9 +1698,6 @@ int32_t ViEChannel::FrameToRender(
VideoProcessingModule::ColorEnhancement(&video_frame);
}
// Record videoframe.
file_recorder_.RecordVideoFrame(video_frame);
uint32_t arr_ofCSRC[kRtpCsrcSize];
int32_t no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
if (no_of_csrcs <= 0) {
@ -1913,17 +1909,6 @@ int32_t ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
return 0;
}
ViEFileRecorder& ViEChannel::GetIncomingFileRecorder() {
// Start getting callback of all frames before they are decoded.
vcm_.RegisterFrameStorageCallback(this);
return file_recorder_;
}
void ViEChannel::ReleaseIncomingFileRecorder() {
// Stop getting callback of all frames before they are decoded.
vcm_.RegisterFrameStorageCallback(NULL);
}
void ViEChannel::OnApplicationDataReceived(const int32_t id,
const uint8_t sub_type,
const uint32_t name,

View File

@ -22,7 +22,6 @@
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_file_recorder.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
#include "webrtc/video_engine/vie_receiver.h"
#include "webrtc/video_engine/vie_sender.h"
@ -322,9 +321,6 @@ class ViEChannel
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
ViEFileRecorder& GetIncomingFileRecorder();
void ReleaseIncomingFileRecorder();
protected:
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();
@ -395,8 +391,6 @@ class ViEChannel
ViEEffectFilter* effect_filter_;
bool color_enhancement_;
ViEFileRecorder file_recorder_;
// User set MTU, -1 if not set.
uint16_t mtu_;
const bool sender_;

View File

@ -691,6 +691,31 @@ int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
return 0;
}
int ViECodecImpl::StartDebugRecording(int video_channel,
const char* file_name_utf8) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StartDebugRecording(file_name_utf8);
}
int ViECodecImpl::StopDebugRecording(int video_channel) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StopDebugRecording();
}
bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// Check pl_name matches codec_type.
if (video_codec.codecType == kVideoCodecRED) {

View File

@ -67,6 +67,9 @@ class ViECodecImpl
virtual int DeregisterDecoderObserver(const int video_channel);
virtual int SendKeyFrame(const int video_channel);
virtual int WaitForFirstKeyFrame(const int video_channel, const bool wait);
virtual int StartDebugRecording(int video_channel,
const char* file_name_utf8);
virtual int StopDebugRecording(int video_channel);
protected:
explicit ViECodecImpl(ViESharedData* shared_data);

View File

@ -59,9 +59,6 @@ enum { kViEMaxSrtpAuthSh1Length = 20 };
enum { kViEMaxSrtpTagAuthNullLength = 12 };
enum { kViEMaxSrtpKeyAuthNullLength = 256 };
// ViEFile
enum { kViEMaxFilePlayers = 3 };
// ViENetwork
enum { kViEMaxMtu = 1500 };
enum { kViESocketThreads = 1 };
@ -87,8 +84,6 @@ enum {
kViEChannelIdMax = 0xFF,
kViECaptureIdBase = 0x1001,
kViECaptureIdMax = 0x10FF,
kViEFileIdBase = 0x2000,
kViEFileIdMax = 0x200F,
kViEDummyChannelId = 0xFFFF
};

View File

@ -132,7 +132,6 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
picture_id_sli_(0),
has_received_rpsi_(false),
picture_id_rpsi_(0),
file_recorder_(channel_id),
qm_callback_(NULL) {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
ViEId(engine_id, channel_id),
@ -587,8 +586,6 @@ void ViEEncoder::DeliverFrame(int id,
video_frame->height());
}
}
// Record raw frame.
file_recorder_.RecordVideoFrame(*video_frame);
// Make sure the CSRC list is correct.
if (num_csrcs > 0) {
@ -663,7 +660,6 @@ void ViEEncoder::DelayChanged(int id, int frame_delay) {
frame_delay);
default_rtp_rtcp_->SetCameraDelay(frame_delay);
file_recorder_.SetFrameDelay(frame_delay);
}
int ViEEncoder::GetPreferedFrameSettings(int* width,
@ -1055,10 +1051,6 @@ int32_t ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
return 0;
}
ViEFileRecorder& ViEEncoder::GetOutgoingFileRecorder() {
return file_recorder_;
}
int ViEEncoder::StartDebugRecording(const char* fileNameUTF8) {
return vcm_.StartDebugRecording(fileNameUTF8);
}

View File

@ -22,7 +22,6 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_file_recorder.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
namespace webrtc {
@ -156,9 +155,6 @@ class ViEEncoder
// Effect filter.
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
// Recording.
ViEFileRecorder& GetOutgoingFileRecorder();
// Enables recording of debugging information.
virtual int StartDebugRecording(const char* fileNameUTF8);
@ -217,8 +213,6 @@ class ViEEncoder
uint64_t picture_id_rpsi_;
std::map<unsigned int, int> ssrc_streams_;
ViEFileRecorder file_recorder_;
// Quality modes callback
QMVideoSettingsCallback* qm_callback_;
};

View File

@ -1,958 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/vie_file_impl.h"
#include "webrtc/engine_configurations.h"
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
#include "webrtc/common_video/jpeg/include/jpeg.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/interface/condition_variable_wrapper.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_encoder.h"
#include "webrtc/video_engine/vie_file_image.h"
#include "webrtc/video_engine/vie_file_player.h"
#include "webrtc/video_engine/vie_file_recorder.h"
#include "webrtc/video_engine/vie_impl.h"
#include "webrtc/video_engine/vie_input_manager.h"
#include "webrtc/video_engine/vie_render_manager.h"
#include "webrtc/video_engine/vie_renderer.h"
#endif
namespace webrtc {
ViEFile* ViEFile::GetInterface(VideoEngine* video_engine) {
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
if (!video_engine) {
return NULL;
}
VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
ViEFileImpl* vie_file_impl = vie_impl;
// Increase ref count.
(*vie_file_impl)++;
return vie_file_impl;
#else
return NULL;
#endif
}
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
int ViEFileImpl::Release() {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
"ViEFile::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEFile release too many times");
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
"ViEFile reference count: %d", ref_count);
return ref_count;
}
ViEFileImpl::ViEFileImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViEFileImpl::ViEFileImpl() Ctor");
}
ViEFileImpl::~ViEFileImpl() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
"ViEFileImpl::~ViEFileImpl() Dtor");
}
int ViEFileImpl::StartPlayFile(const char* file_nameUTF8,
int& file_id,
const bool loop,
const FileFormats file_format) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s", __FUNCTION__);
if (!shared_data_->Initialized()) {
shared_data_->SetLastError(kViENotInitialized);
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s - ViE instance %d not initialized", __FUNCTION__,
shared_data_->instance_id());
return -1;
}
VoiceEngine* voice = shared_data_->channel_manager()->GetVoiceEngine();
const int32_t result = shared_data_->input_manager()->CreateFilePlayer(
file_nameUTF8, loop, file_format, voice, file_id);
if (result != 0) {
shared_data_->SetLastError(result);
return -1;
}
return 0;
}
int ViEFileImpl::StopPlayFile(const int file_id) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(file_id: %d)", __FUNCTION__, file_id);
{
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
}
// Destroy the capture device.
return shared_data_->input_manager()->DestroyFilePlayer(file_id);
}
int ViEFileImpl::RegisterObserver(int file_id,
ViEFileObserver& observer) { // NOLINT
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(file_id: %d)", __FUNCTION__, file_id);
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->IsObserverRegistered()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), file_id),
"%s: Observer already registered", __FUNCTION__);
shared_data_->SetLastError(kViEFileObserverAlreadyRegistered);
return -1;
}
if (vie_file_player->RegisterObserver(&observer) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), file_id),
"%s: Failed to register observer", __FUNCTION__, file_id);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::DeregisterObserver(int file_id,
ViEFileObserver& observer) { // NOLINT
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(file_id: %d)", __FUNCTION__, file_id);
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (!vie_file_player->IsObserverRegistered()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), file_id),
"%s: No Observer registered", __FUNCTION__);
shared_data_->SetLastError(kViEFileObserverNotRegistered);
return -1;
}
if (vie_file_player->DeRegisterObserver() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), file_id),
"%s: Failed to deregister observer", __FUNCTION__, file_id);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::SendFileOnChannel(const int file_id, const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(file_id: %d)", __FUNCTION__, file_id);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEInputManagerScoped is(*(shared_data_->input_manager()));
if (is.FrameProvider(vie_encoder) != NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d already connected to a capture device or "
"file.", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInputAlreadyConnected);
return -1;
}
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->RegisterFrameCallback(video_channel, vie_encoder)
!= 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: Failed to register frame callback.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::StopSendFileOnChannel(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
if (!frame_provider ||
frame_provider->Id() < kViEFileIdBase ||
frame_provider->Id() > kViEFileIdMax) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No file connected to Channel %d", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileNotConnected);
return -1;
}
if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Failed to deregister file from channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileUnknownError);
}
return 0;
}
int ViEFileImpl::StartPlayFileAsMicrophone(const int file_id,
const int audio_channel,
bool mix_microphone,
float volume_scaling) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->SendAudioOnChannel(audio_channel, mix_microphone,
volume_scaling) != 0) {
shared_data_->SetLastError(kViEFileVoEFailure);
return -1;
}
return 0;
}
int ViEFileImpl::StopPlayFileAsMicrophone(const int file_id,
const int audio_channel) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->StopSendAudioOnChannel(audio_channel) != 0) {
shared_data_->SetLastError(kViEFileVoEFailure);
return -1;
}
return 0;
}
int ViEFileImpl::StartPlayAudioLocally(const int file_id,
const int audio_channel,
float volume_scaling) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->PlayAudioLocally(audio_channel, volume_scaling) != 0) {
shared_data_->SetLastError(kViEFileVoEFailure);
return -1;
}
return 0;
}
int ViEFileImpl::StopPlayAudioLocally(const int file_id,
const int audio_channel) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s: File with id %d is not playing.", __FUNCTION__,
file_id);
shared_data_->SetLastError(kViEFileNotPlaying);
return -1;
}
if (vie_file_player->StopPlayAudioLocally(audio_channel) != 0) {
shared_data_->SetLastError(kViEFileVoEFailure);
return -1;
}
return 0;
}
int ViEFileImpl::StartRecordOutgoingVideo(const int video_channel,
const char* file_nameUTF8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
if (file_recorder.RecordingStarted()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Already recording outgoing video on channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileAlreadyRecording);
return -1;
}
int32_t ve_channel_id = -1;
VoiceEngine* ve_ptr = NULL;
if (audio_source != NO_AUDIO) {
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
// Channel should exists since we have a ViEEncoder above.
assert(false);
return -1;
}
ve_channel_id = vie_channel->VoiceChannel();
ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
if (!ve_ptr) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Can't access voice engine. Have SetVoiceEngine "
"been called?", __FUNCTION__);
shared_data_->SetLastError(kViEFileVoENotSet);
return -1;
}
}
if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
ve_channel_id, audio_codec, ve_ptr,
file_format) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Failed to start recording. Check arguments.",
__FUNCTION__);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::StopRecordOutgoingVideo(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
if (!file_recorder.RecordingStarted()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d is not recording.", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileNotRecording);
return -1;
}
if (file_recorder.StopRecording() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Failed to stop recording of channel %d.", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::StopRecordIncomingVideo(const int video_channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
if (!file_recorder.RecordingStarted()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d is not recording.", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileNotRecording);
vie_channel->ReleaseIncomingFileRecorder();
return -1;
}
if (file_recorder.StopRecording() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Failed to stop recording of channel %d.",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileUnknownError);
vie_channel->ReleaseIncomingFileRecorder();
return -1;
}
// Let the channel know we are no longer recording.
vie_channel->ReleaseIncomingFileRecorder();
return 0;
}
int ViEFileImpl::StartRecordIncomingVideo(const int video_channel,
const char* file_nameUTF8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileInvalidChannelId);
return -1;
}
ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
if (file_recorder.RecordingStarted()) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Already recording outgoing video on channel %d",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileAlreadyRecording);
return -1;
}
int32_t ve_channel_id = -1;
VoiceEngine* ve_ptr = NULL;
if (audio_source != NO_AUDIO) {
ve_channel_id = vie_channel->VoiceChannel();
ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
if (!ve_ptr) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Can't access voice engine. Have SetVoiceEngine "
"been called?", __FUNCTION__);
shared_data_->SetLastError(kViEFileVoENotSet);
return -1;
}
}
if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
ve_channel_id, audio_codec, ve_ptr,
file_format) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Failed to start recording. Check arguments.",
__FUNCTION__);
shared_data_->SetLastError(kViEFileUnknownError);
return -1;
}
return 0;
}
int ViEFileImpl::GetFileInformation(const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format) {
return ViEFilePlayer::GetFileInformation(
shared_data_->instance_id(),
file_name, video_codec, audio_codec, file_format);
}
int ViEFileImpl::GetRenderSnapshot(const int video_channel,
const char* file_nameUTF8) {
// Gain access to the renderer for the specified channel and get it's
// current frame.
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
return -1;
}
I420VideoFrame video_frame;
if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
return -1;
}
// JPEGEncoder writes the jpeg file for you (no control over it) and does
// not return you the buffer. Thus, we are not going to be writing to the
// disk here.
JpegEncoder jpeg_encoder;
if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"\tCould not open output file '%s' for writing!",
file_nameUTF8);
return -1;
}
if (jpeg_encoder.Encode(video_frame) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"\tCould not encode i420 -> jpeg file '%s' for writing!",
file_nameUTF8);
return -1;
}
return 0;
}
int ViEFileImpl::GetRenderSnapshot(const int video_channel,
ViEPicture& picture) {
// Gain access to the renderer for the specified channel and get it's
// current frame.
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
return -1;
}
I420VideoFrame video_frame;
if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
return -1;
}
// Copy from VideoFrame class to ViEPicture struct.
int buffer_length = CalcBufferSize(kI420, video_frame.width(),
video_frame.height());
picture.data = static_cast<uint8_t*>(malloc(buffer_length * sizeof(uint8_t)));
if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) {
return -1;
}
picture.size = buffer_length;
picture.width = video_frame.width();
picture.height = video_frame.height();
picture.type = kVideoI420;
return 0;
}
int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
const char* file_nameUTF8) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* capturer = is.Capture(capture_id);
if (!capturer) {
return -1;
}
I420VideoFrame video_frame;
if (GetNextCapturedFrame(capture_id, &video_frame) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"Could not gain acces to capture device %d video frame "
"%s:%d", capture_id, __FUNCTION__);
return -1;
}
// JPEGEncoder writes the jpeg file for you (no control over it) and does
// not return you the buffer Thusly, we are not going to be writing to the
// disk here.
JpegEncoder jpeg_encoder;
if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"\tCould not open output file '%s' for writing!",
file_nameUTF8);
return -1;
}
if (jpeg_encoder.Encode(video_frame) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"\tCould not encode i420 -> jpeg file '%s' for "
"writing!", file_nameUTF8);
return -1;
}
return 0;
}
int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
ViEPicture& picture) {
I420VideoFrame video_frame;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* capturer = is.Capture(capture_id);
if (!capturer) {
return -1;
}
if (GetNextCapturedFrame(capture_id, &video_frame) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
"Could not gain acces to capture device %d video frame "
"%s:%d", capture_id, __FUNCTION__);
return -1;
}
// Copy from VideoFrame class to ViEPicture struct.
int buffer_length = CalcBufferSize(kI420, video_frame.width(),
video_frame.height());
picture.data = static_cast<uint8_t*>(malloc(buffer_length * sizeof(uint8_t)));
if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) {
return -1;
}
picture.size = buffer_length;
picture.width = video_frame.width();
picture.height = video_frame.height();
picture.type = kVideoI420;
return 0;
}
int ViEFileImpl::FreePicture(ViEPicture& picture) { // NOLINT
if (picture.data) {
free(picture.data);
}
picture.data = NULL;
picture.size = 0;
picture.width = 0;
picture.height = 0;
picture.type = kVideoUnknown;
return 0;
}
int ViEFileImpl::SetRenderStartImage(const int video_channel,
const char* file_nameUTF8) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
shared_data_->SetLastError(kViEFileInvalidRenderId);
return -1;
}
I420VideoFrame start_image;
if (ViEFileImage::ConvertJPEGToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
&start_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to open file.", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileInvalidFile);
return -1;
}
if (renderer->SetRenderStartImage(start_image) != 0) {
shared_data_->SetLastError(kViEFileSetStartImageError);
return -1;
}
return 0;
}
int ViEFileImpl::SetRenderStartImage(const int video_channel,
const ViEPicture& picture) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
if (picture.type != kVideoI420) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Not a valid picture type.",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidArgument);
return -1;
}
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
shared_data_->SetLastError(kViEFileInvalidRenderId);
return -1;
}
I420VideoFrame start_image;
if (ViEFileImage::ConvertPictureToI420VideoFrame(
ViEId(shared_data_->instance_id(), video_channel), picture,
&start_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to use picture.",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidCapture);
return -1;
}
if (renderer->SetRenderStartImage(start_image) != 0) {
shared_data_->SetLastError(kViEFileSetStartImageError);
return -1;
}
return 0;
}
int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
const char* file_nameUTF8,
const unsigned int timeout_ms) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
shared_data_->SetLastError(kViEFileInvalidRenderId);
return -1;
}
I420VideoFrame timeout_image;
if (ViEFileImage::ConvertJPEGToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
&timeout_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to open file.", __FUNCTION__,
video_channel);
shared_data_->SetLastError(kViEFileInvalidFile);
return -1;
}
int32_t timeout_time = timeout_ms;
if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Invalid timeout_ms, using %d.",
__FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
timeout_time = kViEMinRenderTimeoutTimeMs;
}
if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Invalid timeout_ms, using %d.",
__FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
timeout_time = kViEMaxRenderTimeoutTimeMs;
}
if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
return -1;
}
return 0;
}
int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
const ViEPicture& picture,
const unsigned int timeout_ms) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
if (picture.type != kVideoI420) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Not a valid picture type.",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidArgument);
return -1;
}
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(video_channel);
if (!renderer) {
shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
return -1;
}
I420VideoFrame timeout_image;
if (ViEFileImage::ConvertPictureToI420VideoFrame(
ViEId(shared_data_->instance_id(), video_channel), picture,
&timeout_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to use picture.",
__FUNCTION__, video_channel);
shared_data_->SetLastError(kViEFileInvalidCapture);
return -1;
}
int32_t timeout_time = timeout_ms;
if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Invalid timeout_ms, using %d.",
__FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
timeout_time = kViEMinRenderTimeoutTimeMs;
}
if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Invalid timeout_ms, using %d.",
__FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
timeout_time = kViEMaxRenderTimeoutTimeMs;
}
if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
return -1;
}
return 0;
}
int32_t ViEFileImpl::GetNextCapturedFrame(int32_t capture_id,
I420VideoFrame* video_frame) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* capturer = is.Capture(capture_id);
if (!capturer) {
return -1;
}
ViECaptureSnapshot* snap_shot = new ViECaptureSnapshot();
capturer->RegisterFrameCallback(-1, snap_shot);
bool snapshot_taken = snap_shot->GetSnapshot(kViECaptureMaxSnapshotWaitTimeMs,
video_frame);
// Check once again if it has been destroyed.
capturer->DeregisterFrameCallback(snap_shot);
delete snap_shot;
snap_shot = NULL;
if (snapshot_taken) {
return 0;
}
return -1;
}
int ViEFileImpl::StartDebugRecording(int video_channel,
const char* file_name_utf8) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StartDebugRecording(file_name_utf8);
}
int ViEFileImpl::StopDebugRecording(int video_channel) {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StopDebugRecording();
}
ViECaptureSnapshot::ViECaptureSnapshot()
: crit_(CriticalSectionWrapper::CreateCriticalSection()),
condition_varaible_(ConditionVariableWrapper::CreateConditionVariable()),
video_frame_(NULL) {
}
ViECaptureSnapshot::~ViECaptureSnapshot() {
if (video_frame_) {
delete video_frame_;
video_frame_ = NULL;
}
}
bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time,
I420VideoFrame* video_frame) {
crit_->Enter();
video_frame_ = new I420VideoFrame();
if (condition_varaible_->SleepCS(*(crit_.get()), max_wait_time)) {
// Snapshot taken.
video_frame->SwapFrame(video_frame_);
delete video_frame_;
video_frame_ = NULL;
crit_->Leave();
return true;
}
crit_->Leave();
return false;
}
void ViECaptureSnapshot::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const uint32_t CSRC[kRtpCsrcSize]) {
CriticalSectionScoped cs(crit_.get());
if (!video_frame_) {
return;
}
video_frame_->SwapFrame(video_frame);
condition_varaible_->WakeAll();
return;
}
#endif
} // namespace webrtc

View File

@ -1,135 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/include/vie_file.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
#include "webrtc/video_engine/vie_ref_count.h"
#include "webrtc/video_engine/vie_shared_data.h"
namespace webrtc {
class ConditionVariableWrapper;
class CriticalSectionWrapper;
class ViESharedData;
class ViECaptureSnapshot : public ViEFrameCallback {
public:
ViECaptureSnapshot();
~ViECaptureSnapshot();
bool GetSnapshot(unsigned int max_wait_time, I420VideoFrame* video_frame);
// Implements ViEFrameCallback.
virtual void DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs = 0,
const uint32_t CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay) {}
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
return -1;
}
virtual void ProviderDestroyed(int id) {}
private:
scoped_ptr<CriticalSectionWrapper> crit_;
scoped_ptr<ConditionVariableWrapper> condition_varaible_;
I420VideoFrame* video_frame_;
};
class ViEFileImpl
: public ViEFile,
public ViERefCount {
public:
// Implements ViEFile.
virtual int Release();
virtual int StartPlayFile(const char* file_nameUTF8, int& file_id, // NOLINT
const bool loop = false,
const FileFormats file_format = kFileFormatAviFile);
virtual int StopPlayFile(const int file_id);
virtual int RegisterObserver(int file_id,
ViEFileObserver& observer); // NOLINT
virtual int DeregisterObserver(int file_id,
ViEFileObserver& observer); // NOLINT
virtual int SendFileOnChannel(const int file_id, const int video_channel);
virtual int StopSendFileOnChannel(const int video_channel);
virtual int StartPlayFileAsMicrophone(const int file_id,
const int audio_channel,
bool mix_microphone = false,
float volume_scaling = 1);
virtual int StopPlayFileAsMicrophone(const int file_id,
const int audio_channel);
virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
float volume_scaling = 1);
virtual int StopPlayAudioLocally(const int file_id, const int audio_channel);
virtual int StartRecordOutgoingVideo(
const int video_channel,
const char* file_nameUTF8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format = kFileFormatAviFile);
virtual int StartRecordIncomingVideo(
const int video_channel,
const char* file_nameUTF8,
AudioSource audio_source,
const CodecInst& audio_codec,
const VideoCodec& video_codec,
const FileFormats file_format = kFileFormatAviFile);
virtual int StopRecordOutgoingVideo(const int video_channel);
virtual int StopRecordIncomingVideo(const int video_channel);
virtual int GetFileInformation(
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format = kFileFormatAviFile);
virtual int GetRenderSnapshot(const int video_channel,
const char* file_nameUTF8);
virtual int GetRenderSnapshot(const int video_channel,
ViEPicture& picture); // NOLINT
virtual int FreePicture(ViEPicture& picture); // NOLINT
virtual int GetCaptureDeviceSnapshot(const int capture_id,
const char* file_nameUTF8);
virtual int GetCaptureDeviceSnapshot(const int capture_id,
ViEPicture& picture);
virtual int SetRenderStartImage(const int video_channel,
const char* file_nameUTF8);
virtual int SetRenderStartImage(const int video_channel,
const ViEPicture& picture);
virtual int SetRenderTimeoutImage(const int video_channel,
const char* file_nameUTF8,
const unsigned int timeout_ms);
virtual int SetRenderTimeoutImage(const int video_channel,
const ViEPicture& picture,
const unsigned int timeout_ms);
virtual int StartDebugRecording(int video_channel,
const char* file_name_utf8);
virtual int StopDebugRecording(int video_channel);
protected:
explicit ViEFileImpl(ViESharedData* shared_data);
virtual ~ViEFileImpl();
private:
int32_t GetNextCapturedFrame(int32_t capture_id, I420VideoFrame* video_frame);
ViESharedData* shared_data_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_

View File

@ -1,504 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/vie_file_player.h"
#include "webrtc/modules/utility/interface/file_player.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_file.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_file.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
namespace webrtc {
const int kThreadWaitTimeMs = 100;
ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
int file_id,
int engine_id,
const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voe_ptr) {
ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id);
if (!self || self->Init(file_nameUTF8, loop, file_format, voe_ptr) != 0) {
delete self;
self = NULL;
}
return self;
}
ViEFilePlayer::ViEFilePlayer(int Id,
int engine_id)
: ViEFrameProviderBase(Id, engine_id),
play_back_started_(false),
feedback_cs_(NULL),
audio_cs_(NULL),
file_player_(NULL),
audio_stream_(false),
video_clients_(0),
audio_clients_(0),
local_audio_channel_(-1),
observer_(NULL),
voe_file_interface_(NULL),
voe_video_sync_(NULL),
decode_thread_(NULL),
decode_event_(NULL),
decoded_audio_length_(0) {
memset(file_name_, 0, FileWrapper::kMaxFileNameSize);
memset(decoded_audio_, 0, kMaxDecodedAudioLength);
}
ViEFilePlayer::~ViEFilePlayer() {
// StopPlay deletes decode_thread_.
StopPlay();
delete decode_event_;
delete audio_cs_;
delete feedback_cs_;
}
int ViEFilePlayer::Init(const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voice_engine) {
feedback_cs_ = CriticalSectionWrapper::CreateCriticalSection();
if (!feedback_cs_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
audio_cs_ = CriticalSectionWrapper::CreateCriticalSection();
if (!audio_cs_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
decode_event_ = EventWrapper::Create();
if (!decode_event_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to allocate event");
return -1;
}
if (strlen(file_nameUTF8) > FileWrapper::kMaxFileNameSize) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Too long filename");
return -1;
}
strncpy(file_name_, file_nameUTF8, strlen(file_nameUTF8) + 1);
file_player_ = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
file_format);
if (!file_player_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to create file player");
return -1;
}
if (file_player_->RegisterModuleFileCallback(this) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to "
"RegisterModuleFileCallback");
file_player_ = NULL;
return -1;
}
decode_thread_ = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
this, kHighestPriority,
"ViEFilePlayThread");
if (!decode_thread_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to start decode thread.");
file_player_ = NULL;
return -1;
}
// Always try to open with Audio since we don't know on what channels the
// audio should be played on.
int32_t error = file_player_->StartPlayingVideoFile(file_name_, loop, false);
if (error) {
// Failed to open the file with audio, try without.
error = file_player_->StartPlayingVideoFile(file_name_, loop, true);
audio_stream_ = false;
if (error) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to Start play video "
"file");
return -1;
}
} else {
audio_stream_ = true;
}
if (audio_stream_) {
if (voice_engine) {
// A VoiceEngine has been provided and we want to play audio on local
// a channel.
voe_file_interface_ = VoEFile::GetInterface(voice_engine);
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get VEFile "
"interface");
return -1;
}
voe_video_sync_ = VoEVideoSync::GetInterface(voice_engine);
if (!voe_video_sync_) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() failed to get "
"VoEVideoSync interface");
return -1;
}
}
}
// Read audio /(or just video) every 10ms.
decode_event_->StartTimer(true, 10);
return 0;
}
int ViEFilePlayer::FrameCallbackChanged() {
// Starts the decode thread when someone cares.
if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() >
video_clients_) {
if (!play_back_started_) {
play_back_started_ = true;
unsigned int thread_id;
if (decode_thread_->Start(thread_id)) {
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Started file decode"
" thread %u", thread_id);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged() Failed to start "
"file decode thread.");
}
} else if (!file_player_->IsPlayingFile()) {
if (file_player_->StartPlayingVideoFile(file_name_, false,
!audio_stream_) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart "
"the file player.");
}
}
}
video_clients_ = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
return 0;
}
bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj) {
return static_cast<ViEFilePlayer*>(obj)->FilePlayDecodeProcess();
}
bool ViEFilePlayer::FilePlayDecodeProcess() {
if (decode_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
if (audio_stream_ && audio_clients_ == 0) {
// There is audio but no one cares, read the audio here.
Read(NULL, 0);
}
if (file_player_->TimeUntilNextVideoFrame() < 10) {
// Less than 10ms to next videoframe.
if (file_player_->GetVideoFromFile(decoded_video_) != 0) {
}
}
if (!decoded_video_.IsZeroSize()) {
if (local_audio_channel_ != -1 && voe_video_sync_) {
// We are playing audio locally.
int audio_delay = 0;
if (voe_video_sync_->GetPlayoutBufferSize(audio_delay) == 0) {
decoded_video_.set_render_time_ms(decoded_video_.render_time_ms() +
audio_delay);
}
}
DeliverFrame(&decoded_video_);
decoded_video_.ResetSize();
}
}
return true;
}
int ViEFilePlayer::StopPlay() {
// Only called from destructor.
if (decode_thread_) {
decode_thread_->SetNotAlive();
if (decode_thread_->Stop()) {
delete decode_thread_;
} else {
assert(false && "ViEFilePlayer::StopPlay() Failed to stop decode thread");
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StartPlay() Failed to stop file decode "
"thread.");
}
}
decode_thread_ = NULL;
if (decode_event_) {
decode_event_->StopTimer();
}
StopPlayAudio();
if (voe_file_interface_) {
voe_file_interface_->Release();
voe_file_interface_ = NULL;
}
if (voe_video_sync_) {
voe_video_sync_->Release();
voe_video_sync_ = NULL;
}
if (file_player_) {
file_player_->StopPlayingFile();
FilePlayer::DestroyFilePlayer(file_player_);
file_player_ = NULL;
}
return 0;
}
int ViEFilePlayer::StopPlayAudio() {
// Stop sending audio.
std::set<int>::iterator it = audio_channels_sending_.begin();
while (it != audio_channels_sending_.end()) {
StopSendAudioOnChannel(*it);
// StopSendAudioOnChannel erases the item from the map.
it = audio_channels_sending_.begin();
}
// Stop local audio playback.
if (local_audio_channel_ != -1) {
StopPlayAudioLocally(local_audio_channel_);
}
local_audio_channel_ = -1;
audio_channel_buffers_.clear();
audio_clients_ = 0;
return 0;
}
int ViEFilePlayer::Read(void* buf, int len) {
// Protect from simultaneous reading from multiple channels.
CriticalSectionScoped lock(audio_cs_);
if (NeedsAudioFromFile(buf)) {
// We will run the VoE in 16KHz.
if (file_player_->Get10msAudioFromFile(decoded_audio_,
decoded_audio_length_, 16000) != 0) {
// No data.
decoded_audio_length_ = 0;
return 0;
}
// 2 bytes per sample.
decoded_audio_length_ *= 2;
if (buf) {
audio_channel_buffers_.push_back(buf);
}
} else {
// No need for new audiobuffer from file, ie the buffer read from file has
// not been played on this channel.
}
if (buf) {
memcpy(buf, decoded_audio_, decoded_audio_length_);
}
return decoded_audio_length_;
}
bool ViEFilePlayer::NeedsAudioFromFile(void* buf) {
bool needs_new_audio = false;
if (audio_channel_buffers_.size() == 0) {
return true;
}
// Check if we the buf already have read the current audio.
for (std::list<void*>::iterator it = audio_channel_buffers_.begin();
it != audio_channel_buffers_.end(); ++it) {
if (*it == buf) {
needs_new_audio = true;
audio_channel_buffers_.erase(it);
break;
}
}
return needs_new_audio;
}
void ViEFilePlayer::PlayFileEnded(const int32_t id) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id),
"%s: file_id %d", __FUNCTION__, id_);
file_player_->StopPlayingFile();
CriticalSectionScoped lock(feedback_cs_);
if (observer_) {
observer_->PlayFileEnded(id_);
}
}
bool ViEFilePlayer::IsObserverRegistered() {
CriticalSectionScoped lock(feedback_cs_);
return observer_ != NULL;
}
int ViEFilePlayer::RegisterObserver(ViEFileObserver* observer) {
CriticalSectionScoped lock(feedback_cs_);
if (observer_) {
return -1;
}
observer_ = observer;
return 0;
}
int ViEFilePlayer::DeRegisterObserver() {
CriticalSectionScoped lock(feedback_cs_);
observer_ = NULL;
return 0;
}
int ViEFilePlayer::SendAudioOnChannel(const int audio_channel,
bool mix_microphone,
float volume_scaling) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (voe_file_interface_->StartPlayingFileAsMicrophone(audio_channel, this,
mix_microphone,
kFileFormatPcm16kHzFile,
volume_scaling) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::SendAudioOnChannel() "
"VE_StartPlayingFileAsMicrophone failed. audio_channel %d, "
" mix_microphone %d, volume_scaling %.2f",
audio_channel, mix_microphone, volume_scaling);
return -1;
}
audio_channels_sending_.insert(audio_channel);
CriticalSectionScoped lock(audio_cs_);
audio_clients_++;
return 0;
}
int ViEFilePlayer::StopSendAudioOnChannel(const int audio_channel) {
int result = 0;
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() - no VoE interface");
return -1;
}
std::set<int>::iterator it = audio_channels_sending_.find(audio_channel);
if (it == audio_channels_sending_.end()) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel AudioChannel %d not "
"sending", audio_channel);
return -1;
}
result = voe_file_interface_->StopPlayingFileAsMicrophone(audio_channel);
if (result != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"ViEFilePlayer::StopSendAudioOnChannel() "
"VE_StopPlayingFileAsMicrophone failed. audio_channel %d",
audio_channel);
}
audio_channels_sending_.erase(audio_channel);
CriticalSectionScoped lock(audio_cs_);
audio_clients_--;
assert(audio_clients_ >= 0);
return 0;
}
int ViEFilePlayer::PlayAudioLocally(const int audio_channel,
float volume_scaling) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (voe_file_interface_->StartPlayingFileLocally(audio_channel, this,
kFileFormatPcm16kHzFile,
volume_scaling) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StartPlayingFileAsMicrophone failed. audio_channel %d,"
" mix_microphone %d, volume_scaling %.2f",
__FUNCTION__, audio_channel, volume_scaling);
return -1;
}
CriticalSectionScoped lock(audio_cs_);
local_audio_channel_ = audio_channel;
audio_clients_++;
return 0;
}
int ViEFilePlayer::StopPlayAudioLocally(const int audio_channel) {
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (voe_file_interface_->StopPlayingFileLocally(audio_channel) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
"%s VE_StopPlayingFileLocally failed. audio_channel %d.",
__FUNCTION__, audio_channel);
return -1;
}
CriticalSectionScoped lock(audio_cs_);
local_audio_channel_ = -1;
audio_clients_--;
return 0;
}
int ViEFilePlayer::GetFileInformation(int engine_id,
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, engine_id, "%s ", __FUNCTION__);
FilePlayer* file_player = FilePlayer::CreateFilePlayer(engine_id,
file_format);
if (!file_player) {
return -1;
}
bool video_only = false;
memset(&video_codec, 0, sizeof(video_codec));
memset(&audio_codec, 0, sizeof(audio_codec));
if (file_player->StartPlayingVideoFile(file_name, false, false) != 0) {
video_only = true;
if (file_player->StartPlayingVideoFile(file_name, false, true) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to open file.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
}
if (!video_only && file_player->AudioCodec(audio_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to get audio codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
if (file_player->video_codec_info(video_codec) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s Failed to get video codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(file_player);
return -1;
}
FilePlayer::DestroyFilePlayer(file_player);
return 0;
}
} // namespace webrtc

View File

@ -1,139 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#include <list>
#include <set>
#include "webrtc/common_types.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/modules/media_file/interface/media_file_defines.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
namespace webrtc {
class EventWrapper;
class FilePlayer;
class ThreadWrapper;
class ViEFileObserver;
class VoEFile;
class VoEVideoSync;
class VoiceEngine;
class ViEFilePlayer
: public ViEFrameProviderBase,
protected FileCallback,
protected InStream {
public:
static ViEFilePlayer* CreateViEFilePlayer(int file_id,
int engine_id,
const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voe_ptr);
static int GetFileInformation(const int engine_id,
const char* file_name,
VideoCodec& video_codec,
CodecInst& audio_codec,
const FileFormats file_format);
~ViEFilePlayer();
bool IsObserverRegistered();
int RegisterObserver(ViEFileObserver* observer);
int DeRegisterObserver();
int SendAudioOnChannel(const int audio_channel,
bool mix_microphone,
float volume_scaling);
int StopSendAudioOnChannel(const int audio_channel);
int PlayAudioLocally(const int audio_channel, float volume_scaling);
int StopPlayAudioLocally(const int audio_channel);
// Implements ViEFrameProviderBase.
virtual int FrameCallbackChanged();
protected:
ViEFilePlayer(int Id, int engine_id);
int Init(const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
VoiceEngine* voe_ptr);
int StopPlay();
int StopPlayAudio();
// File play decode function.
static bool FilePlayDecodeThreadFunction(void* obj);
bool FilePlayDecodeProcess();
bool NeedsAudioFromFile(void* buf);
// Implements webrtc::InStream.
virtual int Read(void* buf, int len);
virtual int Rewind() {
return 0;
}
// Implements FileCallback.
virtual void PlayNotification(const int32_t /*id*/,
const uint32_t /*notification_ms*/) {}
virtual void RecordNotification(const int32_t /*id*/,
const uint32_t /*notification_ms*/) {}
virtual void PlayFileEnded(const int32_t id);
virtual void RecordFileEnded(const int32_t /*id*/) {}
private:
static const int kMaxDecodedAudioLength = 320;
bool play_back_started_;
CriticalSectionWrapper* feedback_cs_;
CriticalSectionWrapper* audio_cs_;
FilePlayer* file_player_;
bool audio_stream_;
// Number of active video clients.
int video_clients_;
// Number of audio channels sending this audio.
int audio_clients_;
// Local audio channel playing this video. Sync video against this.
int local_audio_channel_;
ViEFileObserver* observer_;
char file_name_[FileWrapper::kMaxFileNameSize];
// VoE Interface.
VoEFile* voe_file_interface_;
VoEVideoSync* voe_video_sync_;
// Thread for decoding video (and audio if no audio clients connected).
ThreadWrapper* decode_thread_;
EventWrapper* decode_event_;
int16_t decoded_audio_[kMaxDecodedAudioLength];
int decoded_audio_length_;
// Trick - list containing VoE buffer reading this file. Used if multiple
// audio channels are sending.
std::list<void*> audio_channel_buffers_;
// AudioChannels sending audio from this file.
std::set<int> audio_channels_sending_;
// Frame receiving decoded video from file.
I420VideoFrame decoded_video_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_

View File

@ -1,240 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/vie_file_recorder.h"
#include "webrtc/modules/utility/interface/file_player.h"
#include "webrtc/modules/utility/interface/file_recorder.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
ViEFileRecorder::ViEFileRecorder(int instanceID)
: recorder_cs_(CriticalSectionWrapper::CreateCriticalSection()),
file_recorder_(NULL),
is_first_frame_recorded_(false),
is_out_stream_started_(false),
instance_id_(instanceID),
frame_delay_(0),
audio_channel_(-1),
audio_source_(NO_AUDIO),
voe_file_interface_(NULL) {
}
ViEFileRecorder::~ViEFileRecorder() {
StopRecording();
delete recorder_cs_;
}
int ViEFileRecorder::StartRecording(const char* file_nameUTF8,
const VideoCodec& codec_inst,
AudioSource audio_source,
int audio_channel,
const CodecInst& audio_codec_inst,
VoiceEngine* voe_ptr,
const FileFormats file_format) {
CriticalSectionScoped lock(recorder_cs_);
if (file_recorder_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StartRecording() - already recording.");
return -1;
}
file_recorder_ = FileRecorder::CreateFileRecorder(instance_id_, file_format);
if (!file_recorder_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StartRecording() failed to create recoder.");
return -1;
}
int error = file_recorder_->StartRecordingVideoFile(file_nameUTF8,
audio_codec_inst,
codec_inst,
AMRFileStorage,
audio_source == NO_AUDIO);
if (error) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StartRecording() failed to "
"StartRecordingVideoFile.");
FileRecorder::DestroyFileRecorder(file_recorder_);
file_recorder_ = NULL;
return -1;
}
audio_source_ = audio_source;
if (voe_ptr && audio_source != NO_AUDIO) {
// VoE interface has been provided and we want to record audio.
voe_file_interface_ = VoEFile::GetInterface(voe_ptr);
if (!voe_file_interface_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StartRecording() failed to get VEFile "
"interface");
return -1;
}
// Always L16.
CodecInst engine_audio_codec_inst = {96, "L16", audio_codec_inst.plfreq,
audio_codec_inst.plfreq / 100, 1,
audio_codec_inst.plfreq * 16 };
switch (audio_source) {
// case NO_AUDIO is checked above.
case MICROPHONE:
error = voe_file_interface_->StartRecordingMicrophone(
this, &engine_audio_codec_inst);
break;
case PLAYOUT:
error = voe_file_interface_->StartRecordingPlayout(
audio_channel, this, &engine_audio_codec_inst);
break;
default:
assert(false && "Unknown audio_source");
}
if (error != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StartRecording() failed to start recording"
" audio");
FileRecorder::DestroyFileRecorder(file_recorder_);
file_recorder_ = NULL;
return -1;
}
is_out_stream_started_ = true;
audio_channel_ = audio_channel;
}
is_first_frame_recorded_ = false;
return 0;
}
int ViEFileRecorder::StopRecording() {
int error = 0;
// We can not hold the ptr_cs_ while accessing VoE functions. It might cause
// deadlock in Write.
if (voe_file_interface_) {
switch (audio_source_) {
case MICROPHONE:
error = voe_file_interface_->StopRecordingMicrophone();
break;
case PLAYOUT:
error = voe_file_interface_->StopRecordingPlayout(audio_channel_);
break;
case NO_AUDIO:
break;
default:
assert(false && "Unknown audio_source");
}
if (error != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
"ViEFileRecorder::StopRecording() failed to stop recording "
"audio");
}
}
CriticalSectionScoped lock(recorder_cs_);
if (voe_file_interface_) {
voe_file_interface_->Release();
voe_file_interface_ = NULL;
}
if (file_recorder_) {
if (file_recorder_->IsRecording()) {
int error = file_recorder_->StopRecording();
if (error) {
return -1;
}
}
FileRecorder::DestroyFileRecorder(file_recorder_);
file_recorder_ = NULL;
}
is_first_frame_recorded_ = false;
is_out_stream_started_ = false;
return 0;
}
void ViEFileRecorder::SetFrameDelay(int frame_delay) {
CriticalSectionScoped lock(recorder_cs_);
frame_delay_ = frame_delay;
}
bool ViEFileRecorder::RecordingStarted() {
CriticalSectionScoped lock(recorder_cs_);
return file_recorder_ && file_recorder_->IsRecording();
}
bool ViEFileRecorder::FirstFrameRecorded() {
CriticalSectionScoped lock(recorder_cs_);
return is_first_frame_recorded_;
}
bool ViEFileRecorder::IsRecordingFileFormat(const FileFormats file_format) {
CriticalSectionScoped lock(recorder_cs_);
return (file_recorder_->RecordingFileFormat() == file_format) ? true : false;
}
void ViEFileRecorder::RecordVideoFrame(const I420VideoFrame& video_frame) {
CriticalSectionScoped lock(recorder_cs_);
if (file_recorder_ && file_recorder_->IsRecording()) {
if (!IsRecordingFileFormat(kFileFormatAviFile))
return;
// Compensate for frame delay in order to get audio/video sync when
// recording local video.
const uint32_t time_stamp = video_frame.timestamp();
const int64_t render_time_stamp = video_frame.render_time_ms();
I420VideoFrame& unconst_video_frame =
const_cast<I420VideoFrame&>(video_frame);
unconst_video_frame.set_timestamp(time_stamp - 90 * frame_delay_);
unconst_video_frame.set_render_time_ms(render_time_stamp - frame_delay_);
file_recorder_->RecordVideoToFile(unconst_video_frame);
unconst_video_frame.set_render_time_ms(render_time_stamp);
unconst_video_frame.set_timestamp(time_stamp);
}
}
bool ViEFileRecorder::Write(const void* buf, int len) {
if (!is_out_stream_started_)
return true;
// Always 10 ms L16 from VoE.
if (len % (2 * 80)) {
// Not 2 bytes 80 samples.
WEBRTC_TRACE(kTraceError, kTraceVideo, audio_channel_,
"Audio length not supported: %d.", len);
return true;
}
AudioFrame audio_frame;
uint16_t length_in_samples = len / 2;
audio_frame.UpdateFrame(audio_channel_, 0,
static_cast<const int16_t*>(buf),
length_in_samples, length_in_samples * 100,
AudioFrame::kUndefined,
AudioFrame::kVadUnknown);
CriticalSectionScoped lock(recorder_cs_);
if (file_recorder_ && file_recorder_->IsRecording()) {
TickTime tick_time = TickTime::Now();
file_recorder_->RecordAudioToFile(audio_frame, &tick_time);
}
// Always return true to continue recording.
return true;
}
int ViEFileRecorder::Rewind() {
// Not supported!
return -1;
}
} // namespace webrtc

View File

@ -1,65 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
#include "webrtc/modules/utility/interface/file_recorder.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/include/vie_file.h"
#include "webrtc/voice_engine/include/voe_file.h"
namespace webrtc {
class CriticalSectionWrapper;
class ViEFileRecorder : protected OutStream {
public:
explicit ViEFileRecorder(int channel_id);
~ViEFileRecorder();
int StartRecording(const char* file_nameUTF8,
const VideoCodec& codec_inst,
AudioSource audio_source, int audio_channel,
const CodecInst& audio_codec_inst,
VoiceEngine* voe_ptr,
const FileFormats file_format = kFileFormatAviFile);
int StopRecording();
void SetFrameDelay(int frame_delay);
bool RecordingStarted();
// Records incoming decoded video frame to file.
void RecordVideoFrame(const I420VideoFrame& video_frame);
protected:
bool FirstFrameRecorded();
bool IsRecordingFileFormat(const FileFormats file_format);
// Implements OutStream.
bool Write(const void* buf, int len);
int Rewind();
private:
CriticalSectionWrapper* recorder_cs_;
FileRecorder* file_recorder_;
bool is_first_frame_recorded_;
bool is_out_stream_started_;
int instance_id_;
int frame_delay_;
int audio_channel_;
AudioSource audio_source_;
VoEFile* voe_file_interface_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_

View File

@ -22,7 +22,6 @@
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_file_player.h"
namespace webrtc {
@ -40,10 +39,6 @@ ViEInputManager::ViEInputManager(const int engine_id, const Config& config)
for (int idx = 0; idx < kViEMaxCaptureDevices; idx++) {
free_capture_device_id_[idx] = true;
}
for (int idx = 0; idx < kViEMaxFilePlayers; idx++) {
free_file_id_[idx] = true;
}
}
ViEInputManager::~ViEInputManager() {
@ -394,83 +389,6 @@ int ViEInputManager::CreateExternalCaptureDevice(
return 0;
}
int ViEInputManager::CreateFilePlayer(const char* file_nameUTF8,
const bool loop,
const webrtc::FileFormats file_format,
VoiceEngine* voe_ptr, int& file_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(device_unique_id: %s)", __FUNCTION__, file_nameUTF8);
CriticalSectionScoped cs(map_cs_.get());
int new_file_id = 0;
if (GetFreeFileId(&new_file_id) == false) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Maximum supported number of file players already in use",
__FUNCTION__);
return kViEFileMaxNoOfFilesOpened;
}
ViEFilePlayer* vie_file_player = ViEFilePlayer::CreateViEFilePlayer(
new_file_id, engine_id_, file_nameUTF8, loop, file_format, voe_ptr);
if (!vie_file_player) {
ReturnFileId(new_file_id);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Could not open file %s for playback", __FUNCTION__,
file_nameUTF8);
return kViEFileUnknownError;
}
if (vie_frame_provider_map_.Insert(new_file_id, vie_file_player) != 0) {
ReturnCaptureId(new_file_id);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Could not insert file player for %s", __FUNCTION__,
file_nameUTF8);
delete vie_file_player;
return kViEFileUnknownError;
}
file_id = new_file_id;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(filename: %s, file_id: %d)", __FUNCTION__, file_nameUTF8,
new_file_id);
return 0;
}
int ViEInputManager::DestroyFilePlayer(int file_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(file_id: %d)", __FUNCTION__, file_id);
ViEFilePlayer* vie_file_player = NULL;
{
// We need exclusive access to the object to delete it.
// Take this write lock first since the read lock is taken before map_cs_.
ViEManagerWriteScoped wl(this);
CriticalSectionScoped cs(map_cs_.get());
vie_file_player = ViEFilePlayerPtr(file_id);
if (!vie_file_player) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(file_id: %d) - No such file player", __FUNCTION__,
file_id);
return -1;
}
int num_callbacks = vie_file_player->NumberOfRegisteredFrameCallbacks();
if (num_callbacks > 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
ViEId(engine_id_), "%s(file_id: %d) - %u registered "
"callbacks when destroying file player", __FUNCTION__,
file_id, num_callbacks);
}
vie_frame_provider_map_.Erase(file_id);
ReturnFileId(file_id);
// Leave cs before deleting the file object. This is because deleting the
// object might cause deletions of renderers so we prefer to not have a lock
// at that time.
}
delete vie_file_player;
return 0;
}
bool ViEInputManager::GetFreeCaptureId(int* freecapture_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
__FUNCTION__);
@ -498,35 +416,6 @@ void ViEInputManager::ReturnCaptureId(int capture_id) {
return;
}
bool ViEInputManager::GetFreeFileId(int* free_file_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
__FUNCTION__);
for (int id = 0; id < kViEMaxFilePlayers; id++) {
if (free_file_id_[id]) {
// We found a free capture device id.
free_file_id_[id] = false;
*free_file_id = id + kViEFileIdBase;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: new id: %d", __FUNCTION__, *free_file_id);
return true;
}
}
return false;
}
void ViEInputManager::ReturnFileId(int file_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(%d)", __FUNCTION__, file_id);
CriticalSectionScoped cs(map_cs_.get());
if (file_id >= kViEFileIdBase &&
file_id < kViEMaxFilePlayers + kViEFileIdBase) {
free_file_id_[file_id - kViEFileIdBase] = true;
}
return;
}
ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(
const ViEFrameCallback* capture_observer) const {
assert(capture_observer);
@ -572,20 +461,6 @@ ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
return vie_capture;
}
ViEFilePlayer* ViEInputManager::ViEFilePlayerPtr(int file_id) const {
if (file_id < kViEFileIdBase || file_id > kViEFileIdMax) {
return NULL;
}
CriticalSectionScoped cs(map_cs_.get());
MapItem* map_item = vie_frame_provider_map_.Find(file_id);
if (!map_item) {
return NULL;
}
ViEFilePlayer* vie_file_player =
static_cast<ViEFilePlayer*>(map_item->GetItem());
return vie_file_player;
}
ViEInputManagerScoped::ViEInputManagerScoped(
const ViEInputManager& vie_input_manager)
: ViEManagerScopedBase(vie_input_manager) {
@ -608,9 +483,4 @@ ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
provider_id);
}
ViEFilePlayer* ViEInputManagerScoped::FilePlayer(int file_id) const {
return static_cast<const ViEInputManager*>(vie_manager_)->ViEFilePlayerPtr(
file_id);
}
} // namespace webrtc

View File

@ -28,7 +28,6 @@ class ProcessThread;
class RWLockWrapper;
class ViECapturer;
class ViEExternalCapture;
class ViEFilePlayer;
class VoiceEngine;
class ViEInputManager : private ViEManagerBase {
@ -78,12 +77,6 @@ class ViEInputManager : private ViEManagerBase {
int& capture_id);
int DestroyCaptureDevice(int capture_id);
int CreateFilePlayer(const char* file_nameUTF8, const bool loop,
const FileFormats file_format,
VoiceEngine* voe_ptr,
int& file_id);
int DestroyFilePlayer(int file_id);
private:
// Gets and allocates a free capture device id. Assumed protected by caller.
bool GetFreeCaptureId(int* freecapture_id);
@ -91,12 +84,6 @@ class ViEInputManager : private ViEManagerBase {
// Frees a capture id assigned in GetFreeCaptureId.
void ReturnCaptureId(int capture_id);
// Gets and allocates a free file id. Assumed protected by caller.
bool GetFreeFileId(int* free_file_id);
// Frees a file id assigned in GetFreeFileId.
void ReturnFileId(int file_id);
// Gets the ViEFrameProvider for this capture observer.
ViEFrameProviderBase* ViEFrameProvider(
const ViEFrameCallback* capture_observer) const;
@ -107,9 +94,6 @@ class ViEInputManager : private ViEManagerBase {
// Gets the ViECapturer for the capture device id.
ViECapturer* ViECapturePtr(int capture_id) const;
// Gets the ViEFilePlayer for this file_id.
ViEFilePlayer* ViEFilePlayerPtr(int file_id) const;
const Config& config_;
int engine_id_;
scoped_ptr<CriticalSectionWrapper> map_cs_;
@ -120,9 +104,6 @@ class ViEInputManager : private ViEManagerBase {
VideoCaptureModule::DeviceInfo* capture_device_info_;
int free_capture_device_id_[kViEMaxCaptureDevices];
// File Players.
int free_file_id_[kViEMaxFilePlayers];
ProcessThread* module_process_thread_; // Weak.
};
@ -132,7 +113,6 @@ class ViEInputManagerScoped: private ViEManagerScopedBase {
explicit ViEInputManagerScoped(const ViEInputManager& vie_input_manager);
ViECapturer* Capture(int capture_id) const;
ViEFilePlayer* FilePlayer(int file_id) const;
ViEFrameProviderBase* FrameProvider(int provider_id) const;
ViEFrameProviderBase* FrameProvider(const ViEFrameCallback*
capture_observer) const;