git-svn-id: http://webrtc.googlecode.com/svn/trunk@158 4adac7df-926f-26a2-2b94-8c16560cd09d

This commit is contained in:
niklase@google.com 2011-07-07 08:25:09 +00:00
parent cc63de44ae
commit a207f59704
183 changed files with 0 additions and 46192 deletions

View File

@ -1,4 +0,0 @@
mflodman@google.com
perkj@google.com
ronghuawu@google.com
mallinath@google.com

View File

@ -1,153 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
//
// - Creating and deleting VideoEngine instances.
// - Creating and deleting channels.
// - Connect a video channel with a corresponding voice channel for audio/video synchronization.
// - Start and stop sending and receiving.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_
#include "common_types.h"
// Forward declarations
namespace webrtc
{
class VoiceEngine;
// ----------------------------------------------------------------------------
// VideoEngine Callbacks
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEBaseObserver
{
public:
// This method will be called periodically if the average system CPU usage
// exceeds 75%.
virtual void PerformanceAlarm(const unsigned int cpuLoad) = 0;
protected:
virtual ~ViEBaseObserver() {};
};
// ----------------------------------------------------------------------------
// VideoEngine
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT VideoEngine
{
public:
// Creates a VideoEngine object, which can then be used to acquire subAPIs.
static VideoEngine* Create();
// Deletes a VideoEngine instance.
static bool Delete(VideoEngine*& videoEngine);
// Specifies the amount and type of trace information, which will be created
// by the VideoEngine.
static int SetTraceFilter(const unsigned int filter);
// Sets the name of the trace file and enables nonencrypted trace messages.
static int SetTraceFile(const char* fileNameUTF8,
const bool addFileCounter = false);
// Installs the TraceCallback implementation to ensure that the VideoEngine
// user receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback);
// Android specific
// Provides VideoEngine with pointers to objects supplied by the Java
// applications JNI interface.
static int SetAndroidObjects(void* javaVM, void* javaContext);
protected:
VideoEngine() {};
virtual ~VideoEngine() {};
};
// ----------------------------------------------------------------------------
// VideoBase
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEBase
{
public:
// Factory for the ViEBase subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViEBase* GetInterface(VideoEngine* videoEngine);
// Releases the ViEBase sub-API and decreases an internal reference counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Initiates all common parts of the VideoEngine.
virtual int Init() = 0;
// Connects a VideoEngine instance to a VoiceEngine instance for audio video
// synchronization.
virtual int SetVoiceEngine(VoiceEngine* ptrVoiceEngine) = 0;
// Creates a new channel, either with a new encoder instance or by sharing
// encoder instance with an already created channel.
virtual int CreateChannel(int& videoChannel) = 0;
virtual int CreateChannel(int& videoChannel, int originalChannel) = 0;
// Deletes an existing channel and releases the utilized resources.
virtual int DeleteChannel(const int videoChannel) = 0;
// Specifies the VoiceEngine and VideoEngine channel pair to use for
// audio/video synchronization.
virtual int ConnectAudioChannel(const int videoChannel,
const int audioChannel) = 0;
// Disconnects a previously paired VideoEngine and VoiceEngine channel pair.
virtual int DisconnectAudioChannel(const int videoChannel) = 0;
// Starts sending packets to an already specified IP address and port number
// for a specified channel.
virtual int StartSend(const int videoChannel) = 0;
// Stops packets from being sent for a specified channel.
virtual int StopSend(const int videoChannel) = 0;
// Prepares VideoEngine for receiving packets on the specified channel.
virtual int StartReceive(const int videoChannel) = 0;
// Stops receiving incoming RTP and RTCP packets on the specified channel.
virtual int StopReceive(const int videoChannel) = 0;
// Registers an instance of a user implementation of the ViEBase
// observer.
virtual int RegisterObserver(ViEBaseObserver& observer) = 0;
// Removes an already registered instance of ViEBaseObserver.
virtual int DeregisterObserver() = 0;
// Retrieves the version information for VideoEngine and its components.
virtual int GetVersion(char version[1024]) = 0;
// Returns the last VideoEngine error code.
virtual int LastError() = 0;
protected:
ViEBase() {};
virtual ~ViEBase(){};
};
} // namespace webrtc
#endif // #define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_

View File

@ -1,234 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
//
// - Allocating capture devices.
// - Connect a capture device with one or more channels.
// - Start and stop capture devices.
// - Getting capture device capabilities.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CAPTURE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CAPTURE_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
class VideoCaptureModule;
// This structure describes one set of the supported capabilities for a capture
// device.
struct CaptureCapability
{
unsigned int width;
unsigned int height;
unsigned int maxFPS;
RawVideoType rawType;
VideoCodecType codecType;
unsigned int expectedCaptureDelay;
bool interlaced;
CaptureCapability()
{
width = 0;
height = 0;
maxFPS = 0;
rawType = kVideoI420;
codecType = kVideoCodecUnknown;
expectedCaptureDelay = 0;
interlaced = false;
}
};
// This enumerator tells the current brightness alarm mode.
enum Brightness
{
Normal = 0,
Bright = 1,
Dark = 2
};
// This enumerator describes the capture alarm mode.
enum CaptureAlarm
{
AlarmRaised = 0,
AlarmCleared = 1
};
enum RotateCapturedFrame
{
RotateCapturedFrame_0 = 0,
RotateCapturedFrame_90 = 90,
RotateCapturedFrame_180 = 180,
RotateCapturedFrame_270 = 270
};
// This class declares an abstract interface to be used when using an external
// capture device. The user implemented derived class is registered using
// AllocateExternalCaptureDevice and is released using ReleaseCaptureDevice.
class WEBRTC_DLLEXPORT ViEExternalCapture
{
public:
ViEExternalCapture() {}
virtual ~ViEExternalCapture() {}
// This method is called by the user to deliver a new captured frame to
// VideoEngine.
virtual int IncomingFrame(unsigned char* videoFrame,
unsigned int videoFrameLength,
unsigned short width, unsigned short height,
RawVideoType videoType,
unsigned long long captureTime = 0) = 0;
};
// ----------------------------------------------------------------------------
// ViECaptureObserver
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViECaptureObserver
{
public:
// This method is called if a bright or dark captured image is detected.
virtual void BrightnessAlarm(const int captureId,
const Brightness brightness) = 0;
// This method is called periodically telling the capture device frame rate.
virtual void CapturedFrameRate(const int captureId,
const unsigned char frameRate) = 0;
// This method is called if the capture device stops delivering images to
// VideoEngine.
virtual void NoPictureAlarm(const int captureId,
const CaptureAlarm alarm) = 0;
protected:
virtual ~ViECaptureObserver()
{
}
};
// ----------------------------------------------------------------------------
// ViECapture
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViECapture
{
public:
// Factory for the ViECapture subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViECapture* GetInterface(VideoEngine* videoEngine);
// Releases the ViECapture sub-API and decreases an internal reference
// counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Gets the number of available capture devices.
virtual int NumberOfCaptureDevices() = 0;
// Gets the name and unique id of a capture device.
virtual int GetCaptureDevice(unsigned int listNumber, char* deviceNameUTF8,
const unsigned int deviceNameUTF8Length,
char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length) = 0;
// Allocates a capture device to be used in VideoEngine.
virtual int AllocateCaptureDevice(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
int& captureId) = 0;
// Registers an external capture device to be used in VideoEngine
virtual int AllocateExternalCaptureDevice(
int& captureId, ViEExternalCapture *&externalCapture) = 0;
// Use capture device using external capture module.
virtual int AllocateCaptureDevice(VideoCaptureModule& captureModule,
int& captureId) = 0;
// Releases a capture device and makes it available for other applications.
virtual int ReleaseCaptureDevice(const int captureId) = 0;
// This function connects a capture device with a channel. Multiple channels
// can be connected to the same capture device.
virtual int ConnectCaptureDevice(const int captureId,
const int videoChannel) = 0;
// Disconnects a capture device as input for a specified channel.
virtual int DisconnectCaptureDevice(const int videoChannel) = 0;
// Makes a capture device start capturing video frames.
virtual int StartCapture(const int captureId,
const CaptureCapability captureCapability =
CaptureCapability()) = 0;
// Stops a started capture device from capturing video frames.
virtual int StopCapture(const int captureId) = 0;
// Rotates captured frames before encoding and sending.
// Used on mobile devices with rotates cameras.
virtual int SetRotateCapturedFrames(const int captureId,
const RotateCapturedFrame rotation) = 0;
// This function sets the expected delay from when a video frame is captured
// to when that frame is delivered to VideoEngine.
virtual int SetCaptureDelay(const int captureId,
const unsigned int captureDelayMs) = 0;
// Returns the number of sets of capture capabilities the capture device
// supports.
virtual int NumberOfCapabilities(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length) = 0;
// Gets a set of capture capabilities for a specified capture device.
virtual int GetCaptureCapability(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
const unsigned int capabilityNumber,
CaptureCapability& capability) = 0;
// Displays the capture device property dialog box for the specified capture
// device. Windows only.
virtual int ShowCaptureSettingsDialogBox(
const char* uniqueIdUTF8, const unsigned int uniqueIdUTF8Length,
const char* dialogTitle, void* parentWindow = NULL,
const unsigned int x = 200, const unsigned int y = 200) = 0;
// Gets the clockwise angle the frames from the camera must be rotated in
// order to display the frames correctly if the display is rotated in its
// natural orientation.
virtual int GetOrientation(const char* uniqueIdUTF8,
RotateCapturedFrame &orientation) = 0;
// Enables brightness alarm detection and the brightness alarm callback.
virtual int EnableBrightnessAlarm(const int captureId,
const bool enable) = 0;
// Registers an instance of a user implementation of the ViECaptureObserver.
virtual int RegisterObserver(const int captureId,
ViECaptureObserver& observer) = 0;
// Removes an already registered instance of ViECaptureObserver.
virtual int DeregisterObserver(const int captureId) = 0;
protected:
ViECapture() {};
virtual ~ViECapture() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CAPTURE_H_

View File

@ -1,185 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Setting send and receive codecs.
// - Codec specific settings.
// - Key frame signaling.
// - Stream management settings.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CODEC_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
struct VideoCodec;
// ----------------------------------------------------------------------------
// ViEEncoderObserver
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterEncoderObserver()
// and deregistered using DeregisterEncoderObserver().
class WEBRTC_DLLEXPORT ViEEncoderObserver
{
public:
// This method is called once per second with the current encoded frame rate
// and bit rate.
virtual void OutgoingRate(const int videoChannel,
const unsigned int framerate,
const unsigned int bitrate) = 0;
protected:
virtual ~ViEEncoderObserver() {};
};
// ----------------------------------------------------------------------------
// ViEEncoderObserver
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterDecoderObserver()
// and deregistered using DeregisterDecoderObserver().
class WEBRTC_DLLEXPORT ViEDecoderObserver
{
public:
// This method is called when a new incoming stream is detected, normally
// triggered by a new incoming SSRC or payload type.
virtual void IncomingCodecChanged(const int videoChannel,
const VideoCodec& videoCodec) = 0;
// This method is called once per second containing the frame rate and bit
// rate for the incoming stream
virtual void IncomingRate(const int videoChannel,
const unsigned int framerate,
const unsigned int bitrate) = 0;
// This method is called when the decoder needs a new key frame from encoder
// on the sender.
virtual void RequestNewKeyFrame(const int videoChannel) = 0;
protected:
virtual ~ViEDecoderObserver() {};
};
// ----------------------------------------------------------------------------
// ViECodec
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViECodec
{
public:
// Factory for the ViECodec subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViECodec* GetInterface(VideoEngine* videoEngine);
// Releases the ViECodec sub-API and decreases an internal reference
// counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Gets the number of available codecs for the VideoEngine build.
virtual int NumberOfCodecs() const = 0;
// Gets a VideoCodec struct for a codec containing the default configuration
// for that codec type.
virtual int GetCodec(const unsigned char listNumber,
VideoCodec& videoCodec) const = 0;
// Sets the send codec to use for a specified channel.
virtual int SetSendCodec(const int videoChannel,
const VideoCodec& videoCodec) = 0;
// Gets the current send codec settings.
virtual int GetSendCodec(const int videoChannel,
VideoCodec& videoCodec) const = 0;
// Prepares VideoEngine to receive a certain codec type and setting for a
// specified payload type.
virtual int SetReceiveCodec(const int videoChannel,
const VideoCodec& videoCodec) = 0;
// Gets the current receive codec.
virtual int GetReceiveCodec(const int videoChannel,
VideoCodec& videoCodec) const = 0;
// This function is used to get codec configuration parameters to be
// signaled from the encoder to the decoder in the call setup.
virtual int GetCodecConfigParameters(
const int videoChannel,
unsigned char configParameters[kConfigParameterSize],
unsigned char& configParametersSize) const = 0;
// Enables advanced scaling of the captured video stream if the stream
// differs from the send codec settings.
virtual int SetImageScaleStatus(const int videoChannel,
const bool enable) = 0;
// Gets the number of sent key frames and number of sent delta frames.
virtual int GetSendCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const = 0;
// Gets the number of decoded key frames and number of decoded delta frames.
virtual int GetReceiveCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const = 0;
// Enables key frame request callback in ViEDecoderObserver.
virtual int SetKeyFrameRequestCallbackStatus(const int videoChannel,
const bool enable) = 0;
// Enables key frame requests for detected lost packets.
virtual int SetSignalKeyPacketLossStatus(
const int videoChannel, const bool enable,
const bool onlyKeyFrames = false) = 0;
// Registers an instance of a user implementation of the ViEEncoderObserver.
virtual int RegisterEncoderObserver(const int videoChannel,
ViEEncoderObserver& observer) = 0;
// Removes an already registered instance of ViEEncoderObserver.
virtual int DeregisterEncoderObserver(const int videoChannel) = 0;
// Registers an instance of a user implementation of the ViEDecoderObserver.
virtual int RegisterDecoderObserver(const int videoChannel,
ViEDecoderObserver& observer) = 0;
// Removes an already registered instance of ViEDecoderObserver.
virtual int DeregisterDecoderObserver(const int videoChannel) = 0;
// This function forces the next encoded frame to be a key frame. This is
// normally used when the remote endpoint only supports outband key frame
// request.
virtual int SendKeyFrame(const int videoChannel) = 0;
// This function makes the decoder wait for a key frame before starting to
// decode the incoming video stream.
virtual int WaitForFirstKeyFrame(const int videoChannel,
const bool wait) = 0;
// This function makes VideoEngine decode all incoming H.263 key frames as
// delta frames and all incoming delta frames as key frames.
virtual int SetInverseH263Logic(int videoChannel, bool enable) = 0;
protected:
ViECodec() {};
virtual ~ViECodec() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_CODEC_H_

View File

@ -1,91 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Secure RTP (SRTP).
// - External encryption and decryption.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ENCRYPTION_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ENCRYPTION_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
// ----------------------------------------------------------------------------
// ViEEncryption
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEEncryption
{
public:
enum
{
kMaxSrtpKeyLength = 30
};
// Factory for the ViEEncryption subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViEEncryption* GetInterface(VideoEngine* videoEngine);
// Releases the ViEEncryption sub-API and decreases an internal reference
// counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// This function enables SRTP on send packets for a specific channel.
virtual int EnableSRTPSend(const int videoChannel,
const CipherTypes cipherType,
const unsigned int cipherKeyLength,
const AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SecurityLevels level,
const unsigned char key[kMaxSrtpKeyLength],
const bool useForRTCP = false) = 0;
// This function disables SRTP for the specified channel.
virtual int DisableSRTPSend(const int videoChannel) = 0;
// This function enables SRTP on the received packets for a specific
// channel.
virtual int EnableSRTPReceive(const int videoChannel,
const CipherTypes cipherType,
const unsigned int cipherKeyLength,
const AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SecurityLevels level,
const unsigned char key[kMaxSrtpKeyLength],
const bool useForRTCP = false) = 0;
// This function disables SRTP on received packets for a specific channel.
virtual int DisableSRTPReceive(const int videoChannel) = 0;
// This function registers a encryption derived instance and enables
// external encryption for the specified channel.
virtual int RegisterExternalEncryption(const int videoChannel,
Encryption& encryption) = 0;
// This function deregisters a registered encryption derived instance
// and disables external encryption.
virtual int DeregisterExternalEncryption(const int videoChannel) = 0;
protected:
ViEEncryption() {};
virtual ~ViEEncryption() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ENCRYPTION_H_

View File

@ -1,121 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ERRORS_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ERRORS_H_
enum ViEErrors {
//ViEBase
kViENotInitialized = 12000, // Init has not been called successfully.
kViEBaseVoEFailure, // SetVoiceEngine. ViE failed to use VE instance. Check VE instance pointer.ConnectAudioChannel failed to set voice channel. Have SetVoiceEngine been called? Is the voice channel correct.
kViEBaseChannelCreationFailed, // CreateChannel.
kViEBaseInvalidChannelId, // The channel does not exist.
kViEAPIDoesNotExist, // Release called on Interface that has not been created.
kViEBaseInvalidArgument,
kViEBaseAlreadySending, // StartSend called on channel that is already sending.
kViEBaseNotSending, // StopSend called on channel that is not sending.
kViEBaseAlreadyReceiving, // StartReceive called on channel that is already receiving.
kViEBaseObserverAlreadyRegistered, // RegisterObserver- an observer has already been set.
kViEBaseObserverNotRegistered, // DeregisterObserver - no observer has been registered.
kViEBaseUnknownError, // An unknown error has occurred. Check the log file.
//ViECodec
kViECodecInvalidArgument = 12100, // Wrong input parameter to function.
kViECodecObserverAlreadyRegistered, // RegisterEncoderObserver, RegisterDecoderObserver.
kViECodecObserverNotRegistered, // DeregisterEncoderObserver, DeregisterDecoderObserver.
kViECodecInvalidCodec, // SetSendCodec,SetReceiveCodec- The codec structure is invalid.
kViECodecInvalidChannelId, // The channel does not exist.
kViECodecInUse, // SetSendCodec- Can't change codec size or type when multiple channels use the same encoder.
kViECodecUnknownError, // An unknown error has occurred. Check the log file.
//ViERender
kViERenderInvalidRenderId = 12200, // No renderer with the ID exist. In AddRenderer - The render ID is invalid. No capture device, channel or file is allocated with that id.
kViERenderAlreadyExists, // AddRenderer: the renderer already exist.
kViERenderInvalidFrameFormat, // AddRender (external renderer). The user has requested a frame format that we don't support.
kViERenderUnknownError, // An unknown error has occurred. Check the log file.
//ViECapture
kViECaptureDeviceAlreadyConnected = 12300, // ConnectCaptureDevice - A capture device has already been connected to this video channel.
kViECaptureDeviceDoesnNotExist, // No capture device exist with the provided capture id or unique name.
kViECaptureDeviceInvalidChannelId, // ConnectCaptureDevice, DisconnectCaptureDevice- No Channel exist with the provided channel id.
kViECaptureDeviceNotConnected, // DisconnectCaptureDevice- No capture device is connected to the channel.
kViECaptureDeviceNotStarted, // Stop- The capture device is not started.
kViECaptureDeviceAlreadyStarted, // Start- The capture device is already started.
kViECaptureDeviceAlreadyAllocated, // AllocateCaptureDevice The device is already allocated.
kViECaptureDeviceMaxNoDevicesAllocated, // AllocateCaptureDevice Max number of devices already allocated.
kViECaptureObserverAlreadyRegistered, // RegisterObserver- An observer is already registered. Need to deregister first.
kViECaptureDeviceObserverNotRegistered, // DeregisterObserver- No observer is registered.
kViECaptureDeviceUnknownError, // An unknown error has occurred. Check the log file.
kViECaptureDeviceMacQtkitNotSupported, // QTKit handles the capture devices automatically. Thus querying capture capabilities is not supported.
//ViEFile
kViEFileInvalidChannelId = 12400, // No Channel exist with the provided channel id.
kViEFileInvalidArgument, // Incorrect input argument
kViEFileAlreadyRecording, // StartRecordOutgoingVideo - already recording channel
kViEFileVoENotSet, // StartRecordOutgoingVideo. Failed to access voice engine. Has SetVoiceEngine been called?
kViEFileNotRecording, // StopRecordOutgoingVideo
kViEFileMaxNoOfFilesOpened, // StartPlayFile
kViEFileNotPlaying, // StopPlayFile. The file with the provided id is not playing.
kViEFileObserverAlreadyRegistered, // RegisterObserver
kViEFileObserverNotRegistered, // DeregisterObserver
kViEFileInputAlreadyConnected, // SendFileOnChannel- the video channel already have a connected input.
kViEFileNotConnected, // StopSendFileOnChannel- No file is being sent on the channel.
kViEFileVoEFailure, // SendFileOnChannel,StartPlayAudioLocally - failed to play audio stream
kViEFileInvalidRenderId, // SetRenderTimeoutImage and SetRenderStartImage: Renderer with the provided render id does not exist.
kViEFileInvalidFile, // Can't open the file with provided filename. Is the path and file format correct?
kViEFileInvalidCapture, // Can't use ViEPicture. Is the object correct?
kViEFileSetRenderTimeoutError, // SetRenderTimeoutImage- Please see log file.
kViEFileInvalidCaptureId, // SetCaptureDeviceImage capture id does not exist.
kViEFileSetCaptureImageError, // SetCaptureDeviceImage error. Please see log file.
kViEFileSetStartImageError, // SetRenderStartImage error. Please see log file.
kViEFileUnknownError, // An unknown error has occurred. Check the log file.
//ViENetwork
kViENetworkInvalidChannelId = 12500, // No Channel exist with the provided channel id.
kViENetworkAlreadyReceiving, // SetLocalReceiver: Can not change ports while receiving.
kViENetworkLocalReceiverNotSet, // GetLocalReceiver: SetLocalReceiver not called.
kViENetworkAlreadySending, // SetSendDestination
kViENetworkDestinationNotSet, // GetSendDestination
kViENetworkInvalidArgument, // GetLocalIP- Check function arguments.
kViENetworkSendCodecNotSet, // SetSendGQoS- Need to set the send codec first.
kViENetworkServiceTypeNotSupported, // SetSendGQoS
kViENetworkNotSupported, // SetSendGQoS Not supported on this OS.
kViENetworkObserverAlreadyRegistered, // RegisterObserver
kViENetworkObserverNotRegistered, // SetPeriodicDeadOrAliveStatus - Need to call RegisterObserver first, DeregisterObserver if no observer is registered.
kViENetworkUnknownError, // An unknown error has occurred. Check the log file.
//ViERTP_RTCP
kViERtpRtcpInvalidChannelId = 12600, // No Channel exist with the provided channel id.
kViERtpRtcpAlreadySending, // The channel is already sending. Need to stop send before calling this API.
kViERtpRtcpNotSending, // The channel needs to be sending in order for this function to work.
kViERtpRtcpRtcpDisabled, // Functions failed because RTCP is disabled.
kViERtpRtcpObserverAlreadyRegistered, // An observer is already registered. Need to deregister the old first.
kViERtpRtcpObserverNotRegistered, // No observer registered.
kViERtpRtcpUnknownError, // An unknown error has occurred. Check the log file.
//ViEEncryption
kViEEncryptionInvalidChannelId = 12700, // Channel id does not exist.
kViEEncryptionInvalidSrtpParameter, // EnableSRTPSend, EnableSRTPReceive- Check the SRTP parameters.
kViEEncryptionSrtpNotSupported, // This build does not support SRTP.
kViEEncryptionUnknownError, // An unknown error has occurred. Check the log file.
//ViEImageProcess
kViEImageProcessInvalidChannelId = 12800, // No Channel exist with the provided channel id.
kViEImageProcessInvalidCaptureId, // No capture device exist with the provided capture id.
kViEImageProcessFilterExists, // RegisterCaptureEffectFilter,RegisterSendEffectFilter,RegisterRenderEffectFilter - Effect filter already registered.
kViEImageProcessFilterDoesNotExist, // DeRegisterCaptureEffectFilter,DeRegisterSendEffectFilter,DeRegisterRenderEffectFilter - Effect filter not registered.
kViEImageProcessAlreadyEnabled, // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already enabled.
kViEImageProcessAlreadyDisabled, // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already disabled.
kViEImageProcessUnknownError // An unknown error has occurred. Check the log file.
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_ERRORS_H_

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_EXTERNAL_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_EXTERNAL_CODEC_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
class VideoEncoder;
class VideoDecoder;
// ----------------------------------------------------------------------------
// ViEExternalCodec
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEExternalCodec
{
public:
static ViEExternalCodec* GetInterface(VideoEngine* videoEngine);
virtual int Release() = 0;
virtual int RegisterExternalSendCodec(const int videoChannel,
const unsigned char plType,
VideoEncoder* encoder) = 0;
virtual int DeRegisterExternalSendCodec(const int videoChannel,
const unsigned char plType) = 0;
virtual int RegisterExternalReceiveCodec(const int videoChannel,
const unsigned int plType,
VideoDecoder* decoder,
bool decoderRender = false,
int renderDelay = 0) = 0;
virtual int DeRegisterExternalReceiveCodec(const int videoChannel,
const unsigned char plType) = 0;
protected:
ViEExternalCodec() {};
virtual ~ViEExternalCodec() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_EXTERNAL_CODEC_H_

View File

@ -1,229 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - File recording and playing.
// - Snapshots.
// - Background images.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_FILE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_FILE_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
struct VideoCodec;
// This structure contains picture data and describes the picture type.
struct ViEPicture
{
unsigned char* data;
unsigned int size;
unsigned int width;
unsigned int height;
RawVideoType type;
ViEPicture()
{
data = NULL;
size = 0;
width = 0;
height = 0;
type = kVideoI420;
}
//call FreePicture to free data
~ViEPicture()
{
data = NULL;
size = 0;
width = 0;
height = 0;
type = kVideoUnknown;
}
};
// This enumerator tells which audio source to use for media files.
enum AudioSource
{
NO_AUDIO,
MICROPHONE,
PLAYOUT,
VOICECALL
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViEFileObserver
{
public:
// This method is called when the end is reached of a played file.
virtual void PlayFileEnded(const WebRtc_Word32 fileId) = 0;
protected:
virtual ~ViEFileObserver() {};
};
// ----------------------------------------------------------------------------
// ViEFile
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEFile
{
public:
// Factory for the ViEFile subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViEFile* GetInterface(VideoEngine* videoEngine);
// Releases the ViEFile sub-API and decreases an internal reference counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Starts playing a video file.
virtual int StartPlayFile(const char* fileNameUTF8, int& fileId,
const bool loop = false,
const FileFormats fileFormat =
kFileFormatAviFile) = 0;
// Stops a file from being played.
virtual int StopPlayFile(const int fileId) = 0;
// Registers an instance of a user implementation of the ViEFileObserver.
virtual int RegisterObserver(int fileId, ViEFileObserver& observer) = 0;
// Removes an already registered instance of ViEFileObserver.
virtual int DeregisterObserver(int fileId, ViEFileObserver& observer) = 0;
// This function tells which channel, if any, the file should be sent on.
virtual int SendFileOnChannel(const int fileId, const int videoChannel) = 0;
// Stops a file from being sent on a a channel.
virtual int StopSendFileOnChannel(const int videoChannel) = 0;
// Starts playing the file audio as microphone input for the specified voice
// channel.
virtual int StartPlayFileAsMicrophone(const int fileId,
const int audioChannel,
bool mixMicrophone = false,
float volumeScaling = 1) = 0;
// The function stop the audio from being played on a VoiceEngine channel.
virtual int StopPlayFileAsMicrophone(const int fileId,
const int audioChannel) = 0;
// The function plays and mixes the file audio with the local speaker signal
// for playout.
virtual int StartPlayAudioLocally(const int fileId, const int audioChannel,
float volumeScaling = 1) = 0;
// Stops the audio from a file from being played locally.
virtual int StopPlayAudioLocally(const int fileId,
const int audioChannel) = 0;
// This function starts recording the video transmitted to another endpoint.
virtual int StartRecordOutgoingVideo(const int videoChannel,
const char* fileNameUTF8,
AudioSource audioSource,
const CodecInst& audioCodec,
const VideoCodec& videoCodec,
const FileFormats fileFormat =
kFileFormatAviFile) =0;
// This function starts recording the incoming video stream on a channel.
virtual int StartRecordIncomingVideo(const int videoChannel,
const char* fileNameUTF8,
AudioSource audioSource,
const CodecInst& audioCodec,
const VideoCodec& videoCodec,
const FileFormats fileFormat =
kFileFormatAviFile) = 0;
// Stops the file recording of the outgoing stream.
virtual int StopRecordOutgoingVideo(const int videoChannel) = 0;
// Stops the file recording of the incoming stream.
virtual int StopRecordIncomingVideo(const int videoChannel) = 0;
// Gets the audio codec, video codec and file format of a recorded file.
virtual int GetFileInformation(const char* fileName,
VideoCodec& videoCodec,
CodecInst& audioCodec,
const FileFormats fileFormat =
kFileFormatAviFile) = 0;
// The function takes a snapshot of the last rendered image for a video
// channel.
virtual int GetRenderSnapshot(const int videoChannel,
const char* fileNameUTF8) = 0;
// The function takes a snapshot of the last rendered image for a video
// channel
virtual int GetRenderSnapshot(const int videoChannel,
ViEPicture& picture) = 0;
// The function takes a snapshot of the last captured image by a specified
// capture device.
virtual int GetCaptureDeviceSnapshot(const int captureId,
const char* fileNameUTF8) = 0;
// The function takes a snapshot of the last captured image by a specified
// capture device.
virtual int GetCaptureDeviceSnapshot(const int captureId,
ViEPicture& picture) = 0;
// This function sets a jpg image to show before the first frame is captured
// by the capture device. This frame will be encoded and transmitted to a
// possible receiver
virtual int SetCaptureDeviceImage(const int captureId,
const char* fileNameUTF8) = 0;
// This function sets an image to show before the first frame is captured by
// the capture device. This frame will be encoded and transmitted to a
// possible receiver
virtual int SetCaptureDeviceImage(const int captureId,
const ViEPicture& picture) = 0;
virtual int FreePicture(ViEPicture& picture) = 0;
// This function sets a jpg image to render before the first received video
// frame is decoded for a specified channel.
virtual int SetRenderStartImage(const int videoChannel,
const char* fileNameUTF8) = 0;
// This function sets an image to render before the first received video
// frame is decoded for a specified channel.
virtual int SetRenderStartImage(const int videoChannel,
const ViEPicture& picture) = 0;
// This function sets a jpg image to render if no frame is decoded for a
// specified time interval.
virtual int SetRenderTimeoutImage(const int videoChannel,
const char* fileNameUTF8,
const unsigned int timeoutMs = 1000) = 0;
// This function sets an image to render if no frame is decoded for a
// specified time interval.
virtual int SetRenderTimeoutImage(const int videoChannel,
const ViEPicture& picture,
const unsigned int timeoutMs) = 0;
protected:
ViEFile() {};
virtual ~ViEFile() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_FILE_H_

View File

@ -1,108 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Effect filters
// - Deflickering
// - Denoising
// - Color enhancement
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_IMAGE_PROCESS_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_IMAGE_PROCESS_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
// ----------------------------------------------------------------------------
// ViEEffectFilter
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined effect filter.
// The effect filter is registered using RegisterCaptureEffectFilter(),
// RegisterSendEffectFilter() or RegisterRenderEffectFilter() and deregistered
// with the corresponding deregister function.
class WEBRTC_DLLEXPORT ViEEffectFilter
{
public:
// This method is called with an I420 video frame allowing the user to
// modify the video frame.
virtual int Transform(int size, unsigned char* frameBuffer,
unsigned int timeStamp90KHz, unsigned int width,
unsigned int height) = 0;
protected:
ViEEffectFilter() {}
virtual ~ViEEffectFilter(){}
};
// ----------------------------------------------------------------------------
// ViEImageProcess
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViEImageProcess
{
public:
// Factory for the ViEImageProcess subAPI and increases an internal
// reference counter if successful. Returns NULL if the API is not supported
// or if construction fails.
static ViEImageProcess* GetInterface(VideoEngine* videoEngine);
// Releases the ViEImageProcess sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// This function registers a EffectFilter to use for a specified capture
// device.
virtual int RegisterCaptureEffectFilter(const int captureId,
ViEEffectFilter& captureFilter) = 0;
// This function deregisters a EffectFilter for a specified capture device.
virtual int DeregisterCaptureEffectFilter(const int captureId) = 0;
// This function registers an EffectFilter to use for a specified channel.
virtual int RegisterSendEffectFilter(const int videoChannel,
ViEEffectFilter& sendFilter) = 0;
// This function deregisters a send effect filter for a specified channel.
virtual int DeregisterSendEffectFilter(const int videoChannel) = 0;
// This function registers a EffectFilter to use for the rendered video
// stream on an incoming channel.
virtual int RegisterRenderEffectFilter(const int videoChannel,
ViEEffectFilter& renderFilter) = 0;
// This function deregisters a render effect filter for a specified channel.
virtual int DeregisterRenderEffectFilter(const int videoChannel) = 0;
// All cameras run the risk of getting in almost perfect sync with
// florescent lamps, which will result in a very annoying flickering of the
// image. Most cameras have some type of filter to protect against this but
// not all of them succeed. Enabling this function will remove the flicker.
virtual int EnableDeflickering(const int captureId, const bool enable) = 0;
// Some cameras produce very noisy captured images, especially in lowlight
// conditions. This functionality will reduce the camera noise.
virtual int EnableDenoising(const int captureId, const bool enable) = 0;
// This function enhances the colors on the decoded video stream, enabled by
// default.
virtual int EnableColorEnhancement(const int videoChannel,
const bool enable) = 0;
protected:
ViEImageProcess() {};
virtual ~ViEImageProcess() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_IMAGE_PROCESS_H_

View File

@ -1,214 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_NETWORK_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_NETWORK_H_
// This sub-API supports the following functionalities:
// - Configuring send and receive addresses.
// - External transport support.
// - Port and address filters.
// - Windows GQoS functions and ToS functions.
// - Packet timeout notification.
// - DeadorAlive connection observations.
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
class Transport;
// ----------------------------------------------------------------------------
// ViENetworkObserver
// ----------------------------------------------------------------------------
// This enumerator describes VideoEngine packet timeout states.
enum ViEPacketTimeout
{
NoPacket = 0,
PacketReceived = 1
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViENetworkObserver
{
public:
// This method will be called periodically delivering a deadoralive
// decision for a specified channel.
virtual void OnPeriodicDeadOrAlive(const int videoChannel,
const bool alive) = 0;
// This method is called once if a packet timeout occurred.
virtual void PacketTimeout(const int videoChannel,
const ViEPacketTimeout timeout) = 0;
protected:
virtual ~ViENetworkObserver() {};
};
// ----------------------------------------------------------------------------
// ViENetwork
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViENetwork
{
public:
// Default values
enum
{
KDefaultSampleTimeSeconds = 2
};
// Factory for the ViENetwork subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViENetwork* GetInterface(VideoEngine* videoEngine);
// Releases the ViENetwork sub-API and decreases an internal reference
// counter.Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Specifies the ports to receive RTP packets on. It is also possible to set
// port for RTCP and local IP address.
virtual int SetLocalReceiver(const int videoChannel,
const unsigned short rtpPort,
const unsigned short rtcpPort = 0,
const char* ipAddress = NULL) = 0;
// Gets the local receiver ports and address for a specified channel.
virtual int GetLocalReceiver(const int videoChannel,
unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress) = 0;
// Specifies the destination port and IP address for a specified channel.
virtual int SetSendDestination(const int videoChannel,
const char* ipAddress,
const unsigned short rtpPort,
const unsigned short rtcpPort = 0,
const unsigned short sourceRtpPort = 0,
const unsigned short sourceRtcpPort = 0) = 0;
// Get the destination port and address for a specified channel.
virtual int GetSendDestination(const int videoChannel, char* ipAddress,
unsigned short& rtpPort,
unsigned short& rtcpPort,
unsigned short& sourceRtpPort,
unsigned short& sourceRtcpPort) = 0;
// This function registers a user implementation of Transport to use for
// sending RTP and RTCP packets on this channel.
virtual int RegisterSendTransport(const int videoChannel,
Transport& transport) = 0;
// This function deregisters a used Transport for a specified channel.
virtual int DeregisterSendTransport(const int videoChannel) = 0;
// When using external transport for a channel, received RTP packets should
// be passed to VideoEngine using this function. The input should contain
// the RTP header and payload.
virtual int ReceivedRTPPacket(const int videoChannel, const void* data,
const int length) = 0;
// When using external transport for a channel, received RTCP packets should
// be passed to VideoEngine using this function.
virtual int ReceivedRTCPPacket(const int videoChannel, const void* data,
const int length) = 0;
// Gets the source ports and IP address of the incoming stream for a
// specified channel.
virtual int GetSourceInfo(const int videoChannel, unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress,
unsigned int ipAddressLength) = 0;
// Gets the local IP address, in string format.
virtual int GetLocalIP(char ipAddress[64], bool ipv6 = false) = 0;
// Enables IPv6, instead of IPv4, for a specified channel.
virtual int EnableIPv6(int videoChannel) = 0;
// The function returns true if IPv6 is enabled, false otherwise.
virtual bool IsIPv6Enabled(int videoChannel) = 0;
// Enables a port and IP address filtering for incoming packets on a
// specific channel.
virtual int SetSourceFilter(const int videoChannel,
const unsigned short rtpPort,
const unsigned short rtcpPort = 0,
const char* ipAddress = NULL) = 0;
// Gets current port and IP address filter for a specified channel.
virtual int GetSourceFilter(const int videoChannel, unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress) = 0;
// This function sets the sixbit Differentiated Services Code Point (DSCP)
// in the IP header of the outgoing stream for a specific channel.
// Windows and Linux only.
virtual int SetSendToS(const int videoChannel, const int DSCP,
const bool useSetSockOpt = false) = 0;
// Retrieves the sixbit Differentiated Services Code Point (DSCP) in the IP
// header of the outgoing stream for a specific channel.
virtual int GetSendToS(const int videoChannel, int& DSCP,
bool& useSetSockOpt) = 0;
// This function sets the Generic Quality of Service (GQoS) service level.
// The Windows operating system then maps to a Differentiated Services Code
// Point (DSCP) and to an 802.1p setting. Windows only.
virtual int SetSendGQoS(const int videoChannel, const bool enable,
const int serviceType,
const int overrideDSCP = 0) = 0;
// This function retrieves the currently set GQoS service level for a
// specific channel.
virtual int GetSendGQoS(const int videoChannel, bool& enabled,
int& serviceType, int& overrideDSCP) = 0;
// This function sets the Maximum Transition Unit (MTU) for a channel. The
// RTP packet will be packetized based on this MTU to optimize performance
// over the network.
virtual int SetMTU(int videoChannel, unsigned int mtu) = 0;
// This function enables or disables warning reports if packets have not
// been received for a specified time interval.
virtual int SetPacketTimeoutNotification(const int videoChannel,
bool enable,
int timeoutSeconds) = 0;
// Registers an instance of a user implementation of the ViENetwork
// observer.
virtual int RegisterObserver(const int videoChannel,
ViENetworkObserver& observer) = 0;
// Removes a registered instance of ViENetworkObserver.
virtual int DeregisterObserver(const int videoChannel) = 0;
// This function enables or disables the periodic deadoralive callback
// functionality for a specified channel.
virtual int SetPeriodicDeadOrAliveStatus(
const int videoChannel, const bool enable,
const unsigned int sampleTimeSeconds = KDefaultSampleTimeSeconds) = 0;
// This function handles sending a raw UDP data packet over an existing RTP
// or RTCP socket.
virtual int SendUDPPacket(const int videoChannel, const void* data,
const unsigned int length, int& transmittedBytes,
bool useRtcpSocket = false) = 0;
protected:
ViENetwork() {};
virtual ~ViENetwork() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_NETWORK_H_

View File

@ -1,106 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Specify render destinations for incoming video streams, capture devices
// and files.
// - Configuring render streams.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RENDER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RENDER_H_
#include "common_types.h"
namespace webrtc
{
class VideoRender;
class VideoEngine;
// ----------------------------------------------------------------------------
// ExternalRenderer
// ----------------------------------------------------------------------------
// This class declares an abstract interface to be used for external renderers.
// The user implemented derived class is registered using AddRenderer().
class WEBRTC_DLLEXPORT ExternalRenderer
{
public:
// This method will be called when the stream to be rendered changes in
// resolution or number of streams mixed in the image.
virtual int FrameSizeChange(unsigned int width, unsigned int height,
unsigned int numberOfStreams) = 0;
// This method is called when a new frame should be rendered.
virtual int DeliverFrame(unsigned char* buffer, int bufferSize) = 0;
protected:
virtual ~ExternalRenderer() {}
};
// ----------------------------------------------------------------------------
// ViERender
// ----------------------------------------------------------------------------
class WEBRTC_DLLEXPORT ViERender
{
public:
// Factory for the ViERender subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViERender* GetInterface(VideoEngine* videoEngine);
// Releases the ViERender sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Registers render module
virtual int RegisterVideoRenderModule(VideoRender& renderModule) = 0;
// Deegisters render module
virtual int DeRegisterVideoRenderModule(VideoRender& renderModule) = 0;
// Sets the render destination for a given render ID.
virtual int AddRenderer(const int renderId, void* window,
const unsigned int zOrder, const float left,
const float top, const float right,
const float bottom) = 0;
// Removes the renderer for a stream
virtual int RemoveRenderer(const int renderId) = 0;
// Starts rendering a render stream.
virtual int StartRender(const int renderId) = 0;
// Stops rendering a render stream.
virtual int StopRender(const int renderId) = 0;
// Configures an already added render stream.
virtual int ConfigureRender(int renderId, const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom) = 0;
// This function mirrors the rendered stream left and right or up and down.
virtual int MirrorRenderStream(const int renderId, const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis) = 0;
// External render
virtual int AddRenderer(const int renderId, RawVideoType videoInputFormat,
ExternalRenderer* renderer) = 0;
protected:
ViERender() {};
virtual ~ViERender() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RENDER_H_

View File

@ -1,275 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
// - SSRC handling.
// - Transmission of RTCP reports.
// - Obtaining RTCP data from incoming RTCP sender reports.
// - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
// - Forward Error Correction (FEC).
// - RTP Keepalive for maintaining the NAT mappings associated to RTP flows.
// - Writing RTP and RTCP packets to binary files for offline analysis of the
// call quality.
// - Inserting extra RTP packets into active audio stream.
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RTP_RTCP_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RTP_RTCP_H_
#include "common_types.h"
namespace webrtc
{
class VideoEngine;
// This enumerator sets the RTCP mode.
enum ViERTCPMode
{
kRtcpNone = 0,
kRtcpCompound_RFC4585 = 1,
kRtcpNonCompound_RFC5506 = 2
};
// This enumerator describes the key frame request mode.
enum ViEKeyFrameRequestMethod
{
kViEKeyFrameRequestNone = 0,
kViEKeyFrameRequestPliRtcp = 1,
kViEKeyFrameRequestFirRtp = 2,
kViEKeyFrameRequestFirRtcp = 3
};
// ----------------------------------------------------------------------------
// ViERTPObserver
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterRTPObserver() and
// deregistered using DeregisterRTPObserver().
class WEBRTC_DLLEXPORT ViERTPObserver
{
public:
// This method is called if SSRC of the incoming stream is changed.
virtual void IncomingSSRCChanged(const int videoChannel,
const unsigned int SSRC) = 0;
// This method is called if a field in CSRC changes or if the number of
// CSRCs changes.
virtual void IncomingCSRCChanged(const int videoChannel,
const unsigned int CSRC,
const bool added) = 0;
protected:
virtual ~ViERTPObserver() {}
};
// ----------------------------------------------------------------------------
// ViERTCPObserver
// ----------------------------------------------------------------------------
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterRTCPObserver() and
// deregistered using DeregisterRTCPObserver().
class WEBRTC_DLLEXPORT ViERTCPObserver
{
public:
// This method is called if a application-defined RTCP packet has been
// received.
virtual void OnApplicationDataReceived(
const int videoChannel, const unsigned char subType,
const unsigned int name, const char* data,
const unsigned short dataLengthInBytes) = 0;
protected:
virtual ~ViERTCPObserver() {}
};
//
class WEBRTC_DLLEXPORT ViERTP_RTCP
{
public:
// Default values
enum
{
KDefaultDeltaTransmitTimeSeconds = 15
};
enum
{
KMaxRTCPCNameLength = 256
};
// Factory for the ViERTP_RTCP subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViERTP_RTCP* GetInterface(VideoEngine* videoEngine);
// Releases the ViERTP_RTCP sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// This function enables you to specify the RTP synchronization source
// identifier (SSRC) explicitly.
virtual int SetLocalSSRC(const int videoChannel,
const unsigned int SSRC) = 0;
// This function gets the SSRC for the outgoing RTP stream for the specified
// channel.
virtual int GetLocalSSRC(const int videoChannel,
unsigned int& SSRC) const = 0;
// This function gets the SSRC for the incoming RTP stream for the specified
// channel.
virtual int GetRemoteSSRC(const int videoChannel,
unsigned int& SSRC) const = 0;
// This function returns the CSRCs of the incoming RTP packets.
virtual int GetRemoteCSRCs(const int videoChannel,
unsigned int CSRCs[kRtpCsrcSize]) const = 0;
// This function enables manual initialization of the sequence number. The
// start sequence number is normally a random number.
virtual int SetStartSequenceNumber(const int videoChannel,
unsigned short sequenceNumber) = 0;
// This function sets the RTCP status for the specified channel.
// Default mode is kRtcpCompound_RFC4585.
virtual int SetRTCPStatus(const int videoChannel,
const ViERTCPMode rtcpMode) = 0;
// This function gets the RTCP status for the specified channel.
virtual int GetRTCPStatus(const int videoChannel,
ViERTCPMode& rtcpMode) = 0;
// This function sets the RTCP canonical name (CNAME) for the RTCP reports
// on a specific channel.
virtual int SetRTCPCName(const int videoChannel,
const char rtcpCName[KMaxRTCPCNameLength]) = 0;
// This function gets the RTCP canonical name (CNAME) for the RTCP reports
// sent the specified channel.
virtual int GetRTCPCName(const int videoChannel,
char rtcpCName[KMaxRTCPCNameLength]) = 0;
// This function gets the RTCP canonical name (CNAME) for the RTCP reports
// received on the specified channel.
virtual int GetRemoteRTCPCName(
const int videoChannel, char rtcpCName[KMaxRTCPCNameLength]) const = 0;
// This function sends an RTCP APP packet on a specific channel.
virtual int SendApplicationDefinedRTCPPacket(
const int videoChannel, const unsigned char subType,
unsigned int name, const char* data,
unsigned short dataLengthInBytes) = 0;
// This function enables Negative Acknowledgment (NACK) using RTCP,
// implemented based on RFC 4585. NACK retransmits RTP packets if lost on
// the network. This creates a lossless transport at the expense of delay.
// If using NACK, NACK should be enabled on both endpoints in a call.
virtual int SetNACKStatus(const int videoChannel, const bool enable) = 0;
// This function enables Forward Error Correction (FEC) using RTCP,
// implemented based on RFC 5109, to improve packet loss robustness. Extra
// FEC packets are sent together with the usual media packets, hence
// part of the bitrate will be used for FEC packets.
virtual int SetFECStatus(const int videoChannel, const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC) = 0;
// This function enables hybrid Negative Acknowledgment using RTCP
// and Forward Error Correction (FEC) implemented based on RFC 5109,
// to improve packet loss robustness. Extra
// FEC packets are sent together with the usual media packets, hence will
// part of the bitrate be used for FEC packets.
// The hybrid mode will choose between nack only, fec only and both based on
// network conditions. When both are applied, only packets that were not
// recovered by the FEC will be nacked.
virtual int SetHybridNACKFECStatus(const int videoChannel,
const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC) = 0;
// This function enables RTCP key frame requests.
virtual int SetKeyFrameRequestMethod(
const int videoChannel, const ViEKeyFrameRequestMethod method) = 0;
// This function enables signaling of temporary bitrate constraints using
// RTCP, implemented based on RFC4585.
virtual int SetTMMBRStatus(const int videoChannel, const bool enable) = 0;
// The function gets statistics from the received RTCP report.
virtual int GetReceivedRTCPStatistics(
const int videoChannel, unsigned short& fractionLost,
unsigned int& cumulativeLost, unsigned int& extendedMax,
unsigned int& jitter, int& rttMs) const = 0;
// The function gets statistics from the RTCP report sent to the receiver.
virtual int GetSentRTCPStatistics(const int videoChannel,
unsigned short& fractionLost,
unsigned int& cumulativeLost,
unsigned int& extendedMax,
unsigned int& jitter,
int& rttMs) const = 0;
// The function gets statistics from the sent and received RTP streams.
virtual int GetRTPStatistics(const int videoChannel,
unsigned int& bytesSent,
unsigned int& packetsSent,
unsigned int& bytesReceived,
unsigned int& packetsReceived) const = 0;
// This function enables or disables an RTP keep-alive mechanism which can
// be used to maintain an existing Network Address Translator (NAT) mapping
// while regular RTP is no longer transmitted.
virtual int SetRTPKeepAliveStatus(
const int videoChannel, bool enable, const char unknownPayloadType,
const unsigned int deltaTransmitTimeSeconds =
KDefaultDeltaTransmitTimeSeconds) = 0;
// This function gets the RTP keep-alive status.
virtual int GetRTPKeepAliveStatus(
const int videoChannel, bool& enabled, char& unkownPayloadType,
unsigned int& deltaTransmitTimeSeconds) = 0;
// This function enables capturing of RTP packets to a binary file on a
// specific channel and for a given direction. The file can later be
// replayed using e.g. RTP Tools rtpplay since the binary file format is
// compatible with the rtpdump format.
virtual int StartRTPDump(const int videoChannel,
const char fileNameUTF8[1024],
RTPDirections direction) = 0;
// This function disables capturing of RTP packets to a binary file on a
// specific channel and for a given direction.
virtual int StopRTPDump(const int videoChannel,
RTPDirections direction) = 0;
// Registers an instance of a user implementation of the ViERTPObserver.
virtual int RegisterRTPObserver(const int videoChannel,
ViERTPObserver& observer) = 0;
// Removes a registered instance of ViERTPObserver.
virtual int DeregisterRTPObserver(const int videoChannel) = 0;
// Registers an instance of a user implementation of the ViERTCPObserver.
virtual int RegisterRTCPObserver(const int videoChannel,
ViERTCPObserver& observer) = 0;
// Removes a registered instance of ViERTCPObserver.
virtual int DeregisterRTCPObserver(const int videoChannel) = 0;
protected:
ViERTP_RTCP() {};
virtual ~ViERTP_RTCP() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_RTP_RTCP_H_

View File

@ -1,101 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE := libwebrtc_vie_core
LOCAL_MODULE_TAGS := optional
LOCAL_CPP_EXTENSION := .cc
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := vie_base_impl.cc \
vie_capture_impl.cc \
vie_codec_impl.cc \
vie_encryption_impl.cc \
vie_external_codec_impl.cc \
vie_file_impl.cc \
vie_image_process_impl.cc \
vie_impl.cc \
vie_network_impl.cc \
vie_ref_count.cc \
vie_render_impl.cc \
vie_rtp_rtcp_impl.cc \
vie_shared_data.cc \
vie_capturer.cc \
vie_channel.cc \
vie_channel_manager.cc \
vie_encoder.cc \
vie_file_image.cc \
vie_file_player.cc \
vie_file_recorder.cc \
vie_frame_provider_base.cc \
vie_input_manager.cc \
vie_manager_base.cc \
vie_performance_monitor.cc \
vie_receiver.cc \
vie_renderer.cc \
vie_render_manager.cc \
vie_sender.cc \
vie_sync_module.cc
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_THREAD_RR' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../.. \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../../modules/video_capture/main/interface \
$(LOCAL_PATH)/../../../modules/video_render/main/interface \
$(LOCAL_PATH)/../../../common_video/vplib/main/interface \
$(LOCAL_PATH)/../../../common_video/jpeg/main/interface \
$(LOCAL_PATH)/../../../modules/media_file/interface \
$(LOCAL_PATH)/../../../modules/interface \
$(LOCAL_PATH)/../../../modules/rtp_rtcp/interface \
$(LOCAL_PATH)/../../../modules/udp_transport/interface \
$(LOCAL_PATH)/../../../modules/utility/interface \
$(LOCAL_PATH)/../../../modules/audio_coding/main/interface \
$(LOCAL_PATH)/../../../modules/video_coding/main/interface \
$(LOCAL_PATH)/../../../modules/video_coding/codecs/interface \
$(LOCAL_PATH)/../../../modules/video_mixer/main/interface \
$(LOCAL_PATH)/../../../modules/video_processing/main/interface \
$(LOCAL_PATH)/../../../voice_engine/main/interface \
$(LOCAL_PATH)/../../../system_wrappers/interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := libcutils \
libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
LOCAL_LDLIBS += -ldl -lpthread
endif
ifneq ($(TARGET_SIMULATOR),true)
LOCAL_SHARED_LIBRARIES += libdl
endif
include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)

View File

@ -1,133 +0,0 @@
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'video_engine_core',
'type': '<(library)',
'dependencies': [
## common_video
'../../../common_video/vplib/main/source/vplib.gyp:webrtc_vplib',
'../../../common_video/jpeg/main/source/jpeg.gyp:webrtc_jpeg',
## ModulesShared
'../../../modules/media_file/source/media_file.gyp:media_file',
'../../../modules/rtp_rtcp/source/rtp_rtcp.gyp:rtp_rtcp',
'../../../modules/udp_transport/source/udp_transport.gyp:udp_transport',
'../../../modules/utility/source/utility.gyp:webrtc_utility',
## ModulesVideo
'../../../modules/video_coding/main/source/video_coding.gyp:webrtc_video_coding',
'../../../modules/video_processing/main/source/video_processing.gyp:video_processing',
'../../../modules/video_render/main/source/video_render.gyp:video_render_module',
## VoiceEngine
'../../../voice_engine/main/source/voice_engine_core.gyp:voice_engine_core',
## system_wrappers_2005
'../../../system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [
'../interface',
'../../../modules/video_capture/main/interface',
'../../../modules/video_render/main/interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
# interface
'../interface/vie_base.h',
'../interface/vie_capture.h',
'../interface/vie_codec.h',
'../interface/vie_encryption.h',
'../interface/vie_errors.h',
'../interface/vie_external_codec.h',
'../interface/vie_file.h',
'../interface/vie_image_process.h',
'../interface/vie_network.h',
'../interface/vie_render.h',
'../interface/vie_rtp_rtcp.h',
# headers
'vie_base_impl.h',
'vie_capture_impl.h',
'vie_codec_impl.h',
'vie_defines.h',
'vie_encryption_impl.h',
'vie_external_codec_impl.h',
'vie_file_impl.h',
'vie_image_process_impl.h',
'vie_impl.h',
'vie_network_impl.h',
'vie_ref_count.h',
'vie_render_impl.h',
'vie_rtp_rtcp_impl.h',
'vie_shared_data.h',
'vie_capturer.h',
'vie_channel.h',
'vie_channel_manager.h',
'vie_encoder.h',
'vie_file_image.h',
'vie_file_player.h',
'vie_file_recorder.h',
'vie_frame_provider_base.h',
'vie_input_manager.h',
'vie_manager_base.h',
'vie_performance_monitor.h',
'vie_receiver.h',
'vie_renderer.h',
'vie_render_manager.h',
'vie_sender.h',
'vie_sync_module.h',
# ViE
'vie_base_impl.cc',
'vie_capture_impl.cc',
'vie_codec_impl.cc',
'vie_encryption_impl.cc',
'vie_external_codec_impl.cc',
'vie_file_impl.cc',
'vie_image_process_impl.cc',
'vie_impl.cc',
'vie_network_impl.cc',
'vie_ref_count.cc',
'vie_render_impl.cc',
'vie_rtp_rtcp_impl.cc',
'vie_shared_data.cc',
# ViE
'vie_capturer.cc',
'vie_channel.cc',
'vie_channel_manager.cc',
'vie_encoder.cc',
'vie_file_image.cc',
'vie_file_player.cc',
'vie_file_recorder.cc',
'vie_frame_provider_base.cc',
'vie_input_manager.cc',
'vie_manager_base.cc',
'vie_performance_monitor.cc',
'vie_receiver.cc',
'vie_renderer.cc',
'vie_render_manager.cc',
'vie_sender.cc',
'vie_sync_module.cc',
], # source
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -1,838 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_base_impl.cc
*/
#include "vie_base_impl.h"
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "vie_errors.h"
#include "vie_impl.h"
#include "vie_shared_data.h"
#include "vie_channel.h"
#include "vie_channel_manager.h"
#include "vie_encoder.h"
#include "vie_input_manager.h"
#include "vie_performance_monitor.h"
#include "rtp_rtcp.h"
#include "video_render.h"
#include "video_coding.h"
#include "video_processing.h"
#include "stdio.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViEBase* ViEBase::GetInterface(VideoEngine* videoEngine)
{
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViEBaseImpl* vieBaseImpl = vieImpl;
(*vieBaseImpl)++; // Increase ref count
return vieBaseImpl;
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViEBaseImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViEBase::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViEBase release too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViEBase reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEBaseImpl::ViEBaseImpl() :
_viePerformanceMonitor(0)
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEBaseImpl::ViEBaseImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEBaseImpl::~ViEBaseImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEBaseImpl::ViEBaseImpl() Dtor");
_viePerformanceMonitor.Terminate();
}
// ----------------------------------------------------------------------------
// Init
//
// Must be called before any other API is called.
// This API should also reset the state of the enigne to the original state.
// ----------------------------------------------------------------------------
int ViEBaseImpl::Init()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId, "Init");
if (IsInitialized())
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"Init called twice");
return 0;
}
SetInitialized();
_viePerformanceMonitor.Init();
return 0;
}
// ----------------------------------------------------------------------------
// SetVoiceEngine
//
// Connects ViE to a VoE instance.
// ----------------------------------------------------------------------------
int ViEBaseImpl::SetVoiceEngine(VoiceEngine* ptrVoiceEngine)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
if (_channelManager.SetVoiceEngine(ptrVoiceEngine) != 0)
{
SetLastError(kViEBaseVoEFailure);
return -1;
}
return 0;
}
// ============================================================================
// Channel functions
// ============================================================================
// ----------------------------------------------------------------------------
// CreateChannel
//
// Creates a new ViE channel
// ----------------------------------------------------------------------------
int ViEBaseImpl::CreateChannel(int& videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
if (_channelManager.CreateChannel(videoChannel) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Could not create channel", __FUNCTION__);
videoChannel = -1;
SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel created: %d", __FUNCTION__, videoChannel);
return 0;
}
// ----------------------------------------------------------------------------
// CreateChannel
//
// Creates a new channel using the same capture device and encoder as
// the original channel.
// ----------------------------------------------------------------------------
int ViEBaseImpl::CreateChannel(int& videoChannel, int originalChannel)
{
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
if (!cs.Channel(originalChannel))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - originalChannel does not exist.", __FUNCTION__,
_instanceId);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
if (_channelManager.CreateChannel(videoChannel,
originalChannel) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Could not create channel", __FUNCTION__);
videoChannel = -1;
SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel created: %d", __FUNCTION__, videoChannel);
return 0;
}
// ----------------------------------------------------------------------------
// DeleteChannel
//
// Deleted a ViE channel
// ----------------------------------------------------------------------------
int ViEBaseImpl::DeleteChannel(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s(%d)",
__FUNCTION__, videoChannel);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
{
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
// No such channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel %d doesn't exist", __FUNCTION__,
videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
// Deregister the ViEEncoder if no other channel is using it.
ViEEncoder* ptrViEEncoder = cs.Encoder(videoChannel);
if (cs.ChannelUsingViEEncoder(videoChannel) == false)
{
// No other channels using this ViEEncoder.
// Disconnect the channel encoder from possible input.
// capture or file.
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* provider = is.FrameProvider(ptrViEEncoder);
if (provider)
{
provider->DeregisterFrameCallback(ptrViEEncoder);
}
}
}
if (_channelManager.DeleteChannel(videoChannel) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Could not delete channel %d", __FUNCTION__,
videoChannel);
SetLastError(kViEBaseUnknownError);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel deleted: %d", __FUNCTION__, videoChannel);
return 0;
}
// ----------------------------------------------------------------------------
// ConnectAudioChannel
//
// Connects a ViE channel with a VoE channel
// ----------------------------------------------------------------------------
int ViEBaseImpl::ConnectAudioChannel(const int videoChannel,
const int audioChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s(%d)",
__FUNCTION__, videoChannel);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
if (cs.Channel(videoChannel) == NULL)
{
// No such channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
if (_channelManager.ConnectVoiceChannel(videoChannel, audioChannel) != 0)
{
SetLastError(kViEBaseVoEFailure);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DisconnectAudioChannel
//
// Disconnects a previously connected ViE and VoE channel pair
// ----------------------------------------------------------------------------
int ViEBaseImpl::DisconnectAudioChannel(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s(%d)",
__FUNCTION__, videoChannel);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
if (cs.Channel(videoChannel) == NULL)
{
// No such channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
if (_channelManager.DisconnectVoiceChannel(videoChannel) != 0)
{
SetLastError(kViEBaseVoEFailure);
return -1;
}
return 0;
}
// ============================================================================
// Start and stop
// ============================================================================
// ----------------------------------------------------------------------------
// StartSend
//
// Starts sending on videoChannel and also starts the encoder.
// ----------------------------------------------------------------------------
int ViEBaseImpl::StartSend(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId,
videoChannel),
"%s(channel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
if (ptrViEChannel == NULL)
{
// The channel doesn't exists
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d does not exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
ViEEncoder* ptrViEEncoder = cs.Encoder(videoChannel);
if (ptrViEEncoder == NULL)
{
assert(false);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not find encoder for channel %d", __FUNCTION__,
videoChannel);
return -1;
}
// Make sure we start with a key frame...
ptrViEEncoder->Pause();
WebRtc_Word32 error = ptrViEChannel->StartSend();
if (error != 0)
{
// Restart the encoder, if it was stopped
ptrViEEncoder->Restart();
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not start sending on channel %d", __FUNCTION__,
videoChannel);
if (error == kViEBaseAlreadySending)
{
SetLastError(kViEBaseAlreadySending);
}
SetLastError(kViEBaseUnknownError);
return -1;
}
// Trigger the key frame and restart
ptrViEEncoder->SendKeyFrame();
ptrViEEncoder->Restart();
return 0;
}
// ----------------------------------------------------------------------------
// StopSend
//
// Stops sending on the channel. This will also stop the encoder for the
// channel, if not shared with still active channels.
// ----------------------------------------------------------------------------
int ViEBaseImpl::StopSend(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(channel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
if (ptrViEChannel == NULL)
{
// The channel doesn't exists
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d does not exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
WebRtc_Word32 error = ptrViEChannel->StopSend();
if (error != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not stop sending on channel %d", __FUNCTION__,
videoChannel);
if (error == kViEBaseNotSending)
{
SetLastError(kViEBaseNotSending);
}
else
{
SetLastError(kViEBaseUnknownError);
}
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StartReceive
//
// Stops receiving on the channel. This will also start the decoder.
// ----------------------------------------------------------------------------
int ViEBaseImpl::StartReceive(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(channel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
if (ptrViEChannel == NULL)
{
// The channel doesn't exists
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d does not exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
if (ptrViEChannel->Receiving())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d already receive.", __FUNCTION__,
videoChannel);
SetLastError(kViEBaseAlreadyReceiving);
return -1;
}
if (ptrViEChannel->StartReceive() != 0)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StopReceive
//
// Stops receiving on the channel. No decoding will be done.
// ----------------------------------------------------------------------------
int ViEBaseImpl::StopReceive(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(channel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
if (ptrViEChannel == NULL)
{
// The channel doesn't exists
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d does not exist", __FUNCTION__, videoChannel);
SetLastError(kViEBaseInvalidChannelId);
return -1;
}
if (ptrViEChannel->StopReceive() != 0)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Channel functions
// ============================================================================
// ----------------------------------------------------------------------------
// RegisterObserver
//
// Registers a customer implemented ViE observer
// ----------------------------------------------------------------------------
int ViEBaseImpl::RegisterObserver(ViEBaseObserver& observer)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (_viePerformanceMonitor.ViEBaseObserverRegistered())
{
SetLastError(kViEBaseObserverAlreadyRegistered);
return -1;
}
return _viePerformanceMonitor.RegisterViEBaseObserver(&observer);
}
// ----------------------------------------------------------------------------
// DeregisterObserver
//
// Deregisters an observer
// ----------------------------------------------------------------------------
int ViEBaseImpl::DeregisterObserver()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!_viePerformanceMonitor.ViEBaseObserverRegistered())
{
SetLastError(kViEBaseObserverNotRegistered);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
"%s No observer registered.", __FUNCTION__);
return -1;
}
return _viePerformanceMonitor.RegisterViEBaseObserver(NULL);
}
// ============================================================================
// Info functions
// ============================================================================
// ----------------------------------------------------------------------------
// GetVersion
//
// Writes version information in 'version'
// ----------------------------------------------------------------------------
int ViEBaseImpl::GetVersion(char version[1024])
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"GetVersion(version=?)");
assert(kViEVersionMaxMessageSize == 1024);
if (version == NULL)
{
SetLastError(kViEBaseInvalidArgument);
return (-1);
}
char versionBuf[kViEVersionMaxMessageSize];
char* versionPtr = versionBuf;
WebRtc_Word32 len = 0; // does not include terminating NULL
WebRtc_Word32 accLen = 0;
len = AddViEVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
len = AddBuildInfo(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
#ifdef WEBRTC_EXTERNAL_TRANSPORT
len = AddExternalTransportBuild(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
#endif
len = AddVCMVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
#ifndef WEBRTC_EXTERNAL_TRANSPORT
len = AddSocketModuleVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
#endif
#ifdef WEBRTC_SRTP
len = AddSRTPModuleVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
#endif
len = AddRtpRtcpModuleVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
len = AddVideoCaptureVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
len = AddRenderVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
len = AddVideoProcessingVersion(versionPtr);
if (len == -1)
{
SetLastError(kViEBaseUnknownError);
return -1;
}
versionPtr += len;
accLen += len;
assert(accLen < kViEVersionMaxMessageSize);
memcpy(version, versionBuf, accLen);
version[accLen] = '\0';
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo, ViEId(_instanceId),
"GetVersion() => %s", version);
return 0;
}
WebRtc_Word32 ViEBaseImpl::AddBuildInfo(char* str) const
{
return sprintf(str, "Build: %s\n", BUILDINFO);
}
WebRtc_Word32 ViEBaseImpl::AddViEVersion(char* str) const
{
return sprintf(str, "VideoEngine 3.1.0\n");
}
#ifdef WEBRTC_EXTERNAL_TRANSPORT
WebRtc_Word32 ViEBaseImpl::AddExternalTransportBuild(char* str) const
{
return sprintf(str, "External transport build\n");
}
#endif
WebRtc_Word32 ViEBaseImpl::AddModuleVersion(webrtc::Module* module,
char* str) const
{
WebRtc_Word8 version[kViEMaxModuleVersionSize];
WebRtc_UWord32 remainingBufferInBytes(kViEMaxModuleVersionSize);
WebRtc_UWord32 position(0);
if (module && module->Version(version, remainingBufferInBytes, position)
== 0)
{
return sprintf(str, "%s\n", version);
}
return -1;
}
WebRtc_Word32 ViEBaseImpl::AddVCMVersion(char* str) const
{
webrtc::VideoCodingModule* vcmPtr =
webrtc::VideoCodingModule::Create(_instanceId);
int len = AddModuleVersion(vcmPtr, str);
webrtc::VideoCodingModule::Destroy(vcmPtr);
return len;
}
WebRtc_Word32 ViEBaseImpl::AddVideoCaptureVersion(char* str) const
{
return 0;
}
WebRtc_Word32 ViEBaseImpl::AddVideoProcessingVersion(char* str) const
{
webrtc::VideoProcessingModule* videoPtr =
webrtc::VideoProcessingModule::Create(_instanceId);
int len = AddModuleVersion(videoPtr, str);
webrtc::VideoProcessingModule::Destroy(videoPtr);
return len;
}
WebRtc_Word32 ViEBaseImpl::AddRenderVersion(char* str) const
{
return 0;
}
#ifndef WEBRTC_EXTERNAL_TRANSPORT
WebRtc_Word32 ViEBaseImpl::AddSocketModuleVersion(char* str) const
{
WebRtc_UWord8 numSockThreads(1);
UdpTransport* socketPtr =
UdpTransport::Create(
_instanceId, numSockThreads);
int len = AddModuleVersion(socketPtr, str);
UdpTransport::Destroy(socketPtr);
return len;
}
#endif
#ifdef WEBRTC_SRTP
WebRtc_Word32 ViEBaseImpl::AddSRTPModuleVersion(char* str) const
{
SrtpModule* srtpPtr = SrtpModule::CreateSrtpModule(-1);
int len = AddModuleVersion(srtpPtr, str);
SrtpModule::DestroySrtpModule(srtpPtr);
return len;
}
#endif
WebRtc_Word32 ViEBaseImpl::AddRtpRtcpModuleVersion(char* str) const
{
RtpRtcp* rtpRtcpPtr =
RtpRtcp::CreateRtpRtcp(-1, true);
int len = AddModuleVersion(rtpRtcpPtr, str);
RtpRtcp::DestroyRtpRtcp(rtpRtcpPtr);
return len;
}
// ----------------------------------------------------------------------------
// LastError
//
// Returns the last set error in this ViE instance
// ----------------------------------------------------------------------------
int ViEBaseImpl::LastError()
{
return LastErrorInternal();
}
} // namespace webrtc

View File

@ -1,101 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_base_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_BASE_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_BASE_IMPL_H_
#include "vie_defines.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
#include "vie_base.h"
// Forward declarations
namespace webrtc
{
class VoiceEngine;
class Module;
class ViEPerformanceMonitor;
class ViEBaseImpl: public virtual ViESharedData,
public ViEBase,
public ViERefCount
{
public:
virtual int Release();
virtual int Init();
virtual int SetVoiceEngine(VoiceEngine* ptrVoiceEngine);
// Channel functions
virtual int CreateChannel(int& videoChannel);
virtual int CreateChannel(int& videoChannel, int originalChannel);
virtual int DeleteChannel(const int videoChannel);
virtual int ConnectAudioChannel(const int videoChannel,
const int audioChannel);
virtual int DisconnectAudioChannel(const int videoChannel);
// Start and stop
virtual int StartSend(const int videoChannel);
virtual int StopSend(const int videoChannel);
virtual int StartReceive(const int videoChannel);
virtual int StopReceive(const int videoChannel);
// Callbacks
virtual int RegisterObserver(ViEBaseObserver& observer);
virtual int DeregisterObserver();
// Info functions
virtual int GetVersion(char version[1024]);
virtual int LastError();
protected:
ViEBaseImpl();
virtual ~ViEBaseImpl();
ViEPerformanceMonitor _viePerformanceMonitor;
private:
// Version functions
WebRtc_Word32 AddViEVersion(char* str) const;
WebRtc_Word32 AddBuildInfo(char* str) const;
#ifdef WEBRTC_EXTERNAL_TRANSPORT
WebRtc_Word32 AddExternalTransportBuild(char* str) const;
#else
WebRtc_Word32 AddSocketModuleVersion(char* str) const;
#endif
WebRtc_Word32 AddModuleVersion(webrtc::Module* module, char* str) const;
WebRtc_Word32 AddVCMVersion(char* str) const;
WebRtc_Word32 AddVideoCaptureVersion(char* str) const;
WebRtc_Word32 AddVideoProcessingVersion(char* str) const;
WebRtc_Word32 AddRenderVersion(char* str) const;
#ifdef WEBRTC_SRTP
WebRtc_Word32 AddSRTPModuleVersion(char* str) const;
#endif
WebRtc_Word32 AddRtpRtcpModuleVersion(char* str) const;
};
} // namespace webrtc
#endif // #define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_BASE_IMPL_H_

View File

@ -1,792 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_capture_impl.cc
*/
#include "vie_capture_impl.h"
// Defines
#include "vie_defines.h"
#include "trace.h"
#include "vie_capturer.h"
#include "vie_channel.h"
#include "vie_channel_manager.h"
#include "vie_encoder.h"
#include "vie_impl.h"
#include "vie_input_manager.h"
#include "vie_errors.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViECapture* ViECapture::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViECaptureImpl* vieCaptureImpl = vieImpl;
(*vieCaptureImpl)++; // Increase ref count
return vieCaptureImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViECaptureImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViECapture::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViECapture release too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViECapture reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViECaptureImpl::ViECaptureImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViECaptureImpl::ViECaptureImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViECaptureImpl::~ViECaptureImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViECaptureImpl::~ViECaptureImpl() Dtor");
}
// ============================================================================
// Available devices
// ============================================================================
// ----------------------------------------------------------------------------
// NumberOfCaptureDevices
//
// Returns the number of available devices
// ----------------------------------------------------------------------------
int ViECaptureImpl::NumberOfCaptureDevices()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
return _inputManager.NumberOfCaptureDevices();
}
// ----------------------------------------------------------------------------
// GetCaptureDevice
//
// Gets capture device listNumber, both name and unique id if available
// ----------------------------------------------------------------------------
int ViECaptureImpl::GetCaptureDevice(unsigned int listNumber,
char* deviceNameUTF8,
unsigned int deviceNameUTF8Length,
char* uniqueIdUTF8,
unsigned int uniqueIdUTF8Length)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(listNumber: %d)", __FUNCTION__, listNumber);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
return _inputManager.GetDeviceName(listNumber,
(WebRtc_UWord8*) deviceNameUTF8,
deviceNameUTF8Length,
(WebRtc_UWord8*) uniqueIdUTF8,
uniqueIdUTF8Length);
}
// ============================================================================
// Allocate capture device
// ============================================================================
// ----------------------------------------------------------------------------
// AllocateCaptureDevice
//
// Allocates the capture device
// ----------------------------------------------------------------------------
int ViECaptureImpl::AllocateCaptureDevice(
const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
int& captureId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(uniqueIdUTF8: %s)", __FUNCTION__, uniqueIdUTF8);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
const WebRtc_Word32
result =
_inputManager.CreateCaptureDevice(
(const WebRtc_UWord8*) uniqueIdUTF8,
(const WebRtc_UWord32) uniqueIdUTF8Length, captureId);
if (result != 0)
{
SetLastError(result);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// AllocateExternalCaptureDevice
//
// Register a customer implemented capture device. callback should be called
// for all new captured images once the the capture device is started
// ----------------------------------------------------------------------------
int ViECaptureImpl::AllocateExternalCaptureDevice(
int& captureId, ViEExternalCapture*& externalCapture)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
const WebRtc_Word32 result =
_inputManager.CreateExternalCaptureDevice(externalCapture, captureId);
if (result != 0)
{
SetLastError(result);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// AllocateCaptureDevice
//
// Allocates the capture device, the capture module to attach
// must be associated with the unique ID.
// ----------------------------------------------------------------------------
int ViECaptureImpl::AllocateCaptureDevice(VideoCaptureModule& captureModule,
int& captureId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
const WebRtc_Word32 result =
_inputManager.CreateCaptureDevice(captureModule, captureId);
if (result != 0)
{
SetLastError(result);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// ReleaseCaptureDevice
//
// Releases an allocated capture device
// ----------------------------------------------------------------------------
int ViECaptureImpl::ReleaseCaptureDevice(const int captureId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d)", __FUNCTION__, captureId);
{
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
}
// Destroy the capture device
return _inputManager.DestroyCaptureDevice(captureId);
}
// ============================================================================
// Pair capture device and channel
// ============================================================================
// ----------------------------------------------------------------------------
// ConnectCaptureDevice
//
// Connects a capture device with a channel, i.e. the capture video from this
// device will be sent to that channel. Serveral channels can be connectet to
// the same capture device.
// ----------------------------------------------------------------------------
int ViECaptureImpl::ConnectCaptureDevice(const int captureId,
const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(captureId: %d, videoChannel: %d)", __FUNCTION__, captureId,
videoChannel);
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* ptrViEEncoder = cs.Encoder(videoChannel);
if (ptrViEEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d doesn't exist", __FUNCTION__,
videoChannel);
SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
// Check if the encoder already has a connected frame provider
if (is.FrameProvider(ptrViEEncoder) != NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Channel %d already connected to a capture device.",
__FUNCTION__, videoChannel);
SetLastError(kViECaptureDeviceAlreadyConnected);
return -1;
}
VideoCodec codec;
bool useHardwareEncoder = false;
if (ptrViEEncoder->GetEncoder(codec) == 0)
{ // try to provide the encoder with preencoded frames if possible
if (ptrViECapture->PreEncodeToViEEncoder(codec, *ptrViEEncoder,
videoChannel) == 0)
{
useHardwareEncoder = true;
}
}
// If we don't use the camera as hardware encoder we register the vieEncoder
// for callbacks
if (!useHardwareEncoder
&& ptrViECapture->RegisterFrameCallback(videoChannel, ptrViEEncoder)
!= 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DisconnectCaptureDevice
//
// Disconnects a capture device from a connected channel.
// ----------------------------------------------------------------------------
int ViECaptureImpl::DisconnectCaptureDevice(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(videoChannel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* ptrViEEncoder = cs.Encoder(videoChannel);
if (ptrViEEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__,
videoChannel);
SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* frameProvider = is.FrameProvider(ptrViEEncoder);
if (!frameProvider)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: No capture device connected to channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
if (frameProvider->Id() < kViECaptureIdBase
|| frameProvider->Id() > kViECaptureIdMax)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: No capture device connected to channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
if (frameProvider->DeregisterFrameCallback(ptrViEEncoder) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Start/stop
// ============================================================================
// ----------------------------------------------------------------------------
// StartCapture
//
// Starts an allocated capture device, i.e. will start output captured frame
// ----------------------------------------------------------------------------
int ViECaptureImpl::StartCapture(const int captureId,
const CaptureCapability captureCapability)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d)", __FUNCTION__, captureId);
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (ptrViECapture->Started())
{
SetLastError(kViECaptureDeviceAlreadyStarted);
return -1;
}
if (ptrViECapture->Start(captureCapability) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StopCapture
//
// Stops a started capture device
// ----------------------------------------------------------------------------
int ViECaptureImpl::StopCapture(const int captureId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d)", __FUNCTION__, captureId);
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (!ptrViECapture->Started())
{
SetLastError(kViECaptureDeviceNotStarted);
return -1;
}
if (ptrViECapture->Stop() != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RotateCapturedFrames
//
// Rotates a frame as soon as it's delivered from the capture device.
// This will apply to mobile devices with accelerometers or other rotation
// detection abilities.
// ----------------------------------------------------------------------------
int ViECaptureImpl::SetRotateCapturedFrames(const int captureId,
const RotateCapturedFrame rotation)
{
int iRotation = -1;
switch (rotation)
{
case RotateCapturedFrame_0:
iRotation = 0;
break;
case RotateCapturedFrame_90:
iRotation = 90;
break;
case RotateCapturedFrame_180:
iRotation = 180;
break;
case RotateCapturedFrame_270:
iRotation = 270;
break;
}
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(rotation: %d)", __FUNCTION__, iRotation);
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (ptrViECapture->SetRotateCapturedFrames(rotation) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// SetCaptureDelay
//
// Defines the capture delay for an external capture device.
// This call will also override a the capture delay value for a capture
// device.
// ----------------------------------------------------------------------------
int ViECaptureImpl::SetCaptureDelay(const int captureId,
const unsigned int captureDelayMs)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d, captureDelayMs %u)", __FUNCTION__,
captureId, captureDelayMs);
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (ptrViECapture->SetCaptureDelay(captureDelayMs) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Capture capabilities
// ============================================================================
// ----------------------------------------------------------------------------
// NumberOfCapabilities
//
// Returns the number of capabilities fot the specified device
// ----------------------------------------------------------------------------
int ViECaptureImpl::NumberOfCapabilities(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureDeviceName: %s)", __FUNCTION__, uniqueIdUTF8);
#if defined(WEBRTC_MAC_INTEL)
// TODO: Move to capture module!
// QTKit framework handles all capabilites and capture settings
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s This API is not supported on Mac OS", __FUNCTION__,
_instanceId);
return -1;
#endif
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
return (int) _inputManager.NumberOfCaptureCapabilities(
(WebRtc_UWord8*) uniqueIdUTF8);
}
// ----------------------------------------------------------------------------
// GetCaptureCapability
//
// Gets a capture capability for the specified capture device
// ----------------------------------------------------------------------------
int ViECaptureImpl::GetCaptureCapability(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
const unsigned int capabilityNumber,
CaptureCapability& capability)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureDeviceName: %s)", __FUNCTION__, uniqueIdUTF8);
#if defined(WEBRTC_MAC_INTEL)
// TODO: Move to capture module!
// QTKit framework handles all capabilites and capture settings
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s This API is not supported on Mac OS", __FUNCTION__,
_instanceId);
return -1;
#endif
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
if (_inputManager.GetCaptureCapability((WebRtc_UWord8*) uniqueIdUTF8,
capabilityNumber, capability) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
int ViECaptureImpl::ShowCaptureSettingsDialogBox(
const char* uniqueIdUTF8, const unsigned int uniqueIdUTF8Length,
const char* dialogTitle, void* parentWindow /*= NULL*/,
const unsigned int x/*=200*/, const unsigned int y/*=200*/)
{
#if defined(WEBRTC_MAC_INTEL)
// TODO: Move to capture module
// QTKit framework handles all capabilites and capture settings
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
SetLastError(kViECaptureDeviceMacQtkitNotSupported);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s This API is not supported on Mac OS", __FUNCTION__,
_instanceId);
return -1;
#endif
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s captureId (captureDeviceName: %s)", __FUNCTION__,
uniqueIdUTF8);
return _inputManager.DisplayCaptureSettingsDialogBox(
(WebRtc_UWord8*) uniqueIdUTF8, (WebRtc_UWord8*) dialogTitle,
parentWindow, x, y);
}
int ViECaptureImpl::GetOrientation(const char* uniqueIdUTF8,
RotateCapturedFrame &orientation)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s (captureDeviceName: %s)", __FUNCTION__, uniqueIdUTF8);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
if (_inputManager.GetOrientation((WebRtc_UWord8*) uniqueIdUTF8,
orientation) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Callbacks
// ============================================================================
// ----------------------------------------------------------------------------
// EnableBrightnessAlarm
//
// Enables brightness alarm callback for a specified capture device
// ----------------------------------------------------------------------------
int ViECaptureImpl::EnableBrightnessAlarm(const int captureId,
const bool enable)
{
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (ptrViECapture->EnableBrightnessAlarm(enable) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RegisterObserver
//
// Register the customer implemented observer for capture callbacks
// ----------------------------------------------------------------------------
int ViECaptureImpl::RegisterObserver(const int captureId,
ViECaptureObserver& observer)
{
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (ptrViECapture->IsObserverRegistered())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Observer already registered", __FUNCTION__);
SetLastError(kViECaptureObserverAlreadyRegistered);
return -1;
}
if (ptrViECapture->RegisterObserver(observer) != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterObserver
//
// Removes the previously registered observer
// ----------------------------------------------------------------------------
int ViECaptureImpl::DeregisterObserver(const int captureId)
{
ViEInputManagerScoped is(_inputManager);
ViECapturer* ptrViECapture = is.Capture(captureId);
if (ptrViECapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, captureId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViECaptureDeviceDoesnNotExist);
return -1;
}
if (!ptrViECapture->IsObserverRegistered())
{
SetLastError(kViECaptureDeviceObserverNotRegistered);
return -1;
}
if (ptrViECapture->DeRegisterObserver() != 0)
{
SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -1,110 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_capture_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURE_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURE_IMPL_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "vie_capture.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViECaptureImpl
// ----------------------------------------------------------------------------
class ViECaptureImpl: public virtual ViESharedData,
public ViECapture,
public ViERefCount
{
public:
virtual int Release();
// Available devices
virtual int NumberOfCaptureDevices();
virtual int GetCaptureDevice(unsigned int listNumber, char* deviceNameUTF8,
const unsigned int deviceNameUTF8Length,
char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length);
// Allocate capture device
virtual int AllocateCaptureDevice(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
int& captureId);
// Allocate capture device
virtual int AllocateCaptureDevice(VideoCaptureModule& captureModule,
int& captureId);
// Allocate external capture device
virtual int AllocateExternalCaptureDevice(
int& captureId, ViEExternalCapture *&externalCapture);
virtual int ReleaseCaptureDevice(const int captureId);
// Pair capture device and channel
virtual int ConnectCaptureDevice(const int captureId,
const int videoChannel);
virtual int DisconnectCaptureDevice(const int videoChannel);
// Start/stop
virtual int StartCapture(const int captureId,
const CaptureCapability captureCapability =
CaptureCapability());
virtual int StopCapture(const int captureId);
virtual int SetRotateCapturedFrames(const int captureId,
const RotateCapturedFrame rotation);
virtual int SetCaptureDelay(const int captureId,
const unsigned int captureDelayMs);
// Capture capabilities
virtual int NumberOfCapabilities(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length);
virtual int GetCaptureCapability(const char* uniqueIdUTF8,
const unsigned int uniqueIdUTF8Length,
const unsigned int capabilityNumber,
CaptureCapability& capability);
virtual int ShowCaptureSettingsDialogBox(
const char* uniqueIdUTF8, const unsigned int uniqueIdUTF8Length,
const char* dialogTitle, void* parentWindow = NULL,
const unsigned int x = 200, const unsigned int y = 200);
virtual int GetOrientation(const char* uniqueIdUTF8,
RotateCapturedFrame &orientation);
// Callbacks
virtual int EnableBrightnessAlarm(const int captureId, const bool enable);
virtual int RegisterObserver(const int captureId,
ViECaptureObserver& observer);
virtual int DeregisterObserver(const int captureId);
protected:
ViECaptureImpl();
virtual ~ViECaptureImpl();
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURE_IMPL_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,203 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_capturer.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURER_H_
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "typedefs.h"
#include "video_capture.h"
#include "video_processing.h"
#include "vie_frame_provider_base.h"
#include "video_codec_interface.h"
#include "video_coding.h"
#include "vie_capture.h"
#include "common_types.h"
// Forward declarations
struct ViEPicture;
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class ViEEffectFilter;
class ViEEncoder;
class ProcessThread;
class ViECapturer: public ViEFrameProviderBase,
public ViEExternalCapture, // External capture
protected VideoCaptureDataCallback,
protected VideoEncoder,
protected VCMReceiveCallback,
protected VideoCaptureFeedBack
{
public:
static ViECapturer* CreateViECapture(int captureId, int,
VideoCaptureModule& captureModule,
ProcessThread& moduleProcessThread);
static ViECapturer* CreateViECapture(int captureId, int engineId,
const WebRtc_UWord8* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length,
ProcessThread& moduleProcessThread);
~ViECapturer();
//Override ViEFrameProviderBase
int FrameCallbackChanged();
virtual int DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
bool IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
// Implements ExternalCapture
virtual int IncomingFrame(unsigned char* videoFrame,
unsigned int videoFrameLength,
unsigned short width, unsigned short height,
RawVideoType videoType,
unsigned long long captureTime = 0);
// Use this capture device as encoder. Returns 0 if the codec is supported by this capture device.
virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vieEncoder,
WebRtc_Word32 vieEncoderId);
// Start/Stop
WebRtc_Word32 Start(const CaptureCapability captureCapability =
CaptureCapability());
WebRtc_Word32 Stop();
bool Started();
WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS);
WebRtc_Word32 SetRotateCapturedFrames(const RotateCapturedFrame rotation);
// Effect filter
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effectFilter);
WebRtc_Word32 EnableDenoising(bool enable);
WebRtc_Word32 EnableDeflickering(bool enable);
WebRtc_Word32 EnableBrightnessAlarm(bool enable);
// Statistic observer
WebRtc_Word32 RegisterObserver(ViECaptureObserver& observer);
WebRtc_Word32 DeRegisterObserver();
bool IsObserverRegistered();
//Information
const WebRtc_UWord8* CurrentDeviceName() const;
// set device images
WebRtc_Word32 SetCaptureDeviceImage(const VideoFrame& captureDeviceImage);
protected:
ViECapturer(int captureId, int engineId,
ProcessThread& moduleProcessThread);
WebRtc_Word32 Init(VideoCaptureModule& captureModule);
WebRtc_Word32 Init(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord32 deviceUniqueIdUTF8Length);
// Implements VideoCaptureDataCallback
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
VideoCodecType codecType);
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay);
bool EncoderActive();
bool CaptureCapabilityFixed(); // Returns true if the capture capability has been set in the StartCapture function and may not be changed.
WebRtc_Word32 IncImageProcRefCount();
WebRtc_Word32 DecImageProcRefCount();
// Implements VideoEncoder
virtual WebRtc_Word32 Version(WebRtc_Word8 *version, WebRtc_Word32 length) const;
virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize);
virtual WebRtc_Word32 Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo =
NULL,
VideoFrameType frameType = kDeltaFrame);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
EncodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate);
// Implements VCMReceiveCallback
virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
// Implements VideoCaptureFeedBack
virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate);
virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm alarm);
// Thread functions for deliver captured frames to receivers
static bool ViECaptureThreadFunction(void* obj);
bool ViECaptureProcess();
void DeliverI420Frame(VideoFrame& videoFrame);
void DeliverCodedFrame(VideoFrame& videoFrame);
private:
enum {kThreadWaitTimeMs = 100};
CriticalSectionWrapper& _captureCritsect; // Never take this one before deliverCritsect!
CriticalSectionWrapper& _deliverCritsect;
VideoCaptureModule* _captureModule;
bool _useExternalModule;
VideoCaptureExternal* _externalCaptureModule;
ProcessThread& _moduleProcessThread;
const int _captureId;
// Capture thread
ThreadWrapper& _vieCaptureThread;
EventWrapper& _vieCaptureEvent;
EventWrapper& _vieDeliverEvent;
VideoFrame _capturedFrame;
VideoFrame _deliverFrame;
VideoFrame _encodedFrame;
// Image processing
ViEEffectFilter* _effectFilter;
VideoProcessingModule* _imageProcModule;
int _imageProcModuleRefCounter;
VideoProcessingModule::FrameStats* _deflickerFrameStats;
VideoProcessingModule::FrameStats* _brightnessFrameStats;
Brightness _currentBrightnessLevel;
Brightness _reportedBrightnessLevel;
bool _denoisingEnabled;
//Statistic observer
CriticalSectionWrapper& _observerCritsect;
ViECaptureObserver* _observer;
// Encoding using encoding capable cameras
CriticalSectionWrapper& _encodingCritsect;
VideoCaptureModule::VideoCaptureEncodeInterface* _captureEncoder;
EncodedImageCallback* _encodeCompleteCallback;
VideoCodec _codec;
ViEEncoder* _vieEncoder; //ViEEncoder we are encoding for.
WebRtc_Word32 _vieEncoderId; //ViEEncoder id we are encoding for.
VideoCodingModule* _vcm; // Used for decoding preencoded frames
EncodedVideoData _decodeBuffer; // Used for decoding preencoded frames
bool _decoderInitialized;
CaptureCapability _requestedCapability;
VideoFrame _captureDeviceImage;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CAPTURER_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,484 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_channel.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_
// Defines
#include "vie_defines.h"
#include "typedefs.h"
#include "vie_network.h"
#include "rtp_rtcp_defines.h"
#include "udp_transport.h"
#include "video_coding_defines.h"
#ifdef WEBRTC_SRTP
#include "SrtpModule.h"
#endif
#include "tick_util.h"
#include "vie_frame_provider_base.h"
#include "vie_file_recorder.h"
// Forward declarations
class SrtpModule;
class VideoRenderCallback;
namespace webrtc
{
class CriticalSectionWrapper;
class Encryption;
class ProcessThread;
class RtpRtcp;
class ThreadWrapper;
class VideoCodingModule;
class VideoDecoder;
class ViEDecoderObserver;
class ViEEffectFilter;
class ViENetworkObserver;
class ViEReceiver;
class ViERTCPObserver;
class ViERTPObserver;
class ViESender;
class ViESyncModule;
class VoEVideoSync;
class ViEChannel:
public VCMFrameTypeCallback, // VCM Module
public VCMReceiveCallback, // VCM Module
public VCMReceiveStatisticsCallback, // VCM Module
public VCMPacketRequestCallback, // VCM Module
public VCMFrameStorageCallback, // VCM Module
public RtcpFeedback, // RTP/RTCP Module
public RtpFeedback, // RTP/RTCP Module
public ViEFrameProviderBase
{
public:
ViEChannel(WebRtc_Word32 channelId, WebRtc_Word32 engineId,
WebRtc_UWord32 numberOfCores,
ProcessThread& moduleProcessThread);
~ViEChannel();
WebRtc_Word32 Init();
//-----------------------------------------------------------------
// Codecs
//-----------------------------------------------------------------
WebRtc_Word32 SetSendCodec(const VideoCodec& videoCodec,
bool newStream = true);
WebRtc_Word32 SetReceiveCodec(const VideoCodec& videoCodec);
WebRtc_Word32 GetReceiveCodec(VideoCodec& videoCodec);
WebRtc_Word32 RegisterCodecObserver(ViEDecoderObserver* observer);
WebRtc_Word32 RegisterExternalDecoder(const WebRtc_UWord8 plType,
VideoDecoder* decoder,
bool decoderRender,
WebRtc_Word32 renderDelay);
WebRtc_Word32 DeRegisterExternalDecoder(const WebRtc_UWord8 plType);
WebRtc_Word32 ReceiveCodecStatistics(WebRtc_UWord32& numKeyFrames,
WebRtc_UWord32& numDeltaFrames);
WebRtc_Word32 WaitForKeyFrame(bool wait);
WebRtc_Word32 SetSignalPacketLossStatus(bool enable, bool onlyKeyFrames);
//-----------------------------------------------------------------
// RTP/RTCP
//-----------------------------------------------------------------
WebRtc_Word32 SetRTCPMode(const RTCPMethod rtcpMode);
WebRtc_Word32 GetRTCPMode(RTCPMethod& rtcpMode);
WebRtc_Word32 SetNACKStatus(const bool enable);
WebRtc_Word32 SetFECStatus(const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC);
WebRtc_Word32 SetHybridNACKFECStatus(const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC);
WebRtc_Word32
SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
WebRtc_Word32 EnableTMMBR(const bool enable);
WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable);
WebRtc_Word32 SetSSRC(const WebRtc_UWord32 SSRC);
WebRtc_Word32 GetLocalSSRC(WebRtc_UWord32& SSRC);
WebRtc_Word32 GetRemoteSSRC(WebRtc_UWord32& SSRC);
WebRtc_Word32 GetRemoteCSRC(unsigned int CSRCs[kRtpCsrcSize]);
WebRtc_Word32 SetStartSequenceNumber(WebRtc_UWord16 sequenceNumber);
WebRtc_Word32 SetRTCPCName(const WebRtc_Word8 rtcpCName[]);
WebRtc_Word32 GetRTCPCName(WebRtc_Word8 rtcpCName[]);
WebRtc_Word32 GetRemoteRTCPCName(WebRtc_Word8 rtcpCName[]);
WebRtc_Word32 RegisterRtpObserver(ViERTPObserver* observer);
WebRtc_Word32 RegisterRtcpObserver(ViERTCPObserver* observer);
WebRtc_Word32 SendApplicationDefinedRTCPPacket(
const WebRtc_UWord8 subType,
WebRtc_UWord32 name,
const WebRtc_UWord8* data,
WebRtc_UWord16 dataLengthInBytes);
WebRtc_Word32 GetSendRtcpStatistics(WebRtc_UWord16& fractionLost,
WebRtc_UWord32& cumulativeLost,
WebRtc_UWord32& extendedMax,
WebRtc_UWord32& jitterSamples,
WebRtc_Word32& rttMs);
WebRtc_Word32 GetReceivedRtcpStatistics(WebRtc_UWord16& fractionLost,
WebRtc_UWord32& cumulativeLost,
WebRtc_UWord32& extendedMax,
WebRtc_UWord32& jitterSamples,
WebRtc_Word32& rttMs);
WebRtc_Word32 GetRtpStatistics(WebRtc_UWord32& bytesSent,
WebRtc_UWord32& packetsSent,
WebRtc_UWord32& bytesReceived,
WebRtc_UWord32& packetsReceived) const;
WebRtc_Word32 SetKeepAliveStatus(const bool enable,
const WebRtc_Word8 unknownPayloadType,
const WebRtc_UWord16 deltaTransmitTimeMS);
WebRtc_Word32 GetKeepAliveStatus(bool& enable,
WebRtc_Word8& unknownPayloadType,
WebRtc_UWord16& deltaTransmitTimeMS);
WebRtc_Word32 StartRTPDump(const char fileNameUTF8[1024],
RTPDirections direction);
WebRtc_Word32 StopRTPDump(RTPDirections direction);
// Implements RtcpFeedback
virtual void OnLipSyncUpdate(const WebRtc_Word32 id,
const WebRtc_Word32 audioVideoOffset);
virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 subType,
const WebRtc_UWord32 name,
const WebRtc_UWord16 length,
const WebRtc_UWord8* data);
// Implements RtpFeedback
virtual WebRtc_Word32 OnInitializeDecoder(
const WebRtc_Word32 id,
const WebRtc_Word8 payloadType,
const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate);
virtual void OnPacketTimeout(const WebRtc_Word32 id);
virtual void OnReceivedPacket(const WebRtc_Word32 id,
const RtpRtcpPacketType packetType);
virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
const RTPAliveType alive);
virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 SSRC);
virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 CSRC,
const bool added);
//-----------------------------------------------------------------
// Network
//-----------------------------------------------------------------
// Soure and destination
WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtpPort,
const WebRtc_UWord16 rtcpPort,
const WebRtc_Word8* ipAddress);
WebRtc_Word32 GetLocalReceiver(WebRtc_UWord16& rtpPort,
WebRtc_UWord16& rtcpPort,
WebRtc_Word8* ipAddress) const;
WebRtc_Word32 SetSendDestination(const WebRtc_Word8* ipAddress,
const WebRtc_UWord16 rtpPort,
const WebRtc_UWord16 rtcpPort,
const WebRtc_UWord16 sourceRtpPort,
const WebRtc_UWord16 sourceRtcpPort);
WebRtc_Word32 GetSendDestination(WebRtc_Word8* ipAddress,
WebRtc_UWord16& rtpPort,
WebRtc_UWord16& rtcpPort,
WebRtc_UWord16& sourceRtpPort,
WebRtc_UWord16& sourceRtcpPort) const;
WebRtc_Word32 GetSourceInfo(WebRtc_UWord16& rtpPort,
WebRtc_UWord16& rtcpPort,
WebRtc_Word8* ipAddress,
WebRtc_UWord32 ipAddressLength);
// Start/Stop Send/Receive
WebRtc_Word32 StartSend();
WebRtc_Word32 StopSend();
bool Sending();
WebRtc_Word32 StartReceive();
WebRtc_Word32 StopReceive();
bool Receiving();
// External transport
WebRtc_Word32 RegisterSendTransport(Transport& transport);
WebRtc_Word32 DeregisterSendTransport();
WebRtc_Word32 ReceivedRTPPacket(const void* rtpPacket,
const WebRtc_Word32 rtpPacketLength);
WebRtc_Word32 ReceivedRTCPPacket(const void* rtcpPacket,
const WebRtc_Word32 rtcpPacketLength);
// IPv6
WebRtc_Word32 EnableIPv6();
bool IsIPv6Enabled();
// Source IP address and port filter
WebRtc_Word32 SetSourceFilter(const WebRtc_UWord16 rtpPort,
const WebRtc_UWord16 rtcpPort,
const WebRtc_Word8* ipAddress);
WebRtc_Word32 GetSourceFilter(WebRtc_UWord16& rtpPort,
WebRtc_UWord16& rtcpPort,
WebRtc_Word8* ipAddress) const;
// ToS
WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP, const bool useSetSockOpt);
WebRtc_Word32 GetToS(WebRtc_Word32& DSCP, bool& useSetSockOpt) const;
// GQoS
WebRtc_Word32 SetSendGQoS(const bool enable,
const WebRtc_Word32 serviceType,
const WebRtc_UWord32 maxBitrate,
const WebRtc_Word32 overrideDSCP);
WebRtc_Word32 GetSendGQoS(bool& enabled, WebRtc_Word32& serviceType,
WebRtc_Word32& overrideDSCP) const;
// Network settings
WebRtc_Word32 SetMTU(WebRtc_UWord16 mtu);
WebRtc_UWord16 MaxDataPayloadLength() const;
WebRtc_Word32 SetMaxPacketBurstSize(WebRtc_UWord16 maxNumberOfPackets);
WebRtc_Word32 SetPacketBurstSpreadState(bool enable,
const WebRtc_UWord16 framePeriodMS);
// Packet timout notification
WebRtc_Word32 SetPacketTimeoutNotification(bool enable,
WebRtc_UWord32 timeoutSeconds);
// Periodic dead-or-alive reports
WebRtc_Word32 RegisterNetworkObserver(ViENetworkObserver* observer);
bool NetworkObserverRegistered();
WebRtc_Word32
SetPeriodicDeadOrAliveStatus(const bool enable,
const WebRtc_UWord32 sampleTimeSeconds);
WebRtc_Word32 SendUDPPacket(const WebRtc_Word8* data,
const WebRtc_UWord32 length,
WebRtc_Word32& transmittedBytes,
bool useRtcpSocket);
//-----------------------------------------------------------------
// Image processing
//-----------------------------------------------------------------
WebRtc_Word32 EnableColorEnhancement(bool enable);
//-----------------------------------------------------------------
// Register sender
//-----------------------------------------------------------------
WebRtc_Word32
RegisterSendRtpRtcpModule(RtpRtcp& sendRtpRtcpModule);
WebRtc_Word32 DeregisterSendRtpRtcpModule();
// Implements VCM::VCMReceiveCallback, getting decoded frames from
// VCM.
virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
// Implements VCM::VCMReceiveCallback, getting info about decoded
// frames from VCM.
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 pictureId);
//Implements VCM::VideoFrameStorageCallback
virtual WebRtc_Word32 StoreReceivedFrame(
const EncodedVideoData& frameToStore);
// Implements VCM::VideoReceiveStatisticsCallback
virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
const WebRtc_UWord32 frameRate);
// Implements VCM::VideoFrameTypeCallback
virtual WebRtc_Word32 FrameTypeRequest(const FrameType frameType);
// Implements VCM::VideoFrameTypeCallback
virtual WebRtc_Word32 SliceLossIndicationRequest(
const WebRtc_UWord64 pictureId);
// Implements VCM::VideoPacketRequestCallback
virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
WebRtc_UWord16 length);
#ifdef WEBRTC_SRTP
//SRTP
WebRtc_Word32 EnableSRTPSend(
const SrtpModule::CipherTypes cipherType,
const unsigned int cipherKeyLength,
const SrtpModule::AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SrtpModule::SecurityLevels level,
const WebRtc_UWord8* key,
const bool useForRTCP);
WebRtc_Word32 DisableSRTPSend();
WebRtc_Word32 EnableSRTPReceive(
const SrtpModule::CipherTypes cipherType,
const unsigned int cipherKeyLength,
const SrtpModule::AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SrtpModule::SecurityLevels level,
const WebRtc_UWord8* key,
const bool useForRTCP);
WebRtc_Word32 DisableSRTPReceive();
#endif
WebRtc_Word32 RegisterExternalEncryption(Encryption* encryption);
WebRtc_Word32 DeRegisterExternalEncryption();
//Voice Engine
WebRtc_Word32 SetVoiceChannel(WebRtc_Word32 veChannelId,
VoEVideoSync* veSyncInterface);
WebRtc_Word32 VoiceChannel();
//ViEFrameProviderBase
virtual int FrameCallbackChanged(){return -1;}
// Effect filter
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effectFilter);
WebRtc_Word32 SetInverseH263Logic(const bool enable);
// File recording
ViEFileRecorder& GetIncomingFileRecorder();
void ReleaseIncomingFileRecorder();
protected:
// Thread function according to ThreadWrapper
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();
private:
WebRtc_Word32 StartDecodeThread();
WebRtc_Word32 StopDecodeThread();
// Protection
WebRtc_Word32 ProcessNACKRequest(const bool enable);
WebRtc_Word32 ProcessFECRequest(const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC);
// General members
WebRtc_Word32 _channelId;
WebRtc_Word32 _engineId;
WebRtc_UWord32 _numberOfCores;
WebRtc_UWord8 _numSocketThreads;
// Critical sections
// Used for all registered callbacks except rendering.
CriticalSectionWrapper& _callbackCritsect;
// Use the same as above instead a seperate?
CriticalSectionWrapper& _dataCritsect;
// Owned modules/classes
RtpRtcp& _rtpRtcp;
#ifndef WEBRTC_EXTERNAL_TRANSPORT
UdpTransport& _socketTransport;
#endif
VideoCodingModule& _vcm;
ViEReceiver& _vieReceiver;
ViESender& _vieSender;
ViESyncModule& _vieSync;//Lip syncronization
//Uses
ProcessThread& _moduleProcessThread;
ViEDecoderObserver* _codecObserver;
bool _doKeyFrameCallbackRequest;
ViERTPObserver* _rtpObserver;
ViERTCPObserver* _rtcpObserver;
ViENetworkObserver* _networkObserver;
bool _rtpPacketTimeout;
bool _usingPacketSpread;
// Registered members
Transport* _ptrExternalTransport;
// Codec
bool _decoderReset;
bool _waitForKeyFrame;
// Decoder
ThreadWrapper* _ptrDecodeThread;
//SRTP - using seperate pointers for encryption and decryption to support
// simultaneous operations.
SrtpModule* _ptrSrtpModuleEncryption;
SrtpModule* _ptrSrtpModuleDecryption;
Encryption* _ptrExternalEncryption;
// Effect filter and color enhancement
ViEEffectFilter* _effectFilter;
bool _colorEnhancement;
// Time when RTT time was last reported to VCM JB.
TickTime _vcmRTTReported;
//Recording
ViEFileRecorder _fileRecorder;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_

View File

@ -1,594 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_channel_manager.cc
*/
#include "vie_channel_manager.h"
#include "engine_configurations.h"
#include "vie_defines.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "vie_channel.h"
#include "vie_encoder.h"
#include "process_thread.h"
// VoiceEngine
#include "voe_video_sync.h"
namespace webrtc
{
ViEChannelManagerScoped::ViEChannelManagerScoped(
const ViEChannelManager& vieChannelManager)
: ViEManagerScopedBase(vieChannelManager)
{
}
ViEChannel* ViEChannelManagerScoped::Channel(int vieChannelId) const
{
return static_cast<const ViEChannelManager*>
(_vieManager)->ViEChannelPtr(vieChannelId);
}
ViEEncoder* ViEChannelManagerScoped::Encoder(int vieChannelId) const
{
return static_cast<const ViEChannelManager*>
(_vieManager)->ViEEncoderPtr(vieChannelId);
}
bool ViEChannelManagerScoped::ChannelUsingViEEncoder(int channelId) const
{
return (static_cast<const ViEChannelManager*>
(_vieManager))->ChannelUsingViEEncoder( channelId);
}
// ============================================================================
// VieChannelManager
// ============================================================================
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEChannelManager::ViEChannelManager(int engineId,
int numberOfCores,
ViEPerformanceMonitor& viePerformanceMonitor)
: _ptrChannelIdCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_engineId(engineId), _numberOfCores(numberOfCores),
_viePerformanceMonitor(viePerformanceMonitor), _channelMap(),
_freeChannelIds(new bool[kViEMaxNumberOfChannels]),
_freeChannelIdsSize(kViEMaxNumberOfChannels), _vieEncoderMap(),
_voiceSyncInterface(NULL), _voiceEngine(NULL),
_moduleProcessThread(NULL)
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engineId),
"ViEChannelManager::ViEChannelManager(engineId: %d) - Constructor",
engineId);
for (int idx = 0; idx < _freeChannelIdsSize; idx++)
{
_freeChannelIds[idx] = true;
}
}
// ----------------------------------------------------------------------------
// SetModuleProcessThread
// Initialize the thread context used by none time critical tasks in video channels.
// ----------------------------------------------------------------------------
void ViEChannelManager::SetModuleProcessThread( ProcessThread& moduleProcessThread)
{
assert(!_moduleProcessThread);
_moduleProcessThread = &moduleProcessThread;
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEChannelManager::~ViEChannelManager()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(_engineId),
"ViEChannelManager Destructor, engineId: %d", _engineId);
while (_channelMap.Size() != 0)
{
MapItem* item = _channelMap.First();
const int channelId = item->GetId();
item = NULL;
DeleteChannel(channelId);
}
if (_voiceSyncInterface)
_voiceSyncInterface->Release();
if (_ptrChannelIdCritsect)
{
delete _ptrChannelIdCritsect;
_ptrChannelIdCritsect = NULL;
}
if (_freeChannelIds)
{
delete[] _freeChannelIds;
_freeChannelIds = NULL;
_freeChannelIdsSize = 0;
}
}
// ----------------------------------------------------------------------------
// CreateChannel
//
// Creates a new channel. 'channelId' will be the id of the created channel
// ----------------------------------------------------------------------------
int ViEChannelManager::CreateChannel(int& channelId)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
// Get a free id for the new channel
if (GetFreeChannelId(channelId) == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"Max number of channels reached: %d", _channelMap.Size());
return -1;
}
ViEChannel* vieChannel = new ViEChannel(channelId, _engineId,
_numberOfCores,
*_moduleProcessThread);
if (vieChannel == NULL)
{
ReturnChannelId(channelId);
return -1;
}
if (vieChannel->Init() != 0)
{
// Could not init channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s could not init channel", __FUNCTION__, channelId);
ReturnChannelId(channelId);
delete vieChannel;
vieChannel = NULL;
return -1;
}
// There is no ViEEncoder for this channel, create one with default settings
ViEEncoder* vieEncoder = new ViEEncoder(_engineId, channelId,
_numberOfCores,
*_moduleProcessThread);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s(videoChannelId: %d) - Could not create a new encoder",
__FUNCTION__, channelId);
delete vieChannel;
return -1;
}
// Add to the map
if (_vieEncoderMap.Insert(channelId, vieEncoder) != 0)
{
// Could not add to the map
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not add new encoder for video channel %d",
__FUNCTION__, channelId);
delete vieChannel;
delete vieEncoder;
return -1;
}
_channelMap.Insert(channelId, vieChannel);
// Register the channel at the encoder
RtpRtcp* ptrSendRtpRtcpModule = vieEncoder->SendRtpRtcpModule();
if (vieChannel->RegisterSendRtpRtcpModule(*ptrSendRtpRtcpModule) != 0)
{
assert(false);
_vieEncoderMap.Erase(channelId);
_channelMap.Erase(channelId);
ReturnChannelId(channelId);
delete vieChannel;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, channelId),
"%s: Could not register rtp module %d", __FUNCTION__,
channelId);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// CreateChannel
//
// Creates a channel and attaches to an already existing ViEEncoder
// ----------------------------------------------------------------------------
int ViEChannelManager::CreateChannel(int& channelId, int originalChannel)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
// Check that originalChannel already exists
ViEEncoder* vieEncoder = ViEEncoderPtr(originalChannel);
if (vieEncoder == NULL)
{
// The original channel doesn't exist
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Original channel doesn't exist", __FUNCTION__,
originalChannel);
return -1;
}
// Get a free id for the new channel
if (GetFreeChannelId(channelId) == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"Max number of channels reached: %d", _channelMap.Size());
return -1;
}
ViEChannel* vieChannel = new ViEChannel(channelId, _engineId,
_numberOfCores,
*_moduleProcessThread);
if (vieChannel == NULL)
{
ReturnChannelId(channelId);
return -1;
}
if (vieChannel->Init() != 0)
{
// Could not init channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s could not init channel", __FUNCTION__, channelId);
ReturnChannelId(channelId);
delete vieChannel;
vieChannel = NULL;
return -1;
}
if (_vieEncoderMap.Insert(channelId, vieEncoder) != 0)
{
// Could not add to the map
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not add new encoder for video channel %d",
__FUNCTION__, channelId);
ReturnChannelId(channelId);
delete vieChannel;
return -1;
}
// Set the same encoder settings for the channel as used by the master channel.
// Do this before attaching rtp module to ensure all rtp cihldren has the same codec type
VideoCodec encoder;
if (vieEncoder->GetEncoder(encoder) == 0)
{
vieChannel->SetSendCodec(encoder);
}
_channelMap.Insert(channelId, vieChannel);
// Register the channel at the encoder
RtpRtcp* ptrSendRtpRtcpModule = vieEncoder->SendRtpRtcpModule();
if (vieChannel->RegisterSendRtpRtcpModule(*ptrSendRtpRtcpModule) != 0)
{
assert(false);
_vieEncoderMap.Erase(channelId);
_channelMap.Erase(channelId);
ReturnChannelId(channelId);
delete vieChannel;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, channelId),
"%s: Could not register rtp module %d", __FUNCTION__,
channelId);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeleteChannel
// ----------------------------------------------------------------------------
int ViEChannelManager::DeleteChannel(int channelId)
{
ViEChannel* vieChannel = NULL;
ViEEncoder* vieEncoder = NULL;
{
// Write lock to make sure no one is using the channel
ViEManagerWriteScoped wl(*this);
// Protect the map
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
MapItem* mapItem = _channelMap.Find(channelId);
if (mapItem == NULL)
{
// No such channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s Channel doesn't exist: %d", __FUNCTION__, channelId);
return -1;
}
vieChannel = reinterpret_cast<ViEChannel*> (mapItem->GetItem());
_channelMap.Erase(mapItem);
// Deregister the channel from the ViEEncoder to stop the media flow
vieChannel->DeregisterSendRtpRtcpModule();
ReturnChannelId(channelId);
// Find the encoder object
mapItem = _vieEncoderMap.Find(channelId);
if (mapItem == NULL)
{
assert(false);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s ViEEncoder not found for channel %d", __FUNCTION__,
channelId);
return -1;
}
// Get the ViEEncoder item
vieEncoder = reinterpret_cast<ViEEncoder*> (mapItem->GetItem());
// Check if other channels are using the same encoder
if (ChannelUsingViEEncoder(channelId))
{
// Don't delete the ViEEncoder, at least on other channel is using it.
WEBRTC_TRACE(
webrtc::kTraceInfo,
webrtc::kTraceVideo,
ViEId(_engineId),
"%s ViEEncoder removed from map for channel %d, not deleted",
__FUNCTION__, channelId);
vieEncoder = NULL;
} else
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s ViEEncoder deleted for channel %d", __FUNCTION__,
channelId);
// Delete later when we've released the critsect
}
// We can't erase the item before we've checked for other channels using same ViEEncoder
_vieEncoderMap.Erase(mapItem);
}
// Leave the write critsect before deleting the objects.
// Deleting a channel can cause other objects, such as renderers, to be deleted and might take time
if (vieEncoder)
{
delete vieEncoder;
}
delete vieChannel;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s Channel %d deleted", __FUNCTION__, channelId);
return 0;
}
// ----------------------------------------------------------------------------
// Channel
//
// Returns a pointer to the channel with id 'channelId'
// ----------------------------------------------------------------------------
ViEChannel* ViEChannelManager::ViEChannelPtr(int channelId) const
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
MapItem* mapItem = _channelMap.Find(channelId);
if (mapItem == NULL)
{
// No such channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s Channel doesn't exist: %d", __FUNCTION__, channelId);
return NULL;
}
ViEChannel* vieChannel = reinterpret_cast<ViEChannel*> (mapItem->GetItem());
return vieChannel;
}
// ----------------------------------------------------------------------------
// GetChannels
//
// Adds all channels to channelMap
// ----------------------------------------------------------------------------
void ViEChannelManager::GetViEChannels(MapWrapper& channelMap)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
if (channelMap.Size() == 0)
{
// No channels
return;
}
// Add all items to 'channelMap'
for (MapItem* item = _channelMap.First(); item != NULL; item
= _channelMap.Next(item))
{
channelMap.Insert(item->GetId(), item->GetItem());
}
return;
}
// ----------------------------------------------------------------------------
// ViEEncoderPtr
//
// Gets the ViEEncoder used as input for videoChannelId
// ----------------------------------------------------------------------------
ViEEncoder* ViEChannelManager::ViEEncoderPtr(int videoChannelId) const
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
MapItem* mapItem = _vieEncoderMap.Find(videoChannelId);
if (mapItem == NULL)
{
// No ViEEncoder for this channel...
return NULL;
}
ViEEncoder* vieEncoder = static_cast<ViEEncoder*> (mapItem->GetItem());
return vieEncoder;
}
// ----------------------------------------------------------------------------
// GetFreeChannelId
//
// Returns true if we found a new channel id, freeChannelId, false otherwise
// ----------------------------------------------------------------------------
bool ViEChannelManager::GetFreeChannelId(int& freeChannelId)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
int idx = 0;
while (idx < _freeChannelIdsSize)
{
if (_freeChannelIds[idx] == true)
{
// We've found a free id, allocate it and return
_freeChannelIds[idx] = false;
freeChannelId = idx + kViEChannelIdBase;
return true;
}
idx++;
}
// No free channel id
freeChannelId = -1;
return false;
}
// ----------------------------------------------------------------------------
// ReturnChannelID
//
// Returns a previously allocated channel id
// ----------------------------------------------------------------------------
void ViEChannelManager::ReturnChannelId(int channelId)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
assert(channelId < kViEMaxNumberOfChannels+kViEChannelIdBase && channelId>=kViEChannelIdBase);
_freeChannelIds[channelId - kViEChannelIdBase] = true;
}
// ----------------------------------------------------------------------------
// ChannelUsingViEEncoder
//
// Returns true if at least one nother channel is using the same encoder
// ----------------------------------------------------------------------------
bool ViEChannelManager::ChannelUsingViEEncoder(int channelId) const
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
MapItem* channelItem = _vieEncoderMap.Find(channelId);
if (channelItem == NULL)
{
// No ViEEncoder for this channel...
return false;
}
ViEEncoder* channelEncoder =
static_cast<ViEEncoder*> (channelItem->GetItem());
// Loop through all other channels to see if anyone points at the same ViEEncoder
MapItem* mapItem = _vieEncoderMap.First();
while (mapItem)
{
ViEEncoder* vieEncoder = static_cast<ViEEncoder*> (mapItem->GetItem());
if (mapItem->GetId() != channelId)
{
if (channelEncoder == static_cast<ViEEncoder*> (mapItem->GetItem()))
{
// We've found another channel using the same ViEEncoder
return true;
}
}
mapItem = _vieEncoderMap.Next(mapItem);
}
return false;
}
// ----------------------------------------------------------------------------
// SetVoiceEngine
//
// Set the voice engine instance to be used by all video channels. We are interested in the voice engine sync interfaces
// ----------------------------------------------------------------------------
int ViEChannelManager::SetVoiceEngine(VoiceEngine* voiceEngine)
{
// Write lock to make sure no one is using the channel
ViEManagerWriteScoped wl(*this);
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
VoEVideoSync* syncInterface = NULL;
if (voiceEngine)
{
// Get new sync interface;
syncInterface = VoEVideoSync::GetInterface(voiceEngine);
if (!syncInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s Can't get audio sync interface from VoiceEngine.",
__FUNCTION__);
if (syncInterface)
{
syncInterface->Release();
}
return -1;
}
}
for (MapItem* item = _channelMap.First(); item != NULL; item
= _channelMap.Next(item))
{
ViEChannel* channel = static_cast<ViEChannel*> (item->GetItem());
assert(channel);
channel->SetVoiceChannel(-1, syncInterface);
}
if (_voiceSyncInterface)
{
_voiceSyncInterface->Release();
}
_voiceEngine = voiceEngine;
_voiceSyncInterface = syncInterface;
return 0;
}
VoiceEngine* ViEChannelManager::GetVoiceEngine()
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
return _voiceEngine;
}
// ----------------------------------------------------------------------------
// ConnectVoiceChannel
//
// Enables lip sync of the channel.
// ----------------------------------------------------------------------------
int ViEChannelManager::ConnectVoiceChannel(int channelId, int audioChannelId)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
if (_voiceSyncInterface == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, channelId),
"No VoE set");
return -1;
}
ViEChannel* channel = ViEChannelPtr(channelId);
if (!channel)
{
return -1;
}
return channel->SetVoiceChannel(audioChannelId, _voiceSyncInterface);
}
// ----------------------------------------------------------------------------
// DisconnectVoiceChannel
//
// Disables lip sync of the channel.
// ----------------------------------------------------------------------------
int ViEChannelManager::DisconnectVoiceChannel(int channelId)
{
CriticalSectionScoped cs(*_ptrChannelIdCritsect);
ViEChannel* channel = ViEChannelPtr(channelId);
if (channel)
{
channel->SetVoiceChannel(-1, NULL);
return 0;
} else
{
return -1;
}
}
} // namespace webrtc

View File

@ -1,102 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_channel_manager.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_MANAGER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_MANAGER_H_
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "typedefs.h"
#include "map_wrapper.h"
#include "vie_manager_base.h"
namespace webrtc
{
class CriticalSectionWrapper;
//class VoiceEngine;
class ProcessThread;
class ViEChannel;
class VoEVideoSync;
class ViEPerformanceMonitor;
class ViEEncoder;
class VoiceEngine;
// ------------------------------------------------------------------
// ViEChannelManager
// ------------------------------------------------------------------
class ViEChannelManager: private ViEManagerBase
{
friend class ViEChannelManagerScoped;
public:
ViEChannelManager(int engineId, int numberOfCores,
ViEPerformanceMonitor& viePerformanceMonitor);
~ViEChannelManager();
void SetModuleProcessThread(ProcessThread& moduleProcessThread);
int CreateChannel(int& channelId);
int CreateChannel(int& channelId, int originalChannel);
int DeleteChannel(int channelId);
int SetVoiceEngine(VoiceEngine* voiceEngine);
int ConnectVoiceChannel(int channelId, int audioChannelId);
int DisconnectVoiceChannel(int channelId);
VoiceEngine* GetVoiceEngine();
private:
// Used by ViEChannelScoped, forcing a manager user to use scoped
ViEChannel* ViEChannelPtr(int channelId) const;
void GetViEChannels(MapWrapper& channelMap);
// Methods used by ViECaptureScoped and ViEEncoderScoped
ViEEncoder* ViEEncoderPtr(int videoChannelId) const;
bool GetFreeChannelId(int& freeChannelId);
void ReturnChannelId(int channelId);
// Returns true if at least one other channels uses the same ViEEncoder as channelId
bool ChannelUsingViEEncoder(int channelId) const;
// Members
CriticalSectionWrapper* _ptrChannelIdCritsect; // protecting _channelMap and _freeChannelIds
int _engineId;
int _numberOfCores;
ViEPerformanceMonitor& _viePerformanceMonitor;
MapWrapper _channelMap;
bool* _freeChannelIds;
int _freeChannelIdsSize;
// Encoder
MapWrapper _vieEncoderMap; // Channel id -> ViEEncoder
VoEVideoSync* _voiceSyncInterface;
VoiceEngine* _voiceEngine;
ProcessThread* _moduleProcessThread;
};
// ------------------------------------------------------------------
// ViEChannelManagerScoped
// ------------------------------------------------------------------
class ViEChannelManagerScoped: private ViEManagerScopedBase
{
public:
ViEChannelManagerScoped(const ViEChannelManager& vieChannelManager);
ViEChannel* Channel(int vieChannelId) const;
ViEEncoder* Encoder(int vieChannelId) const;
// Returns true if at lease one other channels uses the same ViEEncoder as channelId
bool ChannelUsingViEEncoder(int channelId) const;
};
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_MANAGER_H_

View File

@ -1,977 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_codec_impl.cc
*/
#include "vie_codec_impl.h"
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "video_coding.h"
#include "trace.h"
#include "vie_errors.h"
#include "vie_impl.h"
#include "vie_channel.h"
#include "vie_channel_manager.h"
#include "vie_encoder.h"
#include "vie_input_manager.h"
#include "vie_capturer.h"
#include <string.h>
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViECodec* ViECodec::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViECodecImpl* vieCodecImpl = vieImpl;
(*vieCodecImpl)++; // Increase ref count
return vieCodecImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViECodecImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViECodecImpl::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViECodec released too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViECodec reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViECodecImpl::ViECodecImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViECodecImpl::ViECodecImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViECodecImpl::~ViECodecImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViECodecImpl::~ViECodecImpl() Dtor");
}
// Available codecs
// ----------------------------------------------------------------------------
// NumberOfCodecs
//
// Returns the number of available codecs
// ----------------------------------------------------------------------------
int ViECodecImpl::NumberOfCodecs() const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
// +2 because of FEC(RED and ULPFEC)
return (int) (VideoCodingModule::NumberOfCodecs() + 2);
}
// ----------------------------------------------------------------------------
// GetCodec
//
// Return the video codec with listNumber
// ----------------------------------------------------------------------------
int ViECodecImpl::GetCodec(const unsigned char listNumber,
VideoCodec& videoCodec) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(listNumber: %d, codecType: %d)", __FUNCTION__, listNumber);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
if (listNumber == VideoCodingModule::NumberOfCodecs())
{
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
strcpy(videoCodec.plName, "RED");
videoCodec.codecType = kVideoCodecRED;
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
}
else if (listNumber == VideoCodingModule::NumberOfCodecs() + 1)
{
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
strcpy(videoCodec.plName, "ULPFEC");
videoCodec.codecType = kVideoCodecULPFEC;
videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
}
else if (VideoCodingModule::Codec(listNumber, &videoCodec)
!= VCM_OK)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Could not get codec for listNumber: %u", __FUNCTION__,
listNumber);
SetLastError(kViECodecInvalidArgument);
return -1;
}
return 0;
}
// Codec settings
// ----------------------------------------------------------------------------
// SetSendCodec
//
// Sets the send codec for videoChannel
// This call will affect all channels using the same encoder
// ----------------------------------------------------------------------------
int ViECodecImpl::SetSendCodec(const int videoChannel,
const VideoCodec& videoCodec)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId,videoChannel),
"%s(videoChannel: %d, codecType: %d)", __FUNCTION__,
videoChannel, videoCodec.codecType);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId, videoChannel),
"%s: codec: %d, plType: %d, width: %d, height: %d, bitrate: %d"
"maxBr: %d, minBr: %d, frameRate: %d)", __FUNCTION__,
videoCodec.codecType, videoCodec.plType, videoCodec.width,
videoCodec.height, videoCodec.startBitrate,
videoCodec.maxBitrate, videoCodec.minBitrate,
videoCodec.maxFramerate);
if (CodecValid(videoCodec) == false)
{
// Error logged
SetLastError(kViECodecInvalidCodec);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
// Set a maxBitrate if the user hasn't...
VideoCodec videoCodecInternal;
memcpy(&videoCodecInternal, &videoCodec, sizeof(webrtc::VideoCodec));
if (videoCodecInternal.maxBitrate == 0)
{
// Max is one bit per pixel ...
videoCodecInternal.maxBitrate = (videoCodecInternal.width
* videoCodecInternal.height * videoCodecInternal.maxFramerate)
/ 1000;
if (videoCodecInternal.startBitrate > videoCodecInternal.maxBitrate)
{
// ... but should'nt limit the set start bitrate.
videoCodecInternal.maxBitrate = videoCodecInternal.startBitrate;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId,
videoChannel),
"%s: New max bitrate set to %d kbps", __FUNCTION__,
videoCodecInternal.maxBitrate);
}
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
assert(false);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No encoder found for channel %d", __FUNCTION__);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
// We need to check if the codec settings changed,
// then we need a new SSRC
bool newRtpStream = false;
VideoCodec encoder;
vieEncoder->GetEncoder(encoder);
if (encoder.codecType != videoCodecInternal.codecType ||
encoder.width != videoCodecInternal.width ||
encoder.height != videoCodecInternal.height)
{
if (cs.ChannelUsingViEEncoder(videoChannel))
{
// We don't allow changing codec type or size when several
// channels share encoder.
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Settings differs from other channels using encoder",
__FUNCTION__);
SetLastError(kViECodecInUse);
return -1;
}
newRtpStream = true;
}
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* frameProvider = NULL;
// Stop the media flow while reconfiguring
vieEncoder->Pause();
// Check if we have a frame provider that is a camera and can provide this
// codec for us.
bool useCaptureDeviceAsEncoder = false;
frameProvider = is.FrameProvider(vieEncoder);
if (frameProvider)
{
ViECapturer* vieCapture = static_cast<ViECapturer *> (frameProvider);
// Try to get preencoded. Nothing to do if it is not supported.
if (vieCapture && vieCapture->PreEncodeToViEEncoder(videoCodecInternal,
*vieEncoder,
videoChannel) == 0)
{
useCaptureDeviceAsEncoder = true;
}
}
// Update the encoder settings if we are not using a capture device capable
// of this codec.
if (!useCaptureDeviceAsEncoder
&& vieEncoder->SetEncoder(videoCodecInternal) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not change encoder for channel %d", __FUNCTION__,
videoChannel);
SetLastError(kViECodecUnknownError);
return -1;
}
// Give the channel the new information
if (vieChannel->SetSendCodec(videoCodecInternal, newRtpStream) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not set send codec for channel %d", __FUNCTION__,
videoChannel);
SetLastError(kViECodecUnknownError);
return -1;
}
// Update the protection mode, we might be switching NACK/FEC
vieEncoder->UpdateProtectionMethod();
// Get new best format for frame provider
if (frameProvider)
{
frameProvider->FrameCallbackChanged();
}
// Restart the media flow
if (newRtpStream)
{
// Stream settings changed, make sure we get a key frame
vieEncoder->SendKeyFrame();
}
vieEncoder->Restart();
return 0;
}
// ----------------------------------------------------------------------------
// GetSendCodec
//
// Gets the current send codec
// ----------------------------------------------------------------------------
int ViECodecImpl::GetSendCodec(const int videoChannel,
VideoCodec& videoCodec) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(videoChannel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No encoder for channel %d", __FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
return vieEncoder->GetEncoder(videoCodec);
}
// ----------------------------------------------------------------------------
// SetReceiveCodec
//
// Registers a possible receive codec
// ----------------------------------------------------------------------------
int ViECodecImpl::SetReceiveCodec(const int videoChannel,
const VideoCodec& videoCodec)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(videoChannel: %d, codecType: %d)", __FUNCTION__,
videoChannel, videoCodec.codecType);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_instanceId, videoChannel),
"%s: codec: %d, plType: %d, width: %d, height: %d, bitrate: %d,"
"maxBr: %d, minBr: %d, frameRate: %d", __FUNCTION__,
videoCodec.codecType, videoCodec.plType, videoCodec.width,
videoCodec.height, videoCodec.startBitrate,
videoCodec.maxBitrate, videoCodec.minBitrate,
videoCodec.maxFramerate);
if (CodecValid(videoCodec) == false)
{
// Error logged
SetLastError(kViECodecInvalidCodec);
return -1;
}
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->SetReceiveCodec(videoCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId,
videoChannel),
"%s: Could not set receive codec for channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// GetReceiveCodec
//
// Gets the current receive codec
// ----------------------------------------------------------------------------
int ViECodecImpl::GetReceiveCodec(const int videoChannel,
VideoCodec& videoCodec) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(videoChannel: %d, codecType: %d)", __FUNCTION__,
videoChannel, videoCodec.codecType);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->GetReceiveCodec(videoCodec) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// GetCodecConfigParameters
//
// Gets the codec config parameters to be sent out-of-band.
// ----------------------------------------------------------------------------
int ViECodecImpl::GetCodecConfigParameters(
const int videoChannel,
unsigned char configParameters[kConfigParameterSize],
unsigned char& configParametersSize) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(videoChannel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No encoder for channel %d", __FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->GetCodecConfigParameters(configParameters,
configParametersSize) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// SetImageScaleStatus
//
// Enables scaling of the encoded image instead of padding black border or
// cropping
// ----------------------------------------------------------------------------
int ViECodecImpl::SetImageScaleStatus(const int videoChannel, const bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(videoChannel: %d, enable: %d)", __FUNCTION__, videoChannel,
enable);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->ScaleInputImage(enable) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// Codec statistics
// ----------------------------------------------------------------------------
// GetSendCodecStastistics
//
// Get codec statistics for outgoing stream
// ----------------------------------------------------------------------------
int ViECodecImpl::GetSendCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(videoChannel %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No send codec for channel %d", __FUNCTION__,
videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->SendCodecStatistics((WebRtc_UWord32&) keyFrames,
(WebRtc_UWord32&) deltaFrames) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// GetReceiveCodecStastistics
//
// Get codec statistics for incoming stream
// ----------------------------------------------------------------------------
int ViECodecImpl::GetReceiveCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(videoChannel: %d, codecType: %d)", __FUNCTION__,
videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->ReceiveCodecStatistics((WebRtc_UWord32&) keyFrames,
(WebRtc_UWord32&) deltaFrames) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// Callbacks
// ----------------------------------------------------------------------------
// SetKeyFrameRequestCallbackStatus
//
// Enables a kecallback for keyframe request instead of using RTCP
// ----------------------------------------------------------------------------
int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int videoChannel,
const bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s(videoChannel: %d)",
__FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->EnableKeyFrameRequestCallback(enable) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// SetSignalKeyPacketLossStatus
//
// Triggers a key frame request when there is packet loss in a received key
// frame
// ----------------------------------------------------------------------------
int ViECodecImpl::SetSignalKeyPacketLossStatus(const int videoChannel,
const bool enable,
const bool onlyKeyFrames)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(videoChannel: %d, enable: %d, onlyKeyFrames: %d)",
__FUNCTION__, videoChannel, enable);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->SetSignalPacketLossStatus(enable, onlyKeyFrames) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RegisterEncoderObserver
// ----------------------------------------------------------------------------
int ViECodecImpl::RegisterEncoderObserver(const int videoChannel,
ViEEncoderObserver& observer)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No encoder for channel %d", __FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->RegisterCodecObserver(&observer) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not register codec observer at channel",
__FUNCTION__);
SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterEncoderObserver
// ----------------------------------------------------------------------------
int ViECodecImpl::DeregisterEncoderObserver(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: No encoder for channel %d", __FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->RegisterCodecObserver(NULL) != 0)
{
SetLastError(kViECodecObserverNotRegistered);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RegisterDecoderObserver
// ----------------------------------------------------------------------------
int ViECodecImpl::RegisterDecoderObserver(const int videoChannel,
ViEDecoderObserver& observer)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->RegisterCodecObserver(&observer) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Could not register codec observer at channel",
__FUNCTION__);
SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterDecoderObserver
// ----------------------------------------------------------------------------
int ViECodecImpl::DeregisterDecoderObserver(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId), "%s",
__FUNCTION__);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->RegisterCodecObserver(NULL) != 0)
{
SetLastError(kViECodecObserverNotRegistered);
return -1;
}
return 0;
}
// Force a key frame
// ----------------------------------------------------------------------------
// SendKeyFrame
//
// Force the next frame to be a key frame
// ----------------------------------------------------------------------------
int ViECodecImpl::SendKeyFrame(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieEncoder->SendKeyFrame() != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// WaitForFirstKeyFrame
//
// Forc the next frame to be a key frame
// ----------------------------------------------------------------------------
int ViECodecImpl::WaitForFirstKeyFrame(const int videoChannel, const bool wait)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d, wait: %d)", __FUNCTION__, videoChannel,
wait);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->WaitForKeyFrame(wait) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// H263 Specific
// ----------------------------------------------------------------------------
// SetInverseH263Logic
//
// Used to interoperate with old MS H.263 where key frames are marked as delta
// and the oposite.
// ----------------------------------------------------------------------------
int ViECodecImpl::SetInverseH263Logic(int videoChannel, bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId,videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vieChannel->SetInverseH263Logic(enable) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// CodecValid
// ----------------------------------------------------------------------------
bool ViECodecImpl::CodecValid(const VideoCodec& videoCodec)
{
// Check plName matches codecType
if (videoCodec.codecType == kVideoCodecRED)
{
#if defined(WIN32)
if (_strnicmp(videoCodec.plName, "red", 3) == 0)
#elif defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
if (strncasecmp(videoCodec.plName, "red",3) == 0)
#endif
{
// We only care about the type and name for red
return true;
}
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Codec type doesn't match plName", videoCodec.plType);
return false;
}
else if (videoCodec.codecType == kVideoCodecULPFEC)
{
#if defined(WIN32)
if (_strnicmp(videoCodec.plName, "ULPFEC", 6) == 0)
#elif defined(WEBRTC_MAC_INTEL)|| defined(WEBRTC_LINUX)
if (strncasecmp(videoCodec.plName, "ULPFEC",6) == 0)
#endif
{
// We only care about the type and name for ULPFEC
return true;
}
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Codec type doesn't match plName", videoCodec.plType);
return false;
}
else if ((videoCodec.codecType == kVideoCodecH263 &&
strncmp(videoCodec.plName, "H263", 4) == 0)
|| (videoCodec.codecType == kVideoCodecH263
&& strncmp(videoCodec.plName, "H263-1998", 9) == 0)
|| (videoCodec.codecType == kVideoCodecVP8
&& strncmp(videoCodec.plName, "VP8", 4) == 0)
|| (videoCodec.codecType == kVideoCodecI420
&& strncmp(videoCodec.plName, "I420", 4) == 0)
|| (videoCodec.codecType == kVideoCodecH264
&& strncmp(videoCodec.plName, "H264", 4) == 0))
// || (videoCodec.codecType == kVideoCodecMPEG4
// && strncmp(videoCodec.plName, "MP4V-ES", 7) == 0)
{
// ok
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Codec type doesn't match plName", videoCodec.plType);
return false;
}
// pltype
if (videoCodec.plType == 0 && videoCodec.plType > 127)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid codec payload type: %d", videoCodec.plType);
return false;
}
// Size
if (videoCodec.width > kViEMaxCodecWidth || videoCodec.height
> kViEMaxCodecHeight)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid codec size: %u x %u", videoCodec.width,
videoCodec.height);
return false;
}
if (videoCodec.startBitrate < kViEMinCodecBitrate)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid startBitrate: %u", videoCodec.startBitrate);
return false;
}
if (videoCodec.minBitrate < kViEMinCodecBitrate)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid minBitrate: %u", videoCodec.minBitrate);
return false;
}
if (videoCodec.startBitrate < kViEMinCodecBitrate)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid minBitrate: %u", videoCodec.minBitrate);
return false;
}
if (videoCodec.codecType == kVideoCodecH263)
{
if ((videoCodec.width == 704 && videoCodec.height == 576)
|| (videoCodec.width == 352 && videoCodec.height == 288)
|| (videoCodec.width == 176 && videoCodec.height == 144)
|| (videoCodec.width == 128 && videoCodec.height == 96))
{
// ok
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Invalid size for H.263");
return false;
}
}
return true;
}
} // namespace webrtc

View File

@ -1,110 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_codec_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CODEC_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CODEC_IMPL_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
#include "vie_codec.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViECodecImpl
// ----------------------------------------------------------------------------
class ViECodecImpl : public virtual ViESharedData,
public ViECodec,
public ViERefCount
{
public:
virtual int Release();
// Available codecs
virtual int NumberOfCodecs() const;
virtual int GetCodec(const unsigned char listNumber,
VideoCodec& videoCodec) const;
// Codec settings
virtual int SetSendCodec(const int videoChannel,
const VideoCodec& videoCodec);
virtual int GetSendCodec(const int videoChannel,
VideoCodec& videoCodec) const;
virtual int SetReceiveCodec(const int videoChannel,
const VideoCodec& videoCodec);
virtual int GetReceiveCodec(const int videoChannel,
VideoCodec& videoCodec) const;
virtual int GetCodecConfigParameters(
const int videoChannel,
unsigned char configParameters[kConfigParameterSize],
unsigned char& configParametersSize) const;
// Input image scaling
virtual int SetImageScaleStatus(const int videoChannel, const bool enable);
// Codec statistics
virtual int GetSendCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const;
virtual int GetReceiveCodecStastistics(const int videoChannel,
unsigned int& keyFrames,
unsigned int& deltaFrames) const;
// Callbacks
virtual int SetKeyFrameRequestCallbackStatus(const int videoChannel,
const bool enable);
virtual int SetSignalKeyPacketLossStatus(const int videoChannel,
const bool enable,
const bool onlyKeyFrames = false);
virtual int RegisterEncoderObserver(const int videoChannel,
ViEEncoderObserver& observer);
virtual int DeregisterEncoderObserver(const int videoChannel);
virtual int RegisterDecoderObserver(const int videoChannel,
ViEDecoderObserver& observer);
virtual int DeregisterDecoderObserver(const int videoChannel);
// Key frame settings
virtual int SendKeyFrame(const int videoChannel);
virtual int WaitForFirstKeyFrame(const int videoChannel, const bool wait);
// H263 Specific
virtual int SetInverseH263Logic(int videoChannel, bool enable);
protected:
ViECodecImpl();
virtual ~ViECodecImpl();
private:
bool CodecValid(const VideoCodec& videoCodec);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CODEC_IMPL_H_

View File

@ -1,266 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_defines.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
#include "engine_configurations.h"
#ifdef WEBRTC_MAC_INTEL
#include <stdio.h>
#include <unistd.h>
#endif
#ifdef ANDROID
#include <pthread.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <linux/net.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <sys/time.h>
#endif
namespace webrtc
{
// ===================================================
// ViE Defines
// ===================================================
// General
enum { kViEMinKeyRequestIntervalMs = 300};
// ViEBase
enum { kViEMaxNumberOfChannels = 4};
enum { kViEVersionMaxMessageSize = 1024 };
enum { kViEMaxModuleVersionSize = 960 };
// ViECapture
enum { kViEMaxCaptureDevices=10};
// Width used if no send codec has been set when a capture device is started
enum { kViECaptureDefaultWidth = 352};
// Height used if no send codec has been set when a capture device is started
enum { kViECaptureDefaultHeight = 288};
enum { kViECaptureDefaultFramerate = 30};
enum { kViECaptureMaxSnapshotWaitTimeMs = 500 };
// ViECodec
enum { kViEMaxCodecWidth = 1920};
enum { kViEMaxCodecHeight = 1200};
enum { kViEMaxCodecFramerate = 60};
enum { kViEMinCodecBitrate = 30};
// ViEEncryption
enum { kViEMaxSrtpKeyLength = 30};
enum { kViEMinSrtpEncryptLength = 16};
enum { kViEMaxSrtpEncryptLength = 256};
enum { kViEMaxSrtpAuthSh1Length = 20};
enum { kViEMaxSrtpTagAuthNullLength = 12};
enum { kViEMaxSrtpKeyAuthNullLength = 256};
// ViEExternalCodec
// ViEFile
enum { kViEMaxFilePlayers = 3};
// ViEImageProcess
// ViENetwork
enum { kViEMaxMtu = 1500};
enum { kViESocketThreads = 1};
enum { kViENumReceiveSocketBuffers = 500};
// ViERender
// Max valid time set in SetRenderTimeoutImage
enum { kViEMaxRenderTimeoutTimeMs = 10000};
// Min valid time set in SetRenderTimeoutImage
enum { kViEMinRenderTimeoutTimeMs = 33};
enum { kViEDefaultRenderDelayMs = 10};
// ViERTP_RTCP
enum { kNackHistorySize = 400};
// Id definitions
enum {
kViEChannelIdBase=0x0,
kViEChannelIdMax=0xFF,
kViECaptureIdBase=0x1001,
kViECaptureIdMax=0x10FF,
kViEFileIdBase=0x2000,
kViEFileIdMax=0x200F,
kViEDummyChannelId=0xFFFF
};
// Module id
// Create a unique id based on the ViE instance id and the
// channel id. ViE id > 0 and 0 <= channel id <= 255
inline int ViEId(const int vieId, const int channelId = -1)
{
if (channelId == -1)
{
return (int) ((vieId << 16) + kViEDummyChannelId);
}
return (int) ((vieId << 16) + channelId);
}
inline int ViEModuleId(const int vieId, const int channelId = -1)
{
if (channelId == -1)
{
return (int) ((vieId << 16) + kViEDummyChannelId);
}
return (int) ((vieId << 16) + channelId);
}
inline int ChannelId(const int moduleId)
{
return (int) (moduleId & 0xffff);
}
// ============================================================================
// Platform specifics
// ============================================================================
//-------------------------------------
// Windows
//-------------------------------------
#if defined(_WIN32)
// Build information macros
#if defined(_DEBUG)
#define BUILDMODE TEXT("d")
#elif defined(DEBUG)
#define BUILDMODE TEXT("d")
#elif defined(NDEBUG)
#define BUILDMODE TEXT("r")
#else
#define BUILDMODE TEXT("?")
#endif
#define BUILDTIME TEXT(__TIME__)
#define BUILDDATE TEXT(__DATE__)
// example: "Oct 10 2002 12:05:30 r"
#define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
#define RENDER_MODULE_TYPE kRenderWindows
// Warning pragmas
// new behavior: elements of array 'XXX' will be default initialized
#pragma warning(disable: 4351)
// 'this' : used in base member initializer list
#pragma warning(disable: 4355)
// frame pointer register 'ebp' modified by inline assembly code
#pragma warning(disable: 4731)
// Include libraries
#pragma comment( lib, "winmm.lib" )
#ifndef WEBRTC_EXTERNAL_TRANSPORT
#pragma comment( lib, "ws2_32.lib" )
#pragma comment( lib, "Iphlpapi.lib" ) // _GetAdaptersAddresses
#endif
#endif
//-------------------------------------
// Mac
//-------------------------------------
#ifdef WEBRTC_MAC_INTEL
#define SLEEP(x) usleep(x * 1000)
// Build information macros
#define TEXT(x) x
#if defined(_DEBUG)
#define BUILDM//#define webrtc::kFileFormatAviFile 3
//#define __LINUX__ // needed for InterObjects
ODE TEXT("d")
#elif defined(DEBUG)
#define BUILDMODE TEXT("d")
#elif defined(NDEBUG)
#define BUILDMODE TEXT("r")
#else
#define BUILDMODE TEXT("?")
#endif
#define BUILDTIME TEXT(__TIME__)
#define BUILDDATE TEXT(__DATE__)
// example: "Oct 10 2002 12:05:30 r"
#define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
#define RENDER_MODULE_TYPE kRenderWindows
#endif
//#define webrtc::kFileFormatAviFile 3
//#define __LINUX__ // needed for InterObjects
//-------------------------------------
// Linux
//-------------------------------------
#ifndef WEBRTC_ANDROID
#ifdef WEBRTC_LINUX
// Build information macros
#if defined(_DEBUG)
#define BUILDMODE "d"
#elif defined(DEBUG)
#define BUILDMODE "d"
#elif defined(NDEBUG)
#define BUILDMODE "r"
#else
#define BUILDMODE "?"
#endif
#define BUILDTIME __TIME__
#define BUILDDATE __DATE__
// example: "Oct 10 2002 12:05:30 r"
#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
#endif // ifdef WEBRTC_LINUX
#endif // ifndef ANDROID
#ifdef WEBRTC_ANDROID
#define FAR
#define __cdecl
#if defined(_DEBUG)
#define BUILDMODE "d"
#elif defined(DEBUG)
#define BUILDMODE "d"
#elif defined(NDEBUG)
#define BUILDMODE "r"
#else
#define BUILDMODE "?"
#endif
#define BUILDTIME __TIME__
#define BUILDDATE __DATE__
// example: "Oct 10 2002 12:05:30 r"
#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
#endif // #ifdef ANDROID
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,186 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_encoder.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCODER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCODER_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "vie_frame_provider_base.h"
#include "vie_file_recorder.h"
#include "rtp_rtcp_defines.h"
#include "video_coding_defines.h"
#include "video_processing.h"
#include "common_types.h"
namespace webrtc {
class CriticalSectionWrapper;
class ProcessThread;
class RtpRtcp;
class ViEEffectFilter;
class VideoCodingModule;
class ViEEncoderObserver;
class ViEEncoder: public ViEFrameCallback, // New frame delivery
public RtpVideoFeedback, // Feedback from RTP module
public RtcpFeedback, // RTP/RTCP Module
public VCMPacketizationCallback, // Callback from VCM
public VCMProtectionCallback, // Callback from VCM
public VCMSendStatisticsCallback // Callback from VCM
{
public:
ViEEncoder(WebRtc_Word32 engineId, WebRtc_Word32 channelId,
WebRtc_UWord32 numberOfCores,
ProcessThread& moduleProcessThread);
~ViEEncoder();
// Drops incoming packets
void Pause();
void Restart();
WebRtc_Word32 DropDeltaAfterKey(bool enable);
// Codec settings
WebRtc_UWord8 NumberOfCodecs();
WebRtc_Word32 GetCodec(WebRtc_UWord8 listIndex, VideoCodec& videoCodec);
WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* encoder,
WebRtc_UWord8 plType);
WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 plType);
WebRtc_Word32 SetEncoder(const VideoCodec& videoCodec);
WebRtc_Word32 GetEncoder(VideoCodec& videoCodec);
WebRtc_Word32 GetCodecConfigParameters(
unsigned char configParameters[kConfigParameterSize],
unsigned char& configParametersSize);
// Scale or crop/pad image
WebRtc_Word32 ScaleInputImage(bool enable);
// RTP settings
RtpRtcp* SendRtpRtcpModule();
// Implementing ViEFrameCallback
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frameDelay);
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate);
virtual void ProviderDestroyed(int id) { return; }
WebRtc_Word32 EncodeFrame(VideoFrame& videoFrame);
WebRtc_Word32 SendKeyFrame();
WebRtc_Word32 SendCodecStatistics(WebRtc_UWord32& numKeyFrames,
WebRtc_UWord32& numDeltaFrames);
// Loss protection
WebRtc_Word32 UpdateProtectionMethod();
// Implements VCMPacketizationCallback
virtual WebRtc_Word32
SendData(const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* rtpTypeHdr);
// Implements VideoProtectionCallback
virtual WebRtc_Word32 ProtectionRequest(const WebRtc_UWord8 deltaFECRate,
const WebRtc_UWord8 keyFECRate,
const bool nack);
// Implements VideoSendStatisticsCallback
virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
const WebRtc_UWord32 frameRate);
WebRtc_Word32 RegisterCodecObserver(ViEEncoderObserver* observer);
// Implements RtcpFeedback
virtual void OnSLIReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 pictureId);
virtual void OnRPSIReceived(const WebRtc_Word32 id,
const WebRtc_UWord64 pictureId);
// Implements RtpVideoFeedback
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message = 0);
virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs,
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax);
// Effect filter
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effectFilter);
//Recording
ViEFileRecorder& GetOutgoingFileRecorder();
private:
WebRtc_Word32 _engineId;
class QMTestVideoSettingsCallback : public VCMQMSettingsCallback
{
public:
QMTestVideoSettingsCallback();
// update VPM with QM (quality modes: frame size & frame rate) settings
WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height);
// register VPM and VCM
void RegisterVPM(VideoProcessingModule* vpm);
void RegisterVCM(VideoCodingModule* vcm);
void SetNumOfCores(WebRtc_Word32 numOfCores)
{_numOfCores = numOfCores;};
void SetMaxPayloadLength(WebRtc_Word32 maxPayloadLength)
{_maxPayloadLength = maxPayloadLength;};
private:
VideoProcessingModule* _vpm;
VideoCodingModule* _vcm;
WebRtc_Word32 _numOfCores;
WebRtc_Word32 _maxPayloadLength;
};
WebRtc_Word32 _channelId;
const WebRtc_UWord32 _numberOfCores;
VideoCodingModule& _vcm;
VideoProcessingModule& _vpm;
RtpRtcp& _rtpRtcp;
CriticalSectionWrapper& _callbackCritsect;
CriticalSectionWrapper& _dataCritsect;
VideoCodec _sendCodec;
bool _paused;
WebRtc_Word64 _timeLastIntraRequestMs;
WebRtc_Word32 _channelsDroppingDeltaFrames;
bool _dropNextFrame;
//Loss protection
bool _fecEnabled;
bool _nackEnabled;
// Uses
ViEEncoderObserver* _codecObserver;
ViEEffectFilter* _effectFilter;
ProcessThread& _moduleProcessThread;
bool _hasReceivedSLI;
WebRtc_UWord8 _pictureIdSLI;
bool _hasReceivedRPSI;
WebRtc_UWord64 _pictureIdRPSI;
//Recording
ViEFileRecorder _fileRecorder;
// Quality modes callback
QMTestVideoSettingsCallback* _qmCallback;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCODER_H_

View File

@ -1,543 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_encryption_impl.cc
*/
#include "vie_encryption_impl.h"
// Defines
#include "vie_defines.h"
#include "vie_errors.h"
#include "vie_channel.h"
#include "vie_channel_manager.h"
#include "trace.h"
#include "vie_impl.h"
#ifdef WEBRTC_SRTP
#include "SrtpModule.h"
#endif
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViEEncryption* ViEEncryption::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViEEncryptionImpl* vieEncryptionImpl = vieImpl;
(*vieEncryptionImpl)++; // Increase ref count
return vieEncryptionImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViEEncryptionImpl::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViEEncryptionImpl release too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViEEncryptionImpl reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEEncryptionImpl::ViEEncryptionImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEEncryptionImpl::ViEEncryptionImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEEncryptionImpl::~ViEEncryptionImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEEncryptionImpl::~ViEEncryptionImpl() Dtor");
}
// ============================================================================
// SRTP
// ============================================================================
// ----------------------------------------------------------------------------
// EnableSRTPSend
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::EnableSRTPSend(
const int videoChannel, const CipherTypes cipherType,
const unsigned int cipherKeyLength, const AuthenticationTypes authType,
const unsigned int authKeyLength, const unsigned int authTagLength,
const SecurityLevels level, const unsigned char key[kViEMaxSrtpKeyLength],
const bool useForRTCP)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"EnableSRTPSend(channel=%d, cipherType=%d, cipherKeyLength=%d, "
"authType=%d, authKeyLength=%d, authTagLength=%d, level=%d, "
"key=?, RTCP=%s",
videoChannel, cipherType, cipherKeyLength, authType,
authKeyLength, authTagLength, level,
useForRTCP ? "true" : "false");
#ifdef WEBRTC_SRTP
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
bool cipherAllZero = (kCipherNull == cipherType) && (0 == cipherKeyLength);
bool authAllZero = (kAuthNull == authType) &&
(0 == authKeyLength) &&
(0 == authTagLength);
// 1. For no protection all cipher and auth must be zero
// 2. For encryption only all auth must be zero
// 3. For authentication only all cipher must be zero
if (((kNoProtection == level) && (!cipherAllZero || !authAllZero))
|| ((kEncryption == level) && !authAllZero)
|| ((kAuthentication == level) && !cipherAllZero))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
" Invalid input argument");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// 16 <= cipherKeyLength <= 256
if (((kEncryption == level) || (kEncryptionAndAuthentication == level))
&& ((cipherKeyLength < kViEMinSrtpEncryptLength)
|| (cipherKeyLength > kViEMaxSrtpEncryptLength)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
" Invalid cipher key length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// For HMAC_SHA1 auth:
// authKeyLength <= 20, authTagLength <= 20
if (((kAuthentication == level) || (kEncryptionAndAuthentication == level))
&& (kAuthHmacSha1 == authType)
&& ((authKeyLength > kViEMaxSrtpAuthSh1Length)
|| (authTagLength > kViEMaxSrtpAuthSh1Length)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
" Invalid auth key or tag length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// For NULL auth:
// authKeyLength <= 256, authTagLength <= 12
if (((kAuthentication == level) || (kEncryptionAndAuthentication == level))
&& (kAuthNull == authType)
&& ((authKeyLength > kViEMaxSrtpKeyAuthNullLength)
|| (authTagLength > kViEMaxSrtpTagAuthNullLength)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
" Invalid auth key or tag length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
if (!key)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
" key NULL pointer");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
const SrtpModule::CipherTypes cipher_type =
static_cast<const SrtpModule::CipherTypes> (cipherType);
const SrtpModule::AuthenticationTypes auth_type =
static_cast<const SrtpModule::AuthenticationTypes> (authType);
const SrtpModule::SecurityLevels security_level =
static_cast<const SrtpModule::SecurityLevels> (level);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (0 != vieChannel->EnableSRTPSend(cipher_type, cipherKeyLength, auth_type,
authKeyLength, authTagLength,
security_level, key, useForRTCP))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceId,
"Failed to configure SRTP Encryption for sending");
SetLastError(kViEEncryptionUnknownError);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo, _instanceId,
"SRTP Enabled for sending");
return 0;
#else
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVoice,
ViEId(_instanceId, videoChannel),
" _SRTP is undefined => _lastError = %d",
LastErrorInternal());
SetLastError(kViEEncryptionSrtpNotSupported);
return -1;
#endif
}
// ----------------------------------------------------------------------------
// DisableSRTPSend
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::DisableSRTPSend(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"DisableSRTPSend(videoChannel=%d)", videoChannel);
#ifdef WEBRTC_SRTP
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (0 != vieChannel->DisableSRTPSend())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"Failed to Disable SRTP Encryption for sending");
SetLastError(kViEEncryptionUnknownError);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"SRTP Disabled for sending");
return 0;
#else
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVoice,
ViEId(_instanceId, videoChannel),
" _SRTP is undefined => _lastError = %d",
LastErrorInternal());
SetLastError(kViEEncryptionSrtpNotSupported);
return -1;
#endif
}
// ----------------------------------------------------------------------------
// EnableSRTPReceive
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::EnableSRTPReceive(
const int videoChannel, const CipherTypes cipherType,
const unsigned int cipherKeyLength, const AuthenticationTypes authType,
const unsigned int authKeyLength, const unsigned int authTagLength,
const SecurityLevels level,
const unsigned char key[kViEMaxSrtpKeyLength], const bool useForRTCP)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"EnableSRTPReceive(channel=%d, cipherType=%d, "
"cipherKeyLength=%d, authType=%d, authKeyLength=%d, "
"authTagLength=%d, level=%d, key=?, RTCP=%s)",
videoChannel, cipherType, cipherKeyLength, authType,
authKeyLength, authTagLength, level,
useForRTCP ? "true" : "false");
#ifdef WEBRTC_SRTP
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
bool cipherAllZero = (kCipherNull == cipherType) && (0 == cipherKeyLength);
bool authAllZero = (kAuthNull == authType)
&& (0 == authKeyLength)
&& (0 == authTagLength);
// 1. For no protection all cipher and auth must be zero
// 2. For encryption only all auth must be zero
// 3. For authentication only all cipher must be zero
if (((kNoProtection == level) && (!cipherAllZero || !authAllZero)) ||
((kEncryption == level) && !authAllZero) ||
((kAuthentication == level) && !cipherAllZero))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
" Invalid input argument");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// 16 <= cipherKeyLength <= 256
if (((kEncryption == level) || (kEncryptionAndAuthentication == level))
&& ((cipherKeyLength < kViEMinSrtpEncryptLength)
|| (cipherKeyLength > kViEMaxSrtpEncryptLength)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
" Invalid cipher key length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// For HMAC_SHA1 auth:
// authKeyLength <= 20, authTagLength <= 20
if (((kAuthentication == level) || (kEncryptionAndAuthentication == level))
&& (kAuthHmacSha1 == authType)
&& ((authKeyLength > kViEMaxSrtpAuthSh1Length)
|| (authTagLength > kViEMaxSrtpAuthSh1Length)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
" Invalid auth key or tag length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
// For NULL auth:
// authKeyLength <= 256, authTagLength <= 12
if (((kAuthentication == level) || (kEncryptionAndAuthentication == level))
&& (kAuthNull == authType)
&& ((authKeyLength > kViEMaxSrtpKeyAuthNullLength)
|| (authTagLength > kViEMaxSrtpTagAuthNullLength)))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
" Invalid auth key or tag length");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
if (!key)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), " key NULL pointer");
SetLastError(kViEEncryptionInvalidSrtpParameter);
return -1;
}
const SrtpModule::CipherTypes cipher_type =
static_cast<const SrtpModule::CipherTypes> (cipherType);
const SrtpModule::AuthenticationTypes auth_type =
static_cast<const SrtpModule::AuthenticationTypes> (authType);
const SrtpModule::SecurityLevels security_level =
static_cast<const SrtpModule::SecurityLevels> (level);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (0 != vieChannel->EnableSRTPReceive(cipher_type, cipherKeyLength,
auth_type, authKeyLength,
authTagLength, security_level, key,
useForRTCP))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"Failed to configure SRTP Encryption for receiving");
SetLastError(kViEEncryptionUnknownError);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"SRTP Enabled for receiving");
return 0;
#else
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVoice,
ViEId(_instanceId, videoChannel),
" _SRTP is undefined => _lastError = %d",
LastErrorInternal());
SetLastError(kViEEncryptionSrtpNotSupported);
return -1;
#endif
}
// ----------------------------------------------------------------------------
// DisableSRTPReceive
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::DisableSRTPReceive(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"DisableSRTPReceive(videoChannel=%d)", videoChannel);
#ifdef WEBRTC_SRTP
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (0 != vieChannel->DisableSRTPReceive())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"Failed to Disable SRTP Encryption for receiving");
SetLastError(kViEEncryptionUnknownError);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "SRTP Disabled for receiving");
return 0;
#else
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVoice,
ViEId(_instanceId, videoChannel),
" _SRTP is undefined => _lastError = %d",
LastErrorInternal());
SetLastError(kViEEncryptionSrtpNotSupported);
return -1;
#endif
}
// ============================================================================
// External encryption
// ============================================================================
// ----------------------------------------------------------------------------
// RegisterExternalEncryption
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::RegisterExternalEncryption(const int videoChannel,
Encryption& encryption)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"RegisterExternalEncryption(videoChannel=%d)", videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (vieChannel->RegisterExternalEncryption(&encryption) != 0)
{
SetLastError(kViEEncryptionUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterExternalEncryption
//
// ----------------------------------------------------------------------------
int ViEEncryptionImpl::DeregisterExternalEncryption(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"RegisterExternalEncryption(videoChannel=%d)", videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), "%s: No channel %d",
__FUNCTION__, videoChannel);
SetLastError(kViEEncryptionInvalidChannelId);
return -1;
}
if (vieChannel->DeRegisterExternalEncryption() != 0)
{
SetLastError(kViEEncryptionUnknownError);
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -1,75 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_encryption_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCRYPTION_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCRYPTION_IMPL_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "vie_ref_count.h"
#include "vie_encryption.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViEEncryptionImpl
// ----------------------------------------------------------------------------
class ViEEncryptionImpl : public virtual ViESharedData,
public ViEEncryption,
public ViERefCount
{
public:
virtual int Release();
// SRTP calls
virtual int EnableSRTPSend(const int videoChannel,
const CipherTypes cipherType,
const unsigned int cipherKeyLength,
const AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SecurityLevels level,
const unsigned char key[kViEMaxSrtpKeyLength],
const bool useForRTCP);
virtual int DisableSRTPSend(const int videoChannel);
virtual int EnableSRTPReceive(const int videoChannel,
const CipherTypes cipherType,
const unsigned int cipherKeyLength,
const AuthenticationTypes authType,
const unsigned int authKeyLength,
const unsigned int authTagLength,
const SecurityLevels level,
const unsigned char key[kViEMaxSrtpKeyLength],
const bool useForRTCP);
virtual int DisableSRTPReceive(const int videoChannel);
// External encryption
virtual int RegisterExternalEncryption(const int videoChannel,
Encryption& encryption);
virtual int DeregisterExternalEncryption(const int videoChannel);
protected:
ViEEncryptionImpl();
virtual ~ViEEncryptionImpl();
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_ENCRYPTION_IMPL_H_

View File

@ -1,203 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_external_codec_impl.cc
*/
#include "engine_configurations.h"
#include "vie_external_codec_impl.h"
#include "vie_errors.h"
#include "trace.h"
#include "vie_impl.h"
#include "vie_channel.h"
#include "vie_encoder.h"
#include "vie_channel_manager.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViEExternalCodec* ViEExternalCodec::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViEExternalCodecImpl* vieExternalCodecImpl = vieImpl;
(*vieExternalCodecImpl)++; // Increase ref count
return vieExternalCodecImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViEExternalCodecImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViEExternalCodec::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViEExternalCodec release too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViEExternalCodec reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// RegisterExternalSendCodec
// ----------------------------------------------------------------------------
int ViEExternalCodecImpl::RegisterExternalSendCodec(const int videoChannel,
const unsigned char plType,
VideoEncoder* encoder)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s channel %d plType %d encoder 0x%x", __FUNCTION__,
videoChannel, plType, encoder);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (!vieEncoder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument videoChannel %u. Does it exist?",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (!encoder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument Encoder 0x%x.", __FUNCTION__, encoder);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (vieEncoder->RegisterExternalEncoder(encoder, plType) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
int ViEExternalCodecImpl::DeRegisterExternalSendCodec(
const int videoChannel, const unsigned char plType)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s channel %d plType %d", __FUNCTION__, videoChannel, plType);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (!vieEncoder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument videoChannel %u. Does it exist?",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (vieEncoder->DeRegisterExternalEncoder(plType) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
const int videoChannel, const unsigned int plType, VideoDecoder* decoder,
bool decoderRender /*= false*/, int renderDelay /*= 0*/)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s channel %d plType %d decoder 0x%x, decoderRender %d, "
"renderDelay %d", __FUNCTION__, videoChannel, plType, decoder,
decoderRender, renderDelay);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (!vieChannel)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument videoChannel %u. Does it exist?",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (!decoder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument decoder 0x%x.", __FUNCTION__, decoder);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (vieChannel->RegisterExternalDecoder(plType, decoder, decoderRender,
renderDelay) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
int ViEExternalCodecImpl::DeRegisterExternalReceiveCodec(
const int videoChannel, const unsigned char plType)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s channel %d plType %u", __FUNCTION__, videoChannel, plType);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (!vieChannel)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Invalid argument videoChannel %u. Does it exist?",
__FUNCTION__, videoChannel);
SetLastError(kViECodecInvalidArgument);
return -1;
}
if (vieChannel->DeRegisterExternalDecoder(plType) != 0)
{
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_external_codec_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_EXTERNAL_CODEC_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_EXTERNAL_CODEC_IMPL_H_
#include "vie_external_codec.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViEExternalCodec
// ----------------------------------------------------------------------------
class ViEExternalCodecImpl : public virtual ViESharedData,
public ViEExternalCodec,
public ViERefCount
{
public:
virtual int Release();
virtual int RegisterExternalSendCodec(const int videoChannel,
const unsigned char plType,
VideoEncoder* encoder);
virtual int DeRegisterExternalSendCodec(const int videoChannel,
const unsigned char plType);
virtual int RegisterExternalReceiveCodec(const int videoChannel,
const unsigned int plType,
VideoDecoder* decoder,
bool decoderRender = false,
int renderDelay = 0);
virtual int DeRegisterExternalReceiveCodec(const int videoChannel,
const unsigned char plType);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_EXTERNAL_CODEC_IMPL_H_

View File

@ -1,113 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_image.cc
*/
#include <stdio.h>
#include "vie_file_image.h"
#include "video_image.h"
#include "jpeg.h"
#include "trace.h"
namespace webrtc {
int ViEFileImage::ConvertJPEGToVideoFrame(int engineId,
const char* fileNameUTF8,
VideoFrame& videoFrame)
{
// read jpeg file into temporary buffer
EncodedImage imageBuffer;
FILE* imageFile = fopen(fileNameUTF8, "rb");
if (NULL == imageFile)
{
// error reading file
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s could not open file %s", __FUNCTION__, fileNameUTF8);
return -1;
}
fseek(imageFile, 0, SEEK_END);
imageBuffer._size = ftell(imageFile);
fseek(imageFile, 0, SEEK_SET);
imageBuffer._buffer = new WebRtc_UWord8[ imageBuffer._size + 1];
if ( imageBuffer._size != fread(imageBuffer._buffer, sizeof(WebRtc_UWord8),
imageBuffer._size, imageFile))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s could not read file %s", __FUNCTION__, fileNameUTF8);
delete [] imageBuffer._buffer;
return -1;
}
fclose(imageFile);
// if this is a jpeg file, decode it
JpegDecoder decoder;
int ret = 0;
RawImage decodedImage;
ret = decoder.Decode(imageBuffer, decodedImage);
// done with this.
delete [] imageBuffer._buffer;
imageBuffer._buffer = NULL;
if (-1 == ret)
{
// error decoding the file
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s could decode file %s from jpeg format", __FUNCTION__,
fileNameUTF8);
return -1;
} else if (-3 == ret)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s could not convert jpeg's data to i420 format",
__FUNCTION__, fileNameUTF8);
}
WebRtc_UWord32 imageLength = (WebRtc_UWord32)(decodedImage._width *
decodedImage._height * 1.5);
if (-1 == videoFrame.Swap(decodedImage._buffer, imageLength, imageLength))
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
engineId,
"%s could not copy frame imageDecodedBuffer to videoFrame ",
__FUNCTION__, fileNameUTF8);
return -1;
}
if (decodedImage._buffer)
{
delete [] decodedImage._buffer;
decodedImage._buffer = NULL;
}
videoFrame.SetWidth(decodedImage._width);
videoFrame.SetHeight(decodedImage._height);
return 0;
}
int ViEFileImage::ConvertPictureToVideoFrame(int engineId,
const ViEPicture& picture,
VideoFrame& videoFrame)
{
WebRtc_UWord32 pictureLength = (WebRtc_UWord32)(picture.width
* picture.height * 1.5);
videoFrame.CopyFrame(pictureLength, picture.data);
videoFrame.SetWidth(picture.width);
videoFrame.SetHeight(picture.height);
videoFrame.SetLength(pictureLength);
return 0;
}
} // namespace webrtc

View File

@ -1,34 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_image.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMAGE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMAGE_H_
#include "typedefs.h"
#include "vie_file.h"
#include "module_common_types.h"
namespace webrtc {
class ViEFileImage
{
public:
static int ConvertJPEGToVideoFrame(int engineId,
const char* fileNameUTF8,
VideoFrame& videoFrame);
static int ConvertPictureToVideoFrame(int engineId,
const ViEPicture& picture,
VideoFrame& videoFrame);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMAGE_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,175 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMPL_H_
#include "typedefs.h"
#include "vie_defines.h"
#include "vie_file.h"
#include "vie_frame_provider_base.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
namespace webrtc
{
class ConditionVariableWrapper;
// ----------------------------------------------------------------------------
// ViECaptureSnapshot
// ----------------------------------------------------------------------------
class ViECaptureSnapshot: public ViEFrameCallback
{
public:
ViECaptureSnapshot();
~ViECaptureSnapshot();
bool GetSnapshot(VideoFrame& videoFrame, unsigned int maxWaitTime);
// From ViEFrameCallback
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frameDelay) {}
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate)
{
return -1;
}
virtual void ProviderDestroyed(int id) {}
private:
CriticalSectionWrapper& _crit;
ConditionVariableWrapper& _conditionVaraible;
VideoFrame* _ptrVideoFrame;
};
// ----------------------------------------------------------------------------
// VideoFileImpl
// ----------------------------------------------------------------------------
class ViEFileImpl: public virtual ViESharedData,
public ViEFile,
public ViERefCount
{
public:
virtual int Release();
// Play file
virtual int StartPlayFile(const char* fileNameUTF8, int& fileId,
const bool loop = false,
const webrtc::FileFormats fileFormat =
webrtc::kFileFormatAviFile);
virtual int StopPlayFile(const int fileId);
virtual int RegisterObserver(int fileId, ViEFileObserver& observer);
virtual int DeregisterObserver(int fileId, ViEFileObserver& observer);
virtual int SendFileOnChannel(const int fileId, const int videoChannel);
virtual int StopSendFileOnChannel(const int videoChannel);
virtual int StartPlayFileAsMicrophone(const int fileId,
const int audioChannel,
bool mixMicrophone = false,
float volumeScaling = 1);
virtual int StopPlayFileAsMicrophone(const int fileId,
const int audioChannel);
virtual int StartPlayAudioLocally(const int fileId, const int audioChannel,
float volumeScaling = 1);
virtual int StopPlayAudioLocally(const int fileId, const int audioChannel);
virtual int StartRecordOutgoingVideo(const int videoChannel,
const char* fileNameUTF8,
AudioSource audioSource,
const webrtc::CodecInst& audioCodec,
const VideoCodec& videoCodec,
const webrtc::FileFormats fileFormat =
webrtc::kFileFormatAviFile);
virtual int StartRecordIncomingVideo(const int videoChannel,
const char* fileNameUTF8,
AudioSource audioSource,
const webrtc::CodecInst& audioCodec,
const VideoCodec& videoCodec,
const webrtc::FileFormats fileFormat =
webrtc::kFileFormatAviFile);
virtual int StopRecordOutgoingVideo(const int videoChannel);
virtual int StopRecordIncomingVideo(const int videoChannel);
// File information
virtual int GetFileInformation(const char* fileName,
VideoCodec& videoCodec,
webrtc::CodecInst& audioCodec,
const webrtc::FileFormats fileFormat =
webrtc::kFileFormatAviFile);
// Snapshot
virtual int GetRenderSnapshot(const int videoChannel,
const char* fileNameUTF8);
virtual int GetRenderSnapshot(const int videoChannel, ViEPicture& picture);
virtual int FreePicture(ViEPicture& picture);
virtual int GetCaptureDeviceSnapshot(const int captureId,
const char* fileNameUTF8);
virtual int GetCaptureDeviceSnapshot(const int captureId,
ViEPicture& picture);
// Capture device images
virtual int SetCaptureDeviceImage(const int captureId,
const char* fileNameUTF8);
virtual int SetCaptureDeviceImage(const int captureId,
const ViEPicture& picture);
// Render images
virtual int SetRenderStartImage(const int videoChannel,
const char* fileNameUTF8);
virtual int SetRenderStartImage(const int videoChannel,
const ViEPicture& picture);
// Timeout image
virtual int SetRenderTimeoutImage(const int videoChannel,
const char* fileNameUTF8,
const unsigned int timeoutMs);
virtual int SetRenderTimeoutImage(const int videoChannel,
const ViEPicture& picture,
const unsigned int timeoutMs);
protected:
ViEFileImpl();
virtual ~ViEFileImpl();
private:
WebRtc_Word32 GetNextCapturedFrame(WebRtc_Word32 captureId,
VideoFrame& videoFrame);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_IMPL_H_

View File

@ -1,572 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_player.cc
*
*/
#include "critical_section_wrapper.h"
#include "trace.h"
#include "vie_file_player.h"
#include "tick_util.h"
#include "thread_wrapper.h"
#include "event_wrapper.h"
#include "vie_input_manager.h"
namespace webrtc {
ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(int fileId,
int engineId,
const char* fileNameUTF8,
const bool loop,
const webrtc::FileFormats fileFormat,
ViEInputManager& inputManager,
VoiceEngine* vePtr)
{
ViEFilePlayer* self = new ViEFilePlayer(fileId, engineId, inputManager);
if (!self || self->Init(fileNameUTF8, loop, fileFormat, vePtr) != 0)
{
delete self;
self = NULL;
}
return self;
}
ViEFilePlayer::ViEFilePlayer(int Id, int engineId,
ViEInputManager& inputManager)
: ViEFrameProviderBase(Id, engineId), _playBackStarted(false),
_inputManager(inputManager), _ptrFeedBackCritSect(NULL),
_ptrAudioCritSect(NULL), _filePlayer(NULL), _audioStream(false),
_videoClients(0), _audioClients(0), _localAudioChannel(-1), _observer(NULL),
_veFileInterface(NULL), _veVideoSync(NULL), _ptrDecodeThread(NULL),
_ptrDecodeEvent(NULL), _decodedAudioLength(0), _audioChannelBuffers(),
_decodedVideo()
{
}
ViEFilePlayer::~ViEFilePlayer()
{
StopPlay();
delete _ptrDecodeEvent;
delete _ptrAudioCritSect;
delete _ptrFeedBackCritSect;
}
int ViEFilePlayer::Init(const char* fileNameUTF8, const bool loop,
const webrtc::FileFormats fileFormat,
VoiceEngine* vePtr)
{
_ptrFeedBackCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrFeedBackCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
_ptrAudioCritSect = CriticalSectionWrapper::CreateCriticalSection();
if (!_ptrAudioCritSect)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to allocate critsect");
return -1;
}
_ptrDecodeEvent = EventWrapper::Create();
if (!_ptrDecodeEvent)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to allocate event");
return -1;
}
if (strlen(fileNameUTF8) > FileWrapper::kMaxFileNameSize)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() To long filename");
return -1;
}
strncpy(_fileName, fileNameUTF8, strlen(fileNameUTF8) + 1);
_filePlayer = FilePlayer::CreateFilePlayer(ViEId(_engineId, _id),
fileFormat);
if (!_filePlayer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to create file player");
return -1;
}
if (_filePlayer->RegisterModuleFileCallback(this) == -1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to RegisterModuleFileCallback");
_filePlayer = NULL;
return -1;
}
_ptrDecodeThread = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
this, kHighestPriority,
"ViEFilePlayThread");
if (!_ptrDecodeThread)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to start decode thread.");
_filePlayer = NULL;
return -1;
}
// Always try to open with Audio since we don't know on what channels the audio should be played on.
WebRtc_Word32 error = _filePlayer->StartPlayingVideoFile(_fileName, loop,
false);
if (error) // Failed to open the file with audio. Try without
{
error = _filePlayer->StartPlayingVideoFile(_fileName, loop, true);
_audioStream = false;
if (error)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to Start play video file");
return -1;
}
} else
{
_audioStream = true;
}
if (_audioStream) // The file contain an audiostream
{
if (vePtr) // && localAudioChannel!=-1) // VeInterface have been provided and we want to play audio on local channel.
{
_veFileInterface = VoEFile::GetInterface(vePtr);
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to get VEFile interface");
return -1;
}
_veVideoSync = VoEVideoSync::GetInterface(vePtr);
if (!_veVideoSync)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() failed to get "
"VoEVideoSync interface");
return -1;
}
}
}
_ptrDecodeEvent->StartTimer(true, 10); // Read audio /(or just video) every 10ms.
return 0;
}
/*
//Implements ViEFrameProviderBase
// Starts the decode thread when someone cares.
*/
int ViEFilePlayer::FrameCallbackChanged()
{
if (ViEFrameProviderBase::NumberOfRegistersFrameCallbacks() > _videoClients)
{
if (!_playBackStarted)
{
_playBackStarted = true;
unsigned int threadId;
if (_ptrDecodeThread->Start(threadId))
{
WEBRTC_TRACE(
webrtc::kTraceStateInfo,
webrtc::kTraceVideo,
ViEId(_engineId, _id),
"ViEFilePlayer::FrameCallbackChanged() Started filedecode thread %u",
threadId);
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
"ViEFilePlayer::FrameCallbackChanged() Failed to start file decode thread.");
}
} else if (!_filePlayer->IsPlayingFile())
{
if (_filePlayer->StartPlayingVideoFile(_fileName, false,
!_audioStream) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _id),
"ViEFilePlayer::FrameCallbackChanged(), Failed to restart the file player.");
}
}
}
_videoClients = ViEFrameProviderBase::NumberOfRegistersFrameCallbacks();
return 0;
}
// File play decode function.
bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj)
{
return static_cast<ViEFilePlayer*> (obj)->FilePlayDecodeProcess();
}
bool ViEFilePlayer::FilePlayDecodeProcess()
{
if (_ptrDecodeEvent->Wait(kThreadWaitTimeMs) == kEventSignaled)
{
if (_audioStream && _audioClients == 0) // If there is audio but no one cares- read the audio self
{
Read(NULL, 0);
}
if (_filePlayer->TimeUntilNextVideoFrame() < 10) // Less than 10ms to next videoframe
{
if (_filePlayer->GetVideoFromFile(_decodedVideo) != 0)
{
}
}
if (_decodedVideo.Length() > 0)
{
if (_localAudioChannel != -1 && _veVideoSync) // We are playing audio locally
{
int audioDelay = 0;
if (_veVideoSync->GetPlayoutBufferSize(audioDelay) == 0)
{
_decodedVideo.SetRenderTime(_decodedVideo.RenderTimeMs()
+ audioDelay);
}
}
DeliverFrame(_decodedVideo);
_decodedVideo.SetLength(0);
}
}
return true;
}
int ViEFilePlayer::StopPlay() //Only called from destructor.
{
bool threadStoped = false;
if (_ptrDecodeThread)
{
_ptrDecodeThread->SetNotAlive();
if (_ptrDecodeThread->Stop())
{
delete _ptrDecodeThread;
} else
{
assert(!"ViEFilePlayer::StopPlay() Failed to stop decode thread");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StartPlay() Failed to stop file decode thread.");
}
}
_ptrDecodeThread = NULL;
if (_ptrDecodeEvent)
{
_ptrDecodeEvent->StopTimer();
}
StopPlayAudio();
if (_veFileInterface)
{
_veFileInterface->Release();
_veFileInterface = NULL;
}
if (_veVideoSync)
{
_veVideoSync->Release();
_veVideoSync = NULL;
}
if (_filePlayer)
{
_filePlayer->StopPlayingFile();
FilePlayer::DestroyFilePlayer(_filePlayer);
_filePlayer = NULL;
}
return 0;
}
int ViEFilePlayer::StopPlayAudio()
{
// Stop sending audio
while (MapItem* audioItem = _audioChannelsSending.First())
{
StopSendAudioOnChannel(audioItem->GetId());
}
// Stop local audio playback
if (_localAudioChannel != -1)
{
StopPlayAudioLocally(_localAudioChannel);
}
_localAudioChannel = -1;
while (_audioChannelBuffers.PopFront() != -1);
while (_audioChannelsSending.Erase(_audioChannelsSending.First()) != -1);
_audioClients = 0;
return 0;
}
// From webrtc::InStream
int ViEFilePlayer::Read(void *buf, int len)
{
CriticalSectionScoped lock(*_ptrAudioCritSect); // Protect from simultaneouse reading from multiple channels
if (NeedsAudioFromFile(buf))
{
if (_filePlayer->Get10msAudioFromFile(_decodedAudio,
_decodedAudioLength, 16000) != 0) // we will run the VE in 16KHz
{
// No data
_decodedAudioLength = 0;
return 0;
}
_decodedAudioLength *= 2; // 2 bytes per sample
if (buf != 0)
{
_audioChannelBuffers.PushBack(buf);
}
} else
{
// No need for new audiobuffer from file. Ie the buffer read from file has not been played on this channel.
}
if (buf)
{
memcpy(buf, _decodedAudio, _decodedAudioLength);
}
return _decodedAudioLength;
}
bool ViEFilePlayer::NeedsAudioFromFile(void* buf)
{
bool needsNewAudio = false;
if (_audioChannelBuffers.GetSize() == 0)
{
return true;
}
//Check if we the buf already have read the current audio.
for (ListItem* item = _audioChannelBuffers.First(); item != NULL; item
= _audioChannelBuffers.Next(item))
{
if (item->GetItem() == buf)
{
needsNewAudio = true;
_audioChannelBuffers.Erase(item);
break;
}
}
return needsNewAudio;
}
// From FileCallback
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, id),
"%s: fileId %d", __FUNCTION__, _id);
_filePlayer->StopPlayingFile();
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer)
{
_observer->PlayFileEnded(_id);
}
}
bool ViEFilePlayer::IsObserverRegistered()
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
return _observer != NULL;
}
int ViEFilePlayer::RegisterObserver(ViEFileObserver& observer)
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
if (_observer)
return -1;
_observer = &observer;
return 0;
}
int ViEFilePlayer::DeRegisterObserver()
{
CriticalSectionScoped lock(*_ptrFeedBackCritSect);
_observer = NULL;
return 0;
}
// ----------------------------------------------------------------------------
// SendAudioOnChannel
// Order the voice engine to send the audio on a channel
// ----------------------------------------------------------------------------
int ViEFilePlayer::SendAudioOnChannel(const int audioChannel,
bool mixMicrophone, float volumeScaling)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StartPlayingFileAsMicrophone(audioChannel,
this,
mixMicrophone,
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::SendAudioOnChannel() VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
audioChannel, mixMicrophone, volumeScaling);
return -1;
}
_audioChannelsSending.Insert(audioChannel, NULL);
CriticalSectionScoped lock(*_ptrAudioCritSect);
_audioClients++; // Increase the number of audioClients;
return 0;
}
// ----------------------------------------------------------------------------
// StopSendAudioOnChannel
// Order the voice engine to stop send the audio on a channel
// ----------------------------------------------------------------------------
int ViEFilePlayer::StopSendAudioOnChannel(const int audioChannel)
{
int result = 0;
MapItem* audioItem = _audioChannelsSending.Find(audioChannel);
if (!audioItem)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"_s AudioChannel %d not sending", __FUNCTION__, audioChannel);
return -1;
}
result = _veFileInterface->StopPlayingFileAsMicrophone(audioChannel);
if (result != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"ViEFilePlayer::StopSendAudioOnChannel() VE_StopPlayingFileAsMicrophone failed. audioChannel %d",
audioChannel);
}
_audioChannelsSending.Erase(audioItem);
CriticalSectionScoped lock(*_ptrAudioCritSect);
_audioClients--; // Decrease the number of audioClients;
assert(_audioClients>=0);
return 0;
}
int ViEFilePlayer::PlayAudioLocally(const int audioChannel, float volumeScaling)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StartPlayingFileLocally(
audioChannel,
this,
kFileFormatPcm16kHzFile,
volumeScaling) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s VE_StartPlayingFileAsMicrophone failed. audioChannel %d, mixMicrophone %d, volumeScaling %.2f",
__FUNCTION__, audioChannel, volumeScaling);
return -1;
}
CriticalSectionScoped lock(*_ptrAudioCritSect);
_localAudioChannel = audioChannel;
_audioClients++; // Increase the number of audioClients;
return 0;
}
int ViEFilePlayer::StopPlayAudioLocally(const int audioChannel)
{
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s No VEFile interface.", __FUNCTION__);
return -1;
}
if (_veFileInterface->StopPlayingFileLocally(audioChannel) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s VE_StopPlayingFileLocally failed. audioChannel %d.",
__FUNCTION__, audioChannel);
return -1;
}
CriticalSectionScoped lock(*_ptrAudioCritSect);
_localAudioChannel = -1;
_audioClients--; // Decrease the number of audioClients;
return 0;
}
//static
int ViEFilePlayer::GetFileInformation(int engineId, const char* fileName,
VideoCodec& videoCodec,
webrtc::CodecInst& audioCodec,
const webrtc::FileFormats fileFormat)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, engineId, "%s ", __FUNCTION__);
FilePlayer* filePlayer = FilePlayer::CreateFilePlayer(engineId, fileFormat);
if (!filePlayer)
{
return -1;
}
int result = 0;
bool videoOnly = false;
memset(&videoCodec, 0, sizeof(videoCodec));
memset(&audioCodec, 0, sizeof(audioCodec));
if (filePlayer->StartPlayingVideoFile(fileName, false, false) != 0)
{
videoOnly = true;
if (filePlayer->StartPlayingVideoFile(fileName, false, true) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s Failed to open file.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
return -1;
}
}
if (!videoOnly && filePlayer->AudioCodec(audioCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s Failed to get audio codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
return -1;
}
if (filePlayer->video_codec_info(videoCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, engineId,
"%s Failed to get video codec.", __FUNCTION__);
FilePlayer::DestroyFilePlayer(filePlayer);
return -1;
}
FilePlayer::DestroyFilePlayer(filePlayer);
return 0;
}
} // namespace webrtc

View File

@ -1,124 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_player.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_
#include "typedefs.h"
#include "common_types.h" // webrtc::OutStream
#include "file_player.h"
#include "media_file_defines.h"
#include "vie_file.h"
#include "voe_file.h"
#include "voe_video_sync.h"
#include "list_wrapper.h"
#include "vie_frame_provider_base.h"
#include "file_wrapper.h"
namespace webrtc
{
class EventWrapper;
class ThreadWrapper;
class ViEInputManager;
class ViEFilePlayer: public ViEFrameProviderBase,
protected webrtc::FileCallback,
protected webrtc::InStream // for audio
{
public:
static ViEFilePlayer *CreateViEFilePlayer(int fileId, int engineId,
const char* fileNameUTF8,
const bool loop,
const webrtc::FileFormats fileFormat,
ViEInputManager& inputManager,
VoiceEngine* vePtr);
static int GetFileInformation(const int engineId,
const char* fileName,
webrtc::VideoCodec& videoCodec,
webrtc::CodecInst& audioCodec,
const webrtc::FileFormats fileFormat);
~ViEFilePlayer();
bool IsObserverRegistered();
int RegisterObserver(ViEFileObserver& observer);
int DeRegisterObserver();
int SendAudioOnChannel(const int audioChannel, bool mixMicrophone,
float volumeScaling);
int StopSendAudioOnChannel(const int audioChannel);
int PlayAudioLocally(const int audioChannel, float volumeScaling);
int StopPlayAudioLocally(const int audioChannel);
//Implement ViEFrameProviderBase
virtual int FrameCallbackChanged();
protected:
ViEFilePlayer(int Id, int engineId, ViEInputManager& inputManager);
int Init(const WebRtc_Word8* fileNameUTF8, const bool loop,
const webrtc::FileFormats fileFormat, VoiceEngine* vePtr);
int StopPlay();
int StopPlayAudio();
// File play decode function.
static bool FilePlayDecodeThreadFunction(void* obj);
bool FilePlayDecodeProcess();
bool NeedsAudioFromFile(void* buf);
// From webrtc::InStream
virtual int Read(void *buf, int len);
virtual int Rewind() { return 0;}
// From FileCallback
virtual void PlayNotification(const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*notificationMs*/){}
virtual void RecordNotification(const WebRtc_Word32 id,
const WebRtc_UWord32 notificationMs){}
virtual void PlayFileEnded(const WebRtc_Word32 id);
virtual void RecordFileEnded(const WebRtc_Word32 id) { }
private:
enum { kThreadWaitTimeMs = 100 };
bool _playBackStarted;
ViEInputManager& _inputManager;
CriticalSectionWrapper* _ptrFeedBackCritSect;
CriticalSectionWrapper* _ptrAudioCritSect;
webrtc::FilePlayer* _filePlayer;
bool _audioStream;
int _videoClients; // Number of active video clients
int _audioClients; //No of audio channels sending this audio.
int _localAudioChannel; //Local audio channel playing this video. Sync video against this.
ViEFileObserver* _observer;
WebRtc_Word8 _fileName[FileWrapper::kMaxFileNameSize];
// VE Interface
VoEFile* _veFileInterface;
VoEVideoSync* _veVideoSync;
// Thread for decoding video (and audio if no audio clients connected)
ThreadWrapper* _ptrDecodeThread;
EventWrapper* _ptrDecodeEvent;
WebRtc_Word16 _decodedAudio[320];
WebRtc_UWord32 _decodedAudioLength;
ListWrapper _audioChannelBuffers; //trick - list containing VE buffer reading this file. Used if multiple audio channels are sending.
MapWrapper _audioChannelsSending; // AudioChannels sending audio from this file
VideoFrame _decodedVideo; // Frame receiving decoded video from file.
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_PLAYER_H_

View File

@ -1,281 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_recorder.cc
*
*/
#include "vie_file_recorder.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "tick_util.h"
#include "file_player.h"
#include "file_recorder.h"
#include "vie_defines.h"
namespace webrtc {
ViEFileRecorder::ViEFileRecorder(int instanceID)
: _ptrCritSec(CriticalSectionWrapper::CreateCriticalSection()),
_fileRecorder(NULL), _isFirstFrameRecorded(false),
_isOutStreamStarted(false), _instanceID(instanceID), _frameDelay(0),
_audioChannel(-1), _audioSource(NO_AUDIO),
_veFileInterface(NULL)
{
}
ViEFileRecorder::~ViEFileRecorder()
{
StopRecording();
delete _ptrCritSec;
}
int ViEFileRecorder::StartRecording(const char* fileNameUTF8,
const VideoCodec& codecInst,
AudioSource audioSource,
int audioChannel,
const webrtc::CodecInst audioCodecInst,
VoiceEngine* vePtr,
const webrtc::FileFormats fileFormat)
{
CriticalSectionScoped lock(*_ptrCritSec);
if (_fileRecorder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StartRecording() failed, already recording.");
return -1;
}
_fileRecorder = FileRecorder::CreateFileRecorder(_instanceID, fileFormat);
if (!_fileRecorder)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StartRecording() failed to create file recoder.");
return -1;
}
int error = _fileRecorder->StartRecordingVideoFile(fileNameUTF8,
audioCodecInst,
codecInst,
AMRFileStorage,
audioSource == NO_AUDIO);
if (error)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StartRecording() failed to StartRecordingVideoFile.");
FileRecorder::DestroyFileRecorder(_fileRecorder);
_fileRecorder = NULL;
return -1;
}
_audioSource = audioSource;
if (vePtr && audioSource != NO_AUDIO) // VeInterface have been provided and we want to record audio
{
_veFileInterface = VoEFile::GetInterface(vePtr);
if (!_veFileInterface)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StartRecording() failed to get VEFile interface");
return -1;
}
// always drive VoE in L16
CodecInst engineAudioCodecInst = { 96, // .pltype
"L16", // .plname
audioCodecInst.plfreq, // .plfreq
audioCodecInst.plfreq / 100, // .pacsize (10ms)
1, // .channels
audioCodecInst.plfreq * 16 // .rate
};
switch (audioSource)
{
case MICROPHONE:
error
= _veFileInterface->StartRecordingMicrophone(
this,
&engineAudioCodecInst);
break;
case PLAYOUT:
error
= _veFileInterface->StartRecordingPlayout(
audioChannel,
this,
&engineAudioCodecInst);
break;
case NO_AUDIO:
break;
default:
assert(!"Unknown audioSource");
}
if (error != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StartRecording() failed to start recording audio");
FileRecorder::DestroyFileRecorder(_fileRecorder);
_fileRecorder = NULL;
return -1;
}
_isOutStreamStarted = true;
_audioChannel = audioChannel;
}
_isFirstFrameRecorded = false;
return 0;
}
int ViEFileRecorder::StopRecording()
{
int error;
// Stop recording audio
// Note - we can not hold the _ptrCritSect while accessing VE functions. It might cause deadlock in Write
if (_veFileInterface)
{
switch (_audioSource)
{
case MICROPHONE:
error = _veFileInterface->StopRecordingMicrophone();
break;
case PLAYOUT:
error = _veFileInterface->StopRecordingPlayout(_audioChannel);
break;
case NO_AUDIO:
break;
default:
assert(!"Unknown audioSource");
}
if (error != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _instanceID,
"ViEFileRecorder::StopRecording() failed to stop recording audio");
}
}
CriticalSectionScoped lock(*_ptrCritSec);
if (_veFileInterface)
{
_veFileInterface->Release();
_veFileInterface = NULL;
}
if (_fileRecorder)
{
if (_fileRecorder->IsRecording())
{
int error = _fileRecorder->StopRecording();
if (error)
{
return -1;
}
}
FileRecorder::DestroyFileRecorder(_fileRecorder);
_fileRecorder = NULL;
}
_isFirstFrameRecorded = false;
_isOutStreamStarted = false;
return 0;
}
void ViEFileRecorder::SetFrameDelay(int frameDelay)
{
CriticalSectionScoped lock(*_ptrCritSec);
_frameDelay = frameDelay;
}
bool ViEFileRecorder::RecordingStarted()
{
CriticalSectionScoped lock(*_ptrCritSec);
return _fileRecorder && _fileRecorder->IsRecording();
}
bool ViEFileRecorder::FirstFrameRecorded()
{
CriticalSectionScoped lock(*_ptrCritSec);
return _isFirstFrameRecorded;
}
bool ViEFileRecorder::IsRecordingFileFormat(const webrtc::FileFormats fileFormat)
{
CriticalSectionScoped lock(*_ptrCritSec);
return (_fileRecorder->RecordingFileFormat() == fileFormat) ? true : false;
}
/*******************************************************************************
* void RecordVideoFrame()
*
* Records incoming decoded video frame to AVI-file.
*
*/
void ViEFileRecorder::RecordVideoFrame(const VideoFrame& videoFrame)
{
CriticalSectionScoped lock(*_ptrCritSec);
if (_fileRecorder && _fileRecorder->IsRecording())
{
if (!IsRecordingFileFormat(webrtc::kFileFormatAviFile))
{
return;
}
//Compensate for frame delay in order to get audiosync when recording local video.
const WebRtc_UWord32 timeStamp = videoFrame.TimeStamp();
const WebRtc_Word64 renderTimeStamp = videoFrame.RenderTimeMs();
VideoFrame& unconstVideoFrame =
const_cast<VideoFrame&> (videoFrame);
unconstVideoFrame.SetTimeStamp(timeStamp - 90 * _frameDelay);
unconstVideoFrame.SetRenderTime(renderTimeStamp - _frameDelay);
_fileRecorder->RecordVideoToFile(unconstVideoFrame);
unconstVideoFrame.SetRenderTime(renderTimeStamp);
unconstVideoFrame.SetTimeStamp(timeStamp);
}
}
// ---------------------
// From OutStream
// ---------------------
// 10 ms block of PCM 16
bool ViEFileRecorder::Write(const void* buf, int len)
{
if (!_isOutStreamStarted)
return true;
// always L16 from VoCE
if (len % (2 * 80)) // 2 bytes 80 samples
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, _audioChannel,
"Audio length not supported: %d.", len);
return true;
}
AudioFrame audioFrame;
WebRtc_UWord16 lengthInSamples = len / 2;
audioFrame.UpdateFrame(_audioChannel, 0, (const WebRtc_Word16*) buf,
lengthInSamples, lengthInSamples * 100,
AudioFrame::kUndefined,
AudioFrame::kVadUnknown);
CriticalSectionScoped lock(*_ptrCritSec);
if (_fileRecorder && _fileRecorder->IsRecording())
{
TickTime tickTime = TickTime::Now();
_fileRecorder->RecordAudioToFile(audioFrame, &tickTime);
}
return true; // Always return true!
}
int ViEFileRecorder::Rewind()
{
// Not supported!
return -1;
}
} // namespace webrtc

View File

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_file_recorder.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_RECORDER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_RECORDER_H_
#include "typedefs.h"
#include "file_recorder.h"
#include "vie_file.h"
#include "voe_file.h"
namespace webrtc {
class CriticalSectionWrapper;
class ViEFileRecorder: protected webrtc::OutStream // for audio
{
public:
ViEFileRecorder(int channelId);
~ViEFileRecorder();
int StartRecording(const char* fileNameUTF8,
const webrtc::VideoCodec& codecInst,
AudioSource audioSource, int audioChannel,
const webrtc::CodecInst audioCodecInst,
VoiceEngine* vePtr,
const webrtc::FileFormats fileFormat = webrtc::kFileFormatAviFile);
int StopRecording();
void SetFrameDelay(int frameDelay);
bool RecordingStarted();
void RecordVideoFrame(const VideoFrame& videoFrame);
protected:
bool FirstFrameRecorded();
bool IsRecordingFileFormat(const webrtc::FileFormats fileFormat);
// From webrtc::OutStream
bool Write(const void* buf, int len);
int Rewind();
private:
CriticalSectionWrapper* _ptrCritSec;
FileRecorder* _fileRecorder;
bool _isFirstFrameRecorded;
bool _isOutStreamStarted;
int _instanceID;
int _frameDelay;
int _audioChannel;
AudioSource _audioSource;
VoEFile* _veFileInterface;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FILE_RECORDER_H_

View File

@ -1,310 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "vie_frame_provider_base.h"
#include "critical_section_wrapper.h"
#include "tick_util.h"
#include "trace.h"
#include "vie_defines.h"
namespace webrtc {
ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engineId):
_id(Id),
_engineId(engineId),
_frameCallbackMap(),
_providerCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrExtraFrame(NULL),
_frameDelay(0)
{
}
ViEFrameProviderBase::~ViEFrameProviderBase()
{
if(_frameCallbackMap.Size()>0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId,_id), "FramCallbacks still exist when Provider deleted %d",_frameCallbackMap.Size());
}
for(MapItem* item=_frameCallbackMap.First();item!=NULL;item=_frameCallbackMap.Next(item))
{
static_cast<ViEFrameCallback*>(item->GetItem())->ProviderDestroyed(_id);
}
while(_frameCallbackMap.Erase(_frameCallbackMap.First()) == 0)
;
delete &_providerCritSect;
delete _ptrExtraFrame;
}
int ViEFrameProviderBase::Id()
{
return _id;
}
void ViEFrameProviderBase::DeliverFrame(webrtc::VideoFrame& videoFrame,int numCSRCs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize])
{
#ifdef _DEBUG
const TickTime startProcessTime=TickTime::Now();
#endif
CriticalSectionScoped cs(_providerCritSect);
// Deliver the frame to all registered callbacks
if (_frameCallbackMap.Size() > 0)
{
if(_frameCallbackMap.Size()==1)
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(_frameCallbackMap.First()->GetItem());
frameObserver->DeliverFrame(_id,videoFrame,numCSRCs,CSRC);
}
else
{
// Make a copy of the frame for all callbacks
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
if (_ptrExtraFrame == NULL)
{
_ptrExtraFrame = new webrtc::VideoFrame();
}
if (mapItem != NULL)
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem());
if (frameObserver != NULL)
{
// We must copy the frame each time since the previous receiver might swap it...
_ptrExtraFrame->CopyFrame(videoFrame);
frameObserver->DeliverFrame(_id, *_ptrExtraFrame,numCSRCs,CSRC);
}
}
}
}
}
#ifdef _DEBUG
const int processTime=(int) (TickTime::Now()-startProcessTime).Milliseconds();
if(processTime>25) // Warn If the delivery time is too long.
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId,_id), "%s Too long time: %ums",__FUNCTION__,processTime);
}
#endif
}
void ViEFrameProviderBase::SetFrameDelay(int frameDelay)
{
CriticalSectionScoped cs(_providerCritSect);
_frameDelay=frameDelay;
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
ViEFrameCallback* frameObserver = static_cast<ViEFrameCallback*>(mapItem->GetItem());
assert(frameObserver);
frameObserver->DelayChanged(_id,frameDelay);
}
}
int ViEFrameProviderBase::FrameDelay()
{
return _frameDelay;
}
int ViEFrameProviderBase::GetBestFormat(int& bestWidth,
int& bestHeight,
int& bestFrameRate)
{
int largestWidth = 0;
int largestHeight = 0;
int highestFrameRate = 0;
CriticalSectionScoped cs(_providerCritSect);
// Check if this one already exists...
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
int preferedWidth=0;
int preferedHeight=0;
int preferedFrameRate=0;
ViEFrameCallback* callbackObject = static_cast<ViEFrameCallback*>(mapItem->GetItem());
assert(callbackObject);
if(callbackObject->GetPreferedFrameSettings(preferedWidth,preferedHeight,preferedFrameRate)==0)
{
if (preferedWidth > largestWidth)
{
largestWidth = preferedWidth;
}
if (preferedHeight > largestHeight)
{
largestHeight = preferedHeight;
}
if (preferedFrameRate > highestFrameRate)
{
highestFrameRate = preferedFrameRate;
}
}
}
bestWidth = largestWidth;
bestHeight = largestHeight;
bestFrameRate = highestFrameRate;
return 0;
}
int ViEFrameProviderBase::RegisterFrameCallback(int observerId,ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", callbackObject);
{
CriticalSectionScoped cs(_providerCritSect);
// Check if this one already exists...
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// This callback is already registered
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p already registered", __FUNCTION__, callbackObject);
assert("!frameObserver already registered");
return -1;
}
}
if (_frameCallbackMap.Insert(observerId,callbackObject) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: Could not add 0x%p to list", __FUNCTION__, callbackObject);
return -1;
}
}
// Report current capture delay
callbackObject->DelayChanged(_id,_frameDelay);
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterFrameCallback
// ----------------------------------------------------------------------------
int ViEFrameProviderBase::DeregisterFrameCallback(const ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", callbackObject);
{
CriticalSectionScoped cs(_providerCritSect);
bool itemFound=false;
// Try to find the callback in our list
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
const ViEFrameCallback* observer=static_cast<ViEFrameCallback*> (mapItem->GetItem());
if (observer == callbackObject)
{
// We found it, remove it!
_frameCallbackMap.Erase(mapItem);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p deregistered", __FUNCTION__, callbackObject);
itemFound=true;
break;
}
}
if(!itemFound)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not found", __FUNCTION__, callbackObject);
return -1;
}
}
FrameCallbackChanged(); // Notify implementer of this class that the callback list have changed
return 0;
}
// ----------------------------------------------------------------------------
// IsFrameCallbackRegistered
// ----------------------------------------------------------------------------
bool ViEFrameProviderBase::IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject)
{
if (callbackObject == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s: No argument", __FUNCTION__);
return false;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s(0x%p)", callbackObject);
for (MapItem* mapItem = _frameCallbackMap.First();
mapItem != NULL;
mapItem = _frameCallbackMap.Next(mapItem))
{
if (callbackObject == mapItem->GetItem())
{
// We found the callback
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p is registered", __FUNCTION__, callbackObject);
return true;
}
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _id),
"%s 0x%p not registered", __FUNCTION__, callbackObject);
return false;
}
// ----------------------------------------------------------------------------
// NumberOfRegistersFrameCallbacks
// ----------------------------------------------------------------------------
int ViEFrameProviderBase::NumberOfRegistersFrameCallbacks()
{
CriticalSectionScoped cs(_providerCritSect);
return _frameCallbackMap.Size();
}
} // namespac webrtc

View File

@ -1,100 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_frame_provider_base.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_
// Defines
#include "typedefs.h"
#include "module_common_types.h"
#include "map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoEncoder;
class ViEFrameCallback
{
public:
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
/*
* Delay has changed from the provider.
* frameDelay new capture delay in Ms.
*/
virtual void DelayChanged(int id, int frameDelay)=0;
/*
Fetch the width, height and frame rate preferred by this observer.
return 0 on success, -1 otherwise.
*/
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate)=0;
virtual void ProviderDestroyed(int id) = 0;
protected:
virtual ~ViEFrameCallback()
{
}
;
};
class ViEFrameProviderBase
{
public:
ViEFrameProviderBase(int Id, int engineId);
virtual ~ViEFrameProviderBase();
int Id();
// Register frame callbacks, i.e. a receiver of the captured frame.
virtual int RegisterFrameCallback(int observerId,
ViEFrameCallback* callbackObject);
virtual int
DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
virtual bool
IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
int NumberOfRegistersFrameCallbacks();
// FrameCallbackChanged
// Inherited classes should check for new frameSettings and reconfigure output if possible.
// Return 0 on success, -1 otherwise.
virtual int FrameCallbackChanged() = 0;
protected:
void DeliverFrame(VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frameDelay);
int FrameDelay();
int GetBestFormat(int& bestWidth, int& bestHeight, int& bestFrameRate);
int _id;
int _engineId;
protected:
// Frame callbacks
MapWrapper _frameCallbackMap;
CriticalSectionWrapper& _providerCritSect;
private:
VideoFrame* _ptrExtraFrame;
//Members
int _frameDelay;
};
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_FRAME_PROVIDER_BASE_H_

View File

@ -1,396 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_image_process_impl.cpp
*/
#include "vie_image_process_impl.h"
// Defines
#include "vie_defines.h"
#include "trace.h"
#include "vie_errors.h"
#include "vie_impl.h"
#include "vie_channel.h"
#include "vie_channel_manager.h"
#include "vie_encoder.h"
#include "vie_input_manager.h"
#include "vie_capturer.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViEImageProcess* ViEImageProcess::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViEImageProcessImpl* vieImageProcessImpl = vieImpl;
(*vieImageProcessImpl)++; // Increase ref count
return vieImageProcessImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViEImageProcess::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViEImageProcess release too many times");
SetLastError(kViEAPIDoesNotExist);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViEImageProcess reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEImageProcessImpl::ViEImageProcessImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEImageProcessImpl::ViEImageProcessImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEImageProcessImpl::~ViEImageProcessImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViEImageProcessImpl::~ViEImageProcessImpl() Dtor");
}
// ============================================================================
// Effect filter
// ============================================================================
// ----------------------------------------------------------------------------
// RegisterCaptureEffectFilter
//
// Registers an effect filter for a capture device
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::RegisterCaptureEffectFilter(
const int captureId, ViEEffectFilter& captureFilter)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d)", __FUNCTION__, captureId);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
ViEInputManagerScoped is(_inputManager);
ViECapturer* vieCapture = is.Capture(captureId);
if (vieCapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
if (vieCapture->RegisterEffectFilter(&captureFilter) != 0)
{
SetLastError(kViEImageProcessFilterExists);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterCaptureEffectFilter
//
// Deregisters a previously set fffect filter
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::DeregisterCaptureEffectFilter(const int captureId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d)", __FUNCTION__, captureId);
ViEInputManagerScoped is(_inputManager);
ViECapturer* vieCapture = is.Capture(captureId);
if (vieCapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
if (vieCapture->RegisterEffectFilter(NULL) != 0)
{
SetLastError(kViEImageProcessFilterDoesNotExist);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RegisterSendEffectFilter
//
// Registers an effect filter for a channel
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::RegisterSendEffectFilter(const int videoChannel,
ViEEffectFilter& sendFilter)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieEncoder->RegisterEffectFilter(&sendFilter) != 0)
{
SetLastError(kViEImageProcessFilterExists);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSendEffectFilter
//
// Deregisters a previously set effect filter
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::DeregisterSendEffectFilter(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEEncoder* vieEncoder = cs.Encoder(videoChannel);
if (vieEncoder == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieEncoder->RegisterEffectFilter(NULL) != 0)
{
SetLastError(kViEImageProcessFilterDoesNotExist);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// RegisterRenderEffectFilter
//
// Registers an effect filter for an incoming decoded stream
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::RegisterRenderEffectFilter(
const int videoChannel, ViEEffectFilter& renderFilter)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieChannel->RegisterEffectFilter(&renderFilter) != 0)
{
SetLastError(kViEImageProcessFilterExists);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterRenderEffectFilter
//
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::DeregisterRenderEffectFilter(const int videoChannel)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d)", __FUNCTION__, videoChannel);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieChannel->RegisterEffectFilter(NULL) != 0)
{
SetLastError(kViEImageProcessFilterDoesNotExist);
return -1;
}
return 0;
}
// ============================================================================
// Image enhancement
// ============================================================================
// ----------------------------------------------------------------------------
// EnableDeflickering
//
// Enables/disables deflickering of the captured image.
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::EnableDeflickering(const int captureId,
const bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d, enable: %d)", __FUNCTION__, captureId, enable);
ViEInputManagerScoped is(_inputManager);
ViECapturer* vieCapture = is.Capture(captureId);
if (vieCapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieCapture->EnableDeflickering(enable) != 0)
{
if (enable)
SetLastError(kViEImageProcessAlreadyEnabled);
else
{
SetLastError(kViEImageProcessAlreadyDisabled);
}
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// EnableDenoising
//
// Enables/disables denoising of the captured image.
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::EnableDenoising(const int captureId, const bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(captureId: %d, enable: %d)", __FUNCTION__, captureId, enable);
ViEInputManagerScoped is(_inputManager);
ViECapturer* vieCapture = is.Capture(captureId);
if (vieCapture == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Capture device %d doesn't exist", __FUNCTION__,
captureId);
SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
if (vieCapture->EnableDenoising(enable) != 0)
{
if (enable)
SetLastError(kViEImageProcessAlreadyEnabled);
else
{
SetLastError(kViEImageProcessAlreadyDisabled);
}
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// EnableColorEnhancement
//
// Enables coloe enhancement for decoded images
// ----------------------------------------------------------------------------
int ViEImageProcessImpl::EnableColorEnhancement(const int videoChannel,
const bool enable)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(videoChannel: %d, enable: %d)", __FUNCTION__, videoChannel,
enable);
ViEChannelManagerScoped cs(_channelManager);
ViEChannel* vieChannel = cs.Channel(videoChannel);
if (vieChannel == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: Channel %d doesn't exist", __FUNCTION__, videoChannel);
SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
if (vieChannel->EnableColorEnhancement(enable) != 0)
{
if (enable)
SetLastError(kViEImageProcessAlreadyEnabled);
else
{
SetLastError(kViEImageProcessAlreadyDisabled);
}
return -1;
}
return 0;
}
} // namespace webrtc

View File

@ -1,66 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_image_process_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMAGE_PROCESS_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMAGE_PROCESS_IMPL_H_
#include "typedefs.h"
#include "vie_ref_count.h"
#include "vie_image_process.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViEImageProcessImpl
// ----------------------------------------------------------------------------
class ViEImageProcessImpl: public virtual ViESharedData,
public ViEImageProcess,
public ViERefCount
{
public:
virtual int Release();
// Effect filter
virtual int RegisterCaptureEffectFilter(const int captureId,
ViEEffectFilter& captureFilter);
virtual int DeregisterCaptureEffectFilter(const int captureId);
virtual int RegisterSendEffectFilter(const int videoChannel,
ViEEffectFilter& sendFilter);
virtual int DeregisterSendEffectFilter(const int videoChannel);
virtual int RegisterRenderEffectFilter(const int videoChannel,
ViEEffectFilter& renderFilter);
virtual int DeregisterRenderEffectFilter(const int videoChannel);
// Image enhancement
virtual int EnableDeflickering(const int captureId, const bool enable);
virtual int EnableDenoising(const int captureId, const bool enable);
virtual int EnableColorEnhancement(const int videoChannel,
const bool enable);
protected:
ViEImageProcessImpl();
virtual ~ViEImageProcessImpl();
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMAGE_PROCESS_IMPL_H_

View File

@ -1,301 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_impl.cc
*/
#include "vie_impl.h"
#include "trace.h"
#if (defined(_WIN32) || defined(_WIN64))
#include <Windows.h> // For LoadLibrary
#include <tchar.h> // For _T
#endif
#ifdef ANDROID
#include "video_capture.h"
#include "video_render.h"
#endif
// Global counter to get an id for each new ViE instance
static WebRtc_Word32 gViEActiveInstanceCounter = 0;
namespace webrtc
{
// -------------------------------------------------------------------------
// GetVideoEngine (C-function)
//
// extern "C" ensures that GetProcAddress() can find the function address
// -------------------------------------------------------------------------
extern "C"
{
VideoEngine* GetVideoEngine();
VideoEngine* GetVideoEngine()
{
VideoEngineImpl* self = new VideoEngineImpl();
if (self == NULL)
{
return NULL;
}
gViEActiveInstanceCounter++;
VideoEngine* vie = reinterpret_cast<VideoEngine*> (self);
return vie;
}
}
// -------------------------------------------------------------------------
// Create
// -------------------------------------------------------------------------
VideoEngine* VideoEngine::Create()
{
#if (defined(_WIN32) || defined(_WIN64))
// Load a debug dll, if there is one...
HMODULE hmod_ = LoadLibrary(TEXT("VideoEngineTestingDLL.dll"));
if (hmod_)
{
typedef VideoEngine* (*PFNGetVideoEngineLib)(void);
PFNGetVideoEngineLib pfn =
(PFNGetVideoEngineLib)GetProcAddress(hmod_,"GetVideoEngine");
if (pfn)
{
VideoEngine* self = pfn();
return self;
}
else
{
assert(!"Failed to open test dll VideoEngineTestingDLL.dll");
return NULL;
}
}
#endif
return GetVideoEngine();
}
// -------------------------------------------------------------------------
// Delete
//
// Deletes the VideoEngineImpl instance if all reference counters are
// down to zero.
// -------------------------------------------------------------------------
bool VideoEngine::Delete(VideoEngine*& videoEngine)
{
if (videoEngine == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"VideoEngine::Delete - No argument");
return false;
}
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"VideoEngine::Delete( vie = 0x%p)", videoEngine);
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
// Check all reference counters
ViEBaseImpl* vieBase = vieImpl;
if (vieBase->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViEBase ref count: %d", vieBase->GetCount());
return false;
}
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
ViECaptureImpl* vieCapture = vieImpl;
if (vieCapture->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViECapture ref count: %d", vieCapture->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
ViECodecImpl* vieCodec = vieImpl;
if (vieCodec->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViECodec ref count: %d", vieCodec->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
ViEEncryptionImpl* vieEncryption = vieImpl;
if (vieEncryption->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViEEncryption ref count: %d", vieEncryption->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
ViEExternalCodecImpl* vieExternalCodec = vieImpl;
if (vieExternalCodec->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViEEncryption ref count: %d", vieEncryption->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
ViEFileImpl* vieFile = vieImpl;
if (vieFile->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViEFile ref count: %d", vieFile->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
ViEImageProcessImpl* vieImageProcess = vieImpl;
if (vieImageProcess->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViEImageProcess ref count: %d", vieImageProcess->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
ViENetworkImpl* vieNetwork = vieImpl;
if (vieNetwork->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViENetwork ref count: %d", vieNetwork->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
ViERenderImpl* vieRender = vieImpl;
if (vieRender->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViERender ref count: %d", vieRender->GetCount());
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
ViERTP_RTCPImpl* vieRtpRtcp = vieImpl;
if (vieRtpRtcp->GetCount() > 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"ViERTP_RTCP ref count: %d", vieRtpRtcp->GetCount());
return false;
}
#endif
// Delete VieImpl
delete vieImpl;
vieImpl = NULL;
videoEngine = NULL;
// Decrease the number of instances
gViEActiveInstanceCounter--;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"%s: instance deleted. Remaining instances: %d", __FUNCTION__,
gViEActiveInstanceCounter);
return true;
}
// -------------------------------------------------------------------------
// [static] SetTraceFile
// -------------------------------------------------------------------------
int VideoEngine::SetTraceFile(const char* fileNameUTF8,
const bool addFileCounter)
{
if (fileNameUTF8 == NULL)
{
return -1;
}
if (Trace::SetTraceFile(fileNameUTF8, addFileCounter) == -1)
{
return -1;
}
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetTraceFileName(fileNameUTF8 = %s, addFileCounter = %d",
fileNameUTF8, addFileCounter);
return 0;
}
// -------------------------------------------------------------------------
// [static] SetTraceFilter
// -------------------------------------------------------------------------
int VideoEngine::SetTraceFilter(const unsigned int filter)
{
WebRtc_UWord32 oldFilter = 0;
Trace::LevelFilter(oldFilter);
if (filter == webrtc::kTraceNone && oldFilter != webrtc::kTraceNone)
{
// Do the logging before turning it off
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetTraceFilter(filter = 0x%x)", filter);
}
WebRtc_Word32 error = Trace::SetLevelFilter(filter);
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetTraceFilter(filter = 0x%x)", filter);
if (error != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetTraceFilter error: %d", error);
return -1;
}
return 0;
}
// -------------------------------------------------------------------------
// [static] SetTraceFilter
// -------------------------------------------------------------------------
int VideoEngine::SetTraceCallback(webrtc::TraceCallback* callback)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetTraceCallback(webrtc::TraceCallback = 0x%p)", callback);
return Trace::SetTraceCallback(callback);
}
// -------------------------------------------------------------------------
// [static] SetAndroidObjects
// -------------------------------------------------------------------------
int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"SetAndroidObjects()");
#ifdef ANDROID
if (VideoCaptureModule::SetAndroidObjects(javaVM,javaContext) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"Could not set capture module Android objects");
return -1;
}
if (VideoRender::SetAndroidObjects(javaVM) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, gViEActiveInstanceCounter,
"Could not set render module Android objects");
return -1;
}
return 0;
#else
return -1;
#endif
}
} // namespace webrtc

View File

@ -1,90 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMPL_H_
#include "engine_configurations.h"
#include "vie_defines.h"
// Include all sub API:s
#include "vie_base_impl.h"
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
#include "vie_capture_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
#include "vie_codec_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
#include "vie_encryption_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
#include "vie_file_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
#include "vie_image_process_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
#include "vie_network_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
#include "vie_render_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
#include "vie_rtp_rtcp_impl.h"
#endif
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
#include "vie_external_codec_impl.h"
#endif
namespace webrtc
{
class VideoEngineImpl: public ViEBaseImpl
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
, public ViECodecImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
, public ViECaptureImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
, public ViEEncryptionImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
, public ViEFileImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
, public ViEImageProcessImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
, public ViENetworkImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
, public ViERenderImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
, public ViERTP_RTCPImpl
#endif
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
,public ViEExternalCodecImpl
#endif
{
public:
VideoEngineImpl() {};
virtual ~VideoEngineImpl() {};
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_IMPL_H_

View File

@ -1,817 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_input_manager.cc
*/
#include "vie_input_manager.h"
#include "vie_defines.h"
#include "common_types.h"
#include "critical_section_wrapper.h"
#include "video_capture.h"
#include "video_capture.h"
#include "video_coding.h"
#include "video_coding_defines.h"
#include "rw_lock_wrapper.h"
#include "trace.h"
#include "vie_capturer.h"
#include "vie_file_player.h"
#include "vie_errors.h"
#include <cassert>
namespace webrtc {
//=============================================================================
// ViEInputManager
//=============================================================================
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEInputManager::ViEInputManager(const int engineId)
: _engineId(engineId),
_mapCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrCaptureDeviceInfo(NULL), _vieFrameProviderMap(),
_freeCaptureDeviceId(), _moduleProcessThread(NULL)
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
for (int idx = 0; idx < kViEMaxCaptureDevices; idx++)
{
_freeCaptureDeviceId[idx] = true;
}
#ifdef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
_ptrCaptureDeviceInfo=NULL;
#else
_ptrCaptureDeviceInfo = VideoCaptureModule::CreateDeviceInfo(
ViEModuleId(_engineId));
#endif
for (int idx = 0; idx < kViEMaxFilePlayers; idx++)
{
_freeFileId[idx] = true;
}
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEInputManager::~ViEInputManager()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
while (_vieFrameProviderMap.Size() != 0)
{
MapItem* item = _vieFrameProviderMap.First();
assert(item);
ViEFrameProviderBase* frameProvider = static_cast<ViEFrameProviderBase*>
(item->GetItem());
_vieFrameProviderMap.Erase(item);
delete frameProvider;
}
delete &_mapCritsect;
if (_ptrCaptureDeviceInfo)
{
VideoCaptureModule::DestroyDeviceInfo( _ptrCaptureDeviceInfo);
_ptrCaptureDeviceInfo = NULL;
}
}
// ----------------------------------------------------------------------------
// SetModuleProcessThread
// Initialize the thread context used by none time critical tasks in capture modules.
// ----------------------------------------------------------------------------
void ViEInputManager::SetModuleProcessThread(ProcessThread& moduleProcessThread)
{
assert(!_moduleProcessThread);
_moduleProcessThread = &moduleProcessThread;
}
// ----------------------------------------------------------------------------
// NumberOfCaptureDevices
//
// Returns the number of available capture devices
// ----------------------------------------------------------------------------
// Capture device information
int ViEInputManager::NumberOfCaptureDevices()
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
assert(_ptrCaptureDeviceInfo);
return _ptrCaptureDeviceInfo->NumberOfDevices();
}
// ----------------------------------------------------------------------------
// GetDeviceName
// ----------------------------------------------------------------------------
int ViEInputManager::GetDeviceName(WebRtc_UWord32 deviceNumber,
WebRtc_UWord8* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
WebRtc_UWord8* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceNumber: %d)", __FUNCTION__, deviceNumber);
assert(_ptrCaptureDeviceInfo);
return _ptrCaptureDeviceInfo->GetDeviceName(deviceNumber, deviceNameUTF8,
deviceNameLength,
deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length);
}
// ----------------------------------------------------------------------------
// NumberOfCaptureCapabilities
//
// Returns the number of capture capabilities for the specified capture device
// ----------------------------------------------------------------------------
int ViEInputManager::NumberOfCaptureCapabilities(
const WebRtc_UWord8* deviceUniqueIdUTF8)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
assert(_ptrCaptureDeviceInfo);
return _ptrCaptureDeviceInfo->NumberOfCapabilities(deviceUniqueIdUTF8);
}
// ----------------------------------------------------------------------------
// GetCaptureCapability
// ----------------------------------------------------------------------------
int ViEInputManager::GetCaptureCapability(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord32 deviceCapabilityNumber,
CaptureCapability& capability)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueIdUTF8: %s, deviceCapabilityNumber: %d)",
__FUNCTION__, deviceUniqueIdUTF8, deviceCapabilityNumber);
assert(_ptrCaptureDeviceInfo);
VideoCaptureCapability moduleCapability;
int result = _ptrCaptureDeviceInfo->GetCapability(deviceUniqueIdUTF8,
deviceCapabilityNumber,
moduleCapability);
// Copy from module type to public type
capability.expectedCaptureDelay = moduleCapability.expectedCaptureDelay;
capability.height = moduleCapability.height;
capability.width = moduleCapability.width;
capability.interlaced = moduleCapability.interlaced;
capability.rawType = moduleCapability.rawType;
capability.codecType = moduleCapability.codecType;
capability.maxFPS = moduleCapability.maxFPS;
return result;
}
int ViEInputManager::GetOrientation(const WebRtc_UWord8* deviceUniqueIdUTF8,
RotateCapturedFrame &orientation)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueIdUTF8: %s,)", __FUNCTION__, deviceUniqueIdUTF8);
assert(_ptrCaptureDeviceInfo);
VideoCaptureRotation moduleOrientation;
int result = _ptrCaptureDeviceInfo->GetOrientation(deviceUniqueIdUTF8,
moduleOrientation);
// Copy from module type to public type
switch (moduleOrientation)
{
case kCameraRotate0:
orientation = RotateCapturedFrame_0;
break;
case kCameraRotate90:
orientation = RotateCapturedFrame_90;
break;
case kCameraRotate180:
orientation = RotateCapturedFrame_180;
break;
case kCameraRotate270:
orientation = RotateCapturedFrame_270;
break;
default:
assert(!"Unknown enum");
}
return result;
}
//------------------------------------------------------------------------------
//
// DisplayCaptureSettingsDialogBox
// Show OS specific Capture settings.
// Return 0 on success.
//------------------------------------------------------------------------------
int ViEInputManager::DisplayCaptureSettingsDialogBox(
const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord8* dialogTitleUTF8,
void* parentWindow,
WebRtc_UWord32 positionX,
WebRtc_UWord32 positionY)
{
assert(_ptrCaptureDeviceInfo);
return _ptrCaptureDeviceInfo->DisplayCaptureSettingsDialogBox(
deviceUniqueIdUTF8,
dialogTitleUTF8,
parentWindow,
positionX,
positionY);
}
// ----------------------------------------------------------------------------
// CreateCaptureDevice
//
// Creates a capture module for the specified capture device and assigns
// a capture device id for the device
// ----------------------------------------------------------------------------
int ViEInputManager::CreateCaptureDevice(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord32 deviceUniqueIdUTF8Length,
int& captureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueId: %s)", __FUNCTION__, deviceUniqueIdUTF8);
#ifdef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueId: Only external capture modules can be used.) "
, __FUNCTION__);
return -1;
#endif
CriticalSectionScoped cs(_mapCritsect);
// Make sure the device is not already allocated
for (MapItem* item = _vieFrameProviderMap.First(); item != NULL;
item = _vieFrameProviderMap.Next(item))
{
if (item->GetId() >= kViECaptureIdBase &&
item->GetId() <= kViECaptureIdMax) // Make sure it is a capture device
{
ViECapturer* vieCapture = static_cast<ViECapturer*> (item->GetItem());
assert(vieCapture);
if (strncmp((char*) vieCapture->CurrentDeviceName(),
(char*) deviceUniqueIdUTF8,
strlen((char*) vieCapture->CurrentDeviceName())) == 0)
{
return kViECaptureDeviceAlreadyAllocated;
}
}
}
// Make sure the device name is valid
bool foundDevice = false;
for (WebRtc_UWord32 deviceIndex = 0;
deviceIndex < _ptrCaptureDeviceInfo->NumberOfDevices(); ++deviceIndex)
{
if (deviceUniqueIdUTF8Length >kVideoCaptureUniqueNameLength)
{
// user's string length is longer than the max
return -1;
}
WebRtc_UWord8 foundName[kVideoCaptureDeviceNameLength] = "";
WebRtc_UWord8 foundUniqueName[kVideoCaptureUniqueNameLength] = "";
_ptrCaptureDeviceInfo->GetDeviceName(deviceIndex, foundName,
kVideoCaptureDeviceNameLength,
foundUniqueName,
kVideoCaptureUniqueNameLength);
if (strncmp((char*) deviceUniqueIdUTF8, (char*) foundUniqueName,
strlen((char*) deviceUniqueIdUTF8)) == 0)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(_engineId),
"%s:%d Capture device was found by unique ID: %s. Returning",
__FUNCTION__, __LINE__, deviceUniqueIdUTF8);
foundDevice = true;
break;
}
}
if (!foundDevice)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(_engineId),
"%s:%d Capture device NOT found by unique ID: %s. Returning",
__FUNCTION__, __LINE__, deviceUniqueIdUTF8);
return kViECaptureDeviceDoesnNotExist;
}
int newcaptureId = 0;
if (GetFreeCaptureId(newcaptureId) == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Maximum supported number of capture devices already in use",
__FUNCTION__);
return kViECaptureDeviceMaxNoDevicesAllocated;
}
ViECapturer* vieCapture =ViECapturer::CreateViECapture(newcaptureId,
_engineId,
deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length,
*_moduleProcessThread);
if (vieCapture == NULL)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not create capture module for %s", __FUNCTION__,
deviceUniqueIdUTF8);
return kViECaptureDeviceUnknownError;
}
if (_vieFrameProviderMap.Insert(newcaptureId, vieCapture) != 0)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not insert capture module for %s", __FUNCTION__,
deviceUniqueIdUTF8);
return kViECaptureDeviceUnknownError;
}
captureId = newcaptureId;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueId: %s, captureId: %d)", __FUNCTION__,
deviceUniqueIdUTF8, captureId);
return 0;
}
int ViEInputManager::CreateCaptureDevice(VideoCaptureModule& captureModule,
int& captureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s", __FUNCTION__);
CriticalSectionScoped cs(_mapCritsect);
int newcaptureId = 0;
if (GetFreeCaptureId(newcaptureId) == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Maximum supported number of capture devices already in use",
__FUNCTION__);
return kViECaptureDeviceMaxNoDevicesAllocated;
}
ViECapturer* vieCapture = ViECapturer::CreateViECapture(newcaptureId,
_engineId,
captureModule,
*_moduleProcessThread);
if (vieCapture == NULL)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could attach capture module.", __FUNCTION__);
return kViECaptureDeviceUnknownError;
}
if (_vieFrameProviderMap.Insert(newcaptureId, vieCapture) != 0)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not insert capture module", __FUNCTION__);
return kViECaptureDeviceUnknownError;
}
captureId = newcaptureId;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s, captureId: %d", __FUNCTION__, captureId);
return 0;
}
// ----------------------------------------------------------------------------
// DestroyCaptureDevice
//
// Releases the capture device with specified id
// ----------------------------------------------------------------------------
int ViEInputManager::DestroyCaptureDevice(const int captureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(captureId: %d)", __FUNCTION__, captureId);
ViECapturer* vieCapture = NULL;
{
// We need exclusive access to the object to delete it
ViEManagerWriteScoped wl(*this); // Take this write lock first since the read lock is taken before _mapCritsect
CriticalSectionScoped cs(_mapCritsect);
vieCapture = ViECapturePtr(captureId);
if (vieCapture == NULL)
{
// No capture deveice with that id
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s(captureId: %d) - No such capture device id",
__FUNCTION__, captureId);
return -1;
}
WebRtc_UWord32 numCallbacks = vieCapture->NumberOfRegistersFrameCallbacks();
if (numCallbacks > 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),
"%s(captureId: %d) - %u registered callbacks when destroying capture device",
__FUNCTION__, captureId, numCallbacks);
}
_vieFrameProviderMap.Erase(captureId);
ReturnCaptureId(captureId);
} // Leave cs before deleting the capture object. This is because deleting the object might cause deletions of renderers so we prefer to not have a lock at that time.
delete vieCapture;
return 0;
}
// ----------------------------------------------------------------------------
// CreateExternalCaptureDevice
//
// Creates a capture module to be used with external captureing.
// ----------------------------------------------------------------------------
int ViEInputManager::CreateExternalCaptureDevice(ViEExternalCapture*& externalCapture,
int& captureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
#ifdef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueId: Only external capture modules can be used.) "
, __FUNCTION__);
return -1;
#endif
CriticalSectionScoped cs(_mapCritsect);
int newcaptureId = 0;
if (GetFreeCaptureId(newcaptureId) == false)
{
WEBRTC_TRACE( webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Maximum supported number of capture devices already in use",
__FUNCTION__);
return kViECaptureDeviceMaxNoDevicesAllocated;
}
ViECapturer* vieCapture = ViECapturer::CreateViECapture(newcaptureId,
_engineId, NULL, 0,
*_moduleProcessThread);
if (vieCapture == NULL)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not create capture module for external capture.",
__FUNCTION__);
return kViECaptureDeviceUnknownError;
}
if (_vieFrameProviderMap.Insert(newcaptureId, vieCapture) != 0)
{
ReturnCaptureId(newcaptureId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not insert capture module for external capture.",
__FUNCTION__);
return kViECaptureDeviceUnknownError;
}
captureId = newcaptureId;
externalCapture = vieCapture;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s, captureId: %d)", __FUNCTION__, captureId);
return 0;
}
int ViEInputManager::CreateFilePlayer(const WebRtc_Word8* fileNameUTF8,
const bool loop,
const webrtc::FileFormats fileFormat,
VoiceEngine* vePtr, int& fileId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(deviceUniqueId: %s)", __FUNCTION__, fileNameUTF8);
CriticalSectionScoped cs(_mapCritsect);
int newFileId = 0;
if (GetFreeFileId(newFileId) == false)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Maximum supported number of file players already in use",
__FUNCTION__);
return kViEFileMaxNoOfFilesOpened;
}
ViEFilePlayer* vieFilePlayer = ViEFilePlayer::CreateViEFilePlayer(
newFileId, _engineId, fileNameUTF8,
loop, fileFormat, *this, vePtr);
if (vieFilePlayer == NULL)
{
ReturnFileId(newFileId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not open file %s for playback", __FUNCTION__,
fileNameUTF8);
return kViEFileUnknownError;
}
if (_vieFrameProviderMap.Insert(newFileId, vieFilePlayer) != 0)
{
ReturnCaptureId(newFileId);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not insert file player for %s", __FUNCTION__,
fileNameUTF8);
delete vieFilePlayer;
return kViEFileUnknownError;
}
fileId = newFileId;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(filename: %s, fileId: %d)", __FUNCTION__, fileNameUTF8,
newFileId);
return 0;
}
int ViEInputManager::DestroyFilePlayer(int fileId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s(fileId: %d)", __FUNCTION__, fileId);
ViEFilePlayer* vieFilePlayer = NULL;
{
// We need exclusive access to the object to delete it
ViEManagerWriteScoped wl(*this); // Take this write lock first since the read lock is taken before _mapCritsect
CriticalSectionScoped cs(_mapCritsect);
vieFilePlayer = ViEFilePlayerPtr(fileId);
if (vieFilePlayer == NULL)
{
// No capture deveice with that id
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s(fileId: %d) - No such file player",
__FUNCTION__, fileId);
return -1;
}
int numCallbacks =
vieFilePlayer->NumberOfRegistersFrameCallbacks();
if (numCallbacks > 0)
{
WEBRTC_TRACE( webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId),
"%s(fileId: %d) - %u registered callbacks when destroying file player",
__FUNCTION__, fileId, numCallbacks);
}
_vieFrameProviderMap.Erase(fileId);
ReturnFileId(fileId);
} // Leave cs before deleting the file object. This is because deleting the object might cause deletions of renderers so we prefer to not have a lock at that time.
delete vieFilePlayer;
return 0;
}
// ============================================================================
// Private methods
// ============================================================================
// ----------------------------------------------------------------------------
// GetFreeCaptureId
//
// Gets and allocates a free capture device id. Assumed protected by caller
// ----------------------------------------------------------------------------
// Private, asummed protected
bool ViEInputManager::GetFreeCaptureId(int& freecaptureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
for (int id = 0; id < kViEMaxCaptureDevices; id++)
{
if (_freeCaptureDeviceId[id])
{
// We found a free capture device id
_freeCaptureDeviceId[id] = false;
freecaptureId = id + kViECaptureIdBase;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s: new id: %d", __FUNCTION__, freecaptureId);
return true;
}
}
return false;
}
// ----------------------------------------------------------------------------
// ReturnCaptureId
//
// Frees a capture id assigned in GetFreeCaptureId
// ----------------------------------------------------------------------------
void ViEInputManager::ReturnCaptureId(int captureId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s(%d)",
__FUNCTION__, captureId);
CriticalSectionScoped cs(_mapCritsect);
if (captureId >= kViECaptureIdBase &&
captureId < kViEMaxCaptureDevices + kViECaptureIdBase)
{
_freeCaptureDeviceId[captureId - kViECaptureIdBase] = true;
}
return;
}
// ----------------------------------------------------------------------------
// GetFreeFileId
//
// Gets and allocates a free file id. Assumed protected by caller
// ----------------------------------------------------------------------------
// Private, asumed protected
bool ViEInputManager::GetFreeFileId(int& freeFileId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
for (int id = 0; id < kViEMaxFilePlayers; id++)
{
if (_freeFileId[id])
{
// We found a free capture device id
_freeFileId[id] = false;
freeFileId = id + kViEFileIdBase;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s: new id: %d", __FUNCTION__, freeFileId);
return true;
}
}
return false;
}
// ----------------------------------------------------------------------------
// ReturnFileId
//
// Frees a file id assigned in GetFreeFileId
// ----------------------------------------------------------------------------
void ViEInputManager::ReturnFileId(int fileId)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s(%d)",
__FUNCTION__, fileId);
CriticalSectionScoped cs(_mapCritsect);
if (fileId >= kViEFileIdBase &&
fileId < kViEMaxFilePlayers + kViEFileIdBase)
{
_freeFileId[fileId - kViEFileIdBase] = true;
}
return;
}
// ============================================================================
// Methods used by ViECaptureScoped
// ----------------------------------------------------------------------------
// ViECapturePtr
//
// Gets the ViECapturer for the capture device id
// ----------------------------------------------------------------------------
ViECapturer* ViEInputManager::ViECapturePtr(int captureId) const
{
if (!(captureId >= kViECaptureIdBase &&
captureId <= kViECaptureIdBase + kViEMaxCaptureDevices))
return NULL;
CriticalSectionScoped cs(_mapCritsect);
MapItem* mapItem = _vieFrameProviderMap.Find(captureId);
if (mapItem == NULL)
{
// No ViEEncoder for this channel...
return NULL;
}
ViECapturer* vieCapture = static_cast<ViECapturer*> (mapItem->GetItem());
return vieCapture;
}
// ----------------------------------------------------------------------------
// ViEFrameProvider
//
// Gets the ViEFrameProvider for this capture observer.
// ----------------------------------------------------------------------------
ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(
const ViEFrameCallback* captureObserver) const
{
assert(captureObserver);
CriticalSectionScoped cs(_mapCritsect);
for (MapItem* providerItem = _vieFrameProviderMap.First(); providerItem
!= NULL; providerItem = _vieFrameProviderMap.Next(providerItem))
{
ViEFrameProviderBase* vieFrameProvider = static_cast<ViEFrameProviderBase*>
(providerItem->GetItem());
assert(vieFrameProvider != NULL);
if (vieFrameProvider->IsFrameCallbackRegistered(captureObserver))
{
// We found it
return vieFrameProvider;
}
}
// No capture device set for this channel
return NULL;
}
// ----------------------------------------------------------------------------
// ViEFrameProvider
//
// Gets the ViEFrameProvider for this capture observer.
// ----------------------------------------------------------------------------
ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(int providerId) const
{
CriticalSectionScoped cs(_mapCritsect);
MapItem* mapItem = _vieFrameProviderMap.Find(providerId);
if (mapItem == NULL)
{
return NULL;
}
ViEFrameProviderBase* vieFrameProvider = static_cast<ViEFrameProviderBase*>
(mapItem->GetItem());
return vieFrameProvider;
}
// ----------------------------------------------------------------------------
// GetViECaptures
//
// Gets the the entire map with GetViECaptures
// ----------------------------------------------------------------------------
void ViEInputManager::GetViECaptures(MapWrapper& vieCaptureMap)
{
CriticalSectionScoped cs(_mapCritsect);
if (_vieFrameProviderMap.Size() == 0)
{
// No ViECaptures
return;
}
// Add all items to the map
for (MapItem* item = _vieFrameProviderMap.First();
item != NULL;
item = _vieFrameProviderMap.Next(item))
{
vieCaptureMap.Insert(item->GetId(), item->GetItem());
}
return;
}
// ----------------------------------------------------------------------------
// ViEFilePlayerPtr
//
// Gets the ViEFilePlayer for this fileId
// ----------------------------------------------------------------------------
ViEFilePlayer* ViEInputManager::ViEFilePlayerPtr(int fileId) const
{
if (fileId < kViEFileIdBase || fileId > kViEFileIdMax)
return NULL;
CriticalSectionScoped cs(_mapCritsect);
MapItem* mapItem = _vieFrameProviderMap.Find(fileId);
if (mapItem == NULL)
{
// No ViEFilePlayer for this fileId...
return NULL;
}
ViEFilePlayer* vieFilePlayer =
static_cast<ViEFilePlayer*> (mapItem->GetItem());
return vieFilePlayer;
}
// ----------------------------------------------------------------------------
// ViEInputManagerScoped
//
// Provides protected access to ViEInputManater
// ----------------------------------------------------------------------------
ViEInputManagerScoped::ViEInputManagerScoped(
const ViEInputManager& vieInputManager)
: ViEManagerScopedBase(vieInputManager)
{
}
ViECapturer* ViEInputManagerScoped::Capture(int captureId) const
{
return static_cast<const ViEInputManager*>
(_vieManager)->ViECapturePtr(captureId);
}
ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
const ViEFrameCallback* captureObserver) const
{
return static_cast<const ViEInputManager*>
(_vieManager)->ViEFrameProvider(captureObserver);
}
ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(int providerId) const
{
return static_cast<const ViEInputManager*>
(_vieManager)->ViEFrameProvider( providerId);
}
ViEFilePlayer* ViEInputManagerScoped::FilePlayer(int fileId) const
{
return static_cast<const ViEInputManager*>
(_vieManager)->ViEFilePlayerPtr(fileId);
}
} // namespace webrtc

View File

@ -1,121 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_input_manager.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_INPUT_MANAGER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_INPUT_MANAGER_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "map_wrapper.h"
#include "video_capture.h"
#include "vie_manager_base.h"
#include "vie_frame_provider_base.h"
#include "vie_capture.h"
class ViEExternalCapture;
namespace webrtc {
class CriticalSectionWrapper;
class ProcessThread;
class RWLockWrapper;
class ViECapturer;
class ViEFilePlayer;
class VoiceEngine;
class ViEInputManager: private ViEManagerBase
{
friend class ViEInputManagerScoped;
public:
ViEInputManager(int engineId);
~ViEInputManager();
void SetModuleProcessThread(ProcessThread& moduleProcessThread);
// Capture device information
int NumberOfCaptureDevices();
int GetDeviceName(WebRtc_UWord32 deviceNumber,
WebRtc_UWord8* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
WebRtc_UWord8* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length);
int NumberOfCaptureCapabilities(const WebRtc_UWord8* deviceUniqueIdUTF8);
int GetCaptureCapability(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord32 deviceCapabilityNumber,
CaptureCapability& capability);
int DisplayCaptureSettingsDialogBox(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord8* dialogTitleUTF8,
void* parentWindow,
WebRtc_UWord32 positionX,
WebRtc_UWord32 positionY);
int GetOrientation(const WebRtc_UWord8* deviceUniqueIdUTF8,
RotateCapturedFrame &orientation);
// Create/delete Capture device settings
// Return zero on success. A ViEError on failure.
int CreateCaptureDevice(const WebRtc_UWord8* deviceUniqueIdUTF8,
const WebRtc_UWord32 deviceUniqueIdUTF8Length,
int& captureId);
int CreateCaptureDevice(VideoCaptureModule& captureModule,
int& captureId);
int CreateExternalCaptureDevice(ViEExternalCapture*& externalCapture,
int& captureId);
int DestroyCaptureDevice(int captureId);
int CreateFilePlayer(const WebRtc_Word8* fileNameUTF8, const bool loop,
const webrtc::FileFormats fileFormat, VoiceEngine* vePtr,
int& fileId);
int DestroyFilePlayer(int fileId);
private:
bool GetFreeCaptureId(int& freecaptureId);
void ReturnCaptureId(int captureId);
bool GetFreeFileId(int& freeFileId);
void ReturnFileId(int fileId);
ViEFrameProviderBase* ViEFrameProvider(const ViEFrameCallback*
captureObserver) const;
ViEFrameProviderBase* ViEFrameProvider(int providerId) const;
ViECapturer* ViECapturePtr(int captureId) const;
void GetViECaptures(MapWrapper& vieCaptureMap);
ViEFilePlayer* ViEFilePlayerPtr(int fileId) const;
// Members
int _engineId;
CriticalSectionWrapper& _mapCritsect;
MapWrapper _vieFrameProviderMap;
// Capture devices
VideoCaptureModule::DeviceInfo* _ptrCaptureDeviceInfo;
int _freeCaptureDeviceId[kViEMaxCaptureDevices];
//File Players
int _freeFileId[kViEMaxFilePlayers];
//uses
ProcessThread* _moduleProcessThread;
};
class ViEInputManagerScoped: private ViEManagerScopedBase
{
public:
ViEInputManagerScoped(const ViEInputManager& vieInputManager);
ViECapturer* Capture(int captureId) const;
ViEFilePlayer* FilePlayer(int fileId) const;
ViEFrameProviderBase* FrameProvider(int providerId) const;
ViEFrameProviderBase* FrameProvider(const ViEFrameCallback*
captureObserver) const;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_INPUT_MANAGER_H_

View File

@ -1,113 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "vie_manager_base.h"
#include "rw_lock_wrapper.h"
#include "assert.h"
namespace webrtc {
ViEManagerBase::ViEManagerBase() :
_instanceRWLock(*RWLockWrapper::CreateRWLock())
{
}
ViEManagerBase::~ViEManagerBase()
{
delete &_instanceRWLock;
}
// ----------------------------------------------------------------------------
// ReadLockManager
//
// Lock count increase. Used by ViEManagerScopedBase
// ----------------------------------------------------------------------------
void ViEManagerBase::ReadLockManager() const
{
_instanceRWLock.AcquireLockShared();
}
// ----------------------------------------------------------------------------
// ReleaseLockManager
//
// Releases the lock count.
// ----------------------------------------------------------------------------
void ViEManagerBase::ReleaseLockManager() const
{
_instanceRWLock.ReleaseLockShared();
}
// ----------------------------------------------------------------------------
// WriteLockManager
//
// Lock count increase. Used by ViEManagerWriteScoped
// ----------------------------------------------------------------------------
void ViEManagerBase::WriteLockManager()
{
_instanceRWLock.AcquireLockExclusive();
}
// ----------------------------------------------------------------------------
// ReleaseLockManager
//
// Releases the lock count.
// ----------------------------------------------------------------------------
void ViEManagerBase::ReleaseWriteLockManager()
{
_instanceRWLock.ReleaseLockExclusive();
}
// ----------------------------------------------------------------------------
// ViEManagerScopedBase
//
// ----------------------------------------------------------------------------
ViEManagerScopedBase::ViEManagerScopedBase(const ViEManagerBase& ViEManagerBase) :
_vieManager(&ViEManagerBase), _refCount(0)
{
_vieManager->ReadLockManager();
}
ViEManagerScopedBase::~ViEManagerScopedBase()
{
assert(_refCount==0);
_vieManager->ReleaseLockManager();
}
// ----------------------------------------------------------------------------
///
// ViEManagerWriteScoped
//
// ----------------------------------------------------------------------------
ViEManagerWriteScoped::ViEManagerWriteScoped(ViEManagerBase& vieManager) :
_vieManager(&vieManager)
{
_vieManager->WriteLockManager();
}
ViEManagerWriteScoped::~ViEManagerWriteScoped()
{
_vieManager->ReleaseWriteLockManager();
}
// ----------------------------------------------------------------------------
// ViEManagedItemScopedBase
//
// ----------------------------------------------------------------------------
ViEManagedItemScopedBase::ViEManagedItemScopedBase(
ViEManagerScopedBase& vieScopedManager) :
_vieScopedManager(vieScopedManager)
{
_vieScopedManager._refCount++;
}
ViEManagedItemScopedBase::~ViEManagedItemScopedBase()
{
_vieScopedManager._refCount--;
}
} // namespace webrtc

View File

@ -1,63 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_MANAGER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_MANAGER_BASE_H_
namespace webrtc {
class RWLockWrapper;
class ViEManagerBase
{
friend class ViEManagerScopedBase;
friend class ViEManagedItemScopedBase;
friend class ViEManagerWriteScoped;
public:
ViEManagerBase(void);
~ViEManagerBase(void);
private:
void WriteLockManager();
void ReleaseWriteLockManager();
void ReadLockManager() const;
void ReleaseLockManager() const;
RWLockWrapper& _instanceRWLock;
};
class ViEManagerWriteScoped
{
public:
ViEManagerWriteScoped(ViEManagerBase& vieManager);
~ViEManagerWriteScoped();
private:
ViEManagerBase* _vieManager;
};
class ViEManagerScopedBase
{
friend class ViEManagedItemScopedBase;
public:
ViEManagerScopedBase(const ViEManagerBase& vieManager);
~ViEManagerScopedBase();
protected:
const ViEManagerBase* _vieManager;
private:
int _refCount;
};
class ViEManagedItemScopedBase
{
public:
ViEManagedItemScopedBase(ViEManagerScopedBase& vieScopedManager);
~ViEManagedItemScopedBase();
protected:
ViEManagerScopedBase& _vieScopedManager;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_MANAGER_BASE_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,137 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_network_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_NETWORK_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_NETWORK_IMPL_H_
#include "typedefs.h"
#include "vie_defines.h"
#include "vie_network.h"
#include "vie_ref_count.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViENetworkImpl
// ----------------------------------------------------------------------------
class ViENetworkImpl : public virtual ViESharedData,
public ViENetwork,
public ViERefCount
{
public:
virtual int Release();
// Receive functions
virtual int SetLocalReceiver(const int videoChannel,
const unsigned short rtpPort,
const unsigned short rtcpPort,
const char* ipAddress);
virtual int GetLocalReceiver(const int videoChannel,
unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress);
// Send functions
virtual int SetSendDestination(const int videoChannel,
const char* ipAddress,
const unsigned short rtpPort,
const unsigned short rtcpPort,
const unsigned short sourceRtpPort,
const unsigned short sourceRtcpPort);
virtual int GetSendDestination(const int videoChannel, char* ipAddress,
unsigned short& rtpPort,
unsigned short& rtcpPort,
unsigned short& sourceRtpPort,
unsigned short& sourceRtcpPort);
// External transport
virtual int RegisterSendTransport(const int videoChannel,
Transport& transport);
virtual int DeregisterSendTransport(const int videoChannel);
virtual int ReceivedRTPPacket(const int videoChannel, const void* data,
const int length);
virtual int ReceivedRTCPPacket(const int videoChannel, const void* data,
const int length);
// Get info
virtual int GetSourceInfo(const int videoChannel, unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress,
unsigned int ipAddressLength);
virtual int GetLocalIP(char ipAddress[64], bool ipv6);
// IPv6
virtual int EnableIPv6(int videoChannel);
virtual bool IsIPv6Enabled(int videoChannel);
// Source IP address and port filter
virtual int SetSourceFilter(const int videoChannel,
const unsigned short rtpPort,
const unsigned short rtcpPort,
const char* ipAddress);
virtual int GetSourceFilter(const int videoChannel,
unsigned short& rtpPort,
unsigned short& rtcpPort, char* ipAddress);
// ToS
virtual int SetSendToS(const int videoChannel, const int DSCP,
const bool useSetSockOpt);
virtual int GetSendToS(const int videoChannel, int& DSCP,
bool& useSetSockOpt);
// GQoS
virtual int SetSendGQoS(const int videoChannel, const bool enable,
const int serviceType, const int overrideDSCP);
virtual int GetSendGQoS(const int videoChannel, bool& enabled,
int& serviceType, int& overrideDSCP);
// Network settings
virtual int SetMTU(int videoChannel, unsigned int mtu);
// Packet timout notification
virtual int SetPacketTimeoutNotification(const int videoChannel,
bool enable, int timeoutSeconds);
// Periodic dead-or-alive reports
virtual int RegisterObserver(const int videoChannel,
ViENetworkObserver& observer);
virtual int DeregisterObserver(const int videoChannel);
virtual int
SetPeriodicDeadOrAliveStatus(const int videoChannel, const bool enable,
const unsigned int sampleTimeSeconds);
// Send extra packet using the User Datagram Protocol (UDP)
virtual int SendUDPPacket(const int videoChannel, const void* data,
const unsigned int length, int& transmittedBytes,
bool useRtcpSocket);
protected:
ViENetworkImpl();
virtual ~ViENetworkImpl();
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_NETWORK_IMPL_H_

View File

@ -1,194 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_performance_monitor.cc
*/
#include "vie_performance_monitor.h"
#include "cpu_wrapper.h"
#include "critical_section_wrapper.h"
#include "event_wrapper.h"
#include "thread_wrapper.h"
#include "tick_util.h"
#include "trace.h"
#include "vie_base.h"
#include "vie_defines.h"
namespace webrtc {
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEPerformanceMonitor::ViEPerformanceMonitor(int engineId)
: _engineId(engineId),
_pointerCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrViEMonitorThread(NULL),
_monitorkEvent(*EventWrapper::Create()),
_averageApplicationCPU(kViECpuStartValue),
_averageSystemCPU(kViECpuStartValue), _cpu(NULL), _vieBaseObserver(NULL)
{
_cpu = CpuWrapper::CreateCpu();
if (_cpu)
{
_cpu->CpuUsage(); // to initialize
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not create CpuWrapper", __FUNCTION__);
}
}
ViEPerformanceMonitor::~ViEPerformanceMonitor()
{
Terminate();
delete &_pointerCritsect;
delete &_monitorkEvent;
if (_cpu)
{
delete _cpu;
_cpu = NULL;
}
}
int ViEPerformanceMonitor::Init()
{
if (_cpu == NULL)
{
// Performance monitoring not supported
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Not supported", __FUNCTION__);
return 0;
}
CriticalSectionScoped cs(_pointerCritsect);
if (_ptrViEMonitorThread == NULL)
{
_monitorkEvent.StartTimer(true, kViEMonitorPeriodMs);
_ptrViEMonitorThread
= ThreadWrapper::CreateThread(ViEMonitorThreadFunction, this,
kNormalPriority,
"ViEPerformanceMonitor");
unsigned tId = 0;
if (_ptrViEMonitorThread->Start(tId))
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Performance monitor thread started %u",
__FUNCTION__, tId);
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Could not start performance monitor", __FUNCTION__);
_monitorkEvent.StopTimer();
return -1;
}
}
return 0;
}
int ViEPerformanceMonitor::Terminate()
{
{
_pointerCritsect.Enter();
_vieBaseObserver = NULL;
_pointerCritsect.Leave();
_monitorkEvent.StopTimer();
if (_ptrViEMonitorThread)
{
ThreadWrapper* tmpThread = _ptrViEMonitorThread;
_ptrViEMonitorThread = NULL;
_monitorkEvent.Set();
if (tmpThread->Stop())
{
delete tmpThread;
tmpThread = NULL;
}
}
}
return 0;
}
int ViEPerformanceMonitor::RegisterViEBaseObserver(
ViEBaseObserver* vieBaseObserver)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId), "%s",
__FUNCTION__);
CriticalSectionScoped cs(_pointerCritsect);
if (vieBaseObserver)
{
if (_vieBaseObserver)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"%s: Observer already started", __FUNCTION__);
return -1;
}
_vieBaseObserver = vieBaseObserver;
} else
{
_vieBaseObserver = NULL;
}
return 0;
}
bool ViEPerformanceMonitor::ViEBaseObserverRegistered()
{
CriticalSectionScoped cs(_pointerCritsect);
return _vieBaseObserver != NULL;
}
int ViEPerformanceMonitor::GetAverageApplicationCPU(int& applicationCPU)
{
// Not supported
return -1;
}
int ViEPerformanceMonitor::GetAverageSystemCPU(int& systemCPU)
{
if (_cpu)
{
return _cpu->CpuUsage();
}
return -1;
}
bool ViEPerformanceMonitor::ViEMonitorThreadFunction(void* obj)
{
return static_cast<ViEPerformanceMonitor*> (obj)->ViEMonitorProcess();
}
bool ViEPerformanceMonitor::ViEMonitorProcess()
{
// Periodically triggered with time KViEMonitorPeriodMs
_monitorkEvent.Wait(kViEMonitorPeriodMs);
{
if (_ptrViEMonitorThread == NULL)
{
// Thread removed, exit
return false;
}
if (_cpu)
{
int cpuLoad = _cpu->CpuUsage();
if (cpuLoad > 75)
{
_pointerCritsect.Enter();
if (_vieBaseObserver)
{
_vieBaseObserver->PerformanceAlarm(cpuLoad);
}
_pointerCritsect.Leave();
}
}
}
return true;
}
} //namespace webrtc

View File

@ -1,63 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_performance_monitor.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_PERFORMANCE_MONITOR_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_PERFORMANCE_MONITOR_H_
// Defines
#include "vie_defines.h"
#include "typedefs.h"
namespace webrtc
{
class CriticalSectionWrapper;
class CpuWrapper;
class EventWrapper;
class ThreadWrapper;
class ViEBaseObserver;
class ViEPerformanceMonitor
{
public:
ViEPerformanceMonitor(int engineId);
~ViEPerformanceMonitor();
int Init();
int Terminate();
int RegisterViEBaseObserver(ViEBaseObserver* vieBaseObserver);
bool ViEBaseObserverRegistered();
// ViEBase
int GetAverageApplicationCPU(int& applicationCPU);
int GetAverageSystemCPU(int& systemCPU);
protected:
static bool ViEMonitorThreadFunction(void* obj);
bool ViEMonitorProcess();
private:
enum { kViEMonitorPeriodMs = 975 };
enum { kViECpuStartValue = 75 };
const int _engineId;
CriticalSectionWrapper& _pointerCritsect;
ThreadWrapper* _ptrViEMonitorThread;
EventWrapper& _monitorkEvent;
int _averageApplicationCPU;
int _averageSystemCPU;
CpuWrapper* _cpu;
ViEBaseObserver* _vieBaseObserver;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_PERFORMANCE_MONITOR_H_

View File

@ -1,531 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* ViEChannel.cpp
*/
#include "vie_receiver.h"
#include "critical_section_wrapper.h"
#include "rtp_rtcp.h"
#ifdef WEBRTC_SRTP
#include "SrtpModule.h"
#endif
#include "video_coding.h"
#include "rtp_dump.h"
#include "trace.h"
namespace webrtc {
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViEReceiver::ViEReceiver(int engineId, int channelId,
RtpRtcp& moduleRtpRtcp,
VideoCodingModule& moduleVcm)
: _receiveCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_engineId(engineId), _channelId(channelId), _rtpRtcp(moduleRtpRtcp),
_vcm(moduleVcm),
#ifdef WEBRTC_SRTP
_ptrSrtp(NULL),
_ptrSrtcp(NULL),
_ptrSrtpBuffer(NULL),
_ptrSrtcpBuffer(NULL),
#endif
_ptrExternalDecryption(NULL), _ptrDecryptionBuffer(NULL),
_rtpDump(NULL), _receiving(false)
{
_rtpRtcp.RegisterIncomingVideoCallback(this);
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViEReceiver::~ViEReceiver()
{
delete &_receiveCritsect;
#ifdef WEBRTC_SRTP
if (_ptrSrtpBuffer)
{
delete [] _ptrSrtpBuffer;
_ptrSrtpBuffer = NULL;
}
if (_ptrSrtcpBuffer)
{
delete [] _ptrSrtcpBuffer;
_ptrSrtcpBuffer = NULL;
}
#endif
if (_ptrDecryptionBuffer)
{
delete[] _ptrDecryptionBuffer;
_ptrDecryptionBuffer = NULL;
}
if (_rtpDump)
{
_rtpDump->Stop();
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
}
}
// ============================================================================
// Decryption
// ============================================================================
// ----------------------------------------------------------------------------
// RegisterExternalDecryption
// ----------------------------------------------------------------------------
int ViEReceiver::RegisterExternalDecryption(Encryption* decryption)
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrExternalDecryption)
{
return -1;
}
_ptrDecryptionBuffer = new WebRtc_UWord8[kViEMaxMtu];
if (_ptrDecryptionBuffer == NULL)
{
return -1;
}
_ptrExternalDecryption = decryption;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterExternalDecryption
// ----------------------------------------------------------------------------
int ViEReceiver::DeregisterExternalDecryption()
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrExternalDecryption == NULL)
{
return -1;
}
_ptrExternalDecryption = NULL;
return 0;
}
#ifdef WEBRTC_SRTP
// ----------------------------------------------------------------------------
// RegisterSRTPModule
// ----------------------------------------------------------------------------
int ViEReceiver::RegisterSRTPModule(SrtpModule* srtpModule)
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrSrtp ||
srtpModule == NULL)
{
return -1;
}
_ptrSrtpBuffer = new WebRtc_UWord8[kViEMaxMtu];
if (_ptrSrtpBuffer == NULL)
{
return -1;
}
_ptrSrtp = srtpModule;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSRTPModule
// ----------------------------------------------------------------------------
int ViEReceiver::DeregisterSRTPModule()
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrSrtp == NULL)
{
return -1;
}
if (_ptrSrtpBuffer)
{
delete [] _ptrSrtpBuffer;
_ptrSrtpBuffer = NULL;
}
_ptrSrtp = NULL;
return 0;
}
// ----------------------------------------------------------------------------
// RegisterSRTCPModule
// ----------------------------------------------------------------------------
int ViEReceiver::RegisterSRTCPModule(SrtpModule* srtcpModule)
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrSrtcp ||
srtcpModule == NULL)
{
return -1;
}
_ptrSrtcpBuffer = new WebRtc_UWord8[kViEMaxMtu];
if (_ptrSrtcpBuffer == NULL)
{
return -1;
}
_ptrSrtcp = srtcpModule;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSRTPCModule
// ----------------------------------------------------------------------------
int ViEReceiver::DeregisterSRTCPModule()
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrSrtcp == NULL)
{
return -1;
}
if (_ptrSrtcpBuffer)
{
delete [] _ptrSrtcpBuffer;
_ptrSrtcpBuffer = NULL;
}
_ptrSrtcp = NULL;
return 0;
}
#endif
// ----------------------------------------------------------------------------
// IncomingRTPPacket
//
// Receives RTP packets from SocketTransport
// ----------------------------------------------------------------------------
void ViEReceiver::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
const WebRtc_Word32 incomingRtpPacketLength,
const WebRtc_Word8* fromIP,
const WebRtc_UWord16 fromPort)
{
InsertRTPPacket(incomingRtpPacket, incomingRtpPacketLength);
return;
}
// ----------------------------------------------------------------------------
// IncomingRTCPPacket
//
// Receives RTCP packets from SocketTransport
// ----------------------------------------------------------------------------
void ViEReceiver::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
const WebRtc_Word32 incomingRtcpPacketLength,
const WebRtc_Word8* fromIP,
const WebRtc_UWord16 fromPort)
{
InsertRTCPPacket(incomingRtcpPacket, incomingRtcpPacketLength);
return;
}
// ----------------------------------------------------------------------------
// ReceivedRTPPacket
//
// Receives RTP packets from external transport
// ----------------------------------------------------------------------------
int ViEReceiver::ReceivedRTPPacket(const void* rtpPacket, int rtpPacketLength)
{
if (!_receiving)
{
return -1;
}
return InsertRTPPacket((const WebRtc_Word8*) rtpPacket, rtpPacketLength);
}
// ----------------------------------------------------------------------------
// ReceivedRTCPPacket
//
// Receives RTCP packets from external transport
// ----------------------------------------------------------------------------
int ViEReceiver::ReceivedRTCPPacket(const void* rtcpPacket,
int rtcpPacketLength)
{
if (!_receiving)
{
return -1;
}
return InsertRTCPPacket((const WebRtc_Word8*) rtcpPacket, rtcpPacketLength);
}
// ----------------------------------------------------------------------------
// OnReceivedPayloadData
//
// From RtpData, callback for data from RTP module
// ----------------------------------------------------------------------------
WebRtc_Word32 ViEReceiver::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader)
{
if (rtpHeader == NULL)
{
return 0;
}
if (_vcm.IncomingPacket(payloadData, payloadSize, *rtpHeader) != 0)
{
// Check this...
return -1;
}
return 0;
}
// ============================================================================
// Private methods
// ============================================================================
// ----------------------------------------------------------------------------
// InsertRTPPacket
// ----------------------------------------------------------------------------
int ViEReceiver::InsertRTPPacket(const WebRtc_Word8* rtpPacket,
int rtpPacketLength)
{
WebRtc_UWord8* receivedPacket = (WebRtc_UWord8*) (rtpPacket);
int receivedPacketLength = rtpPacketLength;
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrExternalDecryption)
{
int decryptedLength = 0;
_ptrExternalDecryption->decrypt(_channelId, receivedPacket,
_ptrDecryptionBuffer,
(int) receivedPacketLength,
(int*) &decryptedLength);
if (decryptedLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"RTP decryption failed");
return -1;
} else if (decryptedLength > kViEMaxMtu)
{
WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
" %d bytes is allocated as RTP decrytption output => memory is now corrupted",
kViEMaxMtu);
return -1;
}
receivedPacket = _ptrDecryptionBuffer;
receivedPacketLength = decryptedLength;
}
#ifdef WEBRTC_SRTP
if (_ptrSrtp)
{
int decryptedLength = 0;
_ptrSrtp->decrypt(_channelId, receivedPacket, _ptrSrtpBuffer, receivedPacketLength, &decryptedLength);
if (decryptedLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _channelId), "RTP decryption failed");
return -1;
}
else if (decryptedLength > kViEMaxMtu)
{
WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo,ViEId(_engineId, _channelId), " %d bytes is allocated as RTP decrytption output => memory is now corrupted", kViEMaxMtu);
return -1;
}
receivedPacket = _ptrSrtpBuffer;
receivedPacketLength = decryptedLength;
}
#endif
if (_rtpDump)
{
_rtpDump->DumpPacket(receivedPacket,
(WebRtc_UWord16) receivedPacketLength);
}
}
return _rtpRtcp.IncomingPacket(receivedPacket, receivedPacketLength);
}
// ----------------------------------------------------------------------------
// InsertRTCPPacket
// ----------------------------------------------------------------------------
int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcpPacket,
int rtcpPacketLength)
{
WebRtc_UWord8* receivedPacket = (WebRtc_UWord8*) rtcpPacket;
int receivedPacketLength = rtcpPacketLength;
{
CriticalSectionScoped cs(_receiveCritsect);
if (_ptrExternalDecryption)
{
int decryptedLength = 0;
_ptrExternalDecryption->decrypt_rtcp(_channelId, receivedPacket,
_ptrDecryptionBuffer,
(int) receivedPacketLength,
(int*) &decryptedLength);
if (decryptedLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"RTP decryption failed");
return -1;
} else if (decryptedLength > kViEMaxMtu)
{
WEBRTC_TRACE(
webrtc::kTraceCritical,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
" %d bytes is allocated as RTP decrytption output => memory is now corrupted",
kViEMaxMtu);
return -1;
}
receivedPacket = _ptrDecryptionBuffer;
receivedPacketLength = decryptedLength;
}
#ifdef WEBRTC_SRTP
if (_ptrSrtcp)
{
int decryptedLength = 0;
_ptrSrtcp->decrypt_rtcp(_channelId, receivedPacket, _ptrSrtcpBuffer, (int) receivedPacketLength, (int*) &decryptedLength);
if (decryptedLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _channelId), "RTP decryption failed");
return -1;
}
else if (decryptedLength > kViEMaxMtu)
{
WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, ViEId(_engineId, _channelId), " %d bytes is allocated as RTP decrytption output => memory is now corrupted", kViEMaxMtu);
return -1;
}
receivedPacket = _ptrSrtcpBuffer;
receivedPacketLength = decryptedLength;
}
#endif
if (_rtpDump)
{
_rtpDump->DumpPacket(receivedPacket,
(WebRtc_UWord16) receivedPacketLength);
}
}
return _rtpRtcp.IncomingPacket(receivedPacket, receivedPacketLength);
}
// ----------------------------------------------------------------------------
// StartReceive
//
// Only used for external transport
// ----------------------------------------------------------------------------
void ViEReceiver::StartReceive()
{
_receiving = true;
}
// ----------------------------------------------------------------------------
// StopReceive
//
// Only used for external transport
// ----------------------------------------------------------------------------
void ViEReceiver::StopReceive()
{
_receiving = false;
}
// ----------------------------------------------------------------------------
// StartRTPDump
// ----------------------------------------------------------------------------
int ViEReceiver::StartRTPDump(const char fileNameUTF8[1024])
{
CriticalSectionScoped cs(_receiveCritsect);
if (_rtpDump)
{
// Restart it if it already exists and is started
_rtpDump->Stop();
} else
{
_rtpDump = RtpDump::CreateRtpDump();
if (_rtpDump == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"%s: Failed to create RTP dump", __FUNCTION__);
return -1;
}
}
if (_rtpDump->Start(fileNameUTF8) != 0)
{
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: Failed to start RTP dump", __FUNCTION__);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StopRTPDump
// ----------------------------------------------------------------------------
int ViEReceiver::StopRTPDump()
{
CriticalSectionScoped cs(_receiveCritsect);
if (_rtpDump)
{
if (_rtpDump->IsActive())
{
_rtpDump->Stop();
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"%s: Dump not active", __FUNCTION__);
}
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), "%s: RTP dump not started",
__FUNCTION__);
return -1;
}
return 0;
}
// Implements RtpVideoFeedback
void ViEReceiver::OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message)
{
// Don't do anything, action trigged on default module
return;
}
void ViEReceiver::OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs,
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax)
{
// Called for default module
return;
}
} // namespace webrtc

View File

@ -1,116 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_receiver.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "typedefs.h"
#include "udp_transport.h"
#include "rtp_rtcp_defines.h"
#ifdef WEBRTC_SRTP
class SrtpModule;
#endif
namespace webrtc
{
class CriticalSectionWrapper;
// Forward declarations
class RtpDump;
class RtpRtcp;
class VideoCodingModule;
class Encryption;
class ViEReceiver: public UdpTransportData,
public RtpData,
public RtpVideoFeedback
{
public:
ViEReceiver(int engineId, int channelId, RtpRtcp& moduleRtpRtcp,
webrtc::VideoCodingModule& moduleVcm);
~ViEReceiver();
int RegisterExternalDecryption(Encryption* decryption);
int DeregisterExternalDecryption();
#ifdef WEBRTC_SRTP
int RegisterSRTPModule(SrtpModule* srtpModule);
int DeregisterSRTPModule();
int RegisterSRTCPModule(SrtpModule* srtpModule);
int DeregisterSRTCPModule();
#endif
void StartReceive();
void StopReceive();
int StartRTPDump(const char fileNameUTF8[1024]);
int StopRTPDump();
// From SocketTransportData, receiving packets from the socket
virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
const WebRtc_Word32 incomingRtpPacketLength,
const WebRtc_Word8* fromIP,
const WebRtc_UWord16 fromPort);
virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
const WebRtc_Word32 incomingRtcpPacketLength,
const WebRtc_Word8* fromIP,
const WebRtc_UWord16 fromPort);
// Receives packets from external transport
int ReceivedRTPPacket(const void* rtpPacket, int rtpPacketLength);
int ReceivedRTCPPacket(const void* rtcpPacket, int rtcpPacketLength);
// From RtpData, callback for data from RTP module
virtual WebRtc_Word32
OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader);
// Implements RtpVideoFeedback
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message = 0);
virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs,
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax);
private:
int InsertRTPPacket(const WebRtc_Word8* rtpPacket, int rtpPacketLength);
int InsertRTCPPacket(const WebRtc_Word8* rtcpPacket, int rtcpPacketLength);
// Registered members
CriticalSectionWrapper& _receiveCritsect;
int _engineId;
int _channelId;
RtpRtcp& _rtpRtcp;
VideoCodingModule& _vcm;
#ifdef WEBRTC_SRTP
SrtpModule* _ptrSrtp;
SrtpModule* _ptrSrtcp;
WebRtc_UWord8* _ptrSrtpBuffer;
WebRtc_UWord8* _ptrSrtcpBuffer;
#endif
Encryption* _ptrExternalDecryption;
WebRtc_UWord8* _ptrDecryptionBuffer;
RtpDump* _rtpDump;
bool _receiving; // Only needed to protect external transport
};
} // namespace webrt
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_

View File

@ -1,58 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* ViERefCount.cpp
*/
#include "vie_ref_count.h"
#include "critical_section_wrapper.h"
ViERefCount::ViERefCount() :
_count(0),
_crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection())
{
}
ViERefCount::~ViERefCount()
{
delete &_crit;
}
ViERefCount&
ViERefCount::operator++(int)
{
webrtc::CriticalSectionScoped lock(_crit);
_count++;
return *this;
}
ViERefCount&
ViERefCount::operator--(int)
{
webrtc::CriticalSectionScoped lock(_crit);
_count--;
return *this;
}
void
ViERefCount::Reset()
{
webrtc::CriticalSectionScoped lock(_crit);
_count = 0;
}
int
ViERefCount::GetCount() const
{
return _count;
}

View File

@ -1,39 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_ref_count.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REF_COUNT_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REF_COUNT_H_
namespace webrtc {
class CriticalSectionWrapper;
}
class ViERefCount
{
public:
ViERefCount();
~ViERefCount();
ViERefCount& operator++(int);
ViERefCount& operator--(int);
void Reset();
int GetCount() const;
private:
volatile int _count;
webrtc::CriticalSectionWrapper& _crit;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REF_COUNT_H_

View File

@ -1,563 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_render_impl.cc
*/
#include "vie_render_impl.h"
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "trace.h"
#include "video_render.h"
#include "video_render_defines.h"
#include "vie_errors.h"
#include "vie_impl.h"
#include "vie_capturer.h"
#include "vie_channel.h"
#include "vie_frame_provider_base.h"
#include "vie_channel_manager.h"
#include "vie_input_manager.h"
#include "vie_render_manager.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// GetInterface
// ----------------------------------------------------------------------------
ViERender* ViERender::GetInterface(VideoEngine* videoEngine)
{
#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
if (videoEngine == NULL)
{
return NULL;
}
VideoEngineImpl* vieImpl = reinterpret_cast<VideoEngineImpl*> (videoEngine);
ViERenderImpl* vieRenderImpl = vieImpl;
(*vieRenderImpl)++; // Increase ref count
return vieRenderImpl;
#else
return NULL;
#endif
}
// ----------------------------------------------------------------------------
// Release
//
// Releases the interface, i.e. reduces the reference counter. The number of
// remaining references is returned, -1 if released too many times.
// ----------------------------------------------------------------------------
int ViERenderImpl::Release()
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, _instanceId,
"ViERender::Release()");
(*this)--; // Decrease ref count
WebRtc_Word32 refCount = GetCount();
if (refCount < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, _instanceId,
"ViERender release too many times");
// SetLastError()
return -1;
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _instanceId,
"ViERender reference count: %d", refCount);
return refCount;
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViERenderImpl::ViERenderImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViERenderImpl::ViERenderImpl() Ctor");
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViERenderImpl::~ViERenderImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, _instanceId,
"ViERenderImpl::~ViERenderImpl() Dtor");
}
// ============================================================================
// Registration of render module
// ============================================================================
// ----------------------------------------------------------------------------
// RegisterVideoRenderModule
//
// Registers a video render module, must be called before
// AddRenderer is called for an input stream associated
// with the same window as the module.
// ----------------------------------------------------------------------------
int ViERenderImpl::RegisterVideoRenderModule(
VideoRender& renderModule)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s (&renderModule: %p)", __FUNCTION__, &renderModule);
if (_renderManager.RegisterVideoRenderModule(renderModule) != 0)
{
// Error logging is done in RegisterVideoRenderModule
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// DeRegisterVideoRenderModule
//
// De-registers a video render module, must be called after
// RemoveRenderer has been called for all input streams associated
// with the same window as the module.
// ----------------------------------------------------------------------------
int ViERenderImpl::DeRegisterVideoRenderModule(
VideoRender& renderModule)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s (&renderModule: %p)", __FUNCTION__, &renderModule);
if (_renderManager.DeRegisterVideoRenderModule(renderModule) != 0)
{
// Error logging is done in DeRegisterVideoRenderModule
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Add renderer
// ============================================================================
int ViERenderImpl::AddRenderer(const int renderId, void* window,
const unsigned int zOrder, const float left,
const float top, const float right,
const float bottom)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s (renderId: %d, window: 0x%p, zOrder: %u, left: %f, "
"top: %f, right: %f, bottom: %f)",
__FUNCTION__, renderId, window, zOrder, left, top, right,
bottom);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
{ // Check if the renderer exist already
ViERenderManagerScoped rs(_renderManager);
if (rs.Renderer(renderId) != NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - Renderer already exist %d.", __FUNCTION__,
renderId);
SetLastError(kViERenderAlreadyExists);
return -1;
}
}
if (renderId >= kViEChannelIdBase && renderId <= kViEChannelIdMax)
{
// This is a channel
ViEChannelManagerScoped cm(_channelManager);
ViEFrameProviderBase* frameProvider = cm.Channel(renderId);
if (frameProvider == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: FrameProvider id %d doesn't exist", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
ViERenderer* renderer = _renderManager.AddRenderStream(renderId,
window, zOrder,
left, top,
right, bottom);
if (renderer == NULL)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return frameProvider->RegisterFrameCallback(renderId, renderer);
}
else // camera or file
{
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* frameProvider = is.FrameProvider(renderId);
if (frameProvider == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: FrameProvider id %d doesn't exist", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
ViERenderer* renderer = _renderManager.AddRenderStream(renderId,
window, zOrder,
left, top,
right, bottom);
if (renderer == NULL)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return frameProvider->RegisterFrameCallback(renderId, renderer);
}
SetLastError(kViERenderInvalidRenderId);
return -1;
}
int ViERenderImpl::RemoveRenderer(const int renderId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId),
"%s(renderId: %d)", __FUNCTION__, renderId);
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
ViERenderer* renderer = NULL;
{
ViERenderManagerScoped rs(_renderManager);
renderer = rs.Renderer(renderId);
if (!renderer)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_instanceId),
"%s No render exist with renderId: %d", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
} // Leave the scope lock since we don't want to lock two managers
// simultanousely
if (renderId >= kViEChannelIdBase && renderId <= kViEChannelIdMax)
{
// This is a channel
ViEChannelManagerScoped cm(_channelManager);
ViEChannel* channel = cm.Channel(renderId);
if (!channel)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: no channel with id %d exists ", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
channel->DeregisterFrameCallback(renderer);
}
else //Provider owned by inputmanager - ie file or capture device
{
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* provider = is.FrameProvider(renderId);
if (!provider)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: no provider with id %d exists ", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
provider->DeregisterFrameCallback(renderer);
}
if (_renderManager.RemoveRenderStream(renderId) != 0)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Start/stop
// ============================================================================
// ----------------------------------------------------------------------------
// StartRender
//
// Starts rendering the stream from the channel
// ----------------------------------------------------------------------------
int ViERenderImpl::StartRender(const int renderId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, renderId), "%s(channel: %d)", __FUNCTION__,
renderId);
ViERenderManagerScoped rs(_renderManager);
ViERenderer* ptrRender = rs.Renderer(renderId);
if (ptrRender == NULL)
{
// No renderer for this channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, renderId),
"%s: No renderer with render Id %d exist.", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
if (ptrRender->StartRender() != 0)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StopRender
//
// Stop rendering a stream
// ----------------------------------------------------------------------------
int ViERenderImpl::StopRender(const int renderId)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
ViEId(_instanceId, renderId), "%s(channel: %d)", __FUNCTION__,
renderId);
ViERenderManagerScoped rs(_renderManager);
ViERenderer* ptrRender = rs.Renderer(renderId);
if (ptrRender == NULL)
{
// No renderer for this channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, renderId),
"%s: No renderer with renderId %d exist.", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
if (ptrRender->StopRender() != 0)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// Stream configurations
// ============================================================================
// ----------------------------------------------------------------------------
// ConfigureRender
//
// Reconfigures an already added render stream
// ----------------------------------------------------------------------------
int ViERenderImpl::ConfigureRender(int renderId, const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom)
{
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, ViEId(_instanceId, renderId),
"%s(channel: %d)", __FUNCTION__, renderId);
ViERenderManagerScoped rs(_renderManager);
ViERenderer* ptrRender = rs.Renderer(renderId);
if (ptrRender == NULL)
{
// No renderer for this channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, renderId),
"%s: No renderer with renderId %d exist.", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
if (ptrRender->ConfigureRenderer(zOrder, left, top, right, bottom) != 0)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// MirrorRenderStream
//
// Enables mirror rendering
// ----------------------------------------------------------------------------
int ViERenderImpl::MirrorRenderStream(const int renderId, const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis)
{
ViERenderManagerScoped rs(_renderManager);
ViERenderer* ptrRender = rs.Renderer(renderId);
if (ptrRender == NULL)
{
// No renderer for this channel
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, renderId),
"%s: No renderer with renderId %d exist.", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
if (ptrRender->EnableMirroring(renderId, enable, mirrorXAxis, mirrorYAxis)
!= 0)
{
SetLastError(kViERenderUnknownError);
return -1;
}
return 0;
}
// ============================================================================
// External render
// ============================================================================
// ----------------------------------------------------------------------------
//
//
// AddRenderer
// ----------------------------------------------------------------------------
int ViERenderImpl::AddRenderer(const int renderId,
webrtc::RawVideoType videoInputFormat,
ExternalRenderer* externalRenderer)
{
// check if the client requested a format that we can convert the frames to
if (videoInputFormat != webrtc::kVideoI420
&& videoInputFormat != webrtc::kVideoYV12
&& videoInputFormat != webrtc::kVideoYUY2
&& videoInputFormat != webrtc::kVideoUYVY
&& videoInputFormat != webrtc::kVideoARGB
&& videoInputFormat != webrtc::kVideoRGB24
&& videoInputFormat != webrtc::kVideoRGB565
&& videoInputFormat != webrtc::kVideoARGB4444
&& videoInputFormat != webrtc::kVideoARGB1555)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_instanceId, renderId),
"%s: Unsupported video frame format requested",
__FUNCTION__, renderId);
SetLastError(kViERenderInvalidFrameFormat);
return -1;
}
if (!IsInitialized())
{
SetLastError(kViENotInitialized);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - ViE instance %d not initialized", __FUNCTION__,
_instanceId);
return -1;
}
{ // Check if the renderer exist already
ViERenderManagerScoped rs(_renderManager);
if (rs.Renderer(renderId) != NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s - Renderer already exist %d.", __FUNCTION__,
renderId);
SetLastError(kViERenderAlreadyExists);
return -1;
}
}
if (renderId >= kViEChannelIdBase && renderId <= kViEChannelIdMax)
{
// This is a channel
ViEChannelManagerScoped cm(_channelManager);
ViEFrameProviderBase* frameProvider = cm.Channel(renderId);
if (frameProvider == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: FrameProvider id %d doesn't exist", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
ViERenderer* ptrRender = _renderManager.AddRenderStream(renderId, NULL,
0, 0.0f, 0.0f,
1.0f, 1.0f);
if (ptrRender == NULL)
{
SetLastError(kViERenderUnknownError);
return -1;
}
if (-1 == ptrRender->SetExternalRenderer(renderId, videoInputFormat,
externalRenderer))
{
SetLastError(kViERenderUnknownError);
return -1;
}
return frameProvider->RegisterFrameCallback(renderId, ptrRender);
}
else // camera or file
{
ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* frameProvider = is.FrameProvider(renderId);
if (frameProvider == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_instanceId),
"%s: FrameProvider id %d doesn't exist", __FUNCTION__,
renderId);
SetLastError(kViERenderInvalidRenderId);
return -1;
}
ViERenderer* ptrRender = _renderManager.AddRenderStream(renderId, NULL,
0, 0.0f, 0.0f,
1.0f, 1.0f);
if (ptrRender == NULL)
{
SetLastError(kViERenderUnknownError);
return -1;
}
if (-1 == ptrRender->SetExternalRenderer(renderId, videoInputFormat,
externalRenderer))
{
SetLastError(kViERenderUnknownError);
return -1;
}
return frameProvider->RegisterFrameCallback(renderId, ptrRender);
}
SetLastError(kViERenderInvalidRenderId);
return -1;
}
} // namespace webrtc

View File

@ -1,77 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_render_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_IMPL_H_
#include "vie_defines.h"
#include "typedefs.h"
#include "video_render_defines.h"
#include "vie_ref_count.h"
#include "vie_render.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViERenderImpl
// ----------------------------------------------------------------------------
class ViERenderImpl: public virtual ViESharedData,
public ViERender,
public ViERefCount
{
public:
virtual int Release();
// Registration of render module
virtual int RegisterVideoRenderModule(VideoRender& renderModule);
virtual int DeRegisterVideoRenderModule(
VideoRender& renderModule);
// Add/remove renderer
virtual int AddRenderer(const int renderId, void* window,
const unsigned int zOrder, const float left,
const float top, const float right,
const float bottom);
virtual int RemoveRenderer(const int renderId);
// Start/stop
virtual int StartRender(const int renderId);
virtual int StopRender(const int renderId);
virtual int ConfigureRender(int renderId, const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int MirrorRenderStream(const int renderId, const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis);
// External render
virtual int AddRenderer(const int renderId,
webrtc::RawVideoType videoInputFormat,
ExternalRenderer* renderer);
protected:
ViERenderImpl();
virtual ~ViERenderImpl();
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_IMPL_H_

View File

@ -1,300 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* ViERenderManager.cpp
*/
#include "vie_render_manager.h"
#include "engine_configurations.h"
#include "vie_defines.h"
#include "critical_section_wrapper.h"
#include "video_render.h"
#include "video_render_defines.h"
#include "rw_lock_wrapper.h"
#include "trace.h"
namespace webrtc {
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViERenderManagerScoped::ViERenderManagerScoped(const ViERenderManager& vieRenderManager)
:
ViEManagerScopedBase(vieRenderManager)
{
}
// ----------------------------------------------------------------------------
// Renderer()
//
// Returns a pointer to the ViERender object
// ----------------------------------------------------------------------------
ViERenderer* ViERenderManagerScoped::Renderer(WebRtc_Word32 renderId) const
{
return static_cast<const ViERenderManager*> (_vieManager)->ViERenderPtr(renderId);
}
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViERenderManager::ViERenderManager(WebRtc_Word32 engineId) :
_listCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_engineId(engineId),
_streamToViERenderer(),
_renderList(),
_useExternalRenderModule(false)
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engineId),
"ViERenderManager::ViERenderManager(engineId: %d) - Constructor", engineId);
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViERenderManager::~ViERenderManager()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(_engineId),
"ViERenderManager Destructor, engineId: %d", _engineId);
while(_streamToViERenderer.Size()!=0)
{
MapItem* item=_streamToViERenderer.First();
assert(item);
const WebRtc_Word32 renderId=item->GetId();
item=NULL;// Deleted be RemoveRenderStream;
RemoveRenderStream(renderId);
}
delete &_listCritsect;
}
// ----------------------------------------------------------------------------
// RegisterVideoRenderModule
// ----------------------------------------------------------------------------
WebRtc_Word32 ViERenderManager::RegisterVideoRenderModule(VideoRender& renderModule)
{
// See if there is already a render module registered for the window that
// the registrant render module is associated with
VideoRender* ptrCurrentModule = FindRenderModule(renderModule.Window());
if (ptrCurrentModule)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"A module is already registered for this window (window=%p, current module=%p, registrant module=%p",
renderModule.Window(), ptrCurrentModule, &renderModule);
return -1;
}
// Register module
_renderList.PushBack(static_cast<void*>(&renderModule));
_useExternalRenderModule=true;
return 0;
}
// ----------------------------------------------------------------------------
// DeRegisterVideoRenderModule
// ----------------------------------------------------------------------------
WebRtc_Word32 ViERenderManager::DeRegisterVideoRenderModule(VideoRender& renderModule)
{
// Check if there are streams in the module
WebRtc_UWord32 nStreams = renderModule.GetNumIncomingRenderStreams();
if (nStreams != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId),
"There are still %d streams in this module, cannot de-register", nStreams);
return -1;
}
// Erase the render module from the map
ListItem* listItem = NULL;
bool found = false;
for (listItem = _renderList.First(); listItem != NULL; listItem = _renderList.Next(listItem))
{
if (&renderModule == static_cast<VideoRender*>(listItem->GetItem()))
{
// We've found our renderer
_renderList.Erase(listItem);
found = true;
break;
}
}
if (!found)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId), "Module not registered");
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// AddRenderStream
// ----------------------------------------------------------------------------
ViERenderer* ViERenderManager::AddRenderStream(const WebRtc_Word32 renderId,
void* window,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_listCritsect);
if (_streamToViERenderer.Find(renderId) != NULL)
{
// This stream is already added to a renderer, not allowed!
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId), "Render stream already exists");
return NULL;
}
// Get the render module for this window
VideoRender* ptrRenderer = FindRenderModule(window);
if (ptrRenderer == NULL)
{
// No render module for this window, create a new one
#ifndef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
ptrRenderer = VideoRender::CreateVideoRender(ViEModuleId(_engineId, -1), window, false);
#else
ptrRenderer = VideoRender::CreateVideoRender(ViEModuleId(_engineId, -1), window, false, kRenderExternal);
#endif //WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
if (!ptrRenderer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId), "Could not create new render module");
return NULL;
}
_renderList.PushBack((void*) ptrRenderer);
}
ViERenderer* vieRenderer= ViERenderer::CreateViERenderer(renderId,_engineId,*ptrRenderer,*this,zOrder,left,top,right,bottom);
if(!vieRenderer)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,renderId), "Could not create new render stream");
return NULL;
}
_streamToViERenderer.Insert(renderId, vieRenderer);
return vieRenderer;
}
// ----------------------------------------------------------------------------
// RemoveRenderStream
// ----------------------------------------------------------------------------
WebRtc_Word32 ViERenderManager::RemoveRenderStream(const WebRtc_Word32 renderId)
{
// We need exclusive right to the items in the rendermanager to delete a stream
ViEManagerWriteScoped(*this);
// Protect the list/map
CriticalSectionScoped cs(_listCritsect);
MapItem* mapItem = _streamToViERenderer.Find(renderId);
if (mapItem == NULL)
{
// No such stream
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(_engineId), "No renderer for this stream found, channelId");
return 0;
}
// Get the vieRender object.
ViERenderer* ptrViERenderer = static_cast<ViERenderer*>(mapItem->GetItem());
assert(ptrViERenderer);
// Get the render module pointer for this vieRender object
VideoRender& renderer=ptrViERenderer->RenderModule();
// Delete the vieRender.
// This deletes the stream in the render module.
delete ptrViERenderer;
// Remove from the stream map
_streamToViERenderer.Erase(mapItem);
// Check if there are other streams in the module
if (!_useExternalRenderModule && renderer.GetNumIncomingRenderStreams() == 0)
{
// Erase the render module from the map
ListItem* listItem = NULL;
for (listItem = _renderList.First(); listItem != NULL; listItem = _renderList.Next(listItem))
{
if (&renderer == static_cast<VideoRender*>(listItem->GetItem()))
{
// We've found our renderer
_renderList.Erase(listItem);
break;
}
}
// Destroy the module
VideoRender::DestroyVideoRender(&renderer);
}
return 0;
}
// ----------------------------------------------------------------------------
// FindRenderModule
//
// Returns a pointer to the render module if it exists in the render list.
// Assumed protected
// ----------------------------------------------------------------------------
VideoRender* ViERenderManager::FindRenderModule(void* window)
{
VideoRender* ptrRenderer = NULL;
ListItem* listItem = NULL;
for (listItem = _renderList.First(); listItem != NULL; listItem = _renderList.Next(listItem))
{
ptrRenderer = static_cast<VideoRender*>(listItem->GetItem());
if (ptrRenderer == NULL)
{
break;
}
if (ptrRenderer->Window() == window)
{
// We've found the render module
break;
}
ptrRenderer = NULL;
}
return ptrRenderer;
}
ViERenderer* ViERenderManager::ViERenderPtr(WebRtc_Word32 renderId) const
{
ViERenderer* ptrRenderer = NULL;
MapItem* mapItem = _streamToViERenderer.Find(renderId);
if (mapItem == NULL)
{
// No such stream in any renderer
return NULL;
}
ptrRenderer = static_cast<ViERenderer*>(mapItem->GetItem());
return ptrRenderer;
}
} //namespace webrtc

View File

@ -1,86 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_render_manager.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_MANAGER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_MANAGER_H_
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "typedefs.h"
#include "list_wrapper.h"
#include "map_wrapper.h"
#include "vie_manager_base.h"
#include "vie_renderer.h"
namespace webrtc {
class CriticalSectionWrapper;
class RWLockWrapper;
class VideoRender;
class VideoRenderCallback;
class ViERenderManager: private ViEManagerBase
{
friend class ViERenderManagerScoped;
public:
ViERenderManager(WebRtc_Word32 engineId);
~ViERenderManager();
WebRtc_Word32 RegisterVideoRenderModule(VideoRender& renderModule);
WebRtc_Word32 DeRegisterVideoRenderModule(VideoRender& renderModule);
ViERenderer* AddRenderStream(const WebRtc_Word32 renderId,
void* window,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
WebRtc_Word32 RemoveRenderStream(WebRtc_Word32 renderId);
VideoRender* FindRenderModule(void* window);
private:
// Methods used by ViERenderScoped
ViERenderer* ViERenderPtr(WebRtc_Word32 renderId) const;
// Members
CriticalSectionWrapper& _listCritsect;
WebRtc_Word32 _engineId;
MapWrapper _streamToViERenderer; // Protected by ViEManagerBase
ListWrapper _renderList;
bool _useExternalRenderModule;
};
// ------------------------------------------------------------------
// ViERenderManagerScoped
// ------------------------------------------------------------------
class ViERenderManagerScoped: private ViEManagerScopedBase
{
public:
ViERenderManagerScoped(const ViERenderManager& vieRenderManager);
ViERenderer* Renderer(WebRtc_Word32 renderId) const;
};
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDER_MANAGER_H_

View File

@ -1,248 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "vie_renderer.h"
#include "video_render.h"
#include "video_render_defines.h"
#include "vie_render_manager.h"
#include "vplib.h"
namespace webrtc {
ViERenderer* ViERenderer::CreateViERenderer(
const WebRtc_Word32 renderId,
const WebRtc_Word32 engineId,
VideoRender& renderModule,
ViERenderManager& renderManager,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
ViERenderer* self=new ViERenderer(renderId,engineId,renderModule,renderManager);
if(!self || self->Init(zOrder,left,top,right,bottom)!=0)
{
delete self;
self=NULL;
}
return self;
}
ViERenderer::~ViERenderer(void)
{
if(_ptrRenderCallback)
{
_renderModule.DeleteIncomingRenderStream( _renderId);
}
if(_ptrIncomingExternalCallback){
delete _ptrIncomingExternalCallback;
}
}
ViERenderer::ViERenderer(const WebRtc_Word32 renderId,const WebRtc_Word32 engineId,
VideoRender& renderModule,
ViERenderManager& renderManager)
:
_renderId(renderId),
_engineId(engineId),
_renderModule(renderModule),
_renderManager(renderManager),
_ptrRenderCallback(NULL),
_ptrIncomingExternalCallback(new ViEExternalRendererImpl())
{
}
WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
_ptrRenderCallback = (VideoRenderCallback*)_renderModule.AddIncomingRenderStream( _renderId, zOrder, left, top, right, bottom);
if (_ptrRenderCallback == NULL)
{
// Logging done
return -1;
}
return 0;
}
WebRtc_Word32 ViERenderer::GetLastRenderedFrame(const WebRtc_Word32 renderID, webrtc::VideoFrame& videoFrame)
{
return _renderModule.GetLastRenderedFrame(renderID, videoFrame);
}
WebRtc_Word32 ViERenderer::StartRender()
{
return _renderModule.StartRender(_renderId);
}
WebRtc_Word32 ViERenderer::StopRender()
{
return _renderModule.StopRender(_renderId);
}
// Implement ViEFrameCallback
void ViERenderer::DeliverFrame(int id,
webrtc::VideoFrame& videoFrame,
int numCSRCs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize])
{
_ptrRenderCallback->RenderFrame(_renderId,videoFrame);
}
// Implement ViEFrameCallback
void ViERenderer::ProviderDestroyed(int id)
{
_renderManager.RemoveRenderStream(_renderId); // Remove the render stream since the provider is destroyed.
}
VideoRender& ViERenderer::RenderModule()
{
return _renderModule;
}
WebRtc_Word32 ViERenderer::ConfigureRenderer(const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
return _renderModule.ConfigureRenderer(_renderId, zOrder, left, top, right, bottom);
}
WebRtc_Word32 ViERenderer::SetTimeoutImage(const webrtc::VideoFrame& timeoutImage,const WebRtc_Word32 timeoutValue)
{
return _renderModule.SetTimeoutImage(_renderId,timeoutImage,timeoutValue);
}
WebRtc_Word32 ViERenderer::SetRenderStartImage(const webrtc::VideoFrame& startImage)
{
return _renderModule.SetStartImage(_renderId,startImage);
}
WebRtc_Word32 ViERenderer::EnableMirroring(const WebRtc_Word32 renderId, const bool enable, const bool mirrorXAxis, const bool mirrorYAxis)
{
return _renderModule.MirrorRenderStream(renderId, enable, mirrorXAxis, mirrorYAxis);
}
WebRtc_Word32 ViERenderer::SetExternalRenderer(const WebRtc_Word32 renderId, webrtc::RawVideoType videoInputFormat, ExternalRenderer* externalRenderer)
{
if(NULL == _ptrIncomingExternalCallback){
return -1;
}
_ptrIncomingExternalCallback->SetViEExternalRenderer(externalRenderer, videoInputFormat);
return _renderModule.AddExternalRenderCallback(renderId, _ptrIncomingExternalCallback);
}
ViEExternalRendererImpl::ViEExternalRendererImpl() :
_externalRenderer(NULL),
_externalRendererFormat(),
_externalRendererWidth(0),
_externalRendererHeight(0)
{
}
int ViEExternalRendererImpl::SetViEExternalRenderer(ExternalRenderer* externalRenderer, webrtc::RawVideoType videoInputFormat)
{
_externalRenderer = externalRenderer;
_externalRendererFormat = videoInputFormat;
return 0;
}
// implements VideoRenderCallback
WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(const WebRtc_UWord32 streamId,
webrtc::VideoFrame& videoFrame)
{
webrtc::VideoFrame convertedFrame;
webrtc::VideoFrame* pConvertedFrame = &convertedFrame;
// convert to requested format
switch(_externalRendererFormat)
{
case webrtc::kVideoI420:
pConvertedFrame = &videoFrame;
break;
case webrtc::kVideoYV12:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kYV12,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToYV12(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
case webrtc::kVideoYUY2:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kYUY2,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToYUY2(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
case webrtc::kVideoUYVY:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kUYVY,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToUYVY(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
case webrtc::kVideoIYUV:
// no conversion available
break;
case webrtc::kVideoARGB:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kARGB,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToARGB(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
case webrtc::kVideoRGB24:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kRGB24,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToRGB24(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height());
break;
case webrtc::kVideoRGB565:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kRGB565,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToRGB565(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height());
break;
case webrtc::kVideoARGB4444:
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kARGB4444,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToARGB4444(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
case webrtc::kVideoARGB1555 :
convertedFrame.VerifyAndAllocate(webrtc::CalcBufferSize(webrtc::kARGB1555,videoFrame.Width(),videoFrame.Height()));
webrtc::ConvertI420ToARGB1555(videoFrame.Buffer(), convertedFrame.Buffer(), videoFrame.Width(), videoFrame.Height(), 0);
break;
default:
// the format is something funny. Should never reach here...
assert(false);
pConvertedFrame = NULL;
break;
}
if(_externalRendererWidth!=videoFrame.Width() || _externalRendererHeight!=videoFrame.Height())
{
_externalRendererWidth = videoFrame.Width();
_externalRendererHeight = videoFrame.Height();
_externalRenderer->FrameSizeChange(_externalRendererWidth, _externalRendererHeight, streamId);
}
if(pConvertedFrame)
{
_externalRenderer->DeliverFrame(pConvertedFrame->Buffer(), pConvertedFrame->Length());
}
return 0;
}
} //namespace webrtc

View File

@ -1,120 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_renderer.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDERER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDERER_H_
#include "vie_frame_provider_base.h"
#include "map_wrapper.h"
#include "vie_render.h"
#include "video_render_defines.h"
namespace webrtc {
class VideoRender;
class VideoRenderCallback;
class ViERenderManager;
class ViEExternalRendererImpl : public VideoRenderCallback
{
public:
ViEExternalRendererImpl();
int SetViEExternalRenderer(ExternalRenderer* externalRenderer, webrtc::RawVideoType videoInputFormat);
// implements VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
webrtc::VideoFrame& videoFrame);
virtual ~ViEExternalRendererImpl(){};
private:
ExternalRenderer* _externalRenderer;
webrtc::RawVideoType _externalRendererFormat;
WebRtc_UWord32 _externalRendererWidth;
WebRtc_UWord32 _externalRendererHeight;
};
class ViERenderer: public ViEFrameCallback
{
public:
static ViERenderer* CreateViERenderer(const WebRtc_Word32 renderId,
const WebRtc_Word32 engineId,
VideoRender& renderModule,
ViERenderManager& renderManager,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
~ViERenderer(void);
WebRtc_Word32 StartRender();
WebRtc_Word32 StopRender();
WebRtc_Word32 GetLastRenderedFrame(const WebRtc_Word32 renderID, webrtc::VideoFrame& videoFrame);
WebRtc_Word32 ConfigureRenderer(const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
VideoRender& RenderModule();
WebRtc_Word32 EnableMirroring(const WebRtc_Word32 renderId, const bool enable, const bool mirrorXAxis, const bool mirrorYAxis);
WebRtc_Word32 SetTimeoutImage(const webrtc::VideoFrame& timeoutImage,const WebRtc_Word32 timeoutValue);
WebRtc_Word32 SetRenderStartImage(const webrtc::VideoFrame& startImage);
WebRtc_Word32 SetExternalRenderer(const WebRtc_Word32 renderId, webrtc::RawVideoType videoInputFormat, ExternalRenderer* externalRenderer);
private:
WebRtc_Word32 Init(const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
ViERenderer(const WebRtc_Word32 renderId,const WebRtc_Word32 engineId,
VideoRender& renderModule,
ViERenderManager& renderManager);
// Implement ViEFrameCallback
virtual void DeliverFrame(int id, VideoFrame& videoFrame, int numCSRCs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frameDelay){return;}
virtual int GetPreferedFrameSettings(int &width, int &height,
int &frameRate){return -1;}
virtual void ProviderDestroyed(int id);
WebRtc_UWord32 _renderId;
WebRtc_Word32 _engineId;
VideoRender& _renderModule;
ViERenderManager& _renderManager;
VideoRenderCallback* _ptrRenderCallback;
ViEExternalRendererImpl* _ptrIncomingExternalCallback;
};
} //namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RENDERER_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,144 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_rtp_rtcp_impl.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RTP_RTCP_IMPL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RTP_RTCP_IMPL_H_
#include "vie_defines.h"
#include "rtp_rtcp_defines.h"
#include "typedefs.h"
#include "vie_ref_count.h"
#include "vie_rtp_rtcp.h"
#include "vie_shared_data.h"
namespace webrtc
{
// ----------------------------------------------------------------------------
// ViERTP_RTCPImpl
// ----------------------------------------------------------------------------
class ViERTP_RTCPImpl : public virtual ViESharedData,
public ViERTP_RTCP,
public ViERefCount
{
public:
virtual int Release();
// SSRC/CSRC
virtual int SetLocalSSRC(const int videoChannel, const unsigned int SSRC);
virtual int GetLocalSSRC(const int videoChannel, unsigned int& SSRC) const;
virtual int GetRemoteSSRC(const int videoChannel, unsigned int& SSRC) const;
virtual int GetRemoteCSRCs(const int videoChannel,
unsigned int CSRCs[kRtpCsrcSize]) const;
virtual int SetStartSequenceNumber(const int videoChannel,
unsigned short sequenceNumber);
// RTCP
virtual int SetRTCPStatus(const int videoChannel,
const ViERTCPMode rtcpMode);
virtual int GetRTCPStatus(const int videoChannel, ViERTCPMode& rtcpMode);
virtual int SetRTCPCName(const int videoChannel,
const char rtcpCName[KMaxRTCPCNameLength]);
virtual int GetRTCPCName(const int videoChannel,
char rtcpCName[KMaxRTCPCNameLength]);
virtual int GetRemoteRTCPCName(const int videoChannel,
char rtcpCName[KMaxRTCPCNameLength]) const;
virtual int
SendApplicationDefinedRTCPPacket(const int videoChannel,
const unsigned char subType,
unsigned int name, const char* data,
unsigned short dataLengthInBytes);
virtual int SetNACKStatus(const int videoChannel, const bool enable);
virtual int SetFECStatus(const int videoChannel, const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC);
virtual int SetHybridNACKFECStatus(const int videoChannel, const bool enable,
const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC);
virtual int SetKeyFrameRequestMethod(const int videoChannel,
const ViEKeyFrameRequestMethod method);
virtual int SetTMMBRStatus(const int videoChannel, const bool enable);
// Statistics
virtual int GetReceivedRTCPStatistics(
const int videoChannel, unsigned short& fractionLost,
unsigned int& cumulativeLost, unsigned int& extendedMax,
unsigned int& jitter, int& rttMs) const;
virtual int GetSentRTCPStatistics(const int videoChannel,
unsigned short& fractionLost,
unsigned int& cumulativeLost,
unsigned int& extendedMax,
unsigned int& jitter, int& rttMs) const;
virtual int GetRTPStatistics(const int videoChannel,
unsigned int& bytesSent,
unsigned int& packetsSent,
unsigned int& bytesReceived,
unsigned int& packetsReceived) const;
// Keep alive
virtual int SetRTPKeepAliveStatus(
const int videoChannel, bool enable, const char unknownPayloadType,
const unsigned int deltaTransmitTimeSeconds);
virtual int GetRTPKeepAliveStatus(const int videoChannel, bool& enabled,
char& unkownPayloadType,
unsigned int& deltaTransmitTimeSeconds);
// Dump RTP stream, for debug purpose
virtual int StartRTPDump(const int videoChannel,
const char fileNameUTF8[1024],
RTPDirections direction);
virtual int StopRTPDump(const int videoChannel, RTPDirections direction);
// Callbacks
virtual int RegisterRTPObserver(const int videoChannel,
ViERTPObserver& observer);
virtual int DeregisterRTPObserver(const int videoChannel);
virtual int RegisterRTCPObserver(const int videoChannel,
ViERTCPObserver& observer);
virtual int DeregisterRTCPObserver(const int videoChannel);
protected:
ViERTP_RTCPImpl();
virtual ~ViERTP_RTCPImpl();
private:
RTCPMethod ViERTCPModeToRTCPMethod(ViERTCPMode apiMode);
ViERTCPMode RTCPMethodToViERTCPMode(RTCPMethod moduleMethod);
KeyFrameRequestMethod
APIRequestToModuleRequest(const ViEKeyFrameRequestMethod apiMethod);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RTP_RTCP_IMPL_H_

View File

@ -1,399 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_sender.cc
*/
#include "vie_sender.h"
#include "critical_section_wrapper.h"
#include "rtp_rtcp.h"
#ifdef WEBRTC_SRTP
#include "SrtpModule.h"
#endif
#include "rtp_dump.h"
#include "trace.h"
namespace webrtc {
// ----------------------------------------------------------------------------
// Constructor
// ----------------------------------------------------------------------------
ViESender::ViESender(int engineId, int channelId,
RtpRtcp& rtpRtcpModule)
: _engineId(engineId), _channelId(channelId),
_sendCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_rtpRtcp(rtpRtcpModule),
#ifdef WEBRTC_SRTP
_ptrSrtp(NULL),
_ptrSrtcp(NULL),
#endif
_ptrExternalEncryption(NULL), _ptrSrtpBuffer(NULL),
_ptrSrtcpBuffer(NULL), _ptrEncryptionBuffer(NULL), _ptrTransport(NULL),
_rtpDump(NULL)
{
}
// ----------------------------------------------------------------------------
// Destructor
// ----------------------------------------------------------------------------
ViESender::~ViESender()
{
delete &_sendCritsect;
if (_ptrSrtpBuffer)
{
delete[] _ptrSrtpBuffer;
_ptrSrtpBuffer = NULL;
}
if (_ptrSrtcpBuffer)
{
delete[] _ptrSrtcpBuffer;
_ptrSrtcpBuffer = NULL;
}
if (_ptrEncryptionBuffer)
{
delete[] _ptrEncryptionBuffer;
_ptrEncryptionBuffer = NULL;
}
if (_rtpDump)
{
_rtpDump->Stop();
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
}
}
// ----------------------------------------------------------------------------
// RegisterExternalEncryption
// ----------------------------------------------------------------------------
int ViESender::RegisterExternalEncryption(Encryption* encryption)
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrExternalEncryption)
{
return -1;
}
_ptrEncryptionBuffer = new WebRtc_UWord8[kViEMaxMtu];
if (_ptrEncryptionBuffer == NULL)
{
return -1;
}
_ptrExternalEncryption = encryption;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterExternalEncryption
// ----------------------------------------------------------------------------
int ViESender::DeregisterExternalEncryption()
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrExternalEncryption == NULL)
{
return -1;
}
if (_ptrEncryptionBuffer)
{
delete _ptrEncryptionBuffer;
_ptrEncryptionBuffer = NULL;
}
_ptrExternalEncryption = NULL;
return 0;
}
// ----------------------------------------------------------------------------
// RegisterSendTransport
// ----------------------------------------------------------------------------
int ViESender::RegisterSendTransport(Transport* transport)
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrTransport)
{
return -1;
}
_ptrTransport = transport;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSendTransport
// ----------------------------------------------------------------------------
int ViESender::DeregisterSendTransport()
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrTransport == NULL)
{
return -1;
}
_ptrTransport = NULL;
return 0;
}
#ifdef WEBRTC_SRTP
// ----------------------------------------------------------------------------
// RegisterSRTPModule
// ----------------------------------------------------------------------------
int ViESender::RegisterSRTPModule(SrtpModule* srtpModule)
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrSrtp ||
srtpModule == NULL)
{
return -1;
}
_ptrSrtpBuffer = new WebRtc_UWord8[KMaxPacketSize];
if (_ptrSrtpBuffer == NULL)
{
return -1;
}
_ptrSrtp = srtpModule;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSRTPModule
// ----------------------------------------------------------------------------
int ViESender::DeregisterSRTPModule()
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrSrtp == NULL)
{
return -1;
}
if (_ptrSrtpBuffer)
{
delete [] _ptrSrtpBuffer;
_ptrSrtpBuffer = NULL;
}
_ptrSrtp = NULL;
return 0;
}
// ----------------------------------------------------------------------------
// RegisterSRTCPModule
// ----------------------------------------------------------------------------
int ViESender::RegisterSRTCPModule(SrtpModule* srtcpModule)
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrSrtcp ||
srtcpModule == NULL)
{
return -1;
}
_ptrSrtcpBuffer = new WebRtc_UWord8[KMaxPacketSize];
if (_ptrSrtcpBuffer == NULL)
{
return -1;
}
_ptrSrtcp = srtcpModule;
return 0;
}
// ----------------------------------------------------------------------------
// DeregisterSRTCPModule
// ----------------------------------------------------------------------------
int ViESender::DeregisterSRTCPModule()
{
CriticalSectionScoped cs(_sendCritsect);
if (_ptrSrtcp == NULL)
{
return -1;
}
if (_ptrSrtcpBuffer)
{
delete [] _ptrSrtcpBuffer;
_ptrSrtcpBuffer = NULL;
}
_ptrSrtcp = NULL;
return 0;
}
#endif
// ----------------------------------------------------------------------------
// StartRTPDump
// ----------------------------------------------------------------------------
int ViESender::StartRTPDump(const char fileNameUTF8[1024])
{
CriticalSectionScoped cs(_sendCritsect);
if (_rtpDump)
{
// Restart it if it already exists and is started
_rtpDump->Stop();
} else
{
_rtpDump = RtpDump::CreateRtpDump();
if (_rtpDump == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"%s: Failed to create RTP dump", __FUNCTION__);
return -1;
}
}
if (_rtpDump->Start(fileNameUTF8) != 0)
{
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: Failed to start RTP dump", __FUNCTION__);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// StopRTPDump
// ----------------------------------------------------------------------------
int ViESender::StopRTPDump()
{
CriticalSectionScoped cs(_sendCritsect);
if (_rtpDump)
{
if (_rtpDump->IsActive())
{
_rtpDump->Stop();
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId),
"%s: Dump not active", __FUNCTION__);
}
RtpDump::DestroyRtpDump(_rtpDump);
_rtpDump = NULL;
} else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), "%s: RTP dump not started",
__FUNCTION__);
return -1;
}
return 0;
}
// ----------------------------------------------------------------------------
// SendPacket
// ----------------------------------------------------------------------------
int ViESender::SendPacket(int vieId, const void *data, int len)
{
CriticalSectionScoped cs(_sendCritsect);
if (!_ptrTransport)
{
// No transport
return -1;
}
int channelId = ChannelId(vieId);
assert(channelId == _channelId);
// Prepare for possible encryption and sending
WebRtc_UWord8* sendPacket = (WebRtc_UWord8*) data;
int sendPacketLength = len;
if (_rtpDump)
{
_rtpDump->DumpPacket(sendPacket, sendPacketLength);
}
#ifdef WEBRTC_SRTP
if (_ptrSrtp)
{
_ptrSrtp->encrypt(_channelId, sendPacket, _ptrSrtpBuffer, sendPacketLength, (int*) &sendPacketLength);
if (sendPacketLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _channelId), "RTP encryption failed for channel");
return -1;
}
else if (sendPacketLength > KMaxPacketSize)
{
WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, ViEId(_engineId, _channelId),
" %d bytes is allocated as RTP output => memory is now corrupted", KMaxPacketSize);
return -1;
}
sendPacket = _ptrSrtpBuffer;
}
#endif
if (_ptrExternalEncryption)
{
_ptrExternalEncryption->encrypt(_channelId, sendPacket,
_ptrEncryptionBuffer, sendPacketLength,
(int*) &sendPacketLength);
sendPacket = _ptrEncryptionBuffer;
}
return _ptrTransport->SendPacket(_channelId, sendPacket, sendPacketLength);
}
// ----------------------------------------------------------------------------
// SendRTCPPacket
// ----------------------------------------------------------------------------
int ViESender::SendRTCPPacket(int vieId, const void *data, int len)
{
CriticalSectionScoped cs(_sendCritsect);
if (!_ptrTransport)
{
// No transport
return -1;
}
int channelId = ChannelId(vieId);
assert(channelId == _channelId);
// Prepare for possible encryption and sending
WebRtc_UWord8* sendPacket = (WebRtc_UWord8*) data;
int sendPacketLength = len;
if (_rtpDump)
{
_rtpDump->DumpPacket(sendPacket, sendPacketLength);
}
#ifdef WEBRTC_SRTP
if (_ptrSrtcp)
{
_ptrSrtcp->encrypt_rtcp(_channelId, sendPacket, _ptrSrtcpBuffer, sendPacketLength, (int*) &sendPacketLength);
if (sendPacketLength <= 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _channelId), "RTCP encryption failed for channel");
return -1;
}
else if (sendPacketLength > KMaxPacketSize)
{
WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, ViEId(_engineId, _channelId), " %d bytes is allocated as RTCP output => memory is now corrupted", KMaxPacketSize);
return -1;
}
sendPacket = _ptrSrtcpBuffer;
}
#endif
if (_ptrExternalEncryption)
{
_ptrExternalEncryption->encrypt_rtcp(_channelId, sendPacket,
_ptrEncryptionBuffer,
sendPacketLength,
(int*) &sendPacketLength);
sendPacket = _ptrEncryptionBuffer;
}
return _ptrTransport->SendRTCPPacket(_channelId, sendPacket,
sendPacketLength);
}
} // namespace webrtc

View File

@ -1,84 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_sender.h
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SENDER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SENDER_H_
// Defines
#include "engine_configurations.h"
#include "vie_defines.h"
#include "typedefs.h"
#include "common_types.h"
// Forward declarations
#ifdef WEBRTC_SRTP
class SrtpModule;
#endif
namespace webrtc {
class CriticalSectionWrapper;
class RtpDump;
class RtpRtcp;
class Transport;
class VideoCodingModule;
class ViESender: public Transport
{
public:
ViESender(int engineId, int channelId, RtpRtcp& rtpRtcpModule);
~ViESender();
int RegisterExternalEncryption(Encryption* encryption);
int DeregisterExternalEncryption();
int RegisterSendTransport(Transport* transport);
int DeregisterSendTransport();
#ifdef WEBRTC_SRTP
int RegisterSRTPModule(SrtpModule* srtpModule);
int DeregisterSRTPModule();
int RegisterSRTCPModule(SrtpModule* srtpModule);
int DeregisterSRTCPModule();
#endif
int StartRTPDump(const char fileNameUTF8[1024]);
int StopRTPDump();
// Implements Transport
virtual int SendPacket(int vieId, const void *data, int len);
virtual int SendRTCPPacket(int vieId, const void *data, int len);
private:
int _engineId;
int _channelId;
CriticalSectionWrapper& _sendCritsect;
RtpRtcp& _rtpRtcp;
#ifdef WEBRTC_SRTP
SrtpModule* _ptrSrtp;
SrtpModule* _ptrSrtcp;
#endif
Encryption* _ptrExternalEncryption;
WebRtc_UWord8* _ptrSrtpBuffer;
WebRtc_UWord8* _ptrSrtcpBuffer;
WebRtc_UWord8* _ptrEncryptionBuffer;
Transport* _ptrTransport;
RtpDump* _rtpDump;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SENDER_H_

View File

@ -1,91 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// ViESharedData.cpp
#include "vie_shared_data.h"
#include "vie_defines.h"
#include "cpu_wrapper.h"
#include "critical_section_wrapper.h"
#include "process_thread.h"
#include "trace.h"
#include "vie_channel_manager.h"
#include "vie_input_manager.h"
#include "vie_render_manager.h"
namespace webrtc {
// Active instance counter
int ViESharedData::_instanceCounter = 0;
ViESharedData::ViESharedData()
: _instanceId(++_instanceCounter),
_apiCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_isInitialized(false), _numberOfCores(CpuWrapper::DetectNumberOfCores()),
_moduleProcessThreadPtr(ProcessThread::CreateProcessThread()),
_viePerformanceMonitor(ViEPerformanceMonitor(_instanceId)),
_channelManager(*new ViEChannelManager(_instanceId, _numberOfCores,
_viePerformanceMonitor)),
_inputManager(*new ViEInputManager(_instanceId)),
_renderManager(*new ViERenderManager(_instanceId)), _lastError(0)
{
Trace::CreateTrace();
_channelManager.SetModuleProcessThread(*_moduleProcessThreadPtr);
_inputManager.SetModuleProcessThread(*_moduleProcessThreadPtr);
_moduleProcessThreadPtr->Start();
}
ViESharedData::~ViESharedData()
{
delete &_inputManager;
delete &_channelManager;
delete &_renderManager;
_moduleProcessThreadPtr->Stop();
ProcessThread::DestroyProcessThread(_moduleProcessThreadPtr);
delete &_apiCritsect;
Trace::ReturnTrace();
}
bool ViESharedData::IsInitialized() const
{
return _isInitialized;
}
int ViESharedData::SetInitialized()
{
_isInitialized = true;
return 0;
}
int ViESharedData::SetUnInitialized()
{
_isInitialized = false;
return 0;
}
void ViESharedData::SetLastError(const int error) const
{
_lastError = error;
}
int ViESharedData::LastErrorInternal() const
{
int error = _lastError;
_lastError = 0;
return error;
}
int ViESharedData::NumberOfCores() const
{
return _numberOfCores;
}
} // namespace webrtc

View File

@ -1,56 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// vie_shared_data.h
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SHARED_DATA_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SHARED_DATA_H_
#include "vie_defines.h"
#include "vie_performance_monitor.h"
namespace webrtc {
class CriticalSectionWrapper;
class ViERenderManager;
class ViEChannelManager;
class ViEInputManager;
class ProcessThread;
class ViESharedData
{
protected:
ViESharedData();
~ViESharedData();
bool IsInitialized() const;
int SetInitialized();
int SetUnInitialized();
void SetLastError(const int error) const;
int LastErrorInternal() const;
protected:
int NumberOfCores() const;
static int _instanceCounter;
const int _instanceId;
CriticalSectionWrapper& _apiCritsect;
bool _isInitialized;
const int _numberOfCores;
ViEPerformanceMonitor _viePerformanceMonitor;
ViEChannelManager& _channelManager;
ViEInputManager& _inputManager;
ViERenderManager& _renderManager;
ProcessThread* _moduleProcessThreadPtr;
private:
mutable int _lastError;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SHARED_DATA_H_

View File

@ -1,332 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "vie_sync_module.h"
#include "critical_section_wrapper.h"
#include "voe_video_sync.h"
#include "rtp_rtcp.h"
#include "trace.h"
#include "video_coding.h"
namespace webrtc {
ViESyncModule::ViESyncModule(int id, VideoCodingModule& vcm,
RtpRtcp& rtcpModule)
: _dataCritsect(*CriticalSectionWrapper::CreateCriticalSection()), _id(id),
_vcm(vcm), _rtcpModule(rtcpModule), _voiceChannelId(-1),
_voiceSyncInterface(NULL), _lastSyncTime(TickTime::Now())
{
}
ViESyncModule::~ViESyncModule()
{
delete &_dataCritsect;
}
int ViESyncModule::SetVoiceChannel(int voiceChannelId,
VoEVideoSync* veSyncInterface)
{
CriticalSectionScoped cs(_dataCritsect);
_voiceChannelId = voiceChannelId;
_voiceSyncInterface = veSyncInterface;
_rtcpModule.DeRegisterSyncModule();
if (!veSyncInterface)
{
_voiceChannelId = -1;
if (voiceChannelId >= 0) // trying to set a voice channel but no interface exist
{
return -1;
}
return 0;
}
RtpRtcp* voiceRTPRTCP = NULL;
veSyncInterface->GetRtpRtcp(_voiceChannelId, voiceRTPRTCP);
return _rtcpModule.RegisterSyncModule(voiceRTPRTCP);
}
int ViESyncModule::VoiceChannel()
{
return _voiceChannelId;
}
// ----------------------------------------------------------------------------
// SetNetworkDelay
//
// Set how long time in ms voice is ahead of video when received on the network.
// Positive means audio is ahead of video.
// ----------------------------------------------------------------------------
void ViESyncModule::SetNetworkDelay(int networkDelay)
{
_channelDelay.networkDelay = networkDelay;
}
// Implements Module
WebRtc_Word32 ViESyncModule::Version(WebRtc_Word8* version,
WebRtc_UWord32& remainingBufferInBytes,
WebRtc_UWord32& position) const
{
if (version == NULL)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, -1,
"Invalid in argument to ViESyncModule Version()");
return -1;
}
WebRtc_Word8 ourVersion[] = "ViESyncModule 1.1.0";
WebRtc_UWord32 ourLength = (WebRtc_UWord32) strlen(ourVersion);
if (remainingBufferInBytes < ourLength + 1)
{
return -1;
}
memcpy(version, ourVersion, ourLength);
version[ourLength] = '\0'; // null terminaion
remainingBufferInBytes -= (ourLength + 1);
position += (ourLength + 1);
return 0;
}
WebRtc_Word32 ViESyncModule::ChangeUniqueId(const WebRtc_Word32 id)
{
_id = id;
return 0;
}
WebRtc_Word32 ViESyncModule::TimeUntilNextProcess()
{
return (WebRtc_Word32) (kSyncInterval - (TickTime::Now()
- _lastSyncTime).Milliseconds());
}
// Do the lip sync.
WebRtc_Word32 ViESyncModule::Process()
{
CriticalSectionScoped cs(_dataCritsect);
_lastSyncTime = TickTime::Now();
int totalVideoDelayTargetMS = _vcm.Delay();
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _id,
"Video delay (JB + decoder) is %d ms", totalVideoDelayTargetMS);
if (_voiceChannelId != -1)
{
// Get //Sync start
int currentAudioDelayMS = 0;
if (_voiceSyncInterface->GetDelayEstimate(_voiceChannelId,
currentAudioDelayMS) != 0)
{
// Could not get VoE delay value, probably not a valid channel Id.
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id,
"%s: VE_GetDelayEstimate error for voiceChannel %d",
__FUNCTION__, totalVideoDelayTargetMS, _voiceChannelId);
return 0;
}
int currentDiffMS = 0;
int videoDelayMS = 0; // Total video delay
if (currentAudioDelayMS > 40) // Voice Engine report delay estimates even when not started. Ignore
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _id,
"Audio delay is: %d for voice channel: %d",
currentAudioDelayMS, _voiceChannelId);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _id,
"Network delay diff is: %d for voice channel: %d",
_channelDelay.networkDelay, _voiceChannelId);
// Calculate the diff between the lowest possible
// video delay and the current audio delay
currentDiffMS = totalVideoDelayTargetMS - currentAudioDelayMS
+ _channelDelay.networkDelay;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _id,
"Current diff is: %d for audio channel: %d",
currentDiffMS, _voiceChannelId);
if (currentDiffMS > 0)
{
// The minimum video delay is longer than the current audio delay.
// We need to decrease extra video delay, if we have added extra delay
// earlier, or add extra audio delay.
if (_channelDelay.extraVideoDelayMS > 0)
{
// We have extra delay added to ViE.
// Reduce this delay before adding delay to VE.
// This is the desired delay, we can't reduce more than this.
videoDelayMS = totalVideoDelayTargetMS;
// Check we don't reduce the delay too much
if (videoDelayMS < _channelDelay.lastVideoDelayMS
- kMaxVideoDiffMS)
{
// Too large step...
videoDelayMS = _channelDelay.lastVideoDelayMS
- kMaxVideoDiffMS;
_channelDelay.extraVideoDelayMS = videoDelayMS
- totalVideoDelayTargetMS;
} else
{
_channelDelay.extraVideoDelayMS = 0;
}
_channelDelay.lastVideoDelayMS = videoDelayMS;
_channelDelay.lastSyncDelay = -1;
_channelDelay.extraAudioDelayMS = 0;
} else
{
// We have no extra video delay to remove.
// Increase the audio delay
if (_channelDelay.lastSyncDelay >= 0)
{
// We have increased the audio delay earlier,
// increase it even more.
int audioDiffMS = currentDiffMS / 2;
if (audioDiffMS > kMaxAudioDiffMS)
{
// We only allow a maximum change of KMaxAudioDiffMS for audio
// due to NetEQ maximum changes.
audioDiffMS = kMaxAudioDiffMS;
}
// Increase the audio delay
_channelDelay.extraAudioDelayMS += audioDiffMS;
// Don't set a too high delay.
if (_channelDelay.extraAudioDelayMS > kMaxDelay)
{
_channelDelay.extraAudioDelayMS = kMaxDelay;
}
// Don't add any extra video delay.
videoDelayMS = totalVideoDelayTargetMS;
_channelDelay.extraVideoDelayMS = 0;
_channelDelay.lastVideoDelayMS = videoDelayMS;
_channelDelay.lastSyncDelay = 1;
} else // lastSyncDelay < 0
{
// First time after a delay change, don't add any extra delay.
// This is to not toggle back and forth too much.
_channelDelay.extraAudioDelayMS = 0;
// Set minimum video delay
videoDelayMS = totalVideoDelayTargetMS;
_channelDelay.extraVideoDelayMS = 0;
_channelDelay.lastVideoDelayMS = videoDelayMS;
_channelDelay.lastSyncDelay = 0;
}
}
} else // if (currentDiffMS > 0)
{
// The minimum video delay is lower than the current audio delay.
// We need to decrease possible extra audio delay, or
// add extra video delay.
if (_channelDelay.extraAudioDelayMS > 0)
{
// We have extra delay in VoiceEngine
// Start with decreasing the voice delay
int audioDiffMS = currentDiffMS / 2; // This is a negative value
if (audioDiffMS < -1 * kMaxAudioDiffMS)
{
// Don't change the delay too much at once.
audioDiffMS = -1 * kMaxAudioDiffMS;
}
_channelDelay.extraAudioDelayMS += audioDiffMS; // Add the negative change...
if (_channelDelay.extraAudioDelayMS < 0)
{
// Negative values not allowed
_channelDelay.extraAudioDelayMS = 0;
_channelDelay.lastSyncDelay = 0;
} else
{
// There is more audio delay to use for the next round.
_channelDelay.lastSyncDelay = 1;
}
// Keep the video delay at the minimum values.
videoDelayMS = totalVideoDelayTargetMS;
_channelDelay.extraVideoDelayMS = 0;
_channelDelay.lastVideoDelayMS = videoDelayMS;
} else
{
// We have no extra delay in VoiceEngine
// Increase the video delay
_channelDelay.extraAudioDelayMS = 0;
// Make the diff positive
int videoDiffMS = -1 * currentDiffMS;
// This is the desired delay we want
videoDelayMS = totalVideoDelayTargetMS + videoDiffMS;
if (videoDelayMS > _channelDelay.lastVideoDelayMS)
{
if (videoDelayMS > _channelDelay.lastVideoDelayMS
+ kMaxVideoDiffMS)
{
// Don't increase the delay too much at once
videoDelayMS = _channelDelay.lastVideoDelayMS
+ kMaxVideoDiffMS;
}
// Verify we don't go above the maximum allowed delay
if (videoDelayMS > kMaxDelay)
{
videoDelayMS = kMaxDelay;
}
} else
{
if (videoDelayMS < _channelDelay.lastVideoDelayMS
- kMaxVideoDiffMS)
{
// Don't decrease the delay too much at once
videoDelayMS = _channelDelay.lastVideoDelayMS
- kMaxVideoDiffMS;
}
// Verify we don't go below the minimum delay
if (videoDelayMS < totalVideoDelayTargetMS)
{
videoDelayMS = totalVideoDelayTargetMS;
}
}
// Store the values
_channelDelay.extraVideoDelayMS = videoDelayMS
- totalVideoDelayTargetMS;
_channelDelay.lastVideoDelayMS = videoDelayMS;
_channelDelay.lastSyncDelay = -1;
}
}
}
WEBRTC_TRACE(
webrtc::kTraceInfo,
webrtc::kTraceVideo,
_id,
"Sync video delay %d ms for video channel and audio delay %d for audio channel %d",
videoDelayMS, _channelDelay.extraAudioDelayMS,
_voiceChannelId);
// Set the extra audio delay
if (_voiceSyncInterface->SetMinimumPlayoutDelay(_voiceChannelId,
_channelDelay.extraAudioDelayMS) == -1)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, _id,
"Error setting voice delay");
}
// sanity
// negative not valid
if (videoDelayMS < 0)
{
videoDelayMS = 0;
}
totalVideoDelayTargetMS = (totalVideoDelayTargetMS > videoDelayMS) ?
totalVideoDelayTargetMS : videoDelayMS;
_vcm.SetMinimumPlayoutDelay(totalVideoDelayTargetMS);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, _id,
"New Video delay target is: %d", totalVideoDelayTargetMS);
}
return 0;
}
} // namespace webrtc

View File

@ -1,84 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_sync_module.h
* Responsible for doing Audio/Video sync
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SYNC_MODULE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SYNC_MODULE_H_
#include "module.h"
#include "tick_util.h"
namespace webrtc
{
class CriticalSectionWrapper;
class RtpRtcp;
class VideoCodingModule;
class VoEVideoSync;
class ViESyncModule : public Module
{
public:
enum { kSyncInterval = 1000};
enum { kMaxVideoDiffMS = 80 }; // Video sync
enum { kMaxAudioDiffMS = 80 }; // Video sync
enum { kMaxDelay = 1500 }; // Video sync
ViESyncModule(int id, VideoCodingModule& vcm,
RtpRtcp& rtcpModule);
~ViESyncModule();
int SetVoiceChannel(int voiceChannelId, VoEVideoSync* voiceSyncInterface);
int VoiceChannel();
void SetNetworkDelay(int networkDelay);
// Implements Module
virtual WebRtc_Word32 Version(WebRtc_Word8* version,
WebRtc_UWord32& remainingBufferInBytes,
WebRtc_UWord32& position) const;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 TimeUntilNextProcess();
virtual WebRtc_Word32 Process();
private:
// Critical sections
CriticalSectionWrapper& _dataCritsect;
int _id;
VideoCodingModule& _vcm;
RtpRtcp& _rtcpModule;
int _voiceChannelId;
VoEVideoSync* _voiceSyncInterface;
TickTime _lastSyncTime;
struct ViESyncDelay
{
ViESyncDelay()
{
extraVideoDelayMS = 0;
lastVideoDelayMS = 0;
extraAudioDelayMS = 0;
lastSyncDelay = 0;
networkDelay = 120;
}
int extraVideoDelayMS;
int lastVideoDelayMS;
int extraAudioDelayMS; //audioDelayMS;
int lastSyncDelay;
int networkDelay;
};
ViESyncDelay _channelDelay;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_SYNC_MODULE_H_

View File

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="lib" path="libs/VideoEngine_android_java.jar"/>
<classpathentry kind="lib" path="libs/VoiceEngine_android_java.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -1,33 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ViEAutotest</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -1,39 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
MY_CAPTURE_FOLDER := ../../../../../modules/video_capture/main/source
MY_CAPTURE_JAVA_FOLDER := Android/java/org/webrtc/videoengine
MY_CAPTURE_PATH := $(MY_CAPTURE_FOLDER)/$(MY_CAPTURE_JAVA_FOLDER)
MY_RENDER_FOLDER := ../../../../../modules/video_render/main/source
MY_RENDER_JAVA_FOLDER := Android/java/org/webrtc/videoengine
MY_RENDER_PATH := $(MY_RENDER_FOLDER)/$(MY_RENDER_JAVA_FOLDER)
LOCAL_MODULE_TAGS := tests
LOCAL_SRC_FILES := \
src/org/webrtc/vieautotest/ViEAutotest.java \
$(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
$(MY_RENDER_PATH)/ViEAndroidGLES20.java \
$(MY_RENDER_PATH)/ViERenderer.java \
$(MY_RENDER_PATH)/ViESurfaceRenderer.java
LOCAL_PACKAGE_NAME := webrtc-video-autotest
LOCAL_CERTIFICATE := platform
LOCAL_JNI_SHARED_LIBRARIES := libwebrtc-video-autotest-jni
include $(BUILD_PACKAGE)
include $(call all-makefiles-under,$(LOCAL_PATH))

View File

@ -1,26 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1"
android:versionName="1.0" package="org.webrtc.vieautotest">
<application android:label="@string/app_name"
android:debuggable="true"
android:icon="@drawable/logo">
<activity android:label="@string/app_name"
android:name="ViEAutotest">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="3" android:targetSdkVersion="8" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>

View File

@ -1,11 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-9

View File

@ -1,37 +0,0 @@
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package org.webrtc.vieautotest;
public final class R {
public static final class array {
public static final int subtest_array=0x7f050001;
public static final int test_array=0x7f050000;
}
public static final class attr {
}
public static final class drawable {
public static final int logo=0x7f020000;
}
public static final class id {
public static final int Button01=0x7f060004;
public static final int LocalView=0x7f060001;
public static final int RemoteView=0x7f060000;
public static final int subtestSpinner=0x7f060003;
public static final int testSpinner=0x7f060002;
}
public static final class layout {
public static final int main=0x7f030000;
}
public static final class string {
public static final int SpinnerSubtest=0x7f040004;
public static final int SpinnerTitle=0x7f040003;
public static final int TitleName=0x7f040001;
public static final int app_name=0x7f040000;
public static final int run_button=0x7f040002;
}
}

View File

@ -1,76 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
# the follow two lines are for NDK build
INTERFACES_PATH := $(LOCAL_PATH)/../../../../../../../build/interface
LIBS_PATH := $(LOCAL_PATH)/../../../../../../../build/libraries
include $(CLEAR_VARS)
LOCAL_MODULE_TAGS := tests
LOCAL_MODULE := libwebrtc-video-autotest-jni
LOCAL_CPP_EXTENSION := .cc
LOCAL_SRC_FILES := \
vie_autotest_jni.cc \
../../source/vie_autotest_android.cc \
../../source/vie_autotest.cc \
../../source/vie_autotest_base.cc \
../../source/vie_autotest_capture.cc \
../../source/vie_autotest_codec.cc \
../../source/vie_autotest_encryption.cc \
../../source/vie_autotest_file.cc \
../../source/vie_autotest_image_process.cc \
../../source/vie_autotest_loopback.cc \
../../source/vie_autotest_network.cc \
../../source/vie_autotest_render.cc \
../../source/vie_autotest_rtp_rtcp.cc \
../../source/tb_I420_codec.cc \
../../source/tb_capture_device.cc \
../../source/tb_external_transport.cc \
../../source/tb_interfaces.cc \
../../source/tb_video_channel.cc
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_CFLAGS += \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_C_INCLUDES := \
external/gtest/include \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../interface \
$(LOCAL_PATH)/../../../../interface \
$(LOCAL_PATH)/../../../../../.. \
$(LOCAL_PATH)/../../../../source \
$(LOCAL_PATH)/../../../../../../modules/video_render/main/interface \
$(LOCAL_PATH)/../../../../../../modules/interface \
$(LOCAL_PATH)/../../../../../../modules/video_capture/main/interface \
$(LOCAL_PATH)/../../../../../../modules/video_coding/codecs/interface \
$(LOCAL_PATH)/../../../../../../voice_engine/main/interface \
$(LOCAL_PATH)/../../../../../../system_wrappers/interface
LOCAL_PRELINK_MODULE := false
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := \
libutils \
libstlport \
libandroid \
libwebrtc \
libGLESv2
# the following line is for NDK build
LOCAL_LDLIBS := $(LIBS_PATH)/VideoEngine_android_gcc.a -llog -lgcc
include external/stlport/libstlport.mk
include $(BUILD_SHARED_LIBRARY)

View File

@ -1,34 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_webrtc_vieautotest_ViEAutotest */
#ifndef _Included_org_webrtc_vieautotest_ViEAutotest
#define _Included_org_webrtc_vieautotest_ViEAutotest
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_webrtc_vieautotest_ViEAutotest
* Method: RunTest
* Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
*/
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2
(JNIEnv *, jobject, jint, jint, jobject, jobject);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,137 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <string.h>
#include <android/log.h>
#include <pthread.h>
#include <unistd.h>
#include "org_webrtc_vieautotest_vie_autotest.h"
#include "vie_autotest_android.h"
#define WEBRTC_LOG_TAG "*WEBRTCN*"
// VideoEngine data struct
typedef struct
{
JavaVM* jvm;
} VideoEngineData;
// Global variables
JavaVM* webrtcGlobalVM;
// Global variables visible in this file
static VideoEngineData vieData;
// "Local" functions (i.e. not Java accessible)
#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
static bool GetSubAPIs(VideoEngineData& vieData);
static bool ReleaseSubAPIs(VideoEngineData& vieData);
//
// General functions
//
// JNI_OnLoad
jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
if (!vm) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"JNI_OnLoad did not receive a valid VM pointer");
return -1;
}
JNIEnv* env;
if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
JNI_VERSION_1_4)) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"JNI_OnLoad could not get JNI env");
return -1;
}
// Init ViE data
vieData.jvm = vm;
return JNI_VERSION_1_4;
}
// Class: org_webrtc_vieautotest_ViEAutotest
// Method: RunTest
// Signature: (IILandroid/opengl/GLSurfaceView;Landroid/opengl/GLSurfaceView;)I
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_opengl_GLSurfaceView_2Landroid_opengl_GLSurfaceView_2(
JNIEnv* env,
jobject context,
jint testType,
jint subtestType,
jobject glView1,
jobject glView2)
{
int numErrors = -1;
numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType, glView1,
glView2, vieData.jvm, env,
context);
return numErrors;
}
// Class: org_webrtc_vieautotest_ViEAutotest
// Method: RunTest
// Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2(
JNIEnv* env,
jobject context,
jint testType,
jint subtestType,
jobject surfaceHolder1,
jobject surfaceHolder2)
{
int numErrors = -1;
numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType,
surfaceHolder1, surfaceHolder2,
vieData.jvm, env, context);
return numErrors;
}
//
//local function
//
bool GetSubAPIs(VideoEngineData& vieData) {
bool retVal = true;
//vieData.base = ViEBase::GetInterface(vieData.vie);
//if (vieData.base == NULL)
{
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"Could not get Base API");
retVal = false;
}
return retVal;
}
bool ReleaseSubAPIs(VideoEngineData& vieData) {
bool releaseOk = true;
//if (vieData.base)
{
//if (vieData.base->Release() != 0)
if (false) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"Release base sub-API failed");
releaseOk = false;
}
else {
//vieData.base = NULL;
}
}
return releaseOk;
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 409 B

View File

@ -1,64 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<LinearLayout
android:id="@+id/RemoteView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1">
<!-- log instead of video
<ImageView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:scaleType="fitXY"
android:src="@drawable/logo" /> -->
</LinearLayout>
<LinearLayout
android:id="@+id/LocalView"
android:layout_width="120dip"
android:layout_height="120dip"
android:layout_weight="1">
<!-- <ImageView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:scaleType="fitXY"
android:src="@drawable/logo" /> -->
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true">
<LinearLayout
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true">
<Spinner
android:id="@+id/testSpinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:prompt="@string/SpinnerTitle"
/>
<Spinner
android:id="@+id/subtestSpinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:prompt="@string/SpinnerSubtest"
/>
<Button
android:text="@string/run_button"
android:id="@+id/Button01"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</Button>
</LinearLayout>
</LinearLayout>
</RelativeLayout>
</FrameLayout>

View File

@ -1,31 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">ViEAutotest</string>
<string name="TitleName">ViEAutotest</string>
<string name="run_button">Run Test</string>
<string name="SpinnerTitle">Test type...</string>
<string-array name="test_array">
<item>Standard</item>
<item>API</item>
<item>Extended</item>
<item>Loopback</item>
<item>Custom</item>
</string-array>
<string name="SpinnerSubtest">Run...</string>
<string-array name="subtest_array">
<item>All</item>
<item>Base</item>
<item>Capture</item>
<item>Codec</item>
<item>Mix</item>
<item>Encryption</item>
<item>External Codec</item>
<item>File</item>
<item>Image Process</item>
<item>Network</item>
<item>Render</item>
<item>RTP/RTCP</item>
</string-array>
</resources>

View File

@ -1,162 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.vieautotest;
import org.webrtc.vieautotest.R;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.Button;
import android.view.SurfaceView;
import android.view.View;
import android.view.SurfaceHolder;
import android.widget.LinearLayout;
import android.opengl.GLSurfaceView;
import android.widget.Spinner;
import android.widget.ArrayAdapter;
import android.widget.AdapterView;
public class ViEAutotest extends Activity
implements
AdapterView.OnItemSelectedListener,
View.OnClickListener {
private Thread testThread;
private Spinner testSpinner;
private Spinner subtestSpinner;
private int testSelection;
private int subTestSelection;
// View for remote video
private LinearLayout remoteSurface = null;
private GLSurfaceView glSurfaceView = null;
private SurfaceView surfaceView = null;
private LinearLayout localSurface = null;
private GLSurfaceView glLocalSurfaceView = null;
private SurfaceView localSurfaceView = null;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.d("*WEBRTC*", "onCreate called");
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Set the Start button action
final Button buttonStart = (Button) findViewById(R.id.Button01);
buttonStart.setOnClickListener(this);
// Set test spinner
testSpinner = (Spinner) findViewById(R.id.testSpinner);
ArrayAdapter<CharSequence> adapter =
ArrayAdapter.createFromResource(this, R.array.test_array,
android.R.layout.simple_spinner_item);
int resource = android.R.layout.simple_spinner_dropdown_item;
adapter.setDropDownViewResource(resource);
testSpinner.setAdapter(adapter);
testSpinner.setOnItemSelectedListener(this);
// Set sub test spinner
subtestSpinner = (Spinner) findViewById(R.id.subtestSpinner);
ArrayAdapter<CharSequence> subtestAdapter =
ArrayAdapter.createFromResource(this, R.array.subtest_array,
android.R.layout.simple_spinner_item);
subtestAdapter.setDropDownViewResource(resource);
subtestSpinner.setAdapter(subtestAdapter);
subtestSpinner.setOnItemSelectedListener(this);
remoteSurface = (LinearLayout) findViewById(R.id.RemoteView);
surfaceView = new SurfaceView(this);
remoteSurface.addView(surfaceView);
localSurface = (LinearLayout) findViewById(R.id.LocalView);
localSurfaceView = new SurfaceView(this);
localSurfaceView.setZOrderMediaOverlay(true);
localSurface.addView(localSurfaceView);
// Set members
testSelection = 0;
subTestSelection = 0;
}
public void onClick(View v) {
Log.d("*WEBRTC*", "Button clicked...");
switch (v.getId()) {
case R.id.Button01:
new Thread(new Runnable() {
public void run() {
Log.d("*WEBRTC*", "Calling RunTest...");
RunTest(testSelection, subTestSelection,
localSurfaceView, surfaceView);
Log.d("*WEBRTC*", "RunTest done");
}
}).start();
}
}
public void onItemSelected(AdapterView<?> parent, View v,
int position, long id) {
if (parent == (Spinner) findViewById(R.id.testSpinner)) {
testSelection = position;
} else {
subTestSelection = position;
}
}
public void onNothingSelected(AdapterView<?> parent) {
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onStop() {
super.onStop();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
// C++ function performing the chosen test
// private native int RunTest(int testSelection, int subtestSelection,
// GLSurfaceView window1, GLSurfaceView window2);
private native int RunTest(int testSelection, int subtestSelection,
SurfaceView window1, SurfaceView window2);
// this is used to load the 'ViEAutotestJNIAPI' library on application
// startup.
static {
Log.d("*WEBRTC*", "Loading ViEAutotest...");
System.loadLibrary("webrtc-video-autotest-jni");
}
}

View File

@ -1,132 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* tb_I420_codec.h
*
* This file contains the interface to I420 "codec"
* This is a dummy wrapper to allow VCM deal with raw I420 sequences
*
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
#include "video_codec_interface.h"
class tbI420Encoder: public webrtc::VideoEncoder
{
public:
tbI420Encoder();
virtual ~tbI420Encoder();
static WebRtc_Word32 VersionStatic(WebRtc_Word8* version,
WebRtc_Word32 length);
virtual WebRtc_Word32 Version(WebRtc_Word8 *version,
WebRtc_Word32 length) const;
virtual WebRtc_Word32 InitEncode(const webrtc::VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize);
virtual WebRtc_Word32 Encode(const webrtc::RawImage& inputImage,
const void* codecSpecificInfo = NULL,
webrtc::VideoFrameType frameType =
webrtc::kDeltaFrame);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate);
virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
WebRtc_Word32 /*size*/);
struct FunctionCalls
{
WebRtc_Word32 InitEncode;
WebRtc_Word32 Encode;
WebRtc_Word32 RegisterEncodeCompleteCallback;
WebRtc_Word32 Release;
WebRtc_Word32 Reset;
WebRtc_Word32 SetRates;
WebRtc_Word32 SetPacketLoss;
WebRtc_Word32 SetPeriodicKeyFrames;
WebRtc_Word32 CodecConfigParameters;
};
FunctionCalls GetFunctionCalls();
private:
bool _inited;
webrtc::EncodedImage _encodedImage;
FunctionCalls _functionCalls;
webrtc::EncodedImageCallback* _encodedCompleteCallback;
}; // end of tbI420Encoder class
/***************************/
/* tbI420Decoder class */
/***************************/
class tbI420Decoder: public webrtc::VideoDecoder
{
public:
tbI420Decoder();
virtual ~tbI420Decoder();
virtual WebRtc_Word32 InitDecode(const webrtc::VideoCodec* inst,
WebRtc_Word32 numberOfCores);
virtual WebRtc_Word32 Decode(const webrtc::EncodedImage& inputImage,
bool missingFrames,
const void* codecSpecificInfo = NULL,
WebRtc_Word64 renderTimeMs = -1);
virtual WebRtc_Word32
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
struct FunctionCalls
{
WebRtc_Word32 InitDecode;
WebRtc_Word32 Decode;
WebRtc_Word32 RegisterDecodeCompleteCallback;
WebRtc_Word32 Release;
WebRtc_Word32 Reset;
};
FunctionCalls GetFunctionCalls();
private:
webrtc::RawImage _decodedImage;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
bool _inited;
FunctionCalls _functionCalls;
webrtc::DecodedImageCallback* _decodeCompleteCallback;
}; // end of tbI420Decoder class
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_

View File

@ -1,32 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
#include "tb_interfaces.h"
#include "video_capture.h"
class tbCaptureDevice
{
public:
tbCaptureDevice(tbInterfaces& Engine, int& nrOfErrors);
~tbCaptureDevice(void);
int captureId;
void ConnectTo(int videoChannel);
void Disconnect(int videoChannel);
private:
int& numberOfErrors;
tbInterfaces& ViE;
webrtc::VideoCaptureModule* vcpm_;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_

View File

@ -1,95 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// tb_external_transport.h
//
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
#include "common_types.h"
#include "list_wrapper.h"
namespace webrtc
{
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class ViENetwork;
}
class tbExternalTransport: public webrtc::Transport
{
public:
tbExternalTransport(webrtc::ViENetwork& vieNetwork);
~tbExternalTransport(void);
virtual int SendPacket(int channel, const void *data, int len);
virtual int SendRTCPPacket(int channel, const void *data, int len);
WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate); // Rate in %
void SetNetworkDelay(WebRtc_Word64 delayMs);
void ClearStats();
void GetStats(WebRtc_Word32& numRtpPackets,
WebRtc_Word32& numDroppedPackets,
WebRtc_Word32& numRtcpPackets);
void EnableSSRCCheck();
unsigned int ReceivedSSRC();
void EnableSequenceNumberCheck();
unsigned short GetFirstSequenceNumber();
protected:
static bool ViEExternalTransportRun(void* object);
bool ViEExternalTransportProcess();
private:
WebRtc_Word64 NowMs();
enum
{
KMaxPacketSize = 1650
};
enum
{
KMaxWaitTimeMs = 100
};
typedef struct
{
WebRtc_Word8 packetBuffer[KMaxPacketSize];
WebRtc_Word32 length;
WebRtc_Word32 channel;
WebRtc_Word64 receiveTime;
} VideoPacket;
webrtc::ViENetwork& _vieNetwork;
webrtc::ThreadWrapper& _thread;
webrtc::EventWrapper& _event;
webrtc::CriticalSectionWrapper& _crit;
webrtc::CriticalSectionWrapper& _statCrit;
WebRtc_Word32 _lossRate;
WebRtc_Word64 _networkDelayMs;
WebRtc_Word32 _rtpCount;
WebRtc_Word32 _rtcpCount;
WebRtc_Word32 _dropCount;
webrtc::ListWrapper _rtpPackets;
webrtc::ListWrapper _rtcpPackets;
bool _checkSSRC;
WebRtc_UWord32 _lastSSRC;
bool _checkSequenceNumber;
WebRtc_UWord16 _firstSequenceNumber;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
#include "vie_autotest_defines.h"
#include "common_types.h"
#include "vie_base.h"
#include "vie_capture.h"
#include "vie_codec.h"
#include "vie_image_process.h"
#include "vie_network.h"
#include "vie_render.h"
#include "vie_rtp_rtcp.h"
#include "vie_encryption.h"
#include "vie_defines.h"
//using namespace webrtc;
class tbInterfaces
{
public:
tbInterfaces(const char* testName, int& nrOfErrors);
~tbInterfaces(void);
webrtc::VideoEngine* ptrViE;
webrtc::ViEBase* ptrViEBase;
webrtc::ViECapture* ptrViECapture;
webrtc::ViERender* ptrViERender;
webrtc::ViERTP_RTCP* ptrViERtpRtcp;
webrtc::ViECodec* ptrViECodec;
webrtc::ViENetwork* ptrViENetwork;
webrtc::ViEImageProcess* ptrViEImageProcess;
webrtc::ViEEncryption* ptrViEEncryption;
int LastError()
{
return ptrViEBase->LastError();
}
private:
int& numberOfErrors;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_

View File

@ -1,42 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
#include "tb_interfaces.h"
class tbVideoChannel
{
public:
tbVideoChannel(tbInterfaces& Engine, int& nrOfErrors,
webrtc::VideoCodecType sendCodec = webrtc::kVideoCodecVP8,
int width = 352, int height = 288, int frameRate = 30,
int startBitrate = 300);
~tbVideoChannel(void);
void SetFrameSettings(int width, int height, int frameRate);
void StartSend(const unsigned short rtpPort = 11000,
const char* ipAddress = "127.0.0.1");
void StopSend();
void StartReceive(const unsigned short rtpPort = 11000);
void StopReceive();
int videoChannel;
private:
int& numberOfErrors;
tbInterfaces& ViE;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_

View File

@ -1,143 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest.h
//
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
#include "common_types.h"
#include "voe_base.h"
#include "voe_codec.h"
#include "voe_hardware.h"
#include "voe_audio_processing.h"
#include "vie_base.h"
#include "vie_capture.h"
#include "vie_codec.h"
#include "vie_file.h"
#include "vie_network.h"
#include "vie_render.h"
#include "vie_rtp_rtcp.h"
#include "vie_defines.h"
#include "vie_errors.h"
#include "video_render_defines.h"
#ifndef ANDROID
#include <string>
#endif
using namespace webrtc;
class ViEAutoTest
{
public:
ViEAutoTest(void* window1, void* window2);
~ViEAutoTest();
int ViEStandardTest();
int ViEExtendedTest();
int ViEAPITest();
int ViELoopbackCall();
// custom call and helper functions
int ViECustomCall();
// general settings functions
bool GetVideoDevice(ViEBase* ptrViEBase, ViECapture* ptrViECapture,
char* captureDeviceName, char* captureDeviceUniqueId);
bool GetIPAddress(char* IP);
#ifndef ANDROID
bool ValidateIP(std::string iStr);
#endif
void PrintCallInformation(char* IP, char* videoCaptureDeviceName,
char* videoCaptureUniqueId,
webrtc::VideoCodec videoCodec, int videoTxPort,
int videoRxPort, char* audioCaptureDeviceName,
char* audioPlaybackDeviceName,
webrtc::CodecInst audioCodec, int audioTxPort,
int audioRxPort);
// video settings functions
bool GetVideoPorts(int* txPort, int* rxPort);
bool GetVideoCodec(ViECodec* ptrViECodec, webrtc::VideoCodec& videoCodec);
// audio settings functions
bool GetAudioDevices(VoEBase* ptrVEBase, VoEHardware* ptrVEHardware,
char* recordingDeviceName, int& recordingDeviceIndex,
char* playbackDeviceName, int& playbackDeviceIndex);
bool GetAudioDevices(VoEBase* ptrVEBase, VoEHardware* ptrVEHardware,
int& recordingDeviceIndex, int& playbackDeviceIndex);
bool GetAudioPorts(int* txPort, int* rxPort);
bool GetAudioCodec(VoECodec* ptrVeCodec, CodecInst& audioCodec);
// vie_autotest_base.cc
int ViEBaseStandardTest();
int ViEBaseExtendedTest();
int ViEBaseAPITest();
// vie_autotest_capture.cc
int ViECaptureStandardTest();
int ViECaptureExtendedTest();
int ViECaptureAPITest();
int ViECaptureExternalCaptureTest();
// vie_autotest_codec.cc
int ViECodecStandardTest();
int ViECodecExtendedTest();
int ViECodecExternalCodecTest();
int ViECodecAPITest();
// vie_autotest_encryption.cc
int ViEEncryptionStandardTest();
int ViEEncryptionExtendedTest();
int ViEEncryptionAPITest();
// vie_autotest_file.ccs
int ViEFileStandardTest();
int ViEFileExtendedTest();
int ViEFileAPITest();
// vie_autotest_image_process.cc
int ViEImageProcessStandardTest();
int ViEImageProcessExtendedTest();
int ViEImageProcessAPITest();
// vie_autotest_network.cc
int ViENetworkStandardTest();
int ViENetworkExtendedTest();
int ViENetworkAPITest();
// vie_autotest_render.cc
int ViERenderStandardTest();
int ViERenderExtendedTest();
int ViERenderAPITest();
// vie_autotest_rtp_rtcp.cc
int ViERtpRtcpStandardTest();
int ViERtpRtcpExtendedTest();
int ViERtpRtcpAPITest();
private:
void PrintAudioCodec(const webrtc::CodecInst audioCodec);
void PrintVideoCodec(const webrtc::VideoCodec videoCodec);
void* _window1;
void* _window2;
VideoRenderType _renderType;
VideoRender* _vrm1;
VideoRender* _vrm2;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_

View File

@ -1,26 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
class ViEAutoTestAndroid
{
public:
static int RunAutotest(int testSelection,
int subTestSelection,
void* window1,
void* window2,
void* javaVM,
void* env,
void* context);
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_

View File

@ -1,261 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_defines.h
//
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
#include <cassert>
#include <stdarg.h>
#include <stdio.h>
#include "engine_configurations.h"
#if defined(_WIN32)
#include <windows.h>
#elif defined (WEBRTC_ANDROID)
#include <android/log.h>
#include <string>
#elif defined(WEBRTC_LINUX)
#include <string.h>
#include <time.h>
#include <sys/time.h>
#include <stdlib.h>
#elif defined(WEBRTC_MAC_INTEL)
#import <Foundation/Foundation.h>
#endif
// Choose how to log
//#define VIE_LOG_TO_FILE
#define VIE_LOG_TO_STDOUT
// Choose one way to test error
#define VIE_ASSERT_ERROR
#define VIE_LOG_FILE_NAME "ViEAutotestLog.txt"
#undef RGB
#define RGB(r,g,b) r|g<<8|b<<16
// Default values for custom call
#define DEFAULT_SEND_IP "127.0.0.1"
#define DEFAULT_VIDEO_PORT 9000
#define DEFAULT_VIDEO_CODEC "vp8"
#define DEFAULT_VIDEO_CODEC_WIDTH 352
#define DEFAULT_VIDEO_CODEC_HEIGHT 288
#define DEFAULT_AUDIO_PORT 8000
#define DEFAULT_AUDIO_CODEC "isac"
enum
{
KAutoTestSleepTimeMs = 5000
};
struct AutoTestSize
{
unsigned int width;
unsigned int height;
AutoTestSize() :
width(0),
height(0)
{}
AutoTestSize(unsigned int iWidth, unsigned int iHeight) :
width(iWidth),
height(iHeight)
{}
};
struct AutoTestOrigin
{
unsigned int x;
unsigned int y;
AutoTestOrigin() :
x(0),
y(0)
{}
AutoTestOrigin(unsigned int iX, unsigned int iY) :
x(iX),
y(iY)
{}
};
struct AutoTestRect
{
AutoTestSize size;
AutoTestOrigin origin;
AutoTestRect() :
size(),
origin()
{}
AutoTestRect(unsigned int iX, unsigned int iY, unsigned int iWidth,
unsigned int iHeight) :
size(iX, iY),
origin(iWidth, iHeight)
{}
void Copy(AutoTestRect iRect)
{
origin.x = iRect.origin.x;
origin.y = iRect.origin.y;
size.width = iRect.size.width;
size.height = iRect.size.height;
}
};
// ============================================
class ViETest
{
protected:
static FILE* _logFile;
enum
{
KMaxLogSize = 512
};
static char* _logStr;
public:
static int Init()
{
#ifdef VIE_LOG_TO_FILE
_logFile = fopen(VIE_LOG_FILE_NAME, "w+t");
#else
_logFile = NULL;
#endif
_logStr = new char[KMaxLogSize];
memset(_logStr, 0, KMaxLogSize);
return 0;
}
static int Terminate()
{
if (_logFile)
{
fclose(_logFile);
_logFile = NULL;
}
if (_logStr)
{
delete[] _logStr;
_logStr = NULL;
}
return 0;
}
static void Log(char* fmt, ...)
{
va_list va;
va_start(va, fmt);
memset(_logStr, 0, KMaxLogSize);
vsprintf(_logStr, fmt, va);
va_end(va);
#ifdef VIE_LOG_TO_FILE
if (_logFile)
{
fwrite(_logStr, 1, strlen(_logStr), _logFile);
fwrite("\n", 1, 1, _logFile);
fflush(_logFile);
}
#endif
#ifdef VIE_LOG_TO_STDOUT
#if ANDROID
__android_log_write(ANDROID_LOG_DEBUG, "*WebRTCN*", _logStr);
#else
printf(_logStr);
printf("\n");
#endif
#endif
}
static int TestError(bool expr)
{
if (!expr)
{
#ifdef VIE_ASSERT_ERROR
assert(expr);
#endif
return 1;
}
return 0;
}
static int TestError(bool expr, char* fmt, ...)
{
if (!expr)
{
va_list va;
va_start(va, fmt);
memset(_logStr, 0, KMaxLogSize);
vsprintf(_logStr, fmt, va);
#ifdef ANDROID
__android_log_write(ANDROID_LOG_ERROR, "*WebRTCN*", _logStr);
#endif
Log(_logStr);
va_end(va);
#ifdef VIE_ASSERT_ERROR
assert(false);
#endif
return 1;
}
return 0;
}
};
// milliseconds
#if defined(_WIN32)
#define AutoTestSleep ::Sleep
#elif defined(WEBRTC_MAC_INTEL)
#define AutoTestSleep(x) usleep(x * 1000)
#elif defined(WEBRTC_LINUX)
namespace {
void Sleep(unsigned long x) {
timespec t;
t.tv_sec = x/1000;
t.tv_nsec = (x-(x/1000)*1000)*1000000;
nanosleep(&t,NULL);
}
}
#define AutoTestSleep ::Sleep
#endif
#ifdef WEBRTC_ANDROID
namespace {
void Sleep(unsigned long x) {
timespec t;
t.tv_sec = x/1000;
t.tv_nsec = (x-(x/1000)*1000)*1000000;
nanosleep(&t,NULL);
}
}
#define AutoTestSleep ::Sleep
#define VIE_TEST_FILES_ROOT "/sdcard/vie_auto_test/"
#else
#define VIE_TEST_FILES_ROOT "/tmp/"
#endif
namespace
{
FILE* OpenTestFile(char* fileName)
{
char filePath[256];
sprintf(filePath,"%s%s",VIE_TEST_FILES_ROOT,fileName);
return fopen(filePath,"rb");
}
}
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_

View File

@ -1,44 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
#include "vie_autotest_window_manager_interface.h"
#include <X11/Xlib.h>
#include <X11/Xutil.h>
// Forward declaration
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int TerminateWindows();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title);
virtual bool SetTopmostWindow();
private:
int ViECreateWindow(Window *outWindow, Display **outDisplay, int xpos,
int ypos, int width, int height, char* title);
int ViEDestroyWindow(Window *window, Display *display);
Window _hwnd1;
Window _hwnd2;
Display* _hdsp1;
Display* _hdsp2;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_

View File

@ -1,63 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "EngineConfigurations.h"
#if defined(CARBON_RENDERING)
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
#include "vie_autotest_window_manager_interface.h"
// #define HIVIEWREF_MODE 1
#include <Carbon/Carbon.h>
#import <Cocoa/Cocoa.h>
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, char* window1Title,
char* window2Title);
virtual int TerminateWindows();
virtual bool SetTopmostWindow();
// event handler static methods
static pascal OSStatus HandleWindowEvent (EventHandlerCallRef nextHandler,
EventRef theEvent, void* userData);
static pascal OSStatus HandleHIViewEvent (EventHandlerCallRef nextHandler,
EventRef theEvent, void* userData);
private:
WindowRef* _carbonWindow1;
WindowRef* _carbonWindow2;
HIViewRef* _hiView1;
HIViewRef* _hiView2;
EventHandlerRef _carbonWindow1EventHandlerRef;
EventHandlerRef _carbonWindow2EventHandlerRef;
EventHandlerRef _carbonHIView1EventHandlerRef;
EventHandlerRef _carbonHIView2EventHandlerRef;
};
@interface AutoTestClass : NSObject
{
}
-(void)autoTestWithArg:(NSString*)answerFile;
@end
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_CARBON_H_
#endif CARBON_RENDERING

View File

@ -1,52 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#include "vie_autotest_window_manager_interface.h"
#define MAC_COCOA_USE_NSRUNLOOP 1
@class CocoaRenderView;
#import <Cocoa/Cocoa.h>
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title);
virtual int TerminateWindows();
virtual bool SetTopmostWindow();
private:
CocoaRenderView* _cocoaRenderView1;
CocoaRenderView* _cocoaRenderView2;
};
@interface AutoTestClass : NSObject
{
}
-(void)autoTestWithArg:(NSString*)answerFile;
@end
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#endif // COCOA_RENDERING

View File

@ -1,37 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
#include <string>
using namespace std;
class ViEAutoTestMain
{
public:
ViEAutoTestMain();
bool BeginOSIndependentTesting();
bool GetAnswer(int index, string& answer);
int GetClassTestSelection();
bool GetNextAnswer(string& answer);
bool IsUsingAnswerFile();
bool UseAnswerFile(const char* fileName);
private:
string _answers[1024];
int _answersCount;
int _answersIndex;
bool _useAnswerFile;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_

View File

@ -1,32 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_autotest_window_manager_interface.h
*/
#include "vie_autotest_defines.h"
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
class ViEAutoTestWindowManagerInterface
{
public:
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title) = 0;
virtual int TerminateWindows() = 0;
virtual void* GetWindow1() = 0;
virtual void* GetWindow2() = 0;
virtual bool SetTopmostWindow() = 0;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_

View File

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
#include "vie_autotest_window_manager_interface.h"
#include "engine_configurations.h"
#include <windows.h>
#define TITLE_LENGTH 1024
// Forward declaration
namespace webrtc {
class ThreadWrapper;
class CriticalSectionWrapper;
}
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title);
virtual int TerminateWindows();
virtual bool SetTopmostWindow();
protected:
static bool EventProcess(void* obj);
bool EventLoop();
private:
int ViECreateWindow(HWND &hwndMain, int xPos, int yPos, int width,
int height, TCHAR* className);
int ViEDestroyWindow(HWND& hwnd);
void* _window1;
void* _window2;
bool _terminate;
webrtc::ThreadWrapper& _eventThread;
webrtc::CriticalSectionWrapper& _crit;
HWND _hwndMain;
HWND _hwnd1;
HWND _hwnd2;
AutoTestRect _hwnd1Size;
AutoTestRect _hwnd2Size;
TCHAR _hwnd1Title[TITLE_LENGTH];
TCHAR _hwnd2Title[TITLE_LENGTH];
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_

Some files were not shown because too many files have changed in this diff Show More