From c94abd313ed8e64b43777bc2e298afc602472ac8 Mon Sep 17 00:00:00 2001 From: "xians@webrtc.org" Date: Fri, 25 Oct 2013 18:15:09 +0000 Subject: [PATCH] Use clang-format -style=chromium to correct the format in webrtc/modules/interface/module_common_types.h R=andrew@webrtc.org Review URL: https://webrtc-codereview.appspot.com/2979004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@5036 4adac7df-926f-26a2-2b94-8c16560cd09d --- .../modules/interface/module_common_types.h | 1217 ++++++++--------- 1 file changed, 563 insertions(+), 654 deletions(-) diff --git a/webrtc/modules/interface/module_common_types.h b/webrtc/modules/interface/module_common_types.h index 0d5e44be8..67c6cb40d 100644 --- a/webrtc/modules/interface/module_common_types.h +++ b/webrtc/modules/interface/module_common_types.h @@ -12,7 +12,7 @@ #define MODULE_COMMON_TYPES_H #include -#include // memcpy +#include // memcpy #include @@ -21,282 +21,264 @@ #include "webrtc/typedefs.h" #ifdef _WIN32 - #pragma warning(disable:4351) // remove warning "new behavior: elements of array - // 'array' will be default initialized" +// Remove warning "new behavior: elements of array will be default initialized". +#pragma warning(disable : 4351) #endif namespace webrtc { -struct RTPHeaderExtension -{ - int32_t transmissionTimeOffset; - uint32_t absoluteSendTime; +struct RTPHeaderExtension { + int32_t transmissionTimeOffset; + uint32_t absoluteSendTime; }; -struct RTPHeader -{ - bool markerBit; - uint8_t payloadType; - uint16_t sequenceNumber; - uint32_t timestamp; - uint32_t ssrc; - uint8_t numCSRCs; - uint32_t arrOfCSRCs[kRtpCsrcSize]; - uint8_t paddingLength; - uint16_t headerLength; - int payload_type_frequency; - RTPHeaderExtension extension; +struct RTPHeader { + bool markerBit; + uint8_t payloadType; + uint16_t sequenceNumber; + uint32_t timestamp; + uint32_t ssrc; + uint8_t numCSRCs; + uint32_t arrOfCSRCs[kRtpCsrcSize]; + uint8_t paddingLength; + uint16_t headerLength; + int payload_type_frequency; + RTPHeaderExtension extension; }; -struct RTPAudioHeader -{ - uint8_t numEnergy; // number of valid entries in arrOfEnergy - uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel - bool isCNG; // is this CNG - uint8_t channel; // number of channels 2 = stereo +struct RTPAudioHeader { + uint8_t numEnergy; // number of valid entries in arrOfEnergy + uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel + bool isCNG; // is this CNG + uint8_t channel; // number of channels 2 = stereo }; -enum {kNoPictureId = -1}; -enum {kNoTl0PicIdx = -1}; -enum {kNoTemporalIdx = -1}; -enum {kNoKeyIdx = -1}; -enum {kNoSimulcastIdx = 0}; - -struct RTPVideoHeaderVP8 -{ - void InitRTPVideoHeaderVP8() - { - nonReference = false; - pictureId = kNoPictureId; - tl0PicIdx = kNoTl0PicIdx; - temporalIdx = kNoTemporalIdx; - layerSync = false; - keyIdx = kNoKeyIdx; - partitionId = 0; - beginningOfPartition = false; - } - - bool nonReference; // Frame is discardable. - int16_t pictureId; // Picture ID index, 15 bits; - // kNoPictureId if PictureID does not exist. - int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits; - // kNoTl0PicIdx means no value provided. - int8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx. - bool layerSync; // This frame is a layer sync frame. - // Disabled if temporalIdx == kNoTemporalIdx. - int keyIdx; // 5 bits; kNoKeyIdx means not used. - int partitionId; // VP8 partition ID - bool beginningOfPartition; // True if this packet is the first - // in a VP8 partition. Otherwise false +enum { + kNoPictureId = -1 }; -union RTPVideoTypeHeader -{ - RTPVideoHeaderVP8 VP8; +enum { + kNoTl0PicIdx = -1 +}; +enum { + kNoTemporalIdx = -1 +}; +enum { + kNoKeyIdx = -1 +}; +enum { + kNoSimulcastIdx = 0 }; -enum RtpVideoCodecTypes -{ - kRtpVideoNone, - kRtpVideoGeneric, - kRtpVideoVp8 -}; -struct RTPVideoHeader -{ - uint16_t width; // size - uint16_t height; +struct RTPVideoHeaderVP8 { + void InitRTPVideoHeaderVP8() { + nonReference = false; + pictureId = kNoPictureId; + tl0PicIdx = kNoTl0PicIdx; + temporalIdx = kNoTemporalIdx; + layerSync = false; + keyIdx = kNoKeyIdx; + partitionId = 0; + beginningOfPartition = false; + } - bool isFirstPacket; // first packet in frame - uint8_t simulcastIdx; // Index if the simulcast encoder creating - // this frame, 0 if not using simulcast. - RtpVideoCodecTypes codec; - RTPVideoTypeHeader codecHeader; + bool nonReference; // Frame is discardable. + int16_t pictureId; // Picture ID index, 15 bits; + // kNoPictureId if PictureID does not exist. + int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits; + // kNoTl0PicIdx means no value provided. + int8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx. + bool layerSync; // This frame is a layer sync frame. + // Disabled if temporalIdx == kNoTemporalIdx. + int keyIdx; // 5 bits; kNoKeyIdx means not used. + int partitionId; // VP8 partition ID + bool beginningOfPartition; // True if this packet is the first + // in a VP8 partition. Otherwise false }; -union RTPTypeHeader -{ - RTPAudioHeader Audio; - RTPVideoHeader Video; +union RTPVideoTypeHeader { + RTPVideoHeaderVP8 VP8; }; -struct WebRtcRTPHeader -{ - RTPHeader header; - FrameType frameType; - RTPTypeHeader type; +enum RtpVideoCodecTypes { + kRtpVideoNone, + kRtpVideoGeneric, + kRtpVideoVp8 +}; +struct RTPVideoHeader { + uint16_t width; // size + uint16_t height; + + bool isFirstPacket; // first packet in frame + uint8_t simulcastIdx; // Index if the simulcast encoder creating + // this frame, 0 if not using simulcast. + RtpVideoCodecTypes codec; + RTPVideoTypeHeader codecHeader; +}; +union RTPTypeHeader { + RTPAudioHeader Audio; + RTPVideoHeader Video; }; -class RTPFragmentationHeader -{ -public: - RTPFragmentationHeader() : - fragmentationVectorSize(0), +struct WebRtcRTPHeader { + RTPHeader header; + FrameType frameType; + RTPTypeHeader type; +}; + +class RTPFragmentationHeader { + public: + RTPFragmentationHeader() + : fragmentationVectorSize(0), fragmentationOffset(NULL), fragmentationLength(NULL), fragmentationTimeDiff(NULL), - fragmentationPlType(NULL) - {}; + fragmentationPlType(NULL) {}; - ~RTPFragmentationHeader() - { - delete [] fragmentationOffset; - delete [] fragmentationLength; - delete [] fragmentationTimeDiff; - delete [] fragmentationPlType; + ~RTPFragmentationHeader() { + delete[] fragmentationOffset; + delete[] fragmentationLength; + delete[] fragmentationTimeDiff; + delete[] fragmentationPlType; + } + + void CopyFrom(const RTPFragmentationHeader& src) { + if (this == &src) { + return; } - void CopyFrom(const RTPFragmentationHeader& src) - { - if(this == &src) - { - return; + if (src.fragmentationVectorSize != fragmentationVectorSize) { + // new size of vectors + + // delete old + delete[] fragmentationOffset; + fragmentationOffset = NULL; + delete[] fragmentationLength; + fragmentationLength = NULL; + delete[] fragmentationTimeDiff; + fragmentationTimeDiff = NULL; + delete[] fragmentationPlType; + fragmentationPlType = NULL; + + if (src.fragmentationVectorSize > 0) { + // allocate new + if (src.fragmentationOffset) { + fragmentationOffset = new uint32_t[src.fragmentationVectorSize]; } - - if(src.fragmentationVectorSize != fragmentationVectorSize) - { - // new size of vectors - - // delete old - delete [] fragmentationOffset; - fragmentationOffset = NULL; - delete [] fragmentationLength; - fragmentationLength = NULL; - delete [] fragmentationTimeDiff; - fragmentationTimeDiff = NULL; - delete [] fragmentationPlType; - fragmentationPlType = NULL; - - if(src.fragmentationVectorSize > 0) - { - // allocate new - if(src.fragmentationOffset) - { - fragmentationOffset = new uint32_t[src.fragmentationVectorSize]; - } - if(src.fragmentationLength) - { - fragmentationLength = new uint32_t[src.fragmentationVectorSize]; - } - if(src.fragmentationTimeDiff) - { - fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize]; - } - if(src.fragmentationPlType) - { - fragmentationPlType = new uint8_t[src.fragmentationVectorSize]; - } - } - // set new size - fragmentationVectorSize = src.fragmentationVectorSize; + if (src.fragmentationLength) { + fragmentationLength = new uint32_t[src.fragmentationVectorSize]; } - - if(src.fragmentationVectorSize > 0) - { - // copy values - if(src.fragmentationOffset) - { - memcpy(fragmentationOffset, src.fragmentationOffset, - src.fragmentationVectorSize * sizeof(uint32_t)); - } - if(src.fragmentationLength) - { - memcpy(fragmentationLength, src.fragmentationLength, - src.fragmentationVectorSize * sizeof(uint32_t)); - } - if(src.fragmentationTimeDiff) - { - memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff, - src.fragmentationVectorSize * sizeof(uint16_t)); - } - if(src.fragmentationPlType) - { - memcpy(fragmentationPlType, src.fragmentationPlType, - src.fragmentationVectorSize * sizeof(uint8_t)); - } + if (src.fragmentationTimeDiff) { + fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize]; } + if (src.fragmentationPlType) { + fragmentationPlType = new uint8_t[src.fragmentationVectorSize]; + } + } + // set new size + fragmentationVectorSize = src.fragmentationVectorSize; } - void VerifyAndAllocateFragmentationHeader(const uint16_t size) - { - if(fragmentationVectorSize < size) - { - uint16_t oldVectorSize = fragmentationVectorSize; - { - // offset - uint32_t* oldOffsets = fragmentationOffset; - fragmentationOffset = new uint32_t[size]; - memset(fragmentationOffset+oldVectorSize, 0, - sizeof(uint32_t)*(size-oldVectorSize)); - // copy old values - memcpy(fragmentationOffset,oldOffsets, sizeof(uint32_t) * oldVectorSize); - delete[] oldOffsets; - } - // length - { - uint32_t* oldLengths = fragmentationLength; - fragmentationLength = new uint32_t[size]; - memset(fragmentationLength+oldVectorSize, 0, - sizeof(uint32_t) * (size- oldVectorSize)); - memcpy(fragmentationLength, oldLengths, - sizeof(uint32_t) * oldVectorSize); - delete[] oldLengths; - } - // time diff - { - uint16_t* oldTimeDiffs = fragmentationTimeDiff; - fragmentationTimeDiff = new uint16_t[size]; - memset(fragmentationTimeDiff+oldVectorSize, 0, - sizeof(uint16_t) * (size- oldVectorSize)); - memcpy(fragmentationTimeDiff, oldTimeDiffs, - sizeof(uint16_t) * oldVectorSize); - delete[] oldTimeDiffs; - } - // payload type - { - uint8_t* oldTimePlTypes = fragmentationPlType; - fragmentationPlType = new uint8_t[size]; - memset(fragmentationPlType+oldVectorSize, 0, - sizeof(uint8_t) * (size- oldVectorSize)); - memcpy(fragmentationPlType, oldTimePlTypes, - sizeof(uint8_t) * oldVectorSize); - delete[] oldTimePlTypes; - } - fragmentationVectorSize = size; - } + if (src.fragmentationVectorSize > 0) { + // copy values + if (src.fragmentationOffset) { + memcpy(fragmentationOffset, src.fragmentationOffset, + src.fragmentationVectorSize * sizeof(uint32_t)); + } + if (src.fragmentationLength) { + memcpy(fragmentationLength, src.fragmentationLength, + src.fragmentationVectorSize * sizeof(uint32_t)); + } + if (src.fragmentationTimeDiff) { + memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff, + src.fragmentationVectorSize * sizeof(uint16_t)); + } + if (src.fragmentationPlType) { + memcpy(fragmentationPlType, src.fragmentationPlType, + src.fragmentationVectorSize * sizeof(uint8_t)); + } } + } - uint16_t fragmentationVectorSize; // Number of fragmentations - uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm. - uint32_t* fragmentationLength; // Data size for each fragmentation - uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for - // each fragmentation - uint8_t* fragmentationPlType; // Payload type of each fragmentation + void VerifyAndAllocateFragmentationHeader(const uint16_t size) { + if (fragmentationVectorSize < size) { + uint16_t oldVectorSize = fragmentationVectorSize; + { + // offset + uint32_t* oldOffsets = fragmentationOffset; + fragmentationOffset = new uint32_t[size]; + memset(fragmentationOffset + oldVectorSize, 0, + sizeof(uint32_t) * (size - oldVectorSize)); + // copy old values + memcpy(fragmentationOffset, oldOffsets, + sizeof(uint32_t) * oldVectorSize); + delete[] oldOffsets; + } + // length + { + uint32_t* oldLengths = fragmentationLength; + fragmentationLength = new uint32_t[size]; + memset(fragmentationLength + oldVectorSize, 0, + sizeof(uint32_t) * (size - oldVectorSize)); + memcpy(fragmentationLength, oldLengths, + sizeof(uint32_t) * oldVectorSize); + delete[] oldLengths; + } + // time diff + { + uint16_t* oldTimeDiffs = fragmentationTimeDiff; + fragmentationTimeDiff = new uint16_t[size]; + memset(fragmentationTimeDiff + oldVectorSize, 0, + sizeof(uint16_t) * (size - oldVectorSize)); + memcpy(fragmentationTimeDiff, oldTimeDiffs, + sizeof(uint16_t) * oldVectorSize); + delete[] oldTimeDiffs; + } + // payload type + { + uint8_t* oldTimePlTypes = fragmentationPlType; + fragmentationPlType = new uint8_t[size]; + memset(fragmentationPlType + oldVectorSize, 0, + sizeof(uint8_t) * (size - oldVectorSize)); + memcpy(fragmentationPlType, oldTimePlTypes, + sizeof(uint8_t) * oldVectorSize); + delete[] oldTimePlTypes; + } + fragmentationVectorSize = size; + } + } -private: - DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader); + uint16_t fragmentationVectorSize; // Number of fragmentations + uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm. + uint32_t* fragmentationLength; // Data size for each fragmentation + uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for + // each fragmentation + uint8_t* fragmentationPlType; // Payload type of each fragmentation + + private: + DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader); }; -struct RTCPVoIPMetric -{ - // RFC 3611 4.7 - uint8_t lossRate; - uint8_t discardRate; - uint8_t burstDensity; - uint8_t gapDensity; - uint16_t burstDuration; - uint16_t gapDuration; - uint16_t roundTripDelay; - uint16_t endSystemDelay; - uint8_t signalLevel; - uint8_t noiseLevel; - uint8_t RERL; - uint8_t Gmin; - uint8_t Rfactor; - uint8_t extRfactor; - uint8_t MOSLQ; - uint8_t MOSCQ; - uint8_t RXconfig; - uint16_t JBnominal; - uint16_t JBmax; - uint16_t JBabsMax; +struct RTCPVoIPMetric { + // RFC 3611 4.7 + uint8_t lossRate; + uint8_t discardRate; + uint8_t burstDensity; + uint8_t gapDensity; + uint16_t burstDuration; + uint16_t gapDuration; + uint16_t roundTripDelay; + uint16_t endSystemDelay; + uint8_t signalLevel; + uint8_t noiseLevel; + uint8_t RERL; + uint8_t Gmin; + uint8_t Rfactor; + uint8_t extRfactor; + uint8_t MOSLQ; + uint8_t MOSCQ; + uint8_t RXconfig; + uint16_t JBnominal; + uint16_t JBmax; + uint16_t JBabsMax; }; // Types for the FEC packet masks. The type |kFecMaskRandom| is based on a @@ -327,11 +309,10 @@ class CallStatsObserver { }; // class describing a complete, or parts of an encoded frame. -class EncodedVideoData -{ -public: - EncodedVideoData() : - payloadType(0), +class EncodedVideoData { + public: + EncodedVideoData() + : payloadType(0), timeStamp(0), renderTimeMs(0), encodedWidth(0), @@ -343,91 +324,79 @@ public: bufferSize(0), fragmentationHeader(), frameType(kVideoFrameDelta), - codec(kVideoCodecUnknown) - {}; + codec(kVideoCodecUnknown) {}; - EncodedVideoData(const EncodedVideoData& data) - { - payloadType = data.payloadType; - timeStamp = data.timeStamp; - renderTimeMs = data.renderTimeMs; - encodedWidth = data.encodedWidth; - encodedHeight = data.encodedHeight; - completeFrame = data.completeFrame; - missingFrame = data.missingFrame; - payloadSize = data.payloadSize; - fragmentationHeader.CopyFrom(data.fragmentationHeader); - frameType = data.frameType; - codec = data.codec; - if (data.payloadSize > 0) - { - payloadData = new uint8_t[data.payloadSize]; - memcpy(payloadData, data.payloadData, data.payloadSize); - } - else - { - payloadData = NULL; - } + EncodedVideoData(const EncodedVideoData& data) { + payloadType = data.payloadType; + timeStamp = data.timeStamp; + renderTimeMs = data.renderTimeMs; + encodedWidth = data.encodedWidth; + encodedHeight = data.encodedHeight; + completeFrame = data.completeFrame; + missingFrame = data.missingFrame; + payloadSize = data.payloadSize; + fragmentationHeader.CopyFrom(data.fragmentationHeader); + frameType = data.frameType; + codec = data.codec; + if (data.payloadSize > 0) { + payloadData = new uint8_t[data.payloadSize]; + memcpy(payloadData, data.payloadData, data.payloadSize); + } else { + payloadData = NULL; } + } + ~EncodedVideoData() { + delete[] payloadData; + }; - ~EncodedVideoData() - { - delete [] payloadData; - }; - - EncodedVideoData& operator=(const EncodedVideoData& data) - { - if (this == &data) - { - return *this; - } - payloadType = data.payloadType; - timeStamp = data.timeStamp; - renderTimeMs = data.renderTimeMs; - encodedWidth = data.encodedWidth; - encodedHeight = data.encodedHeight; - completeFrame = data.completeFrame; - missingFrame = data.missingFrame; - payloadSize = data.payloadSize; - fragmentationHeader.CopyFrom(data.fragmentationHeader); - frameType = data.frameType; - codec = data.codec; - if (data.payloadSize > 0) - { - delete [] payloadData; - payloadData = new uint8_t[data.payloadSize]; - memcpy(payloadData, data.payloadData, data.payloadSize); - bufferSize = data.payloadSize; - } - return *this; - }; - void VerifyAndAllocate( const uint32_t size) - { - if (bufferSize < size) - { - uint8_t* oldPayload = payloadData; - payloadData = new uint8_t[size]; - memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize); - - bufferSize = size; - delete[] oldPayload; - } + EncodedVideoData& operator=(const EncodedVideoData& data) { + if (this == &data) { + return *this; } + payloadType = data.payloadType; + timeStamp = data.timeStamp; + renderTimeMs = data.renderTimeMs; + encodedWidth = data.encodedWidth; + encodedHeight = data.encodedHeight; + completeFrame = data.completeFrame; + missingFrame = data.missingFrame; + payloadSize = data.payloadSize; + fragmentationHeader.CopyFrom(data.fragmentationHeader); + frameType = data.frameType; + codec = data.codec; + if (data.payloadSize > 0) { + delete[] payloadData; + payloadData = new uint8_t[data.payloadSize]; + memcpy(payloadData, data.payloadData, data.payloadSize); + bufferSize = data.payloadSize; + } + return *this; + }; + void VerifyAndAllocate(const uint32_t size) { + if (bufferSize < size) { + uint8_t* oldPayload = payloadData; + payloadData = new uint8_t[size]; + memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize); - uint8_t payloadType; - uint32_t timeStamp; - int64_t renderTimeMs; - uint32_t encodedWidth; - uint32_t encodedHeight; - bool completeFrame; - bool missingFrame; - uint8_t* payloadData; - uint32_t payloadSize; - uint32_t bufferSize; - RTPFragmentationHeader fragmentationHeader; - FrameType frameType; - VideoCodecType codec; + bufferSize = size; + delete[] oldPayload; + } + } + + uint8_t payloadType; + uint32_t timeStamp; + int64_t renderTimeMs; + uint32_t encodedWidth; + uint32_t encodedHeight; + bool completeFrame; + bool missingFrame; + uint8_t* payloadData; + uint32_t payloadSize; + uint32_t bufferSize; + RTPFragmentationHeader fragmentationHeader; + FrameType frameType; + VideoCodecType codec; }; struct VideoContentMetrics { @@ -435,8 +404,7 @@ struct VideoContentMetrics { : motion_magnitude(0.0f), spatial_pred_err(0.0f), spatial_pred_err_h(0.0f), - spatial_pred_err_v(0.0f) { - } + spatial_pred_err_v(0.0f) {} void Reset() { motion_magnitude = 0.0f; @@ -459,263 +427,229 @@ struct VideoContentMetrics { * * *************************************************/ -class VideoFrame -{ -public: - VideoFrame(); - ~VideoFrame(); - /** - * Verifies that current allocated buffer size is larger than or equal to the input size. - * If the current buffer size is smaller, a new allocation is made and the old buffer data - * is copied to the new buffer. - * Buffer size is updated to minimumSize. - */ - int32_t VerifyAndAllocate(const uint32_t minimumSize); - /** - * Update length of data buffer in frame. Function verifies that new length is less or - * equal to allocated size. - */ - int32_t SetLength(const uint32_t newLength); - /* - * Swap buffer and size data - */ - int32_t Swap(uint8_t*& newMemory, - uint32_t& newLength, - uint32_t& newSize); - /* - * Swap buffer and size data - */ - int32_t SwapFrame(VideoFrame& videoFrame); - /** - * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length - * is allocated. - */ - int32_t CopyFrame(const VideoFrame& videoFrame); - /** - * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length - * is allocated. - */ - int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer); - /** - * Delete VideoFrame and resets members to zero - */ - void Free(); - /** - * Set frame timestamp (90kHz) - */ - void SetTimeStamp(const uint32_t timeStamp) {_timeStamp = timeStamp;} - /** - * Get pointer to frame buffer - */ - uint8_t* Buffer() const {return _buffer;} +class VideoFrame { + public: + VideoFrame(); + ~VideoFrame(); + /** + * Verifies that current allocated buffer size is larger than or equal to the + * input size. + * If the current buffer size is smaller, a new allocation is made and the old + * buffer data + * is copied to the new buffer. + * Buffer size is updated to minimumSize. + */ + int32_t VerifyAndAllocate(const uint32_t minimumSize); + /** + * Update length of data buffer in frame. Function verifies that new length + * is less or + * equal to allocated size. + */ + int32_t SetLength(const uint32_t newLength); + /* + * Swap buffer and size data + */ + int32_t Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize); + /* + * Swap buffer and size data + */ + int32_t SwapFrame(VideoFrame& videoFrame); + /** + * Copy buffer: If newLength is bigger than allocated size, a new buffer of + * size length + * is allocated. + */ + int32_t CopyFrame(const VideoFrame& videoFrame); + /** + * Copy buffer: If newLength is bigger than allocated size, a new buffer of + * size length + * is allocated. + */ + int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer); + /** + * Delete VideoFrame and resets members to zero + */ + void Free(); + /** + * Set frame timestamp (90kHz) + */ + void SetTimeStamp(const uint32_t timeStamp) { _timeStamp = timeStamp; } + /** + * Get pointer to frame buffer + */ + uint8_t* Buffer() const { return _buffer; } - uint8_t*& Buffer() {return _buffer;} + uint8_t*& Buffer() { return _buffer; } - /** - * Get allocated buffer size - */ - uint32_t Size() const {return _bufferSize;} - /** - * Get frame length - */ - uint32_t Length() const {return _bufferLength;} - /** - * Get frame timestamp (90kHz) - */ - uint32_t TimeStamp() const {return _timeStamp;} - /** - * Get frame width - */ - uint32_t Width() const {return _width;} - /** - * Get frame height - */ - uint32_t Height() const {return _height;} - /** - * Set frame width - */ - void SetWidth(const uint32_t width) {_width = width;} - /** - * Set frame height - */ - void SetHeight(const uint32_t height) {_height = height;} - /** - * Set render time in miliseconds - */ - void SetRenderTime(const int64_t renderTimeMs) {_renderTimeMs = renderTimeMs;} - /** - * Get render time in miliseconds - */ - int64_t RenderTimeMs() const {return _renderTimeMs;} + /** + * Get allocated buffer size + */ + uint32_t Size() const { return _bufferSize; } + /** + * Get frame length + */ + uint32_t Length() const { return _bufferLength; } + /** + * Get frame timestamp (90kHz) + */ + uint32_t TimeStamp() const { return _timeStamp; } + /** + * Get frame width + */ + uint32_t Width() const { return _width; } + /** + * Get frame height + */ + uint32_t Height() const { return _height; } + /** + * Set frame width + */ + void SetWidth(const uint32_t width) { _width = width; } + /** + * Set frame height + */ + void SetHeight(const uint32_t height) { _height = height; } + /** + * Set render time in miliseconds + */ + void SetRenderTime(const int64_t renderTimeMs) { + _renderTimeMs = renderTimeMs; + } + /** + * Get render time in miliseconds + */ + int64_t RenderTimeMs() const { return _renderTimeMs; } -private: - void Set(uint8_t* buffer, - uint32_t size, - uint32_t length, - uint32_t timeStamp); + private: + void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp); - uint8_t* _buffer; // Pointer to frame buffer - uint32_t _bufferSize; // Allocated buffer size - uint32_t _bufferLength; // Length (in bytes) of buffer - uint32_t _timeStamp; // Timestamp of frame (90kHz) - uint32_t _width; - uint32_t _height; - int64_t _renderTimeMs; -}; // end of VideoFrame class declaration + uint8_t* _buffer; // Pointer to frame buffer + uint32_t _bufferSize; // Allocated buffer size + uint32_t _bufferLength; // Length (in bytes) of buffer + uint32_t _timeStamp; // Timestamp of frame (90kHz) + uint32_t _width; + uint32_t _height; + int64_t _renderTimeMs; +}; // end of VideoFrame class declaration // inline implementation of VideoFrame class: -inline -VideoFrame::VideoFrame(): - _buffer(0), - _bufferSize(0), - _bufferLength(0), - _timeStamp(0), - _width(0), - _height(0), - _renderTimeMs(0) -{ - // +inline VideoFrame::VideoFrame() + : _buffer(0), + _bufferSize(0), + _bufferLength(0), + _timeStamp(0), + _width(0), + _height(0), + _renderTimeMs(0) { + // } -inline -VideoFrame::~VideoFrame() -{ - if(_buffer) - { - delete [] _buffer; - _buffer = NULL; +inline VideoFrame::~VideoFrame() { + if (_buffer) { + delete[] _buffer; + _buffer = NULL; + } +} + +inline int32_t VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) { + if (minimumSize < 1) { + return -1; + } + if (minimumSize > _bufferSize) { + // create buffer of sufficient size + uint8_t* newBufferBuffer = new uint8_t[minimumSize]; + if (_buffer) { + // copy old data + memcpy(newBufferBuffer, _buffer, _bufferSize); + delete[] _buffer; + } else { + memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t)); } + _buffer = newBufferBuffer; + _bufferSize = minimumSize; + } + return 0; } +inline int32_t VideoFrame::SetLength(const uint32_t newLength) { + if (newLength > _bufferSize) { // can't accomodate new value + return -1; + } + _bufferLength = newLength; + return 0; +} -inline -int32_t -VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) -{ - if (minimumSize < 1) - { - return -1; +inline int32_t VideoFrame::SwapFrame(VideoFrame& videoFrame) { + uint32_t tmpTimeStamp = _timeStamp; + uint32_t tmpWidth = _width; + uint32_t tmpHeight = _height; + int64_t tmpRenderTime = _renderTimeMs; + + _timeStamp = videoFrame._timeStamp; + _width = videoFrame._width; + _height = videoFrame._height; + _renderTimeMs = videoFrame._renderTimeMs; + + videoFrame._timeStamp = tmpTimeStamp; + videoFrame._width = tmpWidth; + videoFrame._height = tmpHeight; + videoFrame._renderTimeMs = tmpRenderTime; + + return Swap(videoFrame._buffer, videoFrame._bufferLength, + videoFrame._bufferSize); +} + +inline int32_t VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, + uint32_t& newSize) { + uint8_t* tmpBuffer = _buffer; + uint32_t tmpLength = _bufferLength; + uint32_t tmpSize = _bufferSize; + _buffer = newMemory; + _bufferLength = newLength; + _bufferSize = newSize; + newMemory = tmpBuffer; + newLength = tmpLength; + newSize = tmpSize; + return 0; +} + +inline int32_t VideoFrame::CopyFrame(uint32_t length, + const uint8_t* sourceBuffer) { + if (length > _bufferSize) { + int32_t ret = VerifyAndAllocate(length); + if (ret < 0) { + return ret; } - if(minimumSize > _bufferSize) - { - // create buffer of sufficient size - uint8_t* newBufferBuffer = new uint8_t[minimumSize]; - if(_buffer) - { - // copy old data - memcpy(newBufferBuffer, _buffer, _bufferSize); - delete [] _buffer; - } - else - { - memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t)); - } - _buffer = newBufferBuffer; - _bufferSize = minimumSize; - } - return 0; + } + memcpy(_buffer, sourceBuffer, length); + _bufferLength = length; + return 0; } -inline -int32_t -VideoFrame::SetLength(const uint32_t newLength) -{ - if (newLength >_bufferSize ) - { // can't accomodate new value - return -1; - } - _bufferLength = newLength; - return 0; +inline int32_t VideoFrame::CopyFrame(const VideoFrame& videoFrame) { + if (CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) { + return -1; + } + _timeStamp = videoFrame._timeStamp; + _width = videoFrame._width; + _height = videoFrame._height; + _renderTimeMs = videoFrame._renderTimeMs; + return 0; } -inline -int32_t -VideoFrame::SwapFrame(VideoFrame& videoFrame) -{ - uint32_t tmpTimeStamp = _timeStamp; - uint32_t tmpWidth = _width; - uint32_t tmpHeight = _height; - int64_t tmpRenderTime = _renderTimeMs; +inline void VideoFrame::Free() { + _timeStamp = 0; + _bufferLength = 0; + _bufferSize = 0; + _height = 0; + _width = 0; + _renderTimeMs = 0; - _timeStamp = videoFrame._timeStamp; - _width = videoFrame._width; - _height = videoFrame._height; - _renderTimeMs = videoFrame._renderTimeMs; - - videoFrame._timeStamp = tmpTimeStamp; - videoFrame._width = tmpWidth; - videoFrame._height = tmpHeight; - videoFrame._renderTimeMs = tmpRenderTime; - - return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize); + if (_buffer) { + delete[] _buffer; + _buffer = NULL; + } } -inline -int32_t -VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize) -{ - uint8_t* tmpBuffer = _buffer; - uint32_t tmpLength = _bufferLength; - uint32_t tmpSize = _bufferSize; - _buffer = newMemory; - _bufferLength = newLength; - _bufferSize = newSize; - newMemory = tmpBuffer; - newLength = tmpLength; - newSize = tmpSize; - return 0; -} - -inline -int32_t -VideoFrame::CopyFrame(uint32_t length, const uint8_t* sourceBuffer) -{ - if (length > _bufferSize) - { - int32_t ret = VerifyAndAllocate(length); - if (ret < 0) - { - return ret; - } - } - memcpy(_buffer, sourceBuffer, length); - _bufferLength = length; - return 0; -} - -inline -int32_t -VideoFrame::CopyFrame(const VideoFrame& videoFrame) -{ - if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) - { - return -1; - } - _timeStamp = videoFrame._timeStamp; - _width = videoFrame._width; - _height = videoFrame._height; - _renderTimeMs = videoFrame._renderTimeMs; - return 0; -} - -inline -void -VideoFrame::Free() -{ - _timeStamp = 0; - _bufferLength = 0; - _bufferSize = 0; - _height = 0; - _width = 0; - _renderTimeMs = 0; - - if(_buffer) - { - delete [] _buffer; - _buffer = NULL; - } -} - - /* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It * allows for adding and subtracting frames while keeping track of the resulting * states. @@ -730,37 +664,31 @@ VideoFrame::Free() * deciding the resulting state. To do this use the -operator. */ class AudioFrame { -public: + public: // Stereo, 32 kHz, 60 ms (2 * 32 * 60) static const int kMaxDataSizeSamples = 3840; enum VADActivity { - kVadActive = 0, + kVadActive = 0, kVadPassive = 1, kVadUnknown = 2 }; enum SpeechType { kNormalSpeech = 0, - kPLC = 1, - kCNG = 2, - kPLCCNG = 3, - kUndefined = 4 + kPLC = 1, + kCNG = 2, + kPLCCNG = 3, + kUndefined = 4 }; AudioFrame(); virtual ~AudioFrame() {} - // |Interleaved_| is assumed to be unchanged with this UpdateFrame() method. - void UpdateFrame( - int id, - uint32_t timestamp, - const int16_t* data, - int samples_per_channel, - int sample_rate_hz, - SpeechType speech_type, - VADActivity vad_activity, - int num_channels = 1, - uint32_t energy = -1); + // |interleaved_| is not changed by this method. + void UpdateFrame(int id, uint32_t timestamp, const int16_t* data, + int samples_per_channel, int sample_rate_hz, + SpeechType speech_type, VADActivity vad_activity, + int num_channels = 1, uint32_t energy = -1); AudioFrame& Append(const AudioFrame& rhs); @@ -783,12 +711,11 @@ public: uint32_t energy_; bool interleaved_; -private: + private: DISALLOW_COPY_AND_ASSIGN(AudioFrame); }; -inline -AudioFrame::AudioFrame() +inline AudioFrame::AudioFrame() : id_(-1), timestamp_(0), data_(), @@ -800,17 +727,12 @@ AudioFrame::AudioFrame() energy_(0xffffffff), interleaved_(true) {} -inline -void AudioFrame::UpdateFrame( - int id, - uint32_t timestamp, - const int16_t* data, - int samples_per_channel, - int sample_rate_hz, - SpeechType speech_type, - VADActivity vad_activity, - int num_channels, - uint32_t energy) { +inline void AudioFrame::UpdateFrame(int id, uint32_t timestamp, + const int16_t* data, + int samples_per_channel, int sample_rate_hz, + SpeechType speech_type, + VADActivity vad_activity, int num_channels, + uint32_t energy) { id_ = id; timestamp_ = timestamp; samples_per_channel_ = samples_per_channel; @@ -822,7 +744,7 @@ void AudioFrame::UpdateFrame( const int length = samples_per_channel * num_channels; assert(length <= kMaxDataSizeSamples && length >= 0); - if(data != NULL) { + if (data != NULL) { memcpy(data_, data, sizeof(int16_t) * length); } else { memset(data_, 0, sizeof(int16_t) * length); @@ -830,8 +752,7 @@ void AudioFrame::UpdateFrame( } inline void AudioFrame::CopyFrom(const AudioFrame& src) { - if(this == &src) - return; + if (this == &src) return; id_ = src.id_; timestamp_ = src.timestamp_; @@ -848,63 +769,54 @@ inline void AudioFrame::CopyFrom(const AudioFrame& src) { memcpy(data_, src.data_, sizeof(int16_t) * length); } -inline -void AudioFrame::Mute() { +inline void AudioFrame::Mute() { memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t)); } -inline -AudioFrame& AudioFrame::operator>>=(const int rhs) { +inline AudioFrame& AudioFrame::operator>>=(const int rhs) { assert((num_channels_ > 0) && (num_channels_ < 3)); - if((num_channels_ > 2) || (num_channels_ < 1)) - return *this; + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { data_[i] = static_cast(data_[i] >> rhs); } return *this; } -inline -AudioFrame& AudioFrame::Append(const AudioFrame& rhs) { +inline AudioFrame& AudioFrame::Append(const AudioFrame& rhs) { // Sanity check assert((num_channels_ > 0) && (num_channels_ < 3)); assert(interleaved_ == rhs.interleaved_); - if((num_channels_ > 2) || (num_channels_ < 1)) - return *this; - if(num_channels_ != rhs.num_channels_) - return *this; + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; + if (num_channels_ != rhs.num_channels_) return *this; - if((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { + if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { vad_activity_ = kVadActive; - } else if(vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { + } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { vad_activity_ = kVadUnknown; } - if(speech_type_ != rhs.speech_type_) { + if (speech_type_ != rhs.speech_type_) { speech_type_ = kUndefined; } int offset = samples_per_channel_ * num_channels_; - for(int i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) { - data_[offset+i] = rhs.data_[i]; + for (int i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) { + data_[offset + i] = rhs.data_[i]; } samples_per_channel_ += rhs.samples_per_channel_; return *this; } -inline -AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) { +inline AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) { // Sanity check assert((num_channels_ > 0) && (num_channels_ < 3)); assert(interleaved_ == rhs.interleaved_); - if((num_channels_ > 2) || (num_channels_ < 1)) - return *this; - if(num_channels_ != rhs.num_channels_) - return *this; + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; + if (num_channels_ != rhs.num_channels_) return *this; bool noPrevData = false; - if(samples_per_channel_ != rhs.samples_per_channel_) { - if(samples_per_channel_ == 0) { + if (samples_per_channel_ != rhs.samples_per_channel_) { + if (samples_per_channel_ == 0) { // special case we have no data to start with samples_per_channel_ = rhs.samples_per_channel_; noPrevData = true; @@ -913,26 +825,25 @@ AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) { } } - if((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { + if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { vad_activity_ = kVadActive; - } else if(vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { + } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { vad_activity_ = kVadUnknown; } - if(speech_type_ != rhs.speech_type_) - speech_type_ = kUndefined; + if (speech_type_ != rhs.speech_type_) speech_type_ = kUndefined; - if(noPrevData) { + if (noPrevData) { memcpy(data_, rhs.data_, sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_); } else { // IMPROVEMENT this can be done very fast in assembly - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { - int32_t wrapGuard = static_cast(data_[i]) + - static_cast(rhs.data_[i]); - if(wrapGuard < -32768) { + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { + int32_t wrapGuard = + static_cast(data_[i]) + static_cast(rhs.data_[i]); + if (wrapGuard < -32768) { data_[i] = -32768; - } else if(wrapGuard > 32767) { + } else if (wrapGuard > 32767) { data_[i] = 32767; } else { data_[i] = (int16_t)wrapGuard; @@ -943,29 +854,27 @@ AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) { return *this; } -inline -AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) { +inline AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) { // Sanity check assert((num_channels_ > 0) && (num_channels_ < 3)); assert(interleaved_ == rhs.interleaved_); - if((num_channels_ > 2)|| (num_channels_ < 1)) - return *this; + if ((num_channels_ > 2) || (num_channels_ < 1)) return *this; - if((samples_per_channel_ != rhs.samples_per_channel_) || + if ((samples_per_channel_ != rhs.samples_per_channel_) || (num_channels_ != rhs.num_channels_)) { return *this; } - if((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) { + if ((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) { vad_activity_ = kVadUnknown; } speech_type_ = kUndefined; - for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { - int32_t wrapGuard = static_cast(data_[i]) - - static_cast(rhs.data_[i]); - if(wrapGuard < -32768) { + for (int i = 0; i < samples_per_channel_ * num_channels_; i++) { + int32_t wrapGuard = + static_cast(data_[i]) - static_cast(rhs.data_[i]); + if (wrapGuard < -32768) { data_[i] = -32768; - } else if(wrapGuard > 32767) { + } else if (wrapGuard > 32767) { data_[i] = 32767; } else { data_[i] = (int16_t)wrapGuard; @@ -978,25 +887,25 @@ AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) { inline bool IsNewerSequenceNumber(uint16_t sequence_number, uint16_t prev_sequence_number) { return sequence_number != prev_sequence_number && - static_cast(sequence_number - prev_sequence_number) < 0x8000; + static_cast(sequence_number - prev_sequence_number) < 0x8000; } inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) { return timestamp != prev_timestamp && - static_cast(timestamp - prev_timestamp) < 0x80000000; + static_cast(timestamp - prev_timestamp) < 0x80000000; } inline uint16_t LatestSequenceNumber(uint16_t sequence_number1, uint16_t sequence_number2) { - return IsNewerSequenceNumber(sequence_number1, sequence_number2) ? - sequence_number1 : sequence_number2; + return IsNewerSequenceNumber(sequence_number1, sequence_number2) + ? sequence_number1 + : sequence_number2; } inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) { - return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : - timestamp2; + return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2; } } // namespace webrtc -#endif // MODULE_COMMON_TYPES_H +#endif // MODULE_COMMON_TYPES_H