WebRtc_Word32 -> int32_t in video_engine/

BUG=314

Review URL: https://webrtc-codereview.appspot.com/1302005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3801 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2013-04-09 13:41:51 +00:00
parent 1ab45f6dd5
commit b238d1210b
51 changed files with 767 additions and 789 deletions

View File

@ -27,10 +27,10 @@ class TestProcessThread : public ProcessThread {
public:
TestProcessThread() {}
~TestProcessThread() {}
virtual WebRtc_Word32 Start() { return 0; }
virtual WebRtc_Word32 Stop() { return 0; }
virtual WebRtc_Word32 RegisterModule(const Module* module) { return 0; }
virtual WebRtc_Word32 DeRegisterModule(const Module* module) { return 0; }
virtual int32_t Start() { return 0; }
virtual int32_t Stop() { return 0; }
virtual int32_t RegisterModule(const Module* module) { return 0; }
virtual int32_t DeRegisterModule(const Module* module) { return 0; }
};
class MockVieEncoder : public ViEEncoder {

View File

@ -64,7 +64,7 @@ enum AudioSource {
class WEBRTC_DLLEXPORT ViEFileObserver {
public:
// This method is called when the end is reached of a played file.
virtual void PlayFileEnded(const WebRtc_Word32 file_id) = 0;
virtual void PlayFileEnded(const int32_t file_id) = 0;
protected:
virtual ~ViEFileObserver() {}

View File

@ -29,8 +29,8 @@ AndroidMediaCodecDecoder::AndroidMediaCodecDecoder(
vm_attached_(false) {
}
WebRtc_Word32 AndroidMediaCodecDecoder::InitDecode(
const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores) {
int32_t AndroidMediaCodecDecoder::InitDecode(
const VideoCodec* codecSettings, int32_t numberOfCores) {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
// TODO(dwkang): Detach this thread from VM. => this leads to a crash on
@ -64,12 +64,12 @@ WebRtc_Word32 AndroidMediaCodecDecoder::InitDecode(
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 AndroidMediaCodecDecoder::Decode(
int32_t AndroidMediaCodecDecoder::Decode(
const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo,
WebRtc_Word64 renderTimeMs) {
int64_t renderTimeMs) {
if (!vm_attached_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@ -83,13 +83,13 @@ WebRtc_Word32 AndroidMediaCodecDecoder::Decode(
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
WebRtc_Word32 AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback(
int32_t AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 AndroidMediaCodecDecoder::Release() {
int32_t AndroidMediaCodecDecoder::Release() {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
env_->DeleteGlobalRef(mediaCodecDecoder_);
mediaCodecDecoder_ = NULL;
@ -97,7 +97,7 @@ WebRtc_Word32 AndroidMediaCodecDecoder::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 AndroidMediaCodecDecoder::Reset() {
int32_t AndroidMediaCodecDecoder::Reset() {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -27,8 +27,8 @@ class AndroidMediaCodecDecoder : public VideoDecoder {
// - numberOfCores : Number of cores available for the decoder
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 InitDecode(
const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores);
virtual int32_t InitDecode(
const VideoCodec* codecSettings, int32_t numberOfCores);
// Decode encoded image (as a part of a video stream). The decoded image
// will be returned to the user through the decode complete callback.
@ -47,12 +47,12 @@ class AndroidMediaCodecDecoder : public VideoDecoder {
// used by decoders with internal rendering.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32
virtual int32_t
Decode(const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo = NULL,
WebRtc_Word64 renderTimeMs = -1);
int64_t renderTimeMs = -1);
// Register an decode complete callback object.
//
@ -60,18 +60,18 @@ class AndroidMediaCodecDecoder : public VideoDecoder {
// - callback : Callback object which handles decoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 RegisterDecodeCompleteCallback(
virtual int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback* callback);
// Free decoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 Release();
virtual int32_t Release();
// Reset decoder state and prepare for a new call.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 Reset();
virtual int32_t Reset();
// Codec configuration data sent out-of-band, i.e. in SIP call setup
//
@ -81,8 +81,8 @@ class AndroidMediaCodecDecoder : public VideoDecoder {
// bytes
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 SetCodecConfigParameters(
const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) {
virtual int32_t SetCodecConfigParameters(
const uint8_t* /*buffer*/, int32_t /*size*/) {
return WEBRTC_VIDEO_CODEC_ERROR;
}

View File

@ -252,9 +252,9 @@ void TestFullStack(const TbInterfaces& interfaces,
EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
// Collect transport statistics.
WebRtc_Word32 num_rtp_packets = 0;
WebRtc_Word32 num_dropped_packets = 0;
WebRtc_Word32 num_rtcp_packets = 0;
int32_t num_rtp_packets = 0;
int32_t num_dropped_packets = 0;
int32_t num_rtcp_packets = 0;
external_transport.GetStats(num_rtp_packets, num_dropped_packets,
num_rtcp_packets);
ViETest::Log("RTP packets : %5d", num_rtp_packets);
@ -277,7 +277,7 @@ void FixOutputFileForComparison(const std::string& output_file,
"useful to fill that gap with and it is impossible to detect it without "
"any previous timestamps to compare with.";
WebRtc_UWord8* last_frame_data = new WebRtc_UWord8[frame_length_in_bytes];
uint8_t* last_frame_data = new uint8_t[frame_length_in_bytes];
// Process the file and write frame duplicates for all dropped frames.
for (std::vector<Frame*>::const_iterator it = frames.begin();

View File

@ -55,9 +55,9 @@ TEST_F(FrameDropPrimitivesTest, FixOutputFileForComparison) {
frames.push_back(&fourth_frame);
// Prepare data for the first and third frames:
WebRtc_UWord8 first_frame_data[kFrameLength];
uint8_t first_frame_data[kFrameLength];
memset(first_frame_data, 5, kFrameLength); // Fill it with 5's to identify.
WebRtc_UWord8 third_frame_data[kFrameLength];
uint8_t third_frame_data[kFrameLength];
memset(third_frame_data, 7, kFrameLength); // Fill it with 7's to identify.
// Write the first and third frames to the temporary file. This means the fix
@ -79,7 +79,7 @@ TEST_F(FrameDropPrimitivesTest, FixOutputFileForComparison) {
webrtc::test::FrameReaderImpl frame_reader(kOutputFilename, kFrameLength);
frame_reader.Init();
WebRtc_UWord8 read_buffer[kFrameLength];
uint8_t read_buffer[kFrameLength];
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));

View File

@ -68,7 +68,7 @@ class ViEAutotestFileObserver : public webrtc::ViEFileObserver {
ViEAutotestFileObserver() {}
~ViEAutotestFileObserver() {}
void PlayFileEnded(const WebRtc_Word32 file_id) {
void PlayFileEnded(const int32_t file_id) {
ViETest::Log("PlayFile ended");
}
};

View File

@ -27,7 +27,7 @@ public:
ViEAutotestFileObserver() {};
~ViEAutotestFileObserver() {};
void PlayFileEnded(const WebRtc_Word32 fileId)
void PlayFileEnded(const int32_t fileId)
{
ViETest::Log("PlayFile ended");
}

View File

@ -24,49 +24,45 @@ public:
TbI420Encoder();
virtual ~TbI420Encoder();
static WebRtc_Word32 VersionStatic(char* version,
WebRtc_Word32 length);
virtual WebRtc_Word32 Version(char* version,
WebRtc_Word32 length) const;
static int32_t VersionStatic(char* version, int32_t length);
virtual int32_t Version(char* version, int32_t length) const;
virtual WebRtc_Word32 InitEncode(const webrtc::VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize);
virtual int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
int32_t numberOfCores,
uint32_t maxPayloadSize);
virtual WebRtc_Word32 Encode(
virtual int32_t Encode(
const webrtc::I420VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::VideoFrameType>* frameTypes);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
virtual int32_t RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual int32_t Release();
virtual WebRtc_Word32 Reset();
virtual int32_t Reset();
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt);
virtual int32_t SetChannelParameters(uint32_t packetLoss, int rtt);
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate);
virtual int32_t SetRates(uint32_t newBitRate, uint32_t frameRate);
virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
virtual int32_t SetPeriodicKeyFrames(bool enable);
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
WebRtc_Word32 /*size*/);
virtual int32_t CodecConfigParameters(uint8_t* /*buffer*/,
int32_t /*size*/);
struct FunctionCalls
{
WebRtc_Word32 InitEncode;
WebRtc_Word32 Encode;
WebRtc_Word32 RegisterEncodeCompleteCallback;
WebRtc_Word32 Release;
WebRtc_Word32 Reset;
WebRtc_Word32 SetChannelParameters;
WebRtc_Word32 SetRates;
WebRtc_Word32 SetPeriodicKeyFrames;
WebRtc_Word32 CodecConfigParameters;
int32_t InitEncode;
int32_t Encode;
int32_t RegisterEncodeCompleteCallback;
int32_t Release;
int32_t Reset;
int32_t SetChannelParameters;
int32_t SetRates;
int32_t SetPeriodicKeyFrames;
int32_t CodecConfigParameters;
};
@ -90,27 +86,27 @@ public:
TbI420Decoder();
virtual ~TbI420Decoder();
virtual WebRtc_Word32 InitDecode(const webrtc::VideoCodec* inst,
WebRtc_Word32 numberOfCores);
virtual WebRtc_Word32 Decode(
virtual int32_t InitDecode(const webrtc::VideoCodec* inst,
int32_t numberOfCores);
virtual int32_t Decode(
const webrtc::EncodedImage& inputImage,
bool missingFrames,
const webrtc::RTPFragmentationHeader* fragmentation,
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
WebRtc_Word64 renderTimeMs = -1);
int64_t renderTimeMs = -1);
virtual WebRtc_Word32
virtual int32_t
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
virtual int32_t Release();
virtual int32_t Reset();
struct FunctionCalls
{
WebRtc_Word32 InitDecode;
WebRtc_Word32 Decode;
WebRtc_Word32 RegisterDecodeCompleteCallback;
WebRtc_Word32 Release;
WebRtc_Word32 Reset;
int32_t InitDecode;
int32_t Decode;
int32_t RegisterDecodeCompleteCallback;
int32_t Release;
int32_t Reset;
};
FunctionCalls GetFunctionCalls();
@ -118,8 +114,8 @@ public:
private:
webrtc::I420VideoFrame _decodedImage;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
int32_t _width;
int32_t _height;
bool _inited;
FunctionCalls _functionCalls;
webrtc::DecodedImageCallback* _decodeCompleteCallback;

View File

@ -99,12 +99,12 @@ public:
// The network parameters of the link. Regarding packet losses, packets
// belonging to the first frame (same RTP timestamp) will never be dropped.
void SetNetworkParameters(const NetworkParameters& network_parameters);
void SetSSRCFilter(WebRtc_UWord32 SSRC);
void SetSSRCFilter(uint32_t SSRC);
void ClearStats();
void GetStats(WebRtc_Word32& numRtpPackets,
WebRtc_Word32& numDroppedPackets,
WebRtc_Word32& numRtcpPackets);
void GetStats(int32_t& numRtpPackets,
int32_t& numDroppedPackets,
int32_t& numRtcpPackets);
void SetTemporalToggle(unsigned char layers);
void EnableSSRCCheck();
@ -123,7 +123,7 @@ private:
static int GaussianRandom(int mean_ms, int standard_deviation_ms);
bool UniformLoss(int loss_rate);
bool GilbertElliotLoss(int loss_rate, int burst_length);
WebRtc_Word64 NowMs();
int64_t NowMs();
enum
{
@ -135,10 +135,10 @@ private:
};
typedef struct
{
WebRtc_Word8 packetBuffer[KMaxPacketSize];
WebRtc_Word32 length;
WebRtc_Word32 channel;
WebRtc_Word64 receiveTime;
int8_t packetBuffer[KMaxPacketSize];
int32_t length;
int32_t channel;
int64_t receiveTime;
} VideoPacket;
int sender_channel_;
@ -150,9 +150,9 @@ private:
webrtc::CriticalSectionWrapper& _statCrit;
NetworkParameters network_parameters_;
WebRtc_Word32 _rtpCount;
WebRtc_Word32 _rtcpCount;
WebRtc_Word32 _dropCount;
int32_t _rtpCount;
int32_t _rtcpCount;
int32_t _dropCount;
std::list<VideoPacket*> _rtpPackets;
std::list<VideoPacket*> _rtcpPackets;
@ -169,18 +169,18 @@ private:
unsigned int _lastTimeMs;
bool _checkSSRC;
WebRtc_UWord32 _lastSSRC;
uint32_t _lastSSRC;
bool _filterSSRC;
WebRtc_UWord32 _SSRC;
uint32_t _SSRC;
bool _checkSequenceNumber;
WebRtc_UWord16 _firstSequenceNumber;
uint16_t _firstSequenceNumber;
// Keep track of the first RTP timestamp so we don't do packet loss on
// the first frame.
WebRtc_UWord32 _firstRTPTimestamp;
uint32_t _firstRTPTimestamp;
// Track RTP timestamps so we invoke callbacks properly (if registered).
WebRtc_UWord32 _lastSendRTPTimestamp;
WebRtc_UWord32 _lastReceiveRTPTimestamp;
uint32_t _lastSendRTPTimestamp;
uint32_t _lastReceiveRTPTimestamp;
int64_t last_receive_time_;
bool previous_drop_;
};

View File

@ -49,9 +49,9 @@ class ViEFileCaptureDevice {
std::FILE* input_file_;
webrtc::CriticalSectionWrapper* mutex_;
WebRtc_UWord32 frame_length_;
WebRtc_UWord32 width_;
WebRtc_UWord32 height_;
uint32_t frame_length_;
uint32_t width_;
uint32_t height_;
};
#endif // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_

View File

@ -33,11 +33,10 @@ TbI420Encoder::~TbI420Encoder()
}
}
WebRtc_Word32 TbI420Encoder::VersionStatic(char* version,
WebRtc_Word32 length)
int32_t TbI420Encoder::VersionStatic(char* version, int32_t length)
{
const char* str = "I420 version 1.0.0\n";
WebRtc_Word32 verLen = (WebRtc_Word32) strlen(str);
int32_t verLen = (int32_t) strlen(str);
if (verLen > length)
{
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
@ -46,13 +45,12 @@ WebRtc_Word32 TbI420Encoder::VersionStatic(char* version,
return verLen;
}
WebRtc_Word32 TbI420Encoder::Version(char* version,
WebRtc_Word32 length) const
int32_t TbI420Encoder::Version(char* version, int32_t length) const
{
return VersionStatic(version, length);
}
WebRtc_Word32 TbI420Encoder::Release()
int32_t TbI420Encoder::Release()
{
_functionCalls.Release++;
// should allocate an encoded frame and then release it here, for that we
@ -66,7 +64,7 @@ WebRtc_Word32 TbI420Encoder::Release()
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::Reset()
int32_t TbI420Encoder::Reset()
{
_functionCalls.Reset++;
if (!_inited)
@ -77,15 +75,14 @@ WebRtc_Word32 TbI420Encoder::Reset()
}
WebRtc_Word32 TbI420Encoder::SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt) {
int32_t TbI420Encoder::SetChannelParameters(uint32_t packetLoss, int rtt) {
_functionCalls.SetChannelParameters++;
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
WebRtc_Word32 /*numberOfCores*/,
WebRtc_UWord32 /*maxPayloadSize */)
int32_t TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
int32_t /*numberOfCores*/,
uint32_t /*maxPayloadSize */)
{
_functionCalls.InitEncode++;
if (inst == NULL)
@ -104,8 +101,8 @@ WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
const WebRtc_UWord32 newSize = (3 * inst->width * inst->height) >> 1;
WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
const uint32_t newSize = (3 * inst->width * inst->height) >> 1;
uint8_t* newBuffer = new uint8_t[newSize];
if (newBuffer == NULL)
{
return WEBRTC_VIDEO_CODEC_MEMORY;
@ -118,7 +115,7 @@ WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::Encode(
int32_t TbI420Encoder::Encode(
const webrtc::I420VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<webrtc::VideoFrameType>* /*frameTypes*/)
@ -150,7 +147,7 @@ WebRtc_Word32 TbI420Encoder::Encode(
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
WebRtc_UWord8* newBuffer = new WebRtc_UWord8[reqSize];
uint8_t* newBuffer = new uint8_t[reqSize];
if (newBuffer == NULL)
{
return WEBRTC_VIDEO_CODEC_MEMORY;
@ -168,7 +165,7 @@ WebRtc_Word32 TbI420Encoder::Encode(
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::RegisterEncodeCompleteCallback(
int32_t TbI420Encoder::RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback)
{
_functionCalls.RegisterEncodeCompleteCallback++;
@ -176,21 +173,20 @@ WebRtc_Word32 TbI420Encoder::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate)
int32_t TbI420Encoder::SetRates(uint32_t newBitRate, uint32_t frameRate)
{
_functionCalls.SetRates++;
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Encoder::SetPeriodicKeyFrames(bool enable)
int32_t TbI420Encoder::SetPeriodicKeyFrames(bool enable)
{
_functionCalls.SetPeriodicKeyFrames++;
return WEBRTC_VIDEO_CODEC_ERROR;
}
WebRtc_Word32 TbI420Encoder::CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
WebRtc_Word32 /*size*/)
int32_t TbI420Encoder::CodecConfigParameters(uint8_t* /*buffer*/,
int32_t /*size*/)
{
_functionCalls.CodecConfigParameters++;
return WEBRTC_VIDEO_CODEC_ERROR;
@ -212,14 +208,14 @@ TbI420Decoder::~TbI420Decoder()
Release();
}
WebRtc_Word32 TbI420Decoder::Reset()
int32_t TbI420Decoder::Reset()
{
_functionCalls.Reset++;
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
WebRtc_Word32 /*numberOfCores */)
int32_t TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
int32_t /*numberOfCores */)
{
_functionCalls.InitDecode++;
if (inst == NULL)
@ -239,12 +235,12 @@ WebRtc_Word32 TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Decoder::Decode(
int32_t TbI420Decoder::Decode(
const webrtc::EncodedImage& inputImage,
bool /*missingFrames*/,
const webrtc::RTPFragmentationHeader* /*fragmentation*/,
const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
WebRtc_Word64 /*renderTimeMs*/)
int64_t /*renderTimeMs*/)
{
_functionCalls.Decode++;
if (inputImage._buffer == NULL)
@ -276,7 +272,7 @@ WebRtc_Word32 TbI420Decoder::Decode(
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback(
int32_t TbI420Decoder::RegisterDecodeCompleteCallback(
webrtc::DecodedImageCallback* callback)
{
_functionCalls.RegisterDecodeCompleteCallback++;
@ -284,7 +280,7 @@ WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32 TbI420Decoder::Release()
int32_t TbI420Decoder::Release()
{
_functionCalls.Release++;
_inited = false;

View File

@ -108,8 +108,8 @@ TbExternalTransport::~TbExternalTransport()
int TbExternalTransport::SendPacket(int channel, const void *data, int len)
{
// Parse timestamp from RTP header according to RFC 3550, section 5.1.
WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
uint8_t* ptr = (uint8_t*)data;
uint32_t rtp_timestamp = ptr[4] << 24;
rtp_timestamp += ptr[5] << 16;
rtp_timestamp += ptr[6] << 8;
rtp_timestamp += ptr[7];
@ -126,8 +126,8 @@ int TbExternalTransport::SendPacket(int channel, const void *data, int len)
if (_filterSSRC)
{
WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
WebRtc_UWord32 ssrc = ptr[8] << 24;
uint8_t* ptr = (uint8_t*)data;
uint32_t ssrc = ptr[8] << 24;
ssrc += ptr[9] << 16;
ssrc += ptr[10] << 8;
ssrc += ptr[11];
@ -139,7 +139,7 @@ int TbExternalTransport::SendPacket(int channel, const void *data, int len)
if (_temporalLayers) {
// parse out vp8 temporal layers
// 12 bytes RTP
WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
uint8_t* ptr = (uint8_t*)data;
if (ptr[12] & 0x80 && // X-bit
ptr[13] & 0x20) // T-bit
@ -310,7 +310,7 @@ void TbExternalTransport::SetNetworkParameters(
network_parameters_ = network_parameters;
}
void TbExternalTransport::SetSSRCFilter(WebRtc_UWord32 ssrc)
void TbExternalTransport::SetSSRCFilter(uint32_t ssrc)
{
webrtc::CriticalSectionScoped cs(&_crit);
_filterSSRC = true;
@ -325,9 +325,9 @@ void TbExternalTransport::ClearStats()
_rtcpCount = 0;
}
void TbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets,
WebRtc_Word32& numDroppedPackets,
WebRtc_Word32& numRtcpPackets)
void TbExternalTransport::GetStats(int32_t& numRtpPackets,
int32_t& numDroppedPackets,
int32_t& numRtcpPackets)
{
webrtc::CriticalSectionScoped cs(&_statCrit);
numRtpPackets = _rtpCount;
@ -380,7 +380,7 @@ bool TbExternalTransport::ViEExternalTransportProcess()
{
// Take first packet in queue
packet = _rtpPackets.front();
WebRtc_Word64 timeToReceive = 0;
int64_t timeToReceive = 0;
if (packet)
{
timeToReceive = packet->receiveTime - NowMs();
@ -431,8 +431,8 @@ bool TbExternalTransport::ViEExternalTransportProcess()
}
}
// Signal received packet of frame
WebRtc_UWord8* ptr = (WebRtc_UWord8*)packet->packetBuffer;
WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
uint8_t* ptr = (uint8_t*)packet->packetBuffer;
uint32_t rtp_timestamp = ptr[4] << 24;
rtp_timestamp += ptr[5] << 16;
rtp_timestamp += ptr[6] << 8;
rtp_timestamp += ptr[7];
@ -463,7 +463,7 @@ bool TbExternalTransport::ViEExternalTransportProcess()
{
// Take first packet in queue
packet = _rtcpPackets.front();
WebRtc_Word64 timeToReceive = 0;
int64_t timeToReceive = 0;
if (packet)
{
timeToReceive = packet->receiveTime - NowMs();
@ -520,7 +520,7 @@ bool TbExternalTransport::ViEExternalTransportProcess()
return true;
}
WebRtc_Word64 TbExternalTransport::NowMs()
int64_t TbExternalTransport::NowMs()
{
return webrtc::TickTime::MillisecondTimestamp();
}

View File

@ -47,7 +47,7 @@ int ViEBaseImpl::Release() {
"ViEBase::Release()");
(*this)--; // Decrease ref count.
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
"ViEBase release too many times");
@ -262,7 +262,7 @@ int ViEBaseImpl::StartSend(const int video_channel) {
// Pause and trigger a key frame.
vie_encoder->Pause();
WebRtc_Word32 error = vie_channel->StartSend();
int32_t error = vie_channel->StartSend();
if (error != 0) {
vie_encoder->Restart();
WEBRTC_TRACE(kTraceError, kTraceVideo,
@ -295,7 +295,7 @@ int ViEBaseImpl::StopSend(const int video_channel) {
return -1;
}
WebRtc_Word32 error = vie_channel->StopSend();
int32_t error = vie_channel->StopSend();
if (error != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_.instance_id(), video_channel),

View File

@ -54,9 +54,9 @@ class ViEBaseImpl
private:
// Version functions.
WebRtc_Word32 AddViEVersion(char* str) const;
WebRtc_Word32 AddBuildInfo(char* str) const;
WebRtc_Word32 AddExternalTransportBuild(char* str) const;
int32_t AddViEVersion(char* str) const;
int32_t AddBuildInfo(char* str) const;
int32_t AddExternalTransportBuild(char* str) const;
int CreateChannel(int& video_channel, int original_channel, // NOLINT
bool sender);

View File

@ -44,7 +44,7 @@ int ViECaptureImpl::Release() {
// Decrease ref count
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViECapture release too many times");
@ -114,10 +114,10 @@ int ViECaptureImpl::AllocateCaptureDevice(
shared_data_->instance_id());
return -1;
}
const WebRtc_Word32 result =
const int32_t result =
shared_data_->input_manager()->CreateCaptureDevice(
unique_idUTF8,
static_cast<const WebRtc_UWord32>(unique_idUTF8Length),
static_cast<const uint32_t>(unique_idUTF8Length),
capture_id);
if (result != 0) {
shared_data_->SetLastError(result);
@ -138,7 +138,7 @@ int ViECaptureImpl::AllocateExternalCaptureDevice(
shared_data_->instance_id());
return -1;
}
const WebRtc_Word32 result =
const int32_t result =
shared_data_->input_manager()->CreateExternalCaptureDevice(
external_capture, capture_id);
@ -161,7 +161,7 @@ int ViECaptureImpl::AllocateCaptureDevice(
shared_data_->instance_id());
return -1;
}
const WebRtc_Word32 result =
const int32_t result =
shared_data_->input_manager()->CreateCaptureDevice(&capture_module,
capture_id);
if (result != 0) {

View File

@ -138,7 +138,7 @@ ViECapturer* ViECapturer::CreateViECapture(
return capture;
}
WebRtc_Word32 ViECapturer::Init(VideoCaptureModule* capture_module) {
int32_t ViECapturer::Init(VideoCaptureModule* capture_module) {
assert(capture_module_ == NULL);
capture_module_ = capture_module;
capture_module_->RegisterCaptureDataCallback(*this);
@ -154,7 +154,7 @@ ViECapturer* ViECapturer::CreateViECapture(
int capture_id,
int engine_id,
const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length,
const uint32_t device_unique_idUTF8Length,
ProcessThread& module_process_thread) {
ViECapturer* capture = new ViECapturer(capture_id, engine_id,
module_process_thread);
@ -166,9 +166,9 @@ ViECapturer* ViECapturer::CreateViECapture(
return capture;
}
WebRtc_Word32 ViECapturer::Init(
int32_t ViECapturer::Init(
const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length) {
const uint32_t device_unique_idUTF8Length) {
assert(capture_module_ == NULL);
if (device_unique_idUTF8 == NULL) {
capture_module_ = VideoCaptureFactory::Create(
@ -212,7 +212,7 @@ int ViECapturer::FrameCallbackChanged() {
return 0;
}
WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
__FUNCTION__);
int width;
@ -257,7 +257,7 @@ WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
return capture_module_->StartCapture(capability);
}
WebRtc_Word32 ViECapturer::Stop() {
int32_t ViECapturer::Stop() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
__FUNCTION__);
requested_capability_ = CaptureCapability();
@ -274,11 +274,11 @@ const char* ViECapturer::CurrentDeviceName() const {
return capture_module_->CurrentDeviceName();
}
WebRtc_Word32 ViECapturer::SetCaptureDelay(WebRtc_Word32 delay_ms) {
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
return capture_module_->SetCaptureDelay(delay_ms);
}
WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
int32_t ViECapturer::SetRotateCapturedFrames(
const RotateCapturedFrame rotation) {
VideoCaptureRotation converted_rotation = kCameraRotate0;
switch (rotation) {
@ -344,7 +344,7 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
return external_capture_module_->IncomingFrameI420(frame, capture_time);
}
void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
I420VideoFrame& video_frame) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
@ -358,7 +358,7 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
return;
}
void ViECapturer::OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id,
void ViECapturer::OnIncomingCapturedEncodedFrame(const int32_t capture_id,
VideoFrame& video_frame,
VideoCodecType codec_type) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
@ -388,8 +388,8 @@ void ViECapturer::OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id,
return;
}
void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay) {
void ViECapturer::OnCaptureDelayChanged(const int32_t id,
const int32_t delay) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
delay);
@ -402,7 +402,7 @@ void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
}
}
WebRtc_Word32 ViECapturer::RegisterEffectFilter(
int32_t ViECapturer::RegisterEffectFilter(
ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(deliver_cs_.get());
@ -431,7 +431,7 @@ WebRtc_Word32 ViECapturer::RegisterEffectFilter(
return 0;
}
WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
int32_t ViECapturer::IncImageProcRefCount() {
if (!image_proc_module_) {
assert(image_proc_module_ref_counter_ == 0);
image_proc_module_ = VideoProcessingModule::Create(
@ -447,7 +447,7 @@ WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
return 0;
}
WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
int32_t ViECapturer::DecImageProcRefCount() {
image_proc_module_ref_counter_--;
if (image_proc_module_ref_counter_ == 0) {
// Destroy module.
@ -457,7 +457,7 @@ WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
return 0;
}
WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
int32_t ViECapturer::EnableDenoising(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
@ -484,7 +484,7 @@ WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
return 0;
}
WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
int32_t ViECapturer::EnableDeflickering(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
@ -513,7 +513,7 @@ WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
return 0;
}
WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) {
int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
capture_id_, enable);
@ -596,7 +596,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
if (brightness_frame_stats_) {
if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
*video_frame) == 0) {
WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
int32_t brightness = image_proc_module_->BrightnessDetection(
*video_frame, *brightness_frame_stats_);
switch (brightness) {
@ -634,7 +634,7 @@ void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
video_frame->Size());
encoded_image._timeStamp =
90 * static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
90 * static_cast<uint32_t>(video_frame->RenderTimeMs());
encode_complete_callback_->Encoded(encoded_image);
}
@ -646,7 +646,7 @@ void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
const int kMsToRtpTimestamp = 90;
decode_buffer_.timeStamp = kMsToRtpTimestamp *
static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
static_cast<uint32_t>(video_frame->RenderTimeMs());
decode_buffer_.payloadType = codec_.plType;
vcm_->DecodeFromStorage(decode_buffer_);
}
@ -682,9 +682,9 @@ bool ViECapturer::IsFrameCallbackRegistered(
return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
}
WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
WebRtc_Word32 vie_encoder_id) {
int32_t ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
int32_t vie_encoder_id) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
if (vie_encoder_ && &vie_encoder != vie_encoder_) {
@ -735,14 +735,13 @@ bool ViECapturer::CaptureCapabilityFixed() {
requested_capability_.maxFPS != 0;
}
WebRtc_Word32 ViECapturer::Version(char* version,
WebRtc_Word32 length) const {
int32_t ViECapturer::Version(char* version, int32_t length) const {
return 0;
}
WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
WebRtc_Word32 number_of_cores,
WebRtc_UWord32 max_payload_size) {
int32_t ViECapturer::InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
uint32_t max_payload_size) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
@ -768,7 +767,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
}
WebRtc_Word32 ViECapturer::Encode(
int32_t ViECapturer::Encode(
const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
@ -786,7 +785,7 @@ WebRtc_Word32 ViECapturer::Encode(
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
int32_t ViECapturer::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
@ -799,7 +798,7 @@ WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
return 0;
}
WebRtc_Word32 ViECapturer::Release() {
int32_t ViECapturer::Release() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
{
@ -827,14 +826,13 @@ WebRtc_Word32 ViECapturer::Release() {
// Should reset the capture device to the state it was in after the InitEncode
// function. Current implementation do nothing.
WebRtc_Word32 ViECapturer::Reset() {
int32_t ViECapturer::Reset() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
return 0;
}
WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
int rtt) {
int32_t ViECapturer::SetChannelParameters(uint32_t packet_loss, int rtt) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
@ -845,8 +843,7 @@ WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
return capture_encoder_->SetChannelParameters(packet_loss, rtt);
}
WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
WebRtc_UWord32 frame_rate) {
int32_t ViECapturer::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
@ -857,7 +854,7 @@ WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
return capture_encoder_->SetRates(new_bit_rate, frame_rate);
}
WebRtc_Word32 ViECapturer::FrameToRender(
int32_t ViECapturer::FrameToRender(
I420VideoFrame& video_frame) { //NOLINT
deliver_cs_->Enter();
DeliverI420Frame(&video_frame);
@ -865,7 +862,7 @@ WebRtc_Word32 ViECapturer::FrameToRender(
return 0;
}
WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
if (observer_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s Observer already registered", __FUNCTION__, capture_id_);
@ -880,7 +877,7 @@ WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
return 0;
}
WebRtc_Word32 ViECapturer::DeRegisterObserver() {
int32_t ViECapturer::DeRegisterObserver() {
CriticalSectionScoped cs(observer_cs_.get());
if (!observer_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
@ -899,16 +896,16 @@ bool ViECapturer::IsObserverRegistered() {
return observer_ != NULL;
}
void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frame_rate) {
void ViECapturer::OnCaptureFrameRate(const int32_t id,
const uint32_t frame_rate) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"OnCaptureFrameRate %d", frame_rate);
CriticalSectionScoped cs(observer_cs_.get());
observer_->CapturedFrameRate(id_, (WebRtc_UWord8) frame_rate);
observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
}
void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
void ViECapturer::OnNoPictureAlarm(const int32_t id,
const VideoCaptureAlarm alarm) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"OnNoPictureAlarm %d", alarm);

View File

@ -52,7 +52,7 @@ class ViECapturer
int capture_id,
int engine_id,
const char* device_unique_idUTF8,
WebRtc_UWord32 device_unique_idUTF8Length,
uint32_t device_unique_idUTF8Length,
ProcessThread& module_process_thread);
~ViECapturer();
@ -75,31 +75,31 @@ class ViECapturer
// Use this capture device as encoder.
// Returns 0 if the codec is supported by this capture device.
virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
WebRtc_Word32 vie_encoder_id);
virtual int32_t PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
int32_t vie_encoder_id);
// Start/Stop.
WebRtc_Word32 Start(
int32_t Start(
const CaptureCapability& capture_capability = CaptureCapability());
WebRtc_Word32 Stop();
int32_t Stop();
bool Started();
// Overrides the capture delay.
WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delay_ms);
int32_t SetCaptureDelay(int32_t delay_ms);
// Sets rotation of the incoming captured frame.
WebRtc_Word32 SetRotateCapturedFrames(const RotateCapturedFrame rotation);
int32_t SetRotateCapturedFrames(const RotateCapturedFrame rotation);
// Effect filter.
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
WebRtc_Word32 EnableDenoising(bool enable);
WebRtc_Word32 EnableDeflickering(bool enable);
WebRtc_Word32 EnableBrightnessAlarm(bool enable);
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
int32_t EnableDenoising(bool enable);
int32_t EnableDeflickering(bool enable);
int32_t EnableBrightnessAlarm(bool enable);
// Statistics observer.
WebRtc_Word32 RegisterObserver(ViECaptureObserver* observer);
WebRtc_Word32 DeRegisterObserver();
int32_t RegisterObserver(ViECaptureObserver* observer);
int32_t DeRegisterObserver();
bool IsObserverRegistered();
// Information.
@ -110,18 +110,18 @@ class ViECapturer
int engine_id,
ProcessThread& module_process_thread);
WebRtc_Word32 Init(VideoCaptureModule* capture_module);
WebRtc_Word32 Init(const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length);
int32_t Init(VideoCaptureModule* capture_module);
int32_t Init(const char* device_unique_idUTF8,
const uint32_t device_unique_idUTF8Length);
// Implements VideoCaptureDataCallback.
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& video_frame);
virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id,
virtual void OnIncomingCapturedEncodedFrame(const int32_t capture_id,
VideoFrame& video_frame,
VideoCodecType codec_type);
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay);
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay);
bool EncoderActive();
@ -132,35 +132,32 @@ class ViECapturer
// Help function used for keeping track of VideoImageProcesingModule.
// Creates the module if it is needed, returns 0 on success and guarantees
// that the image proc module exist.
WebRtc_Word32 IncImageProcRefCount();
WebRtc_Word32 DecImageProcRefCount();
int32_t IncImageProcRefCount();
int32_t DecImageProcRefCount();
// Implements VideoEncoder.
virtual WebRtc_Word32 Version(char* version,
WebRtc_Word32 length) const;
virtual WebRtc_Word32 InitEncode(const VideoCodec* codec_settings,
WebRtc_Word32 number_of_cores,
WebRtc_UWord32 max_payload_size);
virtual WebRtc_Word32 Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
virtual int32_t Version(char* version, int32_t length) const;
virtual int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
uint32_t max_payload_size);
virtual int32_t Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
virtual int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packet_loss,
int rtt);
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 new_bit_rate,
WebRtc_UWord32 frame_rate);
virtual int32_t Release();
virtual int32_t Reset();
virtual int32_t SetChannelParameters(uint32_t packet_loss, int rtt);
virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate);
// Implements VCMReceiveCallback.
// TODO(mflodman) Change input argument to pointer.
virtual WebRtc_Word32 FrameToRender(I420VideoFrame& video_frame); // NOLINT
virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT
// Implements VideoCaptureFeedBack
virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frame_rate);
virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
virtual void OnCaptureFrameRate(const int32_t id,
const uint32_t frame_rate);
virtual void OnNoPictureAlarm(const int32_t id,
const VideoCaptureAlarm alarm);
// Thread functions for deliver captured frames to receivers.
@ -211,7 +208,7 @@ class ViECapturer
// The ViEEncoder we are encoding for.
ViEEncoder* vie_encoder_;
// ViEEncoder id we are encoding for.
WebRtc_Word32 vie_encoder_id_;
int32_t vie_encoder_id_;
// Used for decoding preencoded frames.
VideoCodingModule* vcm_;
EncodedVideoData decode_buffer_;

View File

@ -51,9 +51,9 @@ class ChannelStatsObserver : public CallStatsObserver {
ViEChannel* owner_;
};
ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
WebRtc_Word32 engine_id,
WebRtc_UWord32 number_of_cores,
ViEChannel::ViEChannel(int32_t channel_id,
int32_t engine_id,
uint32_t number_of_cores,
ProcessThread& module_process_thread,
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer,
@ -124,7 +124,7 @@ ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
vcm_.SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_);
}
WebRtc_Word32 ViEChannel::Init() {
int32_t ViEChannel::Init() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
engine_id_);
@ -229,8 +229,8 @@ ViEChannel::~ViEChannel() {
VideoCodingModule::Destroy(&vcm_);
}
WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
bool new_stream) {
int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
bool new_stream) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: codec_type: %d", __FUNCTION__, video_codec.codecType);
@ -265,8 +265,8 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
NACKMethod nack_method = rtp_rtcp_->NACK();
bool fec_enabled = false;
WebRtc_UWord8 payload_type_red;
WebRtc_UWord8 payload_type_fec;
uint8_t payload_type_red;
uint8_t payload_type_fec;
rtp_rtcp_->GenericFECStatus(fec_enabled, payload_type_red, payload_type_fec);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@ -333,7 +333,7 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
WebRtc_UWord8 idx = 0;
uint8_t idx = 0;
// Configure all simulcast modules.
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end();
@ -416,12 +416,12 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
return 0;
}
WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
// We will not receive simulcast streams, so no need to handle that use case.
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
WebRtc_Word8 old_pltype = -1;
int8_t old_pltype = -1;
if (rtp_rtcp_->ReceivePayloadType(video_codec, &old_pltype) != -1) {
rtp_rtcp_->DeRegisterReceivePayload(old_pltype);
}
@ -445,7 +445,7 @@ WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
return 0;
}
WebRtc_Word32 ViEChannel::GetReceiveCodec(VideoCodec* video_codec) {
int32_t ViEChannel::GetReceiveCodec(VideoCodec* video_codec) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -457,7 +457,7 @@ WebRtc_Word32 ViEChannel::GetReceiveCodec(VideoCodec* video_codec) {
return 0;
}
WebRtc_Word32 ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
int32_t ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (codec_observer_) {
@ -481,14 +481,14 @@ WebRtc_Word32 ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
return 0;
}
WebRtc_Word32 ViEChannel::RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
VideoDecoder* decoder,
bool buffered_rendering,
WebRtc_Word32 render_delay) {
int32_t ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
VideoDecoder* decoder,
bool buffered_rendering,
int32_t render_delay) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
WebRtc_Word32 result;
int32_t result;
result = vcm_.RegisterExternalDecoder(decoder, pl_type, buffered_rendering);
if (result != VCM_OK) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -499,13 +499,13 @@ WebRtc_Word32 ViEChannel::RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
return vcm_.SetRenderDelay(render_delay);
}
WebRtc_Word32 ViEChannel::DeRegisterExternalDecoder(
const WebRtc_UWord8 pl_type) {
int32_t ViEChannel::DeRegisterExternalDecoder(
const uint8_t pl_type) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s pl_type", __FUNCTION__, pl_type);
VideoCodec current_receive_codec;
WebRtc_Word32 result = 0;
int32_t result = 0;
result = vcm_.ReceiveCodec(&current_receive_codec);
if (vcm_.RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
return -1;
@ -518,8 +518,8 @@ WebRtc_Word32 ViEChannel::DeRegisterExternalDecoder(
return result;
}
WebRtc_Word32 ViEChannel::ReceiveCodecStatistics(
WebRtc_UWord32* num_key_frames, WebRtc_UWord32* num_delta_frames) {
int32_t ViEChannel::ReceiveCodecStatistics(
uint32_t* num_key_frames, uint32_t* num_delta_frames) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -534,7 +534,7 @@ WebRtc_Word32 ViEChannel::ReceiveCodecStatistics(
return 0;
}
WebRtc_UWord32 ViEChannel::DiscardedPackets() const {
uint32_t ViEChannel::DiscardedPackets() const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
return vcm_.DiscardedPackets();
@ -544,14 +544,14 @@ int ViEChannel::ReceiveDelay() const {
return vcm_.Delay();
}
WebRtc_Word32 ViEChannel::WaitForKeyFrame(bool wait) {
int32_t ViEChannel::WaitForKeyFrame(bool wait) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(wait: %d)", __FUNCTION__, wait);
wait_for_key_frame_ = wait;
return 0;
}
WebRtc_Word32 ViEChannel::SetSignalPacketLossStatus(bool enable,
int32_t ViEChannel::SetSignalPacketLossStatus(bool enable,
bool only_key_frames) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -578,7 +578,7 @@ WebRtc_Word32 ViEChannel::SetSignalPacketLossStatus(bool enable,
return 0;
}
WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
int32_t ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %d", __FUNCTION__, rtcp_mode);
@ -592,14 +592,14 @@ WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
return rtp_rtcp_->SetRTCPStatus(rtcp_mode);
}
WebRtc_Word32 ViEChannel::GetRTCPMode(RTCPMethod* rtcp_mode) {
int32_t ViEChannel::GetRTCPMode(RTCPMethod* rtcp_mode) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
*rtcp_mode = rtp_rtcp_->RTCP();
return 0;
}
WebRtc_Word32 ViEChannel::SetNACKStatus(const bool enable) {
int32_t ViEChannel::SetNACKStatus(const bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -624,7 +624,7 @@ WebRtc_Word32 ViEChannel::SetNACKStatus(const bool enable) {
return ProcessNACKRequest(enable);
}
WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -683,7 +683,7 @@ WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
return 0;
}
WebRtc_Word32 ViEChannel::SetFECStatus(const bool enable,
int32_t ViEChannel::SetFECStatus(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
// Disable possible NACK.
@ -694,7 +694,7 @@ WebRtc_Word32 ViEChannel::SetFECStatus(const bool enable,
return ProcessFECRequest(enable, payload_typeRED, payload_typeFEC);
}
WebRtc_Word32 ViEChannel::ProcessFECRequest(
int32_t ViEChannel::ProcessFECRequest(
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
@ -719,7 +719,7 @@ WebRtc_Word32 ViEChannel::ProcessFECRequest(
return 0;
}
WebRtc_Word32 ViEChannel::SetHybridNACKFECStatus(
int32_t ViEChannel::SetHybridNACKFECStatus(
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
@ -731,7 +731,7 @@ WebRtc_Word32 ViEChannel::SetHybridNACKFECStatus(
return -1;
}
WebRtc_Word32 ret_val = 0;
int32_t ret_val = 0;
ret_val = ProcessNACKRequest(enable);
if (ret_val < 0) {
return ret_val;
@ -801,7 +801,7 @@ int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
return target_delay_ms * 40 * 30 / 1000;
}
WebRtc_Word32 ViEChannel::SetKeyFrameRequestMethod(
int32_t ViEChannel::SetKeyFrameRequestMethod(
const KeyFrameRequestMethod method) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %d", __FUNCTION__, method);
@ -862,13 +862,13 @@ void ViEChannel::SetTransmissionSmoothingStatus(bool enable) {
paced_sender_->SetStatus(enable);
}
WebRtc_Word32 ViEChannel::EnableTMMBR(const bool enable) {
int32_t ViEChannel::EnableTMMBR(const bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %d", __FUNCTION__, enable);
return rtp_rtcp_->SetTMMBRStatus(enable);
}
WebRtc_Word32 ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
int32_t ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %d", __FUNCTION__, enable);
@ -882,7 +882,7 @@ WebRtc_Word32 ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
return 0;
}
WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC,
int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
const StreamType usage,
const uint8_t simulcast_idx) {
WEBRTC_TRACE(webrtc::kTraceInfo,
@ -910,7 +910,7 @@ WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC,
return rtp_rtcp->SetSSRC(SSRC);
}
WebRtc_Word32 ViEChannel::SetRemoteSSRCType(const StreamType usage,
int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
const uint32_t SSRC) const {
WEBRTC_TRACE(webrtc::kTraceInfo,
webrtc::kTraceVideo,
@ -922,7 +922,7 @@ WebRtc_Word32 ViEChannel::SetRemoteSSRCType(const StreamType usage,
}
// TODO(mflodman) Add kViEStreamTypeRtx.
WebRtc_Word32 ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -944,7 +944,7 @@ WebRtc_Word32 ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
return 0;
}
WebRtc_Word32 ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
int32_t ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -952,7 +952,7 @@ WebRtc_Word32 ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
return 0;
}
WebRtc_Word32 ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
int32_t ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -961,7 +961,7 @@ WebRtc_Word32 ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
int num_csrcs = rtp_rtcp_->RemoteCSRCs(arrayCSRC);
if (num_csrcs > 0) {
memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(WebRtc_UWord32));
memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(uint32_t));
for (int idx = 0; idx < num_csrcs; idx++) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"\tCSRC[%d] = %lu", idx, CSRCs[idx]);
@ -973,8 +973,8 @@ WebRtc_Word32 ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
return 0;
}
WebRtc_Word32 ViEChannel::SetStartSequenceNumber(
WebRtc_UWord16 sequence_number) {
int32_t ViEChannel::SetStartSequenceNumber(
uint16_t sequence_number) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -986,7 +986,7 @@ WebRtc_Word32 ViEChannel::SetStartSequenceNumber(
return rtp_rtcp_->SetSequenceNumber(sequence_number);
}
WebRtc_Word32 ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
int32_t ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
if (rtp_rtcp_->Sending()) {
@ -997,21 +997,21 @@ WebRtc_Word32 ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
return rtp_rtcp_->SetCNAME(rtcp_cname);
}
WebRtc_Word32 ViEChannel::GetRTCPCName(char rtcp_cname[]) {
int32_t ViEChannel::GetRTCPCName(char rtcp_cname[]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return rtp_rtcp_->CNAME(rtcp_cname);
}
WebRtc_Word32 ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
WebRtc_UWord32 remoteSSRC = rtp_rtcp_->RemoteSSRC();
uint32_t remoteSSRC = rtp_rtcp_->RemoteSSRC();
return rtp_rtcp_->RemoteCNAME(remoteSSRC, rtcp_cname);
}
WebRtc_Word32 ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
int32_t ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (rtp_observer_) {
@ -1035,7 +1035,7 @@ WebRtc_Word32 ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
return 0;
}
WebRtc_Word32 ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
int32_t ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (rtcp_observer_) {
@ -1059,11 +1059,11 @@ WebRtc_Word32 ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
return 0;
}
WebRtc_Word32 ViEChannel::SendApplicationDefinedRTCPPacket(
const WebRtc_UWord8 sub_type,
WebRtc_UWord32 name,
const WebRtc_UWord8* data,
WebRtc_UWord16 data_length_in_bytes) {
int32_t ViEChannel::SendApplicationDefinedRTCPPacket(
const uint8_t sub_type,
uint32_t name,
const uint8_t* data,
uint16_t data_length_in_bytes) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (!rtp_rtcp_->Sending()) {
@ -1097,7 +1097,7 @@ WebRtc_Word32 ViEChannel::SendApplicationDefinedRTCPPacket(
return 0;
}
WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
@ -1146,8 +1146,8 @@ WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
*extended_max = statistics->extendedHighSeqNum;
*jitter_samples = statistics->jitter;
WebRtc_UWord16 dummy;
WebRtc_UWord16 rtt = 0;
uint16_t dummy;
uint16_t rtt = 0;
if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not get RTT", __FUNCTION__);
@ -1157,7 +1157,7 @@ WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
return 0;
}
WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
@ -1165,7 +1165,7 @@ WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
WebRtc_UWord8 frac_lost = 0;
uint8_t frac_lost = 0;
if (rtp_rtcp_->StatisticsRTP(&frac_lost, cumulative_lost, extended_max,
jitter_samples) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1185,7 +1185,7 @@ WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
return 0;
}
WebRtc_Word32 ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
@ -1243,7 +1243,7 @@ void ViEChannel::GetEstimatedReceiveBandwidth(
vie_receiver_.EstimatedReceiveBandwidth(estimated_bandwidth);
}
WebRtc_Word32 ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
int32_t ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
RTPDirections direction) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1261,7 +1261,7 @@ WebRtc_Word32 ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
}
}
WebRtc_Word32 ViEChannel::StopRTPDump(RTPDirections direction) {
int32_t ViEChannel::StopRTPDump(RTPDirections direction) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -1278,7 +1278,7 @@ WebRtc_Word32 ViEChannel::StopRTPDump(RTPDirections direction) {
}
}
WebRtc_Word32 ViEChannel::StartSend() {
int32_t ViEChannel::StartSend() {
CriticalSectionScoped cs(callback_cs_.get());
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -1312,7 +1312,7 @@ WebRtc_Word32 ViEChannel::StartSend() {
return 0;
}
WebRtc_Word32 ViEChannel::StopSend() {
int32_t ViEChannel::StopSend() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1351,7 +1351,7 @@ bool ViEChannel::Sending() {
return rtp_rtcp_->Sending();
}
WebRtc_Word32 ViEChannel::StartReceive() {
int32_t ViEChannel::StartReceive() {
CriticalSectionScoped cs(callback_cs_.get());
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1367,7 +1367,7 @@ WebRtc_Word32 ViEChannel::StartReceive() {
return 0;
}
WebRtc_Word32 ViEChannel::StopReceive() {
int32_t ViEChannel::StopReceive() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1377,7 +1377,7 @@ WebRtc_Word32 ViEChannel::StopReceive() {
return 0;
}
WebRtc_Word32 ViEChannel::RegisterSendTransport(Transport* transport) {
int32_t ViEChannel::RegisterSendTransport(Transport* transport) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1402,7 +1402,7 @@ WebRtc_Word32 ViEChannel::RegisterSendTransport(Transport* transport) {
return 0;
}
WebRtc_Word32 ViEChannel::DeregisterSendTransport() {
int32_t ViEChannel::DeregisterSendTransport() {
CriticalSectionScoped cs(callback_cs_.get());
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1422,8 +1422,8 @@ WebRtc_Word32 ViEChannel::DeregisterSendTransport() {
return 0;
}
WebRtc_Word32 ViEChannel::ReceivedRTPPacket(
const void* rtp_packet, const WebRtc_Word32 rtp_packet_length) {
int32_t ViEChannel::ReceivedRTPPacket(
const void* rtp_packet, const int32_t rtp_packet_length) {
{
CriticalSectionScoped cs(callback_cs_.get());
if (!external_transport_) {
@ -1433,8 +1433,8 @@ WebRtc_Word32 ViEChannel::ReceivedRTPPacket(
return vie_receiver_.ReceivedRTPPacket(rtp_packet, rtp_packet_length);
}
WebRtc_Word32 ViEChannel::ReceivedRTCPPacket(
const void* rtcp_packet, const WebRtc_Word32 rtcp_packet_length) {
int32_t ViEChannel::ReceivedRTCPPacket(
const void* rtcp_packet, const int32_t rtcp_packet_length) {
{
CriticalSectionScoped cs(callback_cs_.get());
if (!external_transport_) {
@ -1444,7 +1444,7 @@ WebRtc_Word32 ViEChannel::ReceivedRTCPPacket(
return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
}
WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu) {
int32_t ViEChannel::SetMTU(uint16_t mtu) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (rtp_rtcp_->SetMaxTransferUnit(mtu) != 0) {
@ -1462,18 +1462,18 @@ WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu) {
return 0;
}
WebRtc_UWord16 ViEChannel::MaxDataPayloadLength() const {
uint16_t ViEChannel::MaxDataPayloadLength() const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
return rtp_rtcp_->MaxDataPayloadLength();
}
WebRtc_Word32 ViEChannel::SetPacketTimeoutNotification(
bool enable, WebRtc_UWord32 timeout_seconds) {
int32_t ViEChannel::SetPacketTimeoutNotification(
bool enable, uint32_t timeout_seconds) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (enable) {
WebRtc_UWord32 timeout_ms = 1000 * timeout_seconds;
uint32_t timeout_ms = 1000 * timeout_seconds;
if (rtp_rtcp_->SetPacketTimeout(timeout_ms, 0) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
@ -1489,7 +1489,7 @@ WebRtc_Word32 ViEChannel::SetPacketTimeoutNotification(
return 0;
}
WebRtc_Word32 ViEChannel::RegisterNetworkObserver(
int32_t ViEChannel::RegisterNetworkObserver(
ViENetworkObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
@ -1519,8 +1519,8 @@ bool ViEChannel::NetworkObserverRegistered() {
return networkObserver_ != NULL;
}
WebRtc_Word32 ViEChannel::SetPeriodicDeadOrAliveStatus(
const bool enable, const WebRtc_UWord32 sample_time_seconds) {
int32_t ViEChannel::SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1532,13 +1532,13 @@ WebRtc_Word32 ViEChannel::SetPeriodicDeadOrAliveStatus(
}
bool enabled = false;
WebRtc_UWord8 current_sampletime_seconds = 0;
uint8_t current_sampletime_seconds = 0;
// Get old settings.
rtp_rtcp_->PeriodicDeadOrAliveStatus(enabled, current_sampletime_seconds);
// Set new settings.
if (rtp_rtcp_->SetPeriodicDeadOrAliveStatus(
enable, static_cast<WebRtc_UWord8>(sample_time_seconds)) != 0) {
enable, static_cast<uint8_t>(sample_time_seconds)) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not set periodic dead-or-alive status",
__FUNCTION__);
@ -1554,7 +1554,7 @@ WebRtc_Word32 ViEChannel::SetPeriodicDeadOrAliveStatus(
return 0;
}
WebRtc_Word32 ViEChannel::EnableColorEnhancement(bool enable) {
int32_t ViEChannel::EnableColorEnhancement(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(enable: %d)", __FUNCTION__, enable);
@ -1571,7 +1571,7 @@ CallStatsObserver* ViEChannel::GetStatsObserver() {
return stats_observer_.get();
}
WebRtc_Word32 ViEChannel::FrameToRender(
int32_t ViEChannel::FrameToRender(
I420VideoFrame& video_frame) { // NOLINT
CriticalSectionScoped cs(callback_cs_.get());
@ -1612,8 +1612,8 @@ WebRtc_Word32 ViEChannel::FrameToRender(
// Record videoframe.
file_recorder_.RecordVideoFrame(video_frame);
WebRtc_UWord32 arr_ofCSRC[kRtpCsrcSize];
WebRtc_Word32 no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
uint32_t arr_ofCSRC[kRtpCsrcSize];
int32_t no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
if (no_of_csrcs <= 0) {
arr_ofCSRC[0] = rtp_rtcp_->RemoteSSRC();
no_of_csrcs = 1;
@ -1624,18 +1624,18 @@ WebRtc_Word32 ViEChannel::FrameToRender(
return 0;
}
WebRtc_Word32 ViEChannel::ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 picture_id) {
int32_t ViEChannel::ReceivedDecodedReferenceFrame(
const uint64_t picture_id) {
return rtp_rtcp_->SendRTCPReferencePictureSelection(picture_id);
}
WebRtc_Word32 ViEChannel::StoreReceivedFrame(
int32_t ViEChannel::StoreReceivedFrame(
const EncodedVideoData& frame_to_store) {
return 0;
}
WebRtc_Word32 ViEChannel::ReceiveStatistics(const WebRtc_UWord32 bit_rate,
const WebRtc_UWord32 frame_rate) {
int32_t ViEChannel::ReceiveStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1646,7 +1646,7 @@ WebRtc_Word32 ViEChannel::ReceiveStatistics(const WebRtc_UWord32 bit_rate,
return 0;
}
WebRtc_Word32 ViEChannel::RequestKeyFrame() {
int32_t ViEChannel::RequestKeyFrame() {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
{
@ -1658,13 +1658,13 @@ WebRtc_Word32 ViEChannel::RequestKeyFrame() {
return rtp_rtcp_->RequestKeyFrame();
}
WebRtc_Word32 ViEChannel::SliceLossIndicationRequest(
const WebRtc_UWord64 picture_id) {
return rtp_rtcp_->SendRTCPSliceLossIndication((WebRtc_UWord8) picture_id);
int32_t ViEChannel::SliceLossIndicationRequest(
const uint64_t picture_id) {
return rtp_rtcp_->SendRTCPSliceLossIndication((uint8_t) picture_id);
}
WebRtc_Word32 ViEChannel::ResendPackets(const WebRtc_UWord16* sequence_numbers,
WebRtc_UWord16 length) {
int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
uint16_t length) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(length: %d)", __FUNCTION__, length);
return rtp_rtcp_->SendNACK(sequence_numbers, length);
@ -1685,7 +1685,7 @@ void ViEChannel::OnRttUpdate(uint32_t rtt) {
rtp_rtcp_->SetRtt(rtt);
}
WebRtc_Word32 ViEChannel::StartDecodeThread() {
int32_t ViEChannel::StartDecodeThread() {
// Start the decode thread
if (decode_thread_) {
// Already started.
@ -1714,7 +1714,7 @@ WebRtc_Word32 ViEChannel::StartDecodeThread() {
return 0;
}
WebRtc_Word32 ViEChannel::StopDecodeThread() {
int32_t ViEChannel::StopDecodeThread() {
if (!decode_thread_) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: decode thread not running", __FUNCTION__);
@ -1734,7 +1734,7 @@ WebRtc_Word32 ViEChannel::StopDecodeThread() {
return 0;
}
WebRtc_Word32 ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
int32_t ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1756,7 +1756,7 @@ WebRtc_Word32 ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
return 0;
}
WebRtc_Word32 ViEChannel::DeRegisterExternalEncryption() {
int32_t ViEChannel::DeRegisterExternalEncryption() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1776,7 +1776,7 @@ WebRtc_Word32 ViEChannel::DeRegisterExternalEncryption() {
return 0;
}
WebRtc_Word32 ViEChannel::SetVoiceChannel(WebRtc_Word32 ve_channel_id,
int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
VoEVideoSync* ve_sync_interface) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s, audio channel %d, video channel %d", __FUNCTION__,
@ -1792,11 +1792,11 @@ WebRtc_Word32 ViEChannel::SetVoiceChannel(WebRtc_Word32 ve_channel_id,
rtp_rtcp_.get());
}
WebRtc_Word32 ViEChannel::VoiceChannel() {
int32_t ViEChannel::VoiceChannel() {
return vie_sync_.VoiceChannel();
}
WebRtc_Word32 ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
int32_t ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
if (!effect_filter) {
if (!effect_filter_) {
@ -1834,11 +1834,11 @@ void ViEChannel::ReleaseIncomingFileRecorder() {
vcm_.RegisterFrameStorageCallback(NULL);
}
void ViEChannel::OnApplicationDataReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 sub_type,
const WebRtc_UWord32 name,
const WebRtc_UWord16 length,
const WebRtc_UWord8* data) {
void ViEChannel::OnApplicationDataReceived(const int32_t id,
const uint8_t sub_type,
const uint32_t name,
const uint16_t length,
const uint8_t* data) {
if (channel_id_ != ChannelId(id)) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s, incorrect id", __FUNCTION__, id);
@ -1854,8 +1854,8 @@ void ViEChannel::OnApplicationDataReceived(const WebRtc_Word32 id,
}
}
void ViEChannel::OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC,
void ViEChannel::OnSendReportReceived(const int32_t id,
const uint32_t senderSSRC,
uint32_t ntp_secs,
uint32_t ntp_frac,
uint32_t timestamp) {
@ -1863,13 +1863,13 @@ void ViEChannel::OnSendReportReceived(const WebRtc_Word32 id,
timestamp);
}
WebRtc_Word32 ViEChannel::OnInitializeDecoder(
const WebRtc_Word32 id,
const WebRtc_Word8 payload_type,
int32_t ViEChannel::OnInitializeDecoder(
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int frequency,
const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate) {
const uint8_t channels,
const uint32_t rate) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: payload_type %d, payload_name %s", __FUNCTION__,
payload_type, payload_name);
@ -1881,7 +1881,7 @@ WebRtc_Word32 ViEChannel::OnInitializeDecoder(
return 0;
}
void ViEChannel::OnPacketTimeout(const WebRtc_Word32 id) {
void ViEChannel::OnPacketTimeout(const int32_t id) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -1893,7 +1893,7 @@ void ViEChannel::OnPacketTimeout(const WebRtc_Word32 id) {
}
}
void ViEChannel::OnReceivedPacket(const WebRtc_Word32 id,
void ViEChannel::OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
@ -1909,7 +1909,7 @@ void ViEChannel::OnReceivedPacket(const WebRtc_Word32 id,
}
}
void ViEChannel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
void ViEChannel::OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive) {
assert(ChannelId(id) == channel_id_);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1927,8 +1927,8 @@ void ViEChannel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
return;
}
void ViEChannel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 SSRC) {
void ViEChannel::OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC) {
if (channel_id_ != ChannelId(id)) {
assert(false);
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -1947,8 +1947,8 @@ void ViEChannel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
}
}
void ViEChannel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 CSRC,
void ViEChannel::OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: %u added: %d", __FUNCTION__, CSRC, added);

View File

@ -61,9 +61,9 @@ class ViEChannel
public:
friend class ChannelStatsObserver;
ViEChannel(WebRtc_Word32 channel_id,
WebRtc_Word32 engine_id,
WebRtc_UWord32 number_of_cores,
ViEChannel(int32_t channel_id,
int32_t engine_id,
uint32_t number_of_cores,
ProcessThread& module_process_thread,
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer,
@ -74,211 +74,207 @@ class ViEChannel
bool sender);
~ViEChannel();
WebRtc_Word32 Init();
int32_t Init();
// Sets the encoder to use for the channel. |new_stream| indicates the encoder
// type has changed and we should start a new RTP stream.
WebRtc_Word32 SetSendCodec(const VideoCodec& video_codec,
bool new_stream = true);
WebRtc_Word32 SetReceiveCodec(const VideoCodec& video_codec);
WebRtc_Word32 GetReceiveCodec(VideoCodec* video_codec);
WebRtc_Word32 RegisterCodecObserver(ViEDecoderObserver* observer);
int32_t SetSendCodec(const VideoCodec& video_codec, bool new_stream = true);
int32_t SetReceiveCodec(const VideoCodec& video_codec);
int32_t GetReceiveCodec(VideoCodec* video_codec);
int32_t RegisterCodecObserver(ViEDecoderObserver* observer);
// Registers an external decoder. |buffered_rendering| means that the decoder
// will render frames after decoding according to the render timestamp
// provided by the video coding module. |render_delay| indicates the time
// needed to decode and render a frame.
WebRtc_Word32 RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
VideoDecoder* decoder,
bool buffered_rendering,
WebRtc_Word32 render_delay);
WebRtc_Word32 DeRegisterExternalDecoder(const WebRtc_UWord8 pl_type);
WebRtc_Word32 ReceiveCodecStatistics(WebRtc_UWord32* num_key_frames,
WebRtc_UWord32* num_delta_frames);
WebRtc_UWord32 DiscardedPackets() const;
int32_t RegisterExternalDecoder(const uint8_t pl_type,
VideoDecoder* decoder,
bool buffered_rendering,
int32_t render_delay);
int32_t DeRegisterExternalDecoder(const uint8_t pl_type);
int32_t ReceiveCodecStatistics(uint32_t* num_key_frames,
uint32_t* num_delta_frames);
uint32_t DiscardedPackets() const;
// Returns the estimated delay in milliseconds.
int ReceiveDelay() const;
// Only affects calls to SetReceiveCodec done after this call.
WebRtc_Word32 WaitForKeyFrame(bool wait);
int32_t WaitForKeyFrame(bool wait);
// If enabled, a key frame request will be sent as soon as there are lost
// packets. If |only_key_frames| are set, requests are only sent for loss in
// key frames.
WebRtc_Word32 SetSignalPacketLossStatus(bool enable, bool only_key_frames);
int32_t SetSignalPacketLossStatus(bool enable, bool only_key_frames);
WebRtc_Word32 SetRTCPMode(const RTCPMethod rtcp_mode);
WebRtc_Word32 GetRTCPMode(RTCPMethod* rtcp_mode);
WebRtc_Word32 SetNACKStatus(const bool enable);
WebRtc_Word32 SetFECStatus(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
WebRtc_Word32 SetHybridNACKFECStatus(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
int32_t SetRTCPMode(const RTCPMethod rtcp_mode);
int32_t GetRTCPMode(RTCPMethod* rtcp_mode);
int32_t SetNACKStatus(const bool enable);
int32_t SetFECStatus(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
int32_t SetHybridNACKFECStatus(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
int SetSenderBufferingMode(int target_delay_ms);
int SetReceiverBufferingMode(int target_delay_ms);
WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
int32_t SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
bool EnableRemb(bool enable);
int SetSendTimestampOffsetStatus(bool enable, int id);
int SetReceiveTimestampOffsetStatus(bool enable, int id);
void SetTransmissionSmoothingStatus(bool enable);
WebRtc_Word32 EnableTMMBR(const bool enable);
WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable);
int32_t EnableTMMBR(const bool enable);
int32_t EnableKeyFrameRequestCallback(const bool enable);
// Sets SSRC for outgoing stream.
WebRtc_Word32 SetSSRC(const uint32_t SSRC,
const StreamType usage,
const unsigned char simulcast_idx);
int32_t SetSSRC(const uint32_t SSRC,
const StreamType usage,
const unsigned char simulcast_idx);
// Gets SSRC for outgoing stream number |idx|.
WebRtc_Word32 GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
int32_t GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
// Gets SSRC for the incoming stream.
WebRtc_Word32 GetRemoteSSRC(uint32_t* ssrc);
int32_t GetRemoteSSRC(uint32_t* ssrc);
// Gets the CSRC for the incoming stream.
WebRtc_Word32 GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]);
int32_t GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]);
// Sets the starting sequence number, must be called before StartSend.
WebRtc_Word32 SetStartSequenceNumber(WebRtc_UWord16 sequence_number);
int32_t SetStartSequenceNumber(uint16_t sequence_number);
// Sets the CName for the outgoing stream on the channel.
WebRtc_Word32 SetRTCPCName(const char rtcp_cname[]);
int32_t SetRTCPCName(const char rtcp_cname[]);
// Gets the CName for the outgoing stream on the channel.
WebRtc_Word32 GetRTCPCName(char rtcp_cname[]);
int32_t GetRTCPCName(char rtcp_cname[]);
// Gets the CName of the incoming stream.
WebRtc_Word32 GetRemoteRTCPCName(char rtcp_cname[]);
WebRtc_Word32 RegisterRtpObserver(ViERTPObserver* observer);
WebRtc_Word32 RegisterRtcpObserver(ViERTCPObserver* observer);
WebRtc_Word32 SendApplicationDefinedRTCPPacket(
const WebRtc_UWord8 sub_type,
WebRtc_UWord32 name,
const WebRtc_UWord8* data,
WebRtc_UWord16 data_length_in_bytes);
int32_t GetRemoteRTCPCName(char rtcp_cname[]);
int32_t RegisterRtpObserver(ViERTPObserver* observer);
int32_t RegisterRtcpObserver(ViERTCPObserver* observer);
int32_t SendApplicationDefinedRTCPPacket(
const uint8_t sub_type,
uint32_t name,
const uint8_t* data,
uint16_t data_length_in_bytes);
// Returns statistics reported by the remote client in an RTCP packet.
WebRtc_Word32 GetSendRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms);
int32_t GetSendRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms);
// Returns our localy created statistics of the received RTP stream.
WebRtc_Word32 GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms);
int32_t GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* cumulative_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms);
// Gets sent/received packets statistics.
WebRtc_Word32 GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const;
int32_t GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const;
void GetBandwidthUsage(uint32_t* total_bitrate_sent,
uint32_t* video_bitrate_sent,
uint32_t* fec_bitrate_sent,
uint32_t* nackBitrateSent) const;
void GetEstimatedReceiveBandwidth(uint32_t* estimated_bandwidth) const;
WebRtc_Word32 StartRTPDump(const char file_nameUTF8[1024],
RTPDirections direction);
WebRtc_Word32 StopRTPDump(RTPDirections direction);
int32_t StartRTPDump(const char file_nameUTF8[1024],
RTPDirections direction);
int32_t StopRTPDump(RTPDirections direction);
// Implements RtcpFeedback.
// TODO(pwestin) Depricate this functionality.
virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 sub_type,
const WebRtc_UWord32 name,
const WebRtc_UWord16 length,
const WebRtc_UWord8* data);
virtual void OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC,
virtual void OnApplicationDataReceived(const int32_t id,
const uint8_t sub_type,
const uint32_t name,
const uint16_t length,
const uint8_t* data);
virtual void OnSendReportReceived(const int32_t id,
const uint32_t senderSSRC,
uint32_t ntp_secs,
uint32_t ntp_frac,
uint32_t timestamp);
// Implements RtpFeedback.
virtual WebRtc_Word32 OnInitializeDecoder(
const WebRtc_Word32 id,
const WebRtc_Word8 payload_type,
virtual int32_t OnInitializeDecoder(
const int32_t id,
const int8_t payload_type,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int frequency,
const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate);
virtual void OnPacketTimeout(const WebRtc_Word32 id);
virtual void OnReceivedPacket(const WebRtc_Word32 id,
const uint8_t channels,
const uint32_t rate);
virtual void OnPacketTimeout(const int32_t id);
virtual void OnReceivedPacket(const int32_t id,
const RtpRtcpPacketType packet_type);
virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
virtual void OnPeriodicDeadOrAlive(const int32_t id,
const RTPAliveType alive);
virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 SSRC);
virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 CSRC,
virtual void OnIncomingSSRCChanged(const int32_t id,
const uint32_t SSRC);
virtual void OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added);
WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtp_port,
const WebRtc_UWord16 rtcp_port,
const char* ip_address);
WebRtc_Word32 GetLocalReceiver(WebRtc_UWord16* rtp_port,
WebRtc_UWord16* rtcp_port,
char* ip_address) const;
WebRtc_Word32 SetSendDestination(const char* ip_address,
const WebRtc_UWord16 rtp_port,
const WebRtc_UWord16 rtcp_port,
const WebRtc_UWord16 source_rtp_port,
const WebRtc_UWord16 source_rtcp_port);
WebRtc_Word32 GetSendDestination(char* ip_address,
WebRtc_UWord16* rtp_port,
WebRtc_UWord16* rtcp_port,
WebRtc_UWord16* source_rtp_port,
WebRtc_UWord16* source_rtcp_port) const;
WebRtc_Word32 GetSourceInfo(WebRtc_UWord16* rtp_port,
WebRtc_UWord16* rtcp_port,
char* ip_address,
WebRtc_UWord32 ip_address_length);
int32_t SetLocalReceiver(const uint16_t rtp_port,
const uint16_t rtcp_port,
const char* ip_address);
int32_t GetLocalReceiver(uint16_t* rtp_port,
uint16_t* rtcp_port,
char* ip_address) const;
int32_t SetSendDestination(const char* ip_address,
const uint16_t rtp_port,
const uint16_t rtcp_port,
const uint16_t source_rtp_port,
const uint16_t source_rtcp_port);
int32_t GetSendDestination(char* ip_address,
uint16_t* rtp_port,
uint16_t* rtcp_port,
uint16_t* source_rtp_port,
uint16_t* source_rtcp_port) const;
int32_t GetSourceInfo(uint16_t* rtp_port,
uint16_t* rtcp_port,
char* ip_address,
uint32_t ip_address_length);
WebRtc_Word32 SetRemoteSSRCType(const StreamType usage,
const uint32_t SSRC) const;
int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC) const;
WebRtc_Word32 StartSend();
WebRtc_Word32 StopSend();
int32_t StartSend();
int32_t StopSend();
bool Sending();
WebRtc_Word32 StartReceive();
WebRtc_Word32 StopReceive();
int32_t StartReceive();
int32_t StopReceive();
WebRtc_Word32 RegisterSendTransport(Transport* transport);
WebRtc_Word32 DeregisterSendTransport();
int32_t RegisterSendTransport(Transport* transport);
int32_t DeregisterSendTransport();
// Incoming packet from external transport.
WebRtc_Word32 ReceivedRTPPacket(const void* rtp_packet,
const WebRtc_Word32 rtp_packet_length);
int32_t ReceivedRTPPacket(const void* rtp_packet,
const int32_t rtp_packet_length);
// Incoming packet from external transport.
WebRtc_Word32 ReceivedRTCPPacket(const void* rtcp_packet,
const WebRtc_Word32 rtcp_packet_length);
int32_t ReceivedRTCPPacket(const void* rtcp_packet,
const int32_t rtcp_packet_length);
// Sets the maximum transfer unit size for the network link, i.e. including
// IP, UDP and RTP headers.
WebRtc_Word32 SetMTU(WebRtc_UWord16 mtu);
int32_t SetMTU(uint16_t mtu);
// Returns maximum allowed payload size, i.e. the maximum allowed size of
// encoded data in each packet.
WebRtc_UWord16 MaxDataPayloadLength() const;
WebRtc_Word32 SetMaxPacketBurstSize(WebRtc_UWord16 max_number_of_packets);
WebRtc_Word32 SetPacketBurstSpreadState(bool enable,
const WebRtc_UWord16 frame_periodMS);
uint16_t MaxDataPayloadLength() const;
int32_t SetMaxPacketBurstSize(uint16_t max_number_of_packets);
int32_t SetPacketBurstSpreadState(bool enable, const uint16_t frame_periodMS);
WebRtc_Word32 SetPacketTimeoutNotification(bool enable,
WebRtc_UWord32 timeout_seconds);
WebRtc_Word32 RegisterNetworkObserver(ViENetworkObserver* observer);
int32_t SetPacketTimeoutNotification(bool enable, uint32_t timeout_seconds);
int32_t RegisterNetworkObserver(ViENetworkObserver* observer);
bool NetworkObserverRegistered();
WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
const bool enable, const WebRtc_UWord32 sample_time_seconds);
int32_t SetPeriodicDeadOrAliveStatus(
const bool enable, const uint32_t sample_time_seconds);
WebRtc_Word32 EnableColorEnhancement(bool enable);
int32_t EnableColorEnhancement(bool enable);
// Gets the modules used by the channel.
RtpRtcp* rtp_rtcp();
@ -286,42 +282,42 @@ class ViEChannel
CallStatsObserver* GetStatsObserver();
// Implements VCMReceiveCallback.
virtual WebRtc_Word32 FrameToRender(I420VideoFrame& video_frame); // NOLINT
virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT
// Implements VCMReceiveCallback.
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 picture_id);
virtual int32_t ReceivedDecodedReferenceFrame(
const uint64_t picture_id);
// Implements VCM.
virtual WebRtc_Word32 StoreReceivedFrame(
virtual int32_t StoreReceivedFrame(
const EncodedVideoData& frame_to_store);
// Implements VideoReceiveStatisticsCallback.
virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bit_rate,
const WebRtc_UWord32 frame_rate);
virtual int32_t ReceiveStatistics(const uint32_t bit_rate,
const uint32_t frame_rate);
// Implements VideoFrameTypeCallback.
virtual WebRtc_Word32 RequestKeyFrame();
virtual int32_t RequestKeyFrame();
// Implements VideoFrameTypeCallback.
virtual WebRtc_Word32 SliceLossIndicationRequest(
const WebRtc_UWord64 picture_id);
virtual int32_t SliceLossIndicationRequest(
const uint64_t picture_id);
// Implements VideoPacketRequestCallback.
virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequence_numbers,
WebRtc_UWord16 length);
virtual int32_t ResendPackets(const uint16_t* sequence_numbers,
uint16_t length);
WebRtc_Word32 RegisterExternalEncryption(Encryption* encryption);
WebRtc_Word32 DeRegisterExternalEncryption();
int32_t RegisterExternalEncryption(Encryption* encryption);
int32_t DeRegisterExternalEncryption();
WebRtc_Word32 SetVoiceChannel(WebRtc_Word32 ve_channel_id,
VoEVideoSync* ve_sync_interface);
WebRtc_Word32 VoiceChannel();
int32_t SetVoiceChannel(int32_t ve_channel_id,
VoEVideoSync* ve_sync_interface);
int32_t VoiceChannel();
// Implements ViEFrameProviderBase.
virtual int FrameCallbackChanged() {return -1;}
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
ViEFileRecorder& GetIncomingFileRecorder();
void ReleaseIncomingFileRecorder();
@ -334,20 +330,20 @@ class ViEChannel
private:
// Assumed to be protected.
WebRtc_Word32 StartDecodeThread();
WebRtc_Word32 StopDecodeThread();
int32_t StartDecodeThread();
int32_t StopDecodeThread();
WebRtc_Word32 ProcessNACKRequest(const bool enable);
WebRtc_Word32 ProcessFECRequest(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
int32_t ProcessNACKRequest(const bool enable);
int32_t ProcessFECRequest(const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC);
// Compute NACK list parameters for the buffering mode.
int GetRequiredNackListSize(int target_delay_ms);
WebRtc_Word32 channel_id_;
WebRtc_Word32 engine_id_;
WebRtc_UWord32 number_of_cores_;
WebRtc_UWord8 num_socket_threads_;
int32_t channel_id_;
int32_t engine_id_;
uint32_t number_of_cores_;
uint8_t num_socket_threads_;
// Used for all registered callbacks except rendering.
scoped_ptr<CriticalSectionWrapper> callback_cs_;

View File

@ -49,7 +49,7 @@ int ViECodecImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViECodec released too many times");
@ -511,7 +511,7 @@ int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
return vie_encoder->CodecTargetBitrate(static_cast<WebRtc_UWord32*>(bitrate));
return vie_encoder->CodecTargetBitrate(static_cast<uint32_t*>(bitrate));
}
unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {

View File

@ -43,9 +43,9 @@ class QMVideoSettingsCallback : public VCMQMSettingsCallback {
~QMVideoSettingsCallback();
// Update VPM with QM (quality modes: frame size & frame rate) settings.
WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frame_rate,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height);
int32_t SetVideoQMSettings(const uint32_t frame_rate,
const uint32_t width,
const uint32_t height);
private:
VideoProcessingModule* vpm_;
@ -82,9 +82,9 @@ class ViEPacedSenderCallback : public PacedSender::Callback {
ViEEncoder* owner_;
};
ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
WebRtc_UWord32 number_of_cores,
ViEEncoder::ViEEncoder(int32_t engine_id,
int32_t channel_id,
uint32_t number_of_cores,
ProcessThread& module_process_thread,
BitrateController* bitrate_controller)
: engine_id_(engine_id),
@ -266,7 +266,7 @@ void ViEEncoder::Restart() {
encoder_paused_ = false;
}
WebRtc_Word32 ViEEncoder::DropDeltaAfterKey(bool enable) {
int32_t ViEEncoder::DropDeltaAfterKey(bool enable) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(%d)", __FUNCTION__, enable);
@ -287,12 +287,11 @@ WebRtc_Word32 ViEEncoder::DropDeltaAfterKey(bool enable) {
return 0;
}
WebRtc_UWord8 ViEEncoder::NumberOfCodecs() {
uint8_t ViEEncoder::NumberOfCodecs() {
return vcm_.NumberOfCodecs();
}
WebRtc_Word32 ViEEncoder::GetCodec(WebRtc_UWord8 list_index,
VideoCodec* video_codec) {
int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
if (vcm_.Codec(list_index, video_codec) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: Could not get codec",
@ -302,9 +301,9 @@ WebRtc_Word32 ViEEncoder::GetCodec(WebRtc_UWord8 list_index,
return 0;
}
WebRtc_Word32 ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
WebRtc_UWord8 pl_type,
bool internal_source) {
int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: pltype %u", __FUNCTION__,
pl_type);
@ -322,7 +321,7 @@ WebRtc_Word32 ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
return 0;
}
WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 pl_type) {
int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: pltype %u", __FUNCTION__, pl_type);
@ -348,7 +347,7 @@ WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 pl_type) {
// If the external encoder is the current send codeci, use vcm internal
// encoder.
if (current_send_codec.plType == pl_type) {
WebRtc_UWord16 max_data_payload_length =
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
@ -361,7 +360,7 @@ WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 pl_type) {
return 0;
}
WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
@ -385,7 +384,7 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
// Convert from kbps to bps.
default_rtp_rtcp_->SetTargetSendBitrate(video_codec.startBitrate * 1000);
WebRtc_UWord16 max_data_payload_length =
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
@ -415,7 +414,7 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
return 0;
}
WebRtc_Word32 ViEEncoder::GetEncoder(VideoCodec* video_codec) {
int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
@ -428,13 +427,13 @@ WebRtc_Word32 ViEEncoder::GetEncoder(VideoCodec* video_codec) {
return 0;
}
WebRtc_Word32 ViEEncoder::GetCodecConfigParameters(
int32_t ViEEncoder::GetCodecConfigParameters(
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
WebRtc_Word32 num_parameters =
int32_t num_parameters =
vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize);
if (num_parameters <= 0) {
config_parameters_size = 0;
@ -447,7 +446,7 @@ WebRtc_Word32 ViEEncoder::GetCodecConfigParameters(
return 0;
}
WebRtc_Word32 ViEEncoder::ScaleInputImage(bool enable) {
int32_t ViEEncoder::ScaleInputImage(bool enable) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s(enable %d)", __FUNCTION__,
enable);
@ -489,7 +488,7 @@ RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
void ViEEncoder::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
const uint32_t CSRC[kRtpCsrcSize]) {
WEBRTC_TRACE(webrtc::kTraceStream,
webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
@ -515,9 +514,9 @@ void ViEEncoder::DeliverFrame(int id,
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
const WebRtc_UWord32 time_stamp =
const uint32_t time_stamp =
kMsToRtpTimestamp *
static_cast<WebRtc_UWord32>(video_frame->render_time_ms());
static_cast<uint32_t>(video_frame->render_time_ms());
video_frame->set_timestamp(time_stamp);
{
CriticalSectionScoped cs(callback_cs_.get());
@ -539,7 +538,7 @@ void ViEEncoder::DeliverFrame(int id,
// Make sure the CSRC list is correct.
if (num_csrcs > 0) {
WebRtc_UWord32 tempCSRC[kRtpCsrcSize];
uint32_t tempCSRC[kRtpCsrcSize];
for (int i = 0; i < num_csrcs; i++) {
if (CSRC[i] == 1) {
tempCSRC[i] = default_rtp_rtcp_->SSRC();
@ -547,7 +546,7 @@ void ViEEncoder::DeliverFrame(int id,
tempCSRC[i] = CSRC[i];
}
}
default_rtp_rtcp_->SetCSRCs(tempCSRC, (WebRtc_UWord8) num_csrcs);
default_rtp_rtcp_->SetCSRCs(tempCSRC, (uint8_t) num_csrcs);
}
// Pass frame via preprocessor.
I420VideoFrame* decimated_frame = NULL;
@ -639,8 +638,8 @@ int ViEEncoder::SendKeyFrame() {
return vcm_.IntraFrameRequest(0);
}
WebRtc_Word32 ViEEncoder::SendCodecStatistics(
WebRtc_UWord32* num_key_frames, WebRtc_UWord32* num_delta_frames) {
int32_t ViEEncoder::SendCodecStatistics(
uint32_t* num_key_frames, uint32_t* num_delta_frames) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
@ -656,8 +655,8 @@ WebRtc_Word32 ViEEncoder::SendCodecStatistics(
return 0;
}
WebRtc_Word32 ViEEncoder::EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const {
int32_t ViEEncoder::EstimatedSendBandwidth(
uint32_t* available_bandwidth) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
@ -667,7 +666,7 @@ WebRtc_Word32 ViEEncoder::EstimatedSendBandwidth(
return 0;
}
int ViEEncoder::CodecTargetBitrate(WebRtc_UWord32* bitrate) const {
int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
if (vcm_.Bitrate(bitrate) != 0)
@ -675,16 +674,16 @@ int ViEEncoder::CodecTargetBitrate(WebRtc_UWord32* bitrate) const {
return 0;
}
WebRtc_Word32 ViEEncoder::UpdateProtectionMethod() {
int32_t ViEEncoder::UpdateProtectionMethod() {
bool fec_enabled = false;
WebRtc_UWord8 dummy_ptype_red = 0;
WebRtc_UWord8 dummy_ptypeFEC = 0;
uint8_t dummy_ptype_red = 0;
uint8_t dummy_ptypeFEC = 0;
// Updated protection method to VCM to get correct packetization sizes.
// FEC has larger overhead than NACK -> set FEC if used.
WebRtc_Word32 error = default_rtp_rtcp_->GenericFECStatus(fec_enabled,
dummy_ptype_red,
dummy_ptypeFEC);
int32_t error = default_rtp_rtcp_->GenericFECStatus(fec_enabled,
dummy_ptype_red,
dummy_ptypeFEC);
if (error) {
return -1;
}
@ -714,7 +713,7 @@ WebRtc_Word32 ViEEncoder::UpdateProtectionMethod() {
// The send codec must be registered to set correct MTU.
webrtc::VideoCodec codec;
if (vcm_.SendCodec(&codec) == 0) {
WebRtc_UWord16 max_pay_load = default_rtp_rtcp_->MaxDataPayloadLength();
uint16_t max_pay_load = default_rtp_rtcp_->MaxDataPayloadLength();
uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
@ -755,13 +754,13 @@ void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
}
}
WebRtc_Word32 ViEEncoder::SendData(
int32_t ViEEncoder::SendData(
const FrameType frame_type,
const WebRtc_UWord8 payload_type,
const WebRtc_UWord32 time_stamp,
const uint8_t payload_type,
const uint32_t time_stamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payload_data,
const WebRtc_UWord32 payload_size,
const uint8_t* payload_data,
const uint32_t payload_size,
const webrtc::RTPFragmentationHeader& fragmentation_header,
const RTPVideoHeader* rtp_video_hdr) {
{
@ -790,12 +789,12 @@ WebRtc_Word32 ViEEncoder::SendData(
rtp_video_hdr);
}
WebRtc_Word32 ViEEncoder::ProtectionRequest(
int32_t ViEEncoder::ProtectionRequest(
const FecProtectionParams* delta_fec_params,
const FecProtectionParams* key_fec_params,
WebRtc_UWord32* sent_video_rate_bps,
WebRtc_UWord32* sent_nack_rate_bps,
WebRtc_UWord32* sent_fec_rate_bps) {
uint32_t* sent_video_rate_bps,
uint32_t* sent_nack_rate_bps,
uint32_t* sent_fec_rate_bps) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s, deltaFECRate: %u, key_fecrate: %u, "
@ -824,8 +823,8 @@ WebRtc_Word32 ViEEncoder::ProtectionRequest(
return 0;
}
WebRtc_Word32 ViEEncoder::SendStatistics(const WebRtc_UWord32 bit_rate,
const WebRtc_UWord32 frame_rate) {
int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
@ -836,7 +835,7 @@ WebRtc_Word32 ViEEncoder::SendStatistics(const WebRtc_UWord32 bit_rate,
return 0;
}
WebRtc_Word32 ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
int32_t ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
@ -890,13 +889,13 @@ void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
<< ssrc_streams_.size();
return;
}
std::map<unsigned int, WebRtc_Word64>::iterator time_it =
std::map<unsigned int, int64_t>::iterator time_it =
time_last_intra_request_ms_.find(ssrc);
if (time_it == time_last_intra_request_ms_.end()) {
time_last_intra_request_ms_[ssrc] = 0;
}
WebRtc_Word64 now = TickTime::MillisecondTimestamp();
int64_t now = TickTime::MillisecondTimestamp();
if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
@ -971,7 +970,7 @@ PacedSender* ViEEncoder::GetPacedSender() {
return paced_sender_.get();
}
WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
int32_t ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
if (effect_filter == NULL) {
if (effect_filter_ == NULL) {
@ -1017,10 +1016,10 @@ QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
QMVideoSettingsCallback::~QMVideoSettingsCallback() {
}
WebRtc_Word32 QMVideoSettingsCallback::SetVideoQMSettings(
const WebRtc_UWord32 frame_rate,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height) {
int32_t QMVideoSettingsCallback::SetVideoQMSettings(
const uint32_t frame_rate,
const uint32_t width,
const uint32_t height) {
return vpm_->SetTargetResolution(width, height, frame_rate);
}

View File

@ -48,9 +48,9 @@ class ViEEncoder
friend class ViEBitrateObserver;
friend class ViEPacedSenderCallback;
ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
WebRtc_UWord32 number_of_cores,
ViEEncoder(int32_t engine_id,
int32_t channel_id,
uint32_t number_of_cores,
ProcessThread& module_process_thread,
BitrateController* bitrate_controller);
~ViEEncoder();
@ -66,26 +66,26 @@ class ViEEncoder
void Pause();
void Restart();
WebRtc_Word32 DropDeltaAfterKey(bool enable);
int32_t DropDeltaAfterKey(bool enable);
// Codec settings.
WebRtc_UWord8 NumberOfCodecs();
WebRtc_Word32 GetCodec(WebRtc_UWord8 list_index, VideoCodec* video_codec);
WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* encoder,
WebRtc_UWord8 pl_type,
bool internal_source);
WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 pl_type);
WebRtc_Word32 SetEncoder(const VideoCodec& video_codec);
WebRtc_Word32 GetEncoder(VideoCodec* video_codec);
uint8_t NumberOfCodecs();
int32_t GetCodec(uint8_t list_index, VideoCodec* video_codec);
int32_t RegisterExternalEncoder(VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source);
int32_t DeRegisterExternalEncoder(uint8_t pl_type);
int32_t SetEncoder(const VideoCodec& video_codec);
int32_t GetEncoder(VideoCodec* video_codec);
WebRtc_Word32 GetCodecConfigParameters(
int32_t GetCodecConfigParameters(
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size);
PacedSender* GetPacedSender();
// Scale or crop/pad image.
WebRtc_Word32 ScaleInputImage(bool enable);
int32_t ScaleInputImage(bool enable);
// RTP settings.
RtpRtcp* SendRtpRtcpModule();
@ -94,7 +94,7 @@ class ViEEncoder
virtual void DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
const uint32_t CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay);
virtual int GetPreferedFrameSettings(int* width,
int* height,
@ -104,28 +104,28 @@ class ViEEncoder
return;
}
WebRtc_Word32 SendKeyFrame();
WebRtc_Word32 SendCodecStatistics(WebRtc_UWord32* num_key_frames,
WebRtc_UWord32* num_delta_frames);
int32_t SendKeyFrame();
int32_t SendCodecStatistics(uint32_t* num_key_frames,
uint32_t* num_delta_frames);
WebRtc_Word32 EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const;
int32_t EstimatedSendBandwidth(
uint32_t* available_bandwidth) const;
int CodecTargetBitrate(WebRtc_UWord32* bitrate) const;
int CodecTargetBitrate(uint32_t* bitrate) const;
// Loss protection.
WebRtc_Word32 UpdateProtectionMethod();
int32_t UpdateProtectionMethod();
// Buffering mode.
void SetSenderBufferingMode(int target_delay_ms);
// Implements VCMPacketizationCallback.
virtual WebRtc_Word32 SendData(
virtual int32_t SendData(
FrameType frame_type,
WebRtc_UWord8 payload_type,
WebRtc_UWord32 time_stamp,
uint8_t payload_type,
uint32_t time_stamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payload_data,
WebRtc_UWord32 payload_size,
const uint8_t* payload_data,
uint32_t payload_size,
const RTPFragmentationHeader& fragmentation_header,
const RTPVideoHeader* rtp_video_hdr);
@ -133,14 +133,14 @@ class ViEEncoder
virtual int ProtectionRequest(
const FecProtectionParams* delta_fec_params,
const FecProtectionParams* key_fec_params,
WebRtc_UWord32* sent_video_rate_bps,
WebRtc_UWord32* sent_nack_rate_bps,
WebRtc_UWord32* sent_fec_rate_bps);
uint32_t* sent_video_rate_bps,
uint32_t* sent_nack_rate_bps,
uint32_t* sent_fec_rate_bps);
// Implements VideoSendStatisticsCallback.
virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bit_rate,
const WebRtc_UWord32 frame_rate);
WebRtc_Word32 RegisterCodecObserver(ViEEncoderObserver* observer);
virtual int32_t SendStatistics(const uint32_t bit_rate,
const uint32_t frame_rate);
int32_t RegisterCodecObserver(ViEEncoderObserver* observer);
// Implements RtcpIntraFrameObserver.
virtual void OnReceivedIntraFrameRequest(uint32_t ssrc);
@ -152,7 +152,7 @@ class ViEEncoder
bool SetSsrcs(const std::list<unsigned int>& ssrcs);
// Effect filter.
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
// Recording.
ViEFileRecorder& GetOutgoingFileRecorder();
@ -177,9 +177,9 @@ class ViEEncoder
private:
bool EncoderPaused() const;
WebRtc_Word32 engine_id_;
int32_t engine_id_;
const int channel_id_;
const WebRtc_UWord32 number_of_cores_;
const uint32_t number_of_cores_;
VideoCodingModule& vcm_;
VideoProcessingModule& vpm_;
@ -196,7 +196,7 @@ class ViEEncoder
bool network_is_transmitting_;
bool encoder_paused_;
std::map<unsigned int, int64_t> time_last_intra_request_ms_;
WebRtc_Word32 channels_dropping_delta_frames_;
int32_t channels_dropping_delta_frames_;
bool drop_next_frame_;
bool fec_enabled_;
@ -207,9 +207,9 @@ class ViEEncoder
ProcessThread& module_process_thread_;
bool has_received_sli_;
WebRtc_UWord8 picture_id_sli_;
uint8_t picture_id_sli_;
bool has_received_rpsi_;
WebRtc_UWord64 picture_id_rpsi_;
uint64_t picture_id_rpsi_;
std::map<unsigned int, int> ssrc_streams_;
ViEFileRecorder file_recorder_;

View File

@ -41,7 +41,7 @@ int ViEEncryptionImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEEncryptionImpl release too many times");

View File

@ -42,7 +42,7 @@ int ViEExternalCodecImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEExternalCodec release too many times");

View File

@ -59,8 +59,8 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
file_nameUTF8);
return -1;
}
image_buffer._buffer = new WebRtc_UWord8[ image_buffer._size + 1];
if (image_buffer._size != fread(image_buffer._buffer, sizeof(WebRtc_UWord8),
image_buffer._buffer = new uint8_t[ image_buffer._size + 1];
if (image_buffer._size != fread(image_buffer._buffer, sizeof(uint8_t),
image_buffer._size, image_file)) {
WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
"%s could not read file %s", __FUNCTION__, file_nameUTF8);

View File

@ -57,7 +57,7 @@ int ViEFileImpl::Release() {
"ViEFile::Release()");
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEFile release too many times");
@ -96,7 +96,7 @@ int ViEFileImpl::StartPlayFile(const char* file_nameUTF8,
}
VoiceEngine* voice = shared_data_->channel_manager()->GetVoiceEngine();
const WebRtc_Word32 result = shared_data_->input_manager()->CreateFilePlayer(
const int32_t result = shared_data_->input_manager()->CreateFilePlayer(
file_nameUTF8, loop, file_format, voice, file_id);
if (result != 0) {
shared_data_->SetLastError(result);
@ -375,7 +375,7 @@ int ViEFileImpl::StartRecordOutgoingVideo(const int video_channel,
return -1;
}
WebRtc_Word32 ve_channel_id = -1;
int32_t ve_channel_id = -1;
VoiceEngine* ve_ptr = NULL;
if (audio_source != NO_AUDIO) {
ViEChannel* vie_channel = cs.Channel(video_channel);
@ -511,7 +511,7 @@ int ViEFileImpl::StartRecordIncomingVideo(const int video_channel,
return -1;
}
WebRtc_Word32 ve_channel_id = -1;
int32_t ve_channel_id = -1;
VoiceEngine* ve_ptr = NULL;
if (audio_source != NO_AUDIO) {
ve_channel_id = vie_channel->VoiceChannel();
@ -601,8 +601,7 @@ int ViEFileImpl::GetRenderSnapshot(const int video_channel,
// Copy from VideoFrame class to ViEPicture struct.
int buffer_length = CalcBufferSize(kI420, video_frame.width(),
video_frame.height());
picture.data = static_cast<WebRtc_UWord8*>(malloc(
buffer_length * sizeof(WebRtc_UWord8)));
picture.data = static_cast<uint8_t*>(malloc(buffer_length * sizeof(uint8_t)));
if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) {
return -1;
}
@ -669,8 +668,7 @@ int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
// Copy from VideoFrame class to ViEPicture struct.
int buffer_length = CalcBufferSize(kI420, video_frame.width(),
video_frame.height());
picture.data = static_cast<WebRtc_UWord8*>(malloc(
buffer_length * sizeof(WebRtc_UWord8)));
picture.data = static_cast<uint8_t*>(malloc(buffer_length * sizeof(uint8_t)));
if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) {
return -1;
}
@ -787,7 +785,7 @@ int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
shared_data_->SetLastError(kViEFileInvalidFile);
return -1;
}
WebRtc_Word32 timeout_time = timeout_ms;
int32_t timeout_time = timeout_ms;
if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
@ -842,7 +840,7 @@ const unsigned int timeout_ms) {
shared_data_->SetLastError(kViEFileInvalidCapture);
return -1;
}
WebRtc_Word32 timeout_time = timeout_ms;
int32_t timeout_time = timeout_ms;
if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
@ -864,8 +862,8 @@ const unsigned int timeout_ms) {
return 0;
}
WebRtc_Word32 ViEFileImpl::GetNextCapturedFrame(WebRtc_Word32 capture_id,
I420VideoFrame* video_frame) {
int32_t ViEFileImpl::GetNextCapturedFrame(int32_t capture_id,
I420VideoFrame* video_frame) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* capturer = is.Capture(capture_id);
if (!capturer) {
@ -945,7 +943,7 @@ bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time,
void ViECaptureSnapshot::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
const uint32_t CSRC[kRtpCsrcSize]) {
CriticalSectionScoped cs(crit_.get());
if (!video_frame_) {
return;

View File

@ -36,7 +36,7 @@ class ViECaptureSnapshot : public ViEFrameCallback {
virtual void DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
const uint32_t CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay) {}
virtual int GetPreferedFrameSettings(int* width,
int* height,
@ -125,8 +125,7 @@ class ViEFileImpl
virtual ~ViEFileImpl();
private:
WebRtc_Word32 GetNextCapturedFrame(WebRtc_Word32 capture_id,
I420VideoFrame* video_frame);
int32_t GetNextCapturedFrame(int32_t capture_id, I420VideoFrame* video_frame);
ViESharedData* shared_data_;
};

View File

@ -127,8 +127,7 @@ int ViEFilePlayer::Init(const char* file_nameUTF8,
// Always try to open with Audio since we don't know on what channels the
// audio should be played on.
WebRtc_Word32 error = file_player_->StartPlayingVideoFile(file_name_, loop,
false);
int32_t error = file_player_->StartPlayingVideoFile(file_name_, loop, false);
if (error) {
// Failed to open the file with audio, try without.
error = file_player_->StartPlayingVideoFile(file_name_, loop, true);
@ -331,7 +330,7 @@ bool ViEFilePlayer::NeedsAudioFromFile(void* buf) {
return needs_new_audio;
}
void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id) {
void ViEFilePlayer::PlayFileEnded(const int32_t id) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id),
"%s: file_id %d", __FUNCTION__, id_);
file_player_->StopPlayingFile();

View File

@ -84,12 +84,12 @@ class ViEFilePlayer
}
// Implements FileCallback.
virtual void PlayNotification(const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*notification_ms*/) {}
virtual void RecordNotification(const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*notification_ms*/) {}
virtual void PlayFileEnded(const WebRtc_Word32 id);
virtual void RecordFileEnded(const WebRtc_Word32 /*id*/) {}
virtual void PlayNotification(const int32_t /*id*/,
const uint32_t /*notification_ms*/) {}
virtual void RecordNotification(const int32_t /*id*/,
const uint32_t /*notification_ms*/) {}
virtual void PlayFileEnded(const int32_t id);
virtual void RecordFileEnded(const int32_t /*id*/) {}
private:
static const int kMaxDecodedAudioLength = 320;
@ -120,7 +120,7 @@ class ViEFilePlayer
// Thread for decoding video (and audio if no audio clients connected).
ThreadWrapper* decode_thread_;
EventWrapper* decode_event_;
WebRtc_Word16 decoded_audio_[kMaxDecodedAudioLength];
int16_t decoded_audio_[kMaxDecodedAudioLength];
int decoded_audio_length_;
// Trick - list containing VoE buffer reading this file. Used if multiple

View File

@ -188,8 +188,8 @@ void ViEFileRecorder::RecordVideoFrame(const I420VideoFrame& video_frame) {
// Compensate for frame delay in order to get audio/video sync when
// recording local video.
const WebRtc_UWord32 time_stamp = video_frame.timestamp();
const WebRtc_Word64 render_time_stamp = video_frame.render_time_ms();
const uint32_t time_stamp = video_frame.timestamp();
const int64_t render_time_stamp = video_frame.render_time_ms();
I420VideoFrame& unconst_video_frame =
const_cast<I420VideoFrame&>(video_frame);
unconst_video_frame.set_timestamp(time_stamp - 90 * frame_delay_);
@ -215,9 +215,9 @@ bool ViEFileRecorder::Write(const void* buf, int len) {
}
AudioFrame audio_frame;
WebRtc_UWord16 length_in_samples = len / 2;
uint16_t length_in_samples = len / 2;
audio_frame.UpdateFrame(audio_channel_, 0,
static_cast<const WebRtc_Word16*>(buf),
static_cast<const int16_t*>(buf),
length_in_samples, length_in_samples * 100,
AudioFrame::kUndefined,
AudioFrame::kVadUnknown);

View File

@ -48,7 +48,7 @@ int ViEFrameProviderBase::Id() {
void ViEFrameProviderBase::DeliverFrame(
I420VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
const uint32_t CSRC[kRtpCsrcSize]) {
#ifdef DEBUG_
const TickTime start_process_time = TickTime::Now();
#endif

View File

@ -30,7 +30,7 @@ class ViEFrameCallback {
virtual void DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
const uint32_t CSRC[kRtpCsrcSize] = NULL) = 0;
// The capture delay has changed from the provider. |frame_delay| is given in
// ms.
@ -77,7 +77,7 @@ class ViEFrameProviderBase {
protected:
void DeliverFrame(I420VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
const uint32_t CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frame_delay);
int FrameDelay();
int GetBestFormat(int* best_width,

View File

@ -44,7 +44,7 @@ int ViEImageProcessImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViEImageProcess release too many times");

View File

@ -23,7 +23,7 @@
#endif
// Global counter to get an id for each new ViE instance.
static WebRtc_Word32 g_vie_active_instance_counter = 0;
static int32_t g_vie_active_instance_counter = 0;
namespace webrtc {
@ -178,7 +178,7 @@ int VideoEngine::SetTraceFile(const char* file_nameUTF8,
}
int VideoEngine::SetTraceFilter(const unsigned int filter) {
WebRtc_UWord32 old_filter = 0;
uint32_t old_filter = 0;
Trace::LevelFilter(old_filter);
if (filter == kTraceNone && old_filter != kTraceNone) {
@ -187,7 +187,7 @@ int VideoEngine::SetTraceFilter(const unsigned int filter) {
"SetTraceFilter(filter = 0x%x)", filter);
}
WebRtc_Word32 error = Trace::SetLevelFilter(filter);
int32_t error = Trace::SetLevelFilter(filter);
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
"SetTraceFilter(filter = 0x%x)", filter);
if (error != 0) {

View File

@ -79,11 +79,11 @@ int ViEInputManager::NumberOfCaptureDevices() {
return capture_device_info_->NumberOfDevices();
}
int ViEInputManager::GetDeviceName(WebRtc_UWord32 device_number,
int ViEInputManager::GetDeviceName(uint32_t device_number,
char* device_nameUTF8,
WebRtc_UWord32 device_name_length,
uint32_t device_name_length,
char* device_unique_idUTF8,
WebRtc_UWord32 device_unique_idUTF8Length) {
uint32_t device_unique_idUTF8Length) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(device_number: %d)", __FUNCTION__, device_number);
CriticalSectionScoped cs(device_info_cs_.get());
@ -111,7 +111,7 @@ int ViEInputManager::NumberOfCaptureCapabilities(
int ViEInputManager::GetCaptureCapability(
const char* device_unique_idUTF8,
const WebRtc_UWord32 device_capability_number,
const uint32_t device_capability_number,
CaptureCapability& capability) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(device_unique_idUTF8: %s, device_capability_number: %d)",
@ -174,8 +174,8 @@ int ViEInputManager::DisplayCaptureSettingsDialogBox(
const char* device_unique_idUTF8,
const char* dialog_titleUTF8,
void* parent_window,
WebRtc_UWord32 positionX,
WebRtc_UWord32 positionY) {
uint32_t positionX,
uint32_t positionY) {
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@ -188,7 +188,7 @@ int ViEInputManager::DisplayCaptureSettingsDialogBox(
int ViEInputManager::CreateCaptureDevice(
const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length,
const uint32_t device_unique_idUTF8Length,
int& capture_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s(device_unique_id: %s)", __FUNCTION__, device_unique_idUTF8);
@ -220,7 +220,7 @@ int ViEInputManager::CreateCaptureDevice(
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
ViEModuleId(engine_id_));
assert(capture_device_info_);
for (WebRtc_UWord32 device_index = 0;
for (uint32_t device_index = 0;
device_index < capture_device_info_->NumberOfDevices(); ++device_index) {
if (device_unique_idUTF8Length > kVideoCaptureUniqueNameLength) {
// User's string length is longer than the max.
@ -335,7 +335,7 @@ int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
__FUNCTION__, capture_id);
return -1;
}
WebRtc_UWord32 num_callbacks =
uint32_t num_callbacks =
vie_capture->NumberOfRegisteredFrameCallbacks();
if (num_callbacks > 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,

View File

@ -42,26 +42,26 @@ class ViEInputManager : private ViEManagerBase {
int NumberOfCaptureDevices();
// Gets name and id for a capture device.
int GetDeviceName(WebRtc_UWord32 device_number,
int GetDeviceName(uint32_t device_number,
char* device_nameUTF8,
WebRtc_UWord32 device_name_length,
uint32_t device_name_length,
char* device_unique_idUTF8,
WebRtc_UWord32 device_unique_idUTF8Length);
uint32_t device_unique_idUTF8Length);
// Returns the number of capture capabilities for a specified device.
int NumberOfCaptureCapabilities(const char* device_unique_idUTF8);
// Gets a specific capability for a capture device.
int GetCaptureCapability(const char* device_unique_idUTF8,
const WebRtc_UWord32 device_capability_number,
const uint32_t device_capability_number,
CaptureCapability& capability);
// Show OS specific Capture settings.
int DisplayCaptureSettingsDialogBox(const char* device_unique_idUTF8,
const char* dialog_titleUTF8,
void* parent_window,
WebRtc_UWord32 positionX,
WebRtc_UWord32 positionY);
uint32_t positionX,
uint32_t positionY);
int GetOrientation(const char* device_unique_idUTF8,
RotateCapturedFrame& orientation);
@ -69,7 +69,7 @@ class ViEInputManager : private ViEManagerBase {
// a capture device id for the device.
// Return zero on success, ViEError on failure.
int CreateCaptureDevice(const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length,
const uint32_t device_unique_idUTF8Length,
int& capture_id);
int CreateCaptureDevice(VideoCaptureModule* capture_module,
int& capture_id);

View File

@ -44,7 +44,7 @@ int ViENetworkImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViENetwork release too many times");

View File

@ -54,7 +54,7 @@ int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
if (external_decryption_) {
return -1;
}
decryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
decryption_buffer_ = new uint8_t[kViEMaxMtu];
if (decryption_buffer_ == NULL) {
return -1;
}
@ -92,7 +92,8 @@ int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
if (!receiving_) {
return -1;
}
return InsertRTPPacket((const WebRtc_Word8*) rtp_packet, rtp_packet_length);
return InsertRTPPacket(static_cast<const int8_t*>(rtp_packet),
rtp_packet_length);
}
int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
@ -100,12 +101,12 @@ int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
if (!receiving_) {
return -1;
}
return InsertRTCPPacket((const WebRtc_Word8*) rtcp_packet,
return InsertRTCPPacket(static_cast<const int8_t*>(rtcp_packet),
rtcp_packet_length);
}
WebRtc_Word32 ViEReceiver::OnReceivedPayloadData(
const WebRtc_UWord8* payload_data, const WebRtc_UWord16 payload_size,
int32_t ViEReceiver::OnReceivedPayloadData(
const uint8_t* payload_data, const uint16_t payload_size,
const WebRtcRTPHeader* rtp_header) {
if (rtp_header == NULL) {
return 0;
@ -126,8 +127,8 @@ WebRtc_Word32 ViEReceiver::OnReceivedPayloadData(
return 0;
}
void ViEReceiver::OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC,
void ViEReceiver::OnSendReportReceived(const int32_t id,
const uint32_t senderSSRC,
uint32_t ntp_secs,
uint32_t ntp_frac,
uint32_t timestamp) {
@ -135,10 +136,10 @@ void ViEReceiver::OnSendReportReceived(const WebRtc_Word32 id,
timestamp);
}
int ViEReceiver::InsertRTPPacket(const WebRtc_Word8* rtp_packet,
int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
int rtp_packet_length) {
// TODO(mflodman) Change decrypt to get rid of this cast.
WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtp_packet);
int8_t* tmp_ptr = const_cast<int8_t*>(rtp_packet);
unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
int received_packet_length = rtp_packet_length;
@ -167,17 +168,17 @@ int ViEReceiver::InsertRTPPacket(const WebRtc_Word8* rtp_packet,
if (rtp_dump_) {
rtp_dump_->DumpPacket(received_packet,
static_cast<WebRtc_UWord16>(received_packet_length));
static_cast<uint16_t>(received_packet_length));
}
}
assert(rtp_rtcp_); // Should be set by owner at construction time.
return rtp_rtcp_->IncomingPacket(received_packet, received_packet_length);
}
int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcp_packet,
int ViEReceiver::InsertRTCPPacket(const int8_t* rtcp_packet,
int rtcp_packet_length) {
// TODO(mflodman) Change decrypt to get rid of this cast.
WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtcp_packet);
int8_t* tmp_ptr = const_cast<int8_t*>(rtcp_packet);
unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
int received_packet_length = rtcp_packet_length;
{
@ -207,7 +208,7 @@ int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcp_packet,
if (rtp_dump_) {
rtp_dump_->DumpPacket(
received_packet, static_cast<WebRtc_UWord16>(received_packet_length));
received_packet, static_cast<uint16_t>(received_packet_length));
}
}
{

View File

@ -52,13 +52,13 @@ class ViEReceiver : public RtpData {
int ReceivedRTCPPacket(const void* rtcp_packet, int rtcp_packet_length);
// Implements RtpData.
virtual WebRtc_Word32 OnReceivedPayloadData(
const WebRtc_UWord8* payload_data,
const WebRtc_UWord16 payload_size,
virtual int32_t OnReceivedPayloadData(
const uint8_t* payload_data,
const uint16_t payload_size,
const WebRtcRTPHeader* rtp_header);
void OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC,
void OnSendReportReceived(const int32_t id,
const uint32_t senderSSRC,
uint32_t ntp_secs,
uint32_t ntp_frac,
uint32_t timestamp);
@ -66,8 +66,8 @@ class ViEReceiver : public RtpData {
void EstimatedReceiveBandwidth(unsigned int* available_bandwidth) const;
private:
int InsertRTPPacket(const WebRtc_Word8* rtp_packet, int rtp_packet_length);
int InsertRTCPPacket(const WebRtc_Word8* rtcp_packet, int rtcp_packet_length);
int InsertRTPPacket(const int8_t* rtp_packet, int rtp_packet_length);
int InsertRTCPPacket(const int8_t* rtcp_packet, int rtcp_packet_length);
scoped_ptr<CriticalSectionWrapper> receive_cs_;
const int32_t channel_id_;
@ -77,7 +77,7 @@ class ViEReceiver : public RtpData {
RemoteBitrateEstimator* remote_bitrate_estimator_;
Encryption* external_decryption_;
WebRtc_UWord8* decryption_buffer_;
uint8_t* decryption_buffer_;
RtpDump* rtp_dump_;
bool receiving_;
};

View File

@ -33,10 +33,10 @@ class TestProcessThread : public ProcessThread {
public:
explicit TestProcessThread() {}
~TestProcessThread() {}
virtual WebRtc_Word32 Start() { return 0; }
virtual WebRtc_Word32 Stop() { return 0; }
virtual WebRtc_Word32 RegisterModule(const Module* module) { return 0; }
virtual WebRtc_Word32 DeRegisterModule(const Module* module) { return 0; }
virtual int32_t Start() { return 0; }
virtual int32_t Stop() { return 0; }
virtual int32_t RegisterModule(const Module* module) { return 0; }
virtual int32_t DeRegisterModule(const Module* module) { return 0; }
};
class ViERembTest : public ::testing::Test {

View File

@ -48,7 +48,7 @@ int ViERenderImpl::Release() {
"ViERender::Release()");
// Decrease ref count
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViERender release too many times");

View File

@ -26,12 +26,12 @@ ViERenderManagerScoped::ViERenderManagerScoped(
: ViEManagerScopedBase(vie_render_manager) {
}
ViERenderer* ViERenderManagerScoped::Renderer(WebRtc_Word32 render_id) const {
ViERenderer* ViERenderManagerScoped::Renderer(int32_t render_id) const {
return static_cast<const ViERenderManager*>(vie_manager_)->ViERenderPtr(
render_id);
}
ViERenderManager::ViERenderManager(WebRtc_Word32 engine_id)
ViERenderManager::ViERenderManager(int32_t engine_id)
: list_cs_(CriticalSectionWrapper::CreateCriticalSection()),
engine_id_(engine_id),
use_external_render_module_(false) {
@ -47,14 +47,14 @@ ViERenderManager::~ViERenderManager() {
while (stream_to_vie_renderer_.Size() != 0) {
MapItem* item = stream_to_vie_renderer_.First();
assert(item);
const WebRtc_Word32 render_id = item->GetId();
const int32_t render_id = item->GetId();
// The renderer is delete in RemoveRenderStream.
item = NULL;
RemoveRenderStream(render_id);
}
}
WebRtc_Word32 ViERenderManager::RegisterVideoRenderModule(
int32_t ViERenderManager::RegisterVideoRenderModule(
VideoRender* render_module) {
// See if there is already a render module registered for the window that
// the registrant render module is associated with.
@ -73,10 +73,10 @@ WebRtc_Word32 ViERenderManager::RegisterVideoRenderModule(
return 0;
}
WebRtc_Word32 ViERenderManager::DeRegisterVideoRenderModule(
int32_t ViERenderManager::DeRegisterVideoRenderModule(
VideoRender* render_module) {
// Check if there are streams in the module.
WebRtc_UWord32 n_streams = render_module->GetNumIncomingRenderStreams();
uint32_t n_streams = render_module->GetNumIncomingRenderStreams();
if (n_streams != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"There are still %d streams in this module, cannot "
@ -104,9 +104,9 @@ WebRtc_Word32 ViERenderManager::DeRegisterVideoRenderModule(
return 0;
}
ViERenderer* ViERenderManager::AddRenderStream(const WebRtc_Word32 render_id,
ViERenderer* ViERenderManager::AddRenderStream(const int32_t render_id,
void* window,
const WebRtc_UWord32 z_order,
const uint32_t z_order,
const float left,
const float top,
const float right,
@ -150,8 +150,8 @@ ViERenderer* ViERenderManager::AddRenderStream(const WebRtc_Word32 render_id,
return vie_renderer;
}
WebRtc_Word32 ViERenderManager::RemoveRenderStream(
const WebRtc_Word32 render_id) {
int32_t ViERenderManager::RemoveRenderStream(
const int32_t render_id) {
// We need exclusive right to the items in the render manager to delete a
// stream.
ViEManagerWriteScoped scope(this);
@ -215,7 +215,7 @@ VideoRender* ViERenderManager::FindRenderModule(void* window) {
return renderer;
}
ViERenderer* ViERenderManager::ViERenderPtr(WebRtc_Word32 render_id) const {
ViERenderer* ViERenderManager::ViERenderPtr(int32_t render_id) const {
ViERenderer* renderer = NULL;
MapItem* map_item = stream_to_vie_renderer_.Find(render_id);
if (!map_item) {

View File

@ -29,21 +29,21 @@ class ViERenderer;
class ViERenderManager : private ViEManagerBase {
friend class ViERenderManagerScoped;
public:
explicit ViERenderManager(WebRtc_Word32 engine_id);
explicit ViERenderManager(int32_t engine_id);
~ViERenderManager();
WebRtc_Word32 RegisterVideoRenderModule(VideoRender* render_module);
WebRtc_Word32 DeRegisterVideoRenderModule(VideoRender* render_module);
int32_t RegisterVideoRenderModule(VideoRender* render_module);
int32_t DeRegisterVideoRenderModule(VideoRender* render_module);
ViERenderer* AddRenderStream(const WebRtc_Word32 render_id,
ViERenderer* AddRenderStream(const int32_t render_id,
void* window,
const WebRtc_UWord32 z_order,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom);
WebRtc_Word32 RemoveRenderStream(WebRtc_Word32 render_id);
int32_t RemoveRenderStream(int32_t render_id);
private:
// Returns a pointer to the render module if it exists in the render list.
@ -51,10 +51,10 @@ class ViERenderManager : private ViEManagerBase {
VideoRender* FindRenderModule(void* window);
// Methods used by ViERenderScoped.
ViERenderer* ViERenderPtr(WebRtc_Word32 render_id) const;
ViERenderer* ViERenderPtr(int32_t render_id) const;
scoped_ptr<CriticalSectionWrapper> list_cs_;
WebRtc_Word32 engine_id_;
int32_t engine_id_;
MapWrapper stream_to_vie_renderer_; // Protected by ViEManagerBase.
ListWrapper render_list_;
bool use_external_render_module_;
@ -65,7 +65,7 @@ class ViERenderManagerScoped: private ViEManagerScopedBase {
explicit ViERenderManagerScoped(const ViERenderManager& vie_render_manager);
// Returns a pointer to the ViERender object.
ViERenderer* Renderer(WebRtc_Word32 render_id) const;
ViERenderer* Renderer(int32_t render_id) const;
};
} // namespace webrtc

View File

@ -17,11 +17,11 @@
namespace webrtc {
ViERenderer* ViERenderer::CreateViERenderer(const WebRtc_Word32 render_id,
const WebRtc_Word32 engine_id,
ViERenderer* ViERenderer::CreateViERenderer(const int32_t render_id,
const int32_t engine_id,
VideoRender& render_module,
ViERenderManager& render_manager,
const WebRtc_UWord32 z_order,
const uint32_t z_order,
const float left,
const float top,
const float right,
@ -43,15 +43,15 @@ ViERenderer::~ViERenderer(void) {
delete incoming_external_callback_;
}
WebRtc_Word32 ViERenderer::StartRender() {
int32_t ViERenderer::StartRender() {
return render_module_.StartRender(render_id_);
}
WebRtc_Word32 ViERenderer::StopRender() {
int32_t ViERenderer::StopRender() {
return render_module_.StopRender(render_id_);
}
WebRtc_Word32 ViERenderer::GetLastRenderedFrame(const WebRtc_Word32 renderID,
I420VideoFrame& video_frame) {
int32_t ViERenderer::GetLastRenderedFrame(const int32_t renderID,
I420VideoFrame& video_frame) {
return render_module_.GetLastRenderedFrame(renderID, video_frame);
}
@ -59,11 +59,11 @@ int ViERenderer::SetExpectedRenderDelay(int render_delay) {
return render_module_.SetExpectedRenderDelay(render_id_, render_delay);
}
WebRtc_Word32 ViERenderer::ConfigureRenderer(const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) {
int32_t ViERenderer::ConfigureRenderer(const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) {
return render_module_.ConfigureRenderer(render_id_, z_order, left, top, right,
bottom);
}
@ -72,27 +72,27 @@ VideoRender& ViERenderer::RenderModule() {
return render_module_;
}
WebRtc_Word32 ViERenderer::EnableMirroring(const WebRtc_Word32 render_id,
const bool enable,
const bool mirror_xaxis,
const bool mirror_yaxis) {
int32_t ViERenderer::EnableMirroring(const int32_t render_id,
const bool enable,
const bool mirror_xaxis,
const bool mirror_yaxis) {
return render_module_.MirrorRenderStream(render_id, enable, mirror_xaxis,
mirror_yaxis);
}
WebRtc_Word32 ViERenderer::SetTimeoutImage(const I420VideoFrame& timeout_image,
const WebRtc_Word32 timeout_value) {
int32_t ViERenderer::SetTimeoutImage(const I420VideoFrame& timeout_image,
const int32_t timeout_value) {
return render_module_.SetTimeoutImage(render_id_, timeout_image,
timeout_value);
}
WebRtc_Word32 ViERenderer::SetRenderStartImage(
int32_t ViERenderer::SetRenderStartImage(
const I420VideoFrame& start_image) {
return render_module_.SetStartImage(render_id_, start_image);
}
WebRtc_Word32 ViERenderer::SetExternalRenderer(
const WebRtc_Word32 render_id,
int32_t ViERenderer::SetExternalRenderer(
const int32_t render_id,
RawVideoType video_input_format,
ExternalRenderer* external_renderer) {
if (!incoming_external_callback_)
@ -104,8 +104,8 @@ WebRtc_Word32 ViERenderer::SetExternalRenderer(
incoming_external_callback_);
}
ViERenderer::ViERenderer(const WebRtc_Word32 render_id,
const WebRtc_Word32 engine_id,
ViERenderer::ViERenderer(const int32_t render_id,
const int32_t engine_id,
VideoRender& render_module,
ViERenderManager& render_manager)
: render_id_(render_id),
@ -115,11 +115,11 @@ ViERenderer::ViERenderer(const WebRtc_Word32 render_id,
incoming_external_callback_(new ViEExternalRendererImpl()) {
}
WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order,
const float left,
const float top,
const float right,
const float bottom) {
int32_t ViERenderer::Init(const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom) {
render_callback_ =
static_cast<VideoRenderCallback*>(render_module_.AddIncomingRenderStream(
render_id_, z_order, left, top, right, bottom));
@ -133,7 +133,7 @@ WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order,
void ViERenderer::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
const uint32_t CSRC[kRtpCsrcSize]) {
render_callback_->RenderFrame(render_id_, *video_frame);
}
@ -166,8 +166,8 @@ int ViEExternalRendererImpl::SetViEExternalRenderer(
return 0;
}
WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(
const WebRtc_UWord32 stream_id,
int32_t ViEExternalRendererImpl::RenderFrame(
const uint32_t stream_id,
I420VideoFrame& video_frame) {
VideoFrame* out_frame = converted_frame_.get();

View File

@ -32,8 +32,8 @@ class ViEExternalRendererImpl : public VideoRenderCallback {
RawVideoType video_input_format);
// Implements VideoRenderCallback.
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
I420VideoFrame& video_frame);
virtual int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame& video_frame);
private:
ExternalRenderer* external_renderer_;
@ -46,68 +46,68 @@ class ViEExternalRendererImpl : public VideoRenderCallback {
class ViERenderer: public ViEFrameCallback {
public:
static ViERenderer* CreateViERenderer(const WebRtc_Word32 render_id,
const WebRtc_Word32 engine_id,
static ViERenderer* CreateViERenderer(const int32_t render_id,
const int32_t engine_id,
VideoRender& render_module,
ViERenderManager& render_manager,
const WebRtc_UWord32 z_order,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom);
~ViERenderer(void);
WebRtc_Word32 StartRender();
WebRtc_Word32 StopRender();
int32_t StartRender();
int32_t StopRender();
WebRtc_Word32 GetLastRenderedFrame(const WebRtc_Word32 renderID,
I420VideoFrame& video_frame);
int32_t GetLastRenderedFrame(const int32_t renderID,
I420VideoFrame& video_frame);
int SetExpectedRenderDelay(int render_delay);
WebRtc_Word32 ConfigureRenderer(const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom);
int32_t ConfigureRenderer(const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom);
VideoRender& RenderModule();
WebRtc_Word32 EnableMirroring(const WebRtc_Word32 render_id,
const bool enable,
const bool mirror_xaxis,
const bool mirror_yaxis);
int32_t EnableMirroring(const int32_t render_id,
const bool enable,
const bool mirror_xaxis,
const bool mirror_yaxis);
WebRtc_Word32 SetTimeoutImage(const I420VideoFrame& timeout_image,
const WebRtc_Word32 timeout_value);
WebRtc_Word32 SetRenderStartImage(const I420VideoFrame& start_image);
WebRtc_Word32 SetExternalRenderer(const WebRtc_Word32 render_id,
RawVideoType video_input_format,
ExternalRenderer* external_renderer);
int32_t SetTimeoutImage(const I420VideoFrame& timeout_image,
const int32_t timeout_value);
int32_t SetRenderStartImage(const I420VideoFrame& start_image);
int32_t SetExternalRenderer(const int32_t render_id,
RawVideoType video_input_format,
ExternalRenderer* external_renderer);
private:
ViERenderer(const WebRtc_Word32 render_id, const WebRtc_Word32 engine_id,
ViERenderer(const int32_t render_id, const int32_t engine_id,
VideoRender& render_module,
ViERenderManager& render_manager);
WebRtc_Word32 Init(const WebRtc_UWord32 z_order,
const float left,
const float top,
const float right,
const float bottom);
int32_t Init(const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom);
// Implement ViEFrameCallback
virtual void DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
const uint32_t CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay);
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate);
virtual void ProviderDestroyed(int id);
WebRtc_UWord32 render_id_;
uint32_t render_id_;
VideoRender& render_module_;
ViERenderManager& render_manager_;
VideoRenderCallback* render_callback_;

View File

@ -95,7 +95,7 @@ int ViERTP_RTCPImpl::Release() {
// Decrease ref count.
(*this)--;
WebRtc_Word32 ref_count = GetCount();
int32_t ref_count = GetCount();
if (ref_count < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
"ViERTP_RTCP release too many times");
@ -423,7 +423,7 @@ int ViERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
return -1;
}
if (vie_channel->SendApplicationDefinedRTCPPacket(
sub_type, name, reinterpret_cast<const WebRtc_UWord8*>(data),
sub_type, name, reinterpret_cast<const uint8_t*>(data),
data_length_in_bytes) != 0) {
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
@ -880,7 +880,7 @@ int ViERTP_RTCPImpl::GetEstimatedSendBandwidth(
return -1;
}
return vie_encoder->EstimatedSendBandwidth(
static_cast<WebRtc_UWord32*>(estimated_bandwidth));
static_cast<uint32_t*>(estimated_bandwidth));
}
int ViERTP_RTCPImpl::GetEstimatedReceiveBandwidth(
@ -900,7 +900,7 @@ int ViERTP_RTCPImpl::GetEstimatedReceiveBandwidth(
return -1;
}
vie_channel->GetEstimatedReceiveBandwidth(
static_cast<WebRtc_UWord32*>(estimated_bandwidth));
static_cast<uint32_t*>(estimated_bandwidth));
return 0;
}

View File

@ -45,7 +45,7 @@ int ViESender::RegisterExternalEncryption(Encryption* encryption) {
if (external_encryption_) {
return -1;
}
encryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
encryption_buffer_ = new uint8_t[kViEMaxMtu];
if (encryption_buffer_ == NULL) {
return -1;
}

View File

@ -54,7 +54,7 @@ class ViESender: public Transport {
scoped_ptr<CriticalSectionWrapper> critsect_;
Encryption* external_encryption_;
WebRtc_UWord8* encryption_buffer_;
uint8_t* encryption_buffer_;
Transport* transport_;
RtpDump* rtp_dump_;
};

View File

@ -94,12 +94,12 @@ int ViESyncModule::VoiceChannel() {
return voe_channel_id_;
}
WebRtc_Word32 ViESyncModule::TimeUntilNextProcess() {
return static_cast<WebRtc_Word32>(kSyncInterval -
(TickTime::Now() - last_sync_time_).Milliseconds());
int32_t ViESyncModule::TimeUntilNextProcess() {
return static_cast<int32_t>(kSyncInterval -
(TickTime::Now() - last_sync_time_).Milliseconds());
}
WebRtc_Word32 ViESyncModule::Process() {
int32_t ViESyncModule::Process() {
CriticalSectionScoped cs(data_cs_.get());
last_sync_time_ = TickTime::Now();

View File

@ -44,8 +44,8 @@ class ViESyncModule : public Module {
int SetTargetBufferingDelay(int target_delay_ms);
// Implements Module.
virtual WebRtc_Word32 TimeUntilNextProcess();
virtual WebRtc_Word32 Process();
virtual int32_t TimeUntilNextProcess();
virtual int32_t Process();
private:
scoped_ptr<CriticalSectionWrapper> data_cs_;