Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information. This CL was reviewed and approved in pieces in the following CLs: https://webrtc-codereview.appspot.com/24209004/ https://webrtc-codereview.appspot.com/24229004/ https://webrtc-codereview.appspot.com/24259004/ https://webrtc-codereview.appspot.com/25109004/ https://webrtc-codereview.appspot.com/26099004/ https://webrtc-codereview.appspot.com/27069004/ https://webrtc-codereview.appspot.com/27969004/ https://webrtc-codereview.appspot.com/27989004/ https://webrtc-codereview.appspot.com/29009004/ https://webrtc-codereview.appspot.com/30929004/ https://webrtc-codereview.appspot.com/30939004/ https://webrtc-codereview.appspot.com/31999004/ Committing as TBR to the original reviewers. BUG=chromium:81439 TEST=none TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom Review URL: https://webrtc-codereview.appspot.com/23129004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -1258,7 +1258,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
||||
// |codec_thread_| for execution.
|
||||
virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
|
||||
int32_t /* number_of_cores */,
|
||||
uint32_t /* max_payload_size */) OVERRIDE;
|
||||
size_t /* max_payload_size */) OVERRIDE;
|
||||
virtual int32_t Encode(
|
||||
const webrtc::I420VideoFrame& input_image,
|
||||
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
||||
@@ -1433,7 +1433,7 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
|
||||
int32_t MediaCodecVideoEncoder::InitEncode(
|
||||
const webrtc::VideoCodec* codec_settings,
|
||||
int32_t /* number_of_cores */,
|
||||
uint32_t /* max_payload_size */) {
|
||||
size_t /* max_payload_size */) {
|
||||
// Factory should guard against other codecs being used with us.
|
||||
CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
|
||||
|
||||
|
@@ -152,7 +152,7 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
||||
|
||||
virtual int32 InitEncode(const webrtc::VideoCodec* codecSettings,
|
||||
int32 numberOfCores,
|
||||
uint32 maxPayloadSize) {
|
||||
size_t maxPayloadSize) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@@ -351,7 +351,7 @@ class FakeWebRtcVideoEngine
|
||||
|
||||
// From ViEExternalCapture
|
||||
virtual int IncomingFrame(unsigned char* videoFrame,
|
||||
unsigned int videoFrameLength,
|
||||
size_t videoFrameLength,
|
||||
unsigned short width,
|
||||
unsigned short height,
|
||||
webrtc::RawVideoType videoType,
|
||||
@@ -890,7 +890,7 @@ class FakeWebRtcVideoEngine
|
||||
|
||||
WEBRTC_FUNC(ReceivedRTPPacket, (const int channel,
|
||||
const void* packet,
|
||||
const int length,
|
||||
const size_t length,
|
||||
const webrtc::PacketTime& packet_time)) {
|
||||
WEBRTC_ASSERT_CHANNEL(channel);
|
||||
ASSERT(length > 1);
|
||||
@@ -899,11 +899,11 @@ class FakeWebRtcVideoEngine
|
||||
return 0;
|
||||
}
|
||||
|
||||
WEBRTC_STUB(ReceivedRTCPPacket, (const int, const void*, const int));
|
||||
WEBRTC_STUB(ReceivedRTCPPacket, (const int, const void*, const size_t));
|
||||
// Not using WEBRTC_STUB due to bool return value
|
||||
virtual bool IsIPv6Enabled(int channel) { return true; }
|
||||
WEBRTC_STUB(SetMTU, (int, unsigned int));
|
||||
WEBRTC_STUB(ReceivedBWEPacket, (const int, int64_t, int,
|
||||
WEBRTC_STUB(ReceivedBWEPacket, (const int, int64_t, size_t,
|
||||
const webrtc::RTPHeader&));
|
||||
virtual bool SetBandwidthEstimationConfig(int, const webrtc::Config&) {
|
||||
return true;
|
||||
@@ -1140,8 +1140,8 @@ class FakeWebRtcVideoEngine
|
||||
unsigned int&, unsigned int&, unsigned int&, int&));
|
||||
WEBRTC_STUB_CONST(GetSentRTCPStatistics, (const int, unsigned short&,
|
||||
unsigned int&, unsigned int&, unsigned int&, int&));
|
||||
WEBRTC_STUB_CONST(GetRTPStatistics, (const int, unsigned int&, unsigned int&,
|
||||
unsigned int&, unsigned int&));
|
||||
WEBRTC_STUB_CONST(GetRTPStatistics, (const int, size_t&, unsigned int&,
|
||||
size_t&, unsigned int&));
|
||||
WEBRTC_STUB_CONST(GetReceiveChannelRtcpStatistics, (const int,
|
||||
webrtc::RtcpStatistics&, int&));
|
||||
WEBRTC_STUB_CONST(GetSendChannelRtcpStatistics, (const int,
|
||||
|
@@ -858,7 +858,7 @@ class FakeWebRtcVoiceEngine
|
||||
return 0;
|
||||
}
|
||||
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
||||
unsigned int length)) {
|
||||
size_t length)) {
|
||||
WEBRTC_CHECK_CHANNEL(channel);
|
||||
if (!channels_[channel]->external_transport) return -1;
|
||||
channels_[channel]->packets.push_back(
|
||||
@@ -866,7 +866,7 @@ class FakeWebRtcVoiceEngine
|
||||
return 0;
|
||||
}
|
||||
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
||||
unsigned int length,
|
||||
size_t length,
|
||||
const webrtc::PacketTime& packet_time)) {
|
||||
WEBRTC_CHECK_CHANNEL(channel);
|
||||
if (ReceivedRTPPacket(channel, data, length) == -1) {
|
||||
@@ -877,7 +877,7 @@ class FakeWebRtcVoiceEngine
|
||||
}
|
||||
|
||||
WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data,
|
||||
unsigned int length));
|
||||
size_t length));
|
||||
|
||||
// webrtc::VoERTP_RTCP
|
||||
WEBRTC_STUB(RegisterRTPObserver, (int channel,
|
||||
|
@@ -36,6 +36,7 @@
|
||||
#include "talk/media/webrtc/webrtcvideoframefactory.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/base/safe_conversions.h"
|
||||
#include "webrtc/base/thread.h"
|
||||
#include "webrtc/base/timeutils.h"
|
||||
|
||||
@@ -351,8 +352,8 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
||||
// Signal down stream components on captured frame.
|
||||
// The CapturedFrame class doesn't support planes. We have to ExtractBuffer
|
||||
// to one block for it.
|
||||
int length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
sample.width(), sample.height());
|
||||
size_t length =
|
||||
webrtc::CalcBufferSize(webrtc::kI420, sample.width(), sample.height());
|
||||
capture_buffer_.resize(length);
|
||||
// TODO(ronghuawu): Refactor the WebRtcCapturedFrame to avoid memory copy.
|
||||
webrtc::ExtractBuffer(sample, length, &capture_buffer_[0]);
|
||||
@@ -368,7 +369,7 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,
|
||||
// WebRtcCapturedFrame
|
||||
WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
|
||||
void* buffer,
|
||||
int length) {
|
||||
size_t length) {
|
||||
width = sample.width();
|
||||
height = sample.height();
|
||||
fourcc = FOURCC_I420;
|
||||
@@ -378,7 +379,7 @@ WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
|
||||
// Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds).
|
||||
elapsed_time = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
|
||||
time_stamp = elapsed_time;
|
||||
data_size = length;
|
||||
data_size = rtc::checked_cast<uint32>(length);
|
||||
data = buffer;
|
||||
}
|
||||
|
||||
|
@@ -98,7 +98,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
|
||||
struct WebRtcCapturedFrame : public CapturedFrame {
|
||||
public:
|
||||
WebRtcCapturedFrame(const webrtc::I420VideoFrame& frame,
|
||||
void* buffer, int length);
|
||||
void* buffer, size_t length);
|
||||
};
|
||||
|
||||
} // namespace cricket
|
||||
|
@@ -305,7 +305,7 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
}
|
||||
|
||||
virtual int DeliverFrame(unsigned char* buffer,
|
||||
int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t rtp_time_stamp,
|
||||
int64_t ntp_time_ms,
|
||||
int64_t render_time,
|
||||
@@ -347,14 +347,14 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
|
||||
virtual bool IsTextureSupported() { return true; }
|
||||
|
||||
int DeliverBufferFrame(unsigned char* buffer, int buffer_size,
|
||||
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
|
||||
int64 time_stamp, int64 elapsed_time) {
|
||||
WebRtcVideoFrame video_frame;
|
||||
video_frame.Alias(buffer, buffer_size, width_, height_,
|
||||
1, 1, elapsed_time, time_stamp, 0);
|
||||
|
||||
// Sanity check on decoded frame size.
|
||||
if (buffer_size != static_cast<int>(VideoFrame::SizeOf(width_, height_))) {
|
||||
if (buffer_size != VideoFrame::SizeOf(width_, height_)) {
|
||||
LOG(LS_WARNING) << "WebRtcRenderAdapter (channel " << channel_id_
|
||||
<< ") received a strange frame size: "
|
||||
<< buffer_size;
|
||||
@@ -2499,7 +2499,8 @@ bool WebRtcVideoMediaChannel::GetStats(const StatsOptions& options,
|
||||
ASSERT(channel_id == default_channel_id_);
|
||||
continue;
|
||||
}
|
||||
unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv;
|
||||
size_t bytes_sent, bytes_recv;
|
||||
unsigned int packets_sent, packets_recv;
|
||||
if (engine_->vie()->rtp()->GetRTPStatistics(channel_id, bytes_sent,
|
||||
packets_sent, bytes_recv,
|
||||
packets_recv) != 0) {
|
||||
@@ -2829,7 +2830,7 @@ void WebRtcVideoMediaChannel::OnPacketReceived(
|
||||
engine()->vie()->network()->ReceivedRTPPacket(
|
||||
processing_channel_id,
|
||||
packet->data(),
|
||||
static_cast<int>(packet->length()),
|
||||
packet->length(),
|
||||
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
||||
}
|
||||
|
||||
@@ -2858,7 +2859,7 @@ void WebRtcVideoMediaChannel::OnRtcpReceived(
|
||||
engine_->vie()->network()->ReceivedRTCPPacket(
|
||||
recv_channel_id,
|
||||
packet->data(),
|
||||
static_cast<int>(packet->length()));
|
||||
packet->length());
|
||||
}
|
||||
}
|
||||
// SR may continue RR and any RR entry may correspond to any one of the send
|
||||
@@ -2871,7 +2872,7 @@ void WebRtcVideoMediaChannel::OnRtcpReceived(
|
||||
engine_->vie()->network()->ReceivedRTCPPacket(
|
||||
channel_id,
|
||||
packet->data(),
|
||||
static_cast<int>(packet->length()));
|
||||
packet->length());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4022,16 +4023,16 @@ void WebRtcVideoMediaChannel::OnMessage(rtc::Message* msg) {
|
||||
}
|
||||
|
||||
int WebRtcVideoMediaChannel::SendPacket(int channel, const void* data,
|
||||
int len) {
|
||||
size_t len) {
|
||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||
return MediaChannel::SendPacket(&packet) ? len : -1;
|
||||
return MediaChannel::SendPacket(&packet) ? static_cast<int>(len) : -1;
|
||||
}
|
||||
|
||||
int WebRtcVideoMediaChannel::SendRTCPPacket(int channel,
|
||||
const void* data,
|
||||
int len) {
|
||||
size_t len) {
|
||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||
return MediaChannel::SendRtcp(&packet) ? len : -1;
|
||||
return MediaChannel::SendRtcp(&packet) ? static_cast<int>(len) : -1;
|
||||
}
|
||||
|
||||
void WebRtcVideoMediaChannel::QueueBlackFrame(uint32 ssrc, int64 timestamp,
|
||||
|
@@ -331,8 +331,10 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler,
|
||||
int GetLastEngineError() { return engine()->GetLastEngineError(); }
|
||||
|
||||
// webrtc::Transport:
|
||||
virtual int SendPacket(int channel, const void* data, int len) OVERRIDE;
|
||||
virtual int SendRTCPPacket(int channel, const void* data, int len) OVERRIDE;
|
||||
virtual int SendPacket(int channel, const void* data, size_t len) OVERRIDE;
|
||||
virtual int SendRTCPPacket(int channel,
|
||||
const void* data,
|
||||
size_t len) OVERRIDE;
|
||||
|
||||
bool ConferenceModeIsEnabled() const {
|
||||
return options_.conference_mode.GetWithDefaultIfUnset(false);
|
||||
|
@@ -71,8 +71,8 @@ WebRtcVideoFrame::FrameBuffer::~FrameBuffer() {
|
||||
// Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
|
||||
// will release the buffer if this FrameBuffer owns it.
|
||||
uint8_t* new_memory = NULL;
|
||||
uint32_t new_length = 0;
|
||||
uint32_t new_size = 0;
|
||||
size_t new_length = 0;
|
||||
size_t new_size = 0;
|
||||
video_frame_.Swap(new_memory, new_length, new_size);
|
||||
}
|
||||
|
||||
@@ -84,8 +84,8 @@ void WebRtcVideoFrame::FrameBuffer::Attach(uint8* data, size_t length) {
|
||||
void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
|
||||
owned_data_.reset();
|
||||
uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
|
||||
uint32_t new_length = static_cast<uint32_t>(length);
|
||||
uint32_t new_size = static_cast<uint32_t>(length);
|
||||
size_t new_length = length;
|
||||
size_t new_size = length;
|
||||
video_frame_.Swap(new_memory, new_length, new_size);
|
||||
}
|
||||
|
||||
|
@@ -3154,7 +3154,7 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
|
||||
engine()->voe()->network()->ReceivedRTPPacket(
|
||||
which_channel,
|
||||
packet->data(),
|
||||
static_cast<unsigned int>(packet->length()),
|
||||
packet->length(),
|
||||
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
||||
}
|
||||
|
||||
@@ -3179,7 +3179,7 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
|
||||
engine()->voe()->network()->ReceivedRTCPPacket(
|
||||
which_channel,
|
||||
packet->data(),
|
||||
static_cast<unsigned int>(packet->length()));
|
||||
packet->length());
|
||||
|
||||
if (IsDefaultChannel(which_channel))
|
||||
has_sent_to_default_channel = true;
|
||||
@@ -3199,7 +3199,7 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
|
||||
engine()->voe()->network()->ReceivedRTCPPacket(
|
||||
iter->second->channel(),
|
||||
packet->data(),
|
||||
static_cast<unsigned int>(packet->length()));
|
||||
packet->length());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3730,7 +3730,7 @@ bool WebRtcVoiceMediaChannel::SetupSharedBweOnChannel(int voe_channel) {
|
||||
return true;
|
||||
}
|
||||
|
||||
int WebRtcSoundclipStream::Read(void *buf, int len) {
|
||||
int WebRtcSoundclipStream::Read(void *buf, size_t len) {
|
||||
size_t res = 0;
|
||||
mem_.Read(buf, len, &res, NULL);
|
||||
return static_cast<int>(res);
|
||||
|
@@ -69,7 +69,7 @@ class WebRtcSoundclipStream : public webrtc::InStream {
|
||||
}
|
||||
void set_loop(bool loop) { loop_ = loop; }
|
||||
|
||||
virtual int Read(void* buf, int len) OVERRIDE;
|
||||
virtual int Read(void* buf, size_t len) OVERRIDE;
|
||||
virtual int Rewind() OVERRIDE;
|
||||
|
||||
private:
|
||||
@@ -80,7 +80,7 @@ class WebRtcSoundclipStream : public webrtc::InStream {
|
||||
// WebRtcMonitorStream is used to monitor a stream coming from WebRtc.
|
||||
// For now we just dump the data.
|
||||
class WebRtcMonitorStream : public webrtc::OutStream {
|
||||
virtual bool Write(const void *buf, int len) OVERRIDE {
|
||||
virtual bool Write(const void *buf, size_t len) OVERRIDE {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
@@ -315,17 +315,16 @@ class WebRtcMediaChannel : public T, public webrtc::Transport {
|
||||
|
||||
protected:
|
||||
// implements Transport interface
|
||||
virtual int SendPacket(int channel, const void *data, int len) OVERRIDE {
|
||||
virtual int SendPacket(int channel, const void *data, size_t len) OVERRIDE {
|
||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||
if (!T::SendPacket(&packet)) {
|
||||
return -1;
|
||||
}
|
||||
return len;
|
||||
return T::SendPacket(&packet) ? static_cast<int>(len) : -1;
|
||||
}
|
||||
|
||||
virtual int SendRTCPPacket(int channel, const void *data, int len) OVERRIDE {
|
||||
virtual int SendRTCPPacket(int channel,
|
||||
const void* data,
|
||||
size_t len) OVERRIDE {
|
||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||
return T::SendRtcp(&packet) ? len : -1;
|
||||
return T::SendRtcp(&packet) ? static_cast<int>(len) : -1;
|
||||
}
|
||||
|
||||
private:
|
||||
|
Reference in New Issue
Block a user