Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information. This CL was reviewed and approved in pieces in the following CLs: https://webrtc-codereview.appspot.com/24209004/ https://webrtc-codereview.appspot.com/24229004/ https://webrtc-codereview.appspot.com/24259004/ https://webrtc-codereview.appspot.com/25109004/ https://webrtc-codereview.appspot.com/26099004/ https://webrtc-codereview.appspot.com/27069004/ https://webrtc-codereview.appspot.com/27969004/ https://webrtc-codereview.appspot.com/27989004/ https://webrtc-codereview.appspot.com/29009004/ https://webrtc-codereview.appspot.com/30929004/ https://webrtc-codereview.appspot.com/30939004/ https://webrtc-codereview.appspot.com/31999004/ Committing as TBR to the original reviewers. BUG=chromium:81439 TEST=none TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom Review URL: https://webrtc-codereview.appspot.com/23129004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
edc6e57a92
commit
4591fbd09f
@ -1258,7 +1258,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
|||||||
// |codec_thread_| for execution.
|
// |codec_thread_| for execution.
|
||||||
virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
|
virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
|
||||||
int32_t /* number_of_cores */,
|
int32_t /* number_of_cores */,
|
||||||
uint32_t /* max_payload_size */) OVERRIDE;
|
size_t /* max_payload_size */) OVERRIDE;
|
||||||
virtual int32_t Encode(
|
virtual int32_t Encode(
|
||||||
const webrtc::I420VideoFrame& input_image,
|
const webrtc::I420VideoFrame& input_image,
|
||||||
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
||||||
@ -1433,7 +1433,7 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
|
|||||||
int32_t MediaCodecVideoEncoder::InitEncode(
|
int32_t MediaCodecVideoEncoder::InitEncode(
|
||||||
const webrtc::VideoCodec* codec_settings,
|
const webrtc::VideoCodec* codec_settings,
|
||||||
int32_t /* number_of_cores */,
|
int32_t /* number_of_cores */,
|
||||||
uint32_t /* max_payload_size */) {
|
size_t /* max_payload_size */) {
|
||||||
// Factory should guard against other codecs being used with us.
|
// Factory should guard against other codecs being used with us.
|
||||||
CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
|
CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
|
||||||
|
|
||||||
|
@ -152,7 +152,7 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
|||||||
|
|
||||||
virtual int32 InitEncode(const webrtc::VideoCodec* codecSettings,
|
virtual int32 InitEncode(const webrtc::VideoCodec* codecSettings,
|
||||||
int32 numberOfCores,
|
int32 numberOfCores,
|
||||||
uint32 maxPayloadSize) {
|
size_t maxPayloadSize) {
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,7 +351,7 @@ class FakeWebRtcVideoEngine
|
|||||||
|
|
||||||
// From ViEExternalCapture
|
// From ViEExternalCapture
|
||||||
virtual int IncomingFrame(unsigned char* videoFrame,
|
virtual int IncomingFrame(unsigned char* videoFrame,
|
||||||
unsigned int videoFrameLength,
|
size_t videoFrameLength,
|
||||||
unsigned short width,
|
unsigned short width,
|
||||||
unsigned short height,
|
unsigned short height,
|
||||||
webrtc::RawVideoType videoType,
|
webrtc::RawVideoType videoType,
|
||||||
@ -890,7 +890,7 @@ class FakeWebRtcVideoEngine
|
|||||||
|
|
||||||
WEBRTC_FUNC(ReceivedRTPPacket, (const int channel,
|
WEBRTC_FUNC(ReceivedRTPPacket, (const int channel,
|
||||||
const void* packet,
|
const void* packet,
|
||||||
const int length,
|
const size_t length,
|
||||||
const webrtc::PacketTime& packet_time)) {
|
const webrtc::PacketTime& packet_time)) {
|
||||||
WEBRTC_ASSERT_CHANNEL(channel);
|
WEBRTC_ASSERT_CHANNEL(channel);
|
||||||
ASSERT(length > 1);
|
ASSERT(length > 1);
|
||||||
@ -899,11 +899,11 @@ class FakeWebRtcVideoEngine
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WEBRTC_STUB(ReceivedRTCPPacket, (const int, const void*, const int));
|
WEBRTC_STUB(ReceivedRTCPPacket, (const int, const void*, const size_t));
|
||||||
// Not using WEBRTC_STUB due to bool return value
|
// Not using WEBRTC_STUB due to bool return value
|
||||||
virtual bool IsIPv6Enabled(int channel) { return true; }
|
virtual bool IsIPv6Enabled(int channel) { return true; }
|
||||||
WEBRTC_STUB(SetMTU, (int, unsigned int));
|
WEBRTC_STUB(SetMTU, (int, unsigned int));
|
||||||
WEBRTC_STUB(ReceivedBWEPacket, (const int, int64_t, int,
|
WEBRTC_STUB(ReceivedBWEPacket, (const int, int64_t, size_t,
|
||||||
const webrtc::RTPHeader&));
|
const webrtc::RTPHeader&));
|
||||||
virtual bool SetBandwidthEstimationConfig(int, const webrtc::Config&) {
|
virtual bool SetBandwidthEstimationConfig(int, const webrtc::Config&) {
|
||||||
return true;
|
return true;
|
||||||
@ -1140,8 +1140,8 @@ class FakeWebRtcVideoEngine
|
|||||||
unsigned int&, unsigned int&, unsigned int&, int&));
|
unsigned int&, unsigned int&, unsigned int&, int&));
|
||||||
WEBRTC_STUB_CONST(GetSentRTCPStatistics, (const int, unsigned short&,
|
WEBRTC_STUB_CONST(GetSentRTCPStatistics, (const int, unsigned short&,
|
||||||
unsigned int&, unsigned int&, unsigned int&, int&));
|
unsigned int&, unsigned int&, unsigned int&, int&));
|
||||||
WEBRTC_STUB_CONST(GetRTPStatistics, (const int, unsigned int&, unsigned int&,
|
WEBRTC_STUB_CONST(GetRTPStatistics, (const int, size_t&, unsigned int&,
|
||||||
unsigned int&, unsigned int&));
|
size_t&, unsigned int&));
|
||||||
WEBRTC_STUB_CONST(GetReceiveChannelRtcpStatistics, (const int,
|
WEBRTC_STUB_CONST(GetReceiveChannelRtcpStatistics, (const int,
|
||||||
webrtc::RtcpStatistics&, int&));
|
webrtc::RtcpStatistics&, int&));
|
||||||
WEBRTC_STUB_CONST(GetSendChannelRtcpStatistics, (const int,
|
WEBRTC_STUB_CONST(GetSendChannelRtcpStatistics, (const int,
|
||||||
|
@ -858,7 +858,7 @@ class FakeWebRtcVoiceEngine
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
||||||
unsigned int length)) {
|
size_t length)) {
|
||||||
WEBRTC_CHECK_CHANNEL(channel);
|
WEBRTC_CHECK_CHANNEL(channel);
|
||||||
if (!channels_[channel]->external_transport) return -1;
|
if (!channels_[channel]->external_transport) return -1;
|
||||||
channels_[channel]->packets.push_back(
|
channels_[channel]->packets.push_back(
|
||||||
@ -866,7 +866,7 @@ class FakeWebRtcVoiceEngine
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
|
||||||
unsigned int length,
|
size_t length,
|
||||||
const webrtc::PacketTime& packet_time)) {
|
const webrtc::PacketTime& packet_time)) {
|
||||||
WEBRTC_CHECK_CHANNEL(channel);
|
WEBRTC_CHECK_CHANNEL(channel);
|
||||||
if (ReceivedRTPPacket(channel, data, length) == -1) {
|
if (ReceivedRTPPacket(channel, data, length) == -1) {
|
||||||
@ -877,7 +877,7 @@ class FakeWebRtcVoiceEngine
|
|||||||
}
|
}
|
||||||
|
|
||||||
WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data,
|
WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data,
|
||||||
unsigned int length));
|
size_t length));
|
||||||
|
|
||||||
// webrtc::VoERTP_RTCP
|
// webrtc::VoERTP_RTCP
|
||||||
WEBRTC_STUB(RegisterRTPObserver, (int channel,
|
WEBRTC_STUB(RegisterRTPObserver, (int channel,
|
||||||
|
@ -36,6 +36,7 @@
|
|||||||
#include "talk/media/webrtc/webrtcvideoframefactory.h"
|
#include "talk/media/webrtc/webrtcvideoframefactory.h"
|
||||||
#include "webrtc/base/criticalsection.h"
|
#include "webrtc/base/criticalsection.h"
|
||||||
#include "webrtc/base/logging.h"
|
#include "webrtc/base/logging.h"
|
||||||
|
#include "webrtc/base/safe_conversions.h"
|
||||||
#include "webrtc/base/thread.h"
|
#include "webrtc/base/thread.h"
|
||||||
#include "webrtc/base/timeutils.h"
|
#include "webrtc/base/timeutils.h"
|
||||||
|
|
||||||
@ -351,8 +352,8 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
|||||||
// Signal down stream components on captured frame.
|
// Signal down stream components on captured frame.
|
||||||
// The CapturedFrame class doesn't support planes. We have to ExtractBuffer
|
// The CapturedFrame class doesn't support planes. We have to ExtractBuffer
|
||||||
// to one block for it.
|
// to one block for it.
|
||||||
int length = webrtc::CalcBufferSize(webrtc::kI420,
|
size_t length =
|
||||||
sample.width(), sample.height());
|
webrtc::CalcBufferSize(webrtc::kI420, sample.width(), sample.height());
|
||||||
capture_buffer_.resize(length);
|
capture_buffer_.resize(length);
|
||||||
// TODO(ronghuawu): Refactor the WebRtcCapturedFrame to avoid memory copy.
|
// TODO(ronghuawu): Refactor the WebRtcCapturedFrame to avoid memory copy.
|
||||||
webrtc::ExtractBuffer(sample, length, &capture_buffer_[0]);
|
webrtc::ExtractBuffer(sample, length, &capture_buffer_[0]);
|
||||||
@ -368,7 +369,7 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,
|
|||||||
// WebRtcCapturedFrame
|
// WebRtcCapturedFrame
|
||||||
WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
|
WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
|
||||||
void* buffer,
|
void* buffer,
|
||||||
int length) {
|
size_t length) {
|
||||||
width = sample.width();
|
width = sample.width();
|
||||||
height = sample.height();
|
height = sample.height();
|
||||||
fourcc = FOURCC_I420;
|
fourcc = FOURCC_I420;
|
||||||
@ -378,7 +379,7 @@ WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
|
|||||||
// Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds).
|
// Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds).
|
||||||
elapsed_time = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
|
elapsed_time = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
|
||||||
time_stamp = elapsed_time;
|
time_stamp = elapsed_time;
|
||||||
data_size = length;
|
data_size = rtc::checked_cast<uint32>(length);
|
||||||
data = buffer;
|
data = buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
|
|||||||
struct WebRtcCapturedFrame : public CapturedFrame {
|
struct WebRtcCapturedFrame : public CapturedFrame {
|
||||||
public:
|
public:
|
||||||
WebRtcCapturedFrame(const webrtc::I420VideoFrame& frame,
|
WebRtcCapturedFrame(const webrtc::I420VideoFrame& frame,
|
||||||
void* buffer, int length);
|
void* buffer, size_t length);
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace cricket
|
} // namespace cricket
|
||||||
|
@ -305,7 +305,7 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual int DeliverFrame(unsigned char* buffer,
|
virtual int DeliverFrame(unsigned char* buffer,
|
||||||
int buffer_size,
|
size_t buffer_size,
|
||||||
uint32_t rtp_time_stamp,
|
uint32_t rtp_time_stamp,
|
||||||
int64_t ntp_time_ms,
|
int64_t ntp_time_ms,
|
||||||
int64_t render_time,
|
int64_t render_time,
|
||||||
@ -347,14 +347,14 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
|||||||
|
|
||||||
virtual bool IsTextureSupported() { return true; }
|
virtual bool IsTextureSupported() { return true; }
|
||||||
|
|
||||||
int DeliverBufferFrame(unsigned char* buffer, int buffer_size,
|
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
|
||||||
int64 time_stamp, int64 elapsed_time) {
|
int64 time_stamp, int64 elapsed_time) {
|
||||||
WebRtcVideoFrame video_frame;
|
WebRtcVideoFrame video_frame;
|
||||||
video_frame.Alias(buffer, buffer_size, width_, height_,
|
video_frame.Alias(buffer, buffer_size, width_, height_,
|
||||||
1, 1, elapsed_time, time_stamp, 0);
|
1, 1, elapsed_time, time_stamp, 0);
|
||||||
|
|
||||||
// Sanity check on decoded frame size.
|
// Sanity check on decoded frame size.
|
||||||
if (buffer_size != static_cast<int>(VideoFrame::SizeOf(width_, height_))) {
|
if (buffer_size != VideoFrame::SizeOf(width_, height_)) {
|
||||||
LOG(LS_WARNING) << "WebRtcRenderAdapter (channel " << channel_id_
|
LOG(LS_WARNING) << "WebRtcRenderAdapter (channel " << channel_id_
|
||||||
<< ") received a strange frame size: "
|
<< ") received a strange frame size: "
|
||||||
<< buffer_size;
|
<< buffer_size;
|
||||||
@ -2499,7 +2499,8 @@ bool WebRtcVideoMediaChannel::GetStats(const StatsOptions& options,
|
|||||||
ASSERT(channel_id == default_channel_id_);
|
ASSERT(channel_id == default_channel_id_);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv;
|
size_t bytes_sent, bytes_recv;
|
||||||
|
unsigned int packets_sent, packets_recv;
|
||||||
if (engine_->vie()->rtp()->GetRTPStatistics(channel_id, bytes_sent,
|
if (engine_->vie()->rtp()->GetRTPStatistics(channel_id, bytes_sent,
|
||||||
packets_sent, bytes_recv,
|
packets_sent, bytes_recv,
|
||||||
packets_recv) != 0) {
|
packets_recv) != 0) {
|
||||||
@ -2829,7 +2830,7 @@ void WebRtcVideoMediaChannel::OnPacketReceived(
|
|||||||
engine()->vie()->network()->ReceivedRTPPacket(
|
engine()->vie()->network()->ReceivedRTPPacket(
|
||||||
processing_channel_id,
|
processing_channel_id,
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<int>(packet->length()),
|
packet->length(),
|
||||||
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2858,7 +2859,7 @@ void WebRtcVideoMediaChannel::OnRtcpReceived(
|
|||||||
engine_->vie()->network()->ReceivedRTCPPacket(
|
engine_->vie()->network()->ReceivedRTCPPacket(
|
||||||
recv_channel_id,
|
recv_channel_id,
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<int>(packet->length()));
|
packet->length());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// SR may continue RR and any RR entry may correspond to any one of the send
|
// SR may continue RR and any RR entry may correspond to any one of the send
|
||||||
@ -2871,7 +2872,7 @@ void WebRtcVideoMediaChannel::OnRtcpReceived(
|
|||||||
engine_->vie()->network()->ReceivedRTCPPacket(
|
engine_->vie()->network()->ReceivedRTCPPacket(
|
||||||
channel_id,
|
channel_id,
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<int>(packet->length()));
|
packet->length());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4022,16 +4023,16 @@ void WebRtcVideoMediaChannel::OnMessage(rtc::Message* msg) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int WebRtcVideoMediaChannel::SendPacket(int channel, const void* data,
|
int WebRtcVideoMediaChannel::SendPacket(int channel, const void* data,
|
||||||
int len) {
|
size_t len) {
|
||||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||||
return MediaChannel::SendPacket(&packet) ? len : -1;
|
return MediaChannel::SendPacket(&packet) ? static_cast<int>(len) : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int WebRtcVideoMediaChannel::SendRTCPPacket(int channel,
|
int WebRtcVideoMediaChannel::SendRTCPPacket(int channel,
|
||||||
const void* data,
|
const void* data,
|
||||||
int len) {
|
size_t len) {
|
||||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||||
return MediaChannel::SendRtcp(&packet) ? len : -1;
|
return MediaChannel::SendRtcp(&packet) ? static_cast<int>(len) : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void WebRtcVideoMediaChannel::QueueBlackFrame(uint32 ssrc, int64 timestamp,
|
void WebRtcVideoMediaChannel::QueueBlackFrame(uint32 ssrc, int64 timestamp,
|
||||||
|
@ -331,8 +331,10 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler,
|
|||||||
int GetLastEngineError() { return engine()->GetLastEngineError(); }
|
int GetLastEngineError() { return engine()->GetLastEngineError(); }
|
||||||
|
|
||||||
// webrtc::Transport:
|
// webrtc::Transport:
|
||||||
virtual int SendPacket(int channel, const void* data, int len) OVERRIDE;
|
virtual int SendPacket(int channel, const void* data, size_t len) OVERRIDE;
|
||||||
virtual int SendRTCPPacket(int channel, const void* data, int len) OVERRIDE;
|
virtual int SendRTCPPacket(int channel,
|
||||||
|
const void* data,
|
||||||
|
size_t len) OVERRIDE;
|
||||||
|
|
||||||
bool ConferenceModeIsEnabled() const {
|
bool ConferenceModeIsEnabled() const {
|
||||||
return options_.conference_mode.GetWithDefaultIfUnset(false);
|
return options_.conference_mode.GetWithDefaultIfUnset(false);
|
||||||
|
@ -71,8 +71,8 @@ WebRtcVideoFrame::FrameBuffer::~FrameBuffer() {
|
|||||||
// Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
|
// Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
|
||||||
// will release the buffer if this FrameBuffer owns it.
|
// will release the buffer if this FrameBuffer owns it.
|
||||||
uint8_t* new_memory = NULL;
|
uint8_t* new_memory = NULL;
|
||||||
uint32_t new_length = 0;
|
size_t new_length = 0;
|
||||||
uint32_t new_size = 0;
|
size_t new_size = 0;
|
||||||
video_frame_.Swap(new_memory, new_length, new_size);
|
video_frame_.Swap(new_memory, new_length, new_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,8 +84,8 @@ void WebRtcVideoFrame::FrameBuffer::Attach(uint8* data, size_t length) {
|
|||||||
void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
|
void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
|
||||||
owned_data_.reset();
|
owned_data_.reset();
|
||||||
uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
|
uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
|
||||||
uint32_t new_length = static_cast<uint32_t>(length);
|
size_t new_length = length;
|
||||||
uint32_t new_size = static_cast<uint32_t>(length);
|
size_t new_size = length;
|
||||||
video_frame_.Swap(new_memory, new_length, new_size);
|
video_frame_.Swap(new_memory, new_length, new_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3154,7 +3154,7 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
|
|||||||
engine()->voe()->network()->ReceivedRTPPacket(
|
engine()->voe()->network()->ReceivedRTPPacket(
|
||||||
which_channel,
|
which_channel,
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<unsigned int>(packet->length()),
|
packet->length(),
|
||||||
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3179,7 +3179,7 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
|
|||||||
engine()->voe()->network()->ReceivedRTCPPacket(
|
engine()->voe()->network()->ReceivedRTCPPacket(
|
||||||
which_channel,
|
which_channel,
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<unsigned int>(packet->length()));
|
packet->length());
|
||||||
|
|
||||||
if (IsDefaultChannel(which_channel))
|
if (IsDefaultChannel(which_channel))
|
||||||
has_sent_to_default_channel = true;
|
has_sent_to_default_channel = true;
|
||||||
@ -3199,7 +3199,7 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
|
|||||||
engine()->voe()->network()->ReceivedRTCPPacket(
|
engine()->voe()->network()->ReceivedRTCPPacket(
|
||||||
iter->second->channel(),
|
iter->second->channel(),
|
||||||
packet->data(),
|
packet->data(),
|
||||||
static_cast<unsigned int>(packet->length()));
|
packet->length());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3730,7 +3730,7 @@ bool WebRtcVoiceMediaChannel::SetupSharedBweOnChannel(int voe_channel) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
int WebRtcSoundclipStream::Read(void *buf, int len) {
|
int WebRtcSoundclipStream::Read(void *buf, size_t len) {
|
||||||
size_t res = 0;
|
size_t res = 0;
|
||||||
mem_.Read(buf, len, &res, NULL);
|
mem_.Read(buf, len, &res, NULL);
|
||||||
return static_cast<int>(res);
|
return static_cast<int>(res);
|
||||||
|
@ -69,7 +69,7 @@ class WebRtcSoundclipStream : public webrtc::InStream {
|
|||||||
}
|
}
|
||||||
void set_loop(bool loop) { loop_ = loop; }
|
void set_loop(bool loop) { loop_ = loop; }
|
||||||
|
|
||||||
virtual int Read(void* buf, int len) OVERRIDE;
|
virtual int Read(void* buf, size_t len) OVERRIDE;
|
||||||
virtual int Rewind() OVERRIDE;
|
virtual int Rewind() OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -80,7 +80,7 @@ class WebRtcSoundclipStream : public webrtc::InStream {
|
|||||||
// WebRtcMonitorStream is used to monitor a stream coming from WebRtc.
|
// WebRtcMonitorStream is used to monitor a stream coming from WebRtc.
|
||||||
// For now we just dump the data.
|
// For now we just dump the data.
|
||||||
class WebRtcMonitorStream : public webrtc::OutStream {
|
class WebRtcMonitorStream : public webrtc::OutStream {
|
||||||
virtual bool Write(const void *buf, int len) OVERRIDE {
|
virtual bool Write(const void *buf, size_t len) OVERRIDE {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -315,17 +315,16 @@ class WebRtcMediaChannel : public T, public webrtc::Transport {
|
|||||||
|
|
||||||
protected:
|
protected:
|
||||||
// implements Transport interface
|
// implements Transport interface
|
||||||
virtual int SendPacket(int channel, const void *data, int len) OVERRIDE {
|
virtual int SendPacket(int channel, const void *data, size_t len) OVERRIDE {
|
||||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||||
if (!T::SendPacket(&packet)) {
|
return T::SendPacket(&packet) ? static_cast<int>(len) : -1;
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return len;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual int SendRTCPPacket(int channel, const void *data, int len) OVERRIDE {
|
virtual int SendRTCPPacket(int channel,
|
||||||
|
const void* data,
|
||||||
|
size_t len) OVERRIDE {
|
||||||
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
rtc::Buffer packet(data, len, kMaxRtpPacketLen);
|
||||||
return T::SendRtcp(&packet) ? len : -1;
|
return T::SendRtcp(&packet) ? static_cast<int>(len) : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -184,6 +184,7 @@ static_library("webrtc_base") {
|
|||||||
"firewallsocketserver.h",
|
"firewallsocketserver.h",
|
||||||
"flags.cc",
|
"flags.cc",
|
||||||
"flags.h",
|
"flags.h",
|
||||||
|
"format_macros.h",
|
||||||
"gunit_prod.h",
|
"gunit_prod.h",
|
||||||
"helpers.cc",
|
"helpers.cc",
|
||||||
"helpers.h",
|
"helpers.h",
|
||||||
|
@ -127,6 +127,7 @@
|
|||||||
'firewallsocketserver.h',
|
'firewallsocketserver.h',
|
||||||
'flags.cc',
|
'flags.cc',
|
||||||
'flags.h',
|
'flags.h',
|
||||||
|
'format_macros.h',
|
||||||
'gunit_prod.h',
|
'gunit_prod.h',
|
||||||
'helpers.cc',
|
'helpers.cc',
|
||||||
'helpers.h',
|
'helpers.h',
|
||||||
|
94
webrtc/base/format_macros.h
Normal file
94
webrtc/base/format_macros.h
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef WEBRTC_BASE_FORMAT_MACROS_H_
|
||||||
|
#define WEBRTC_BASE_FORMAT_MACROS_H_
|
||||||
|
|
||||||
|
// This file defines the format macros for some integer types and is derived
|
||||||
|
// from Chromium's base/format_macros.h.
|
||||||
|
|
||||||
|
// To print a 64-bit value in a portable way:
|
||||||
|
// int64_t value;
|
||||||
|
// printf("xyz:%" PRId64, value);
|
||||||
|
// The "d" in the macro corresponds to %d; you can also use PRIu64 etc.
|
||||||
|
//
|
||||||
|
// To print a size_t value in a portable way:
|
||||||
|
// size_t size;
|
||||||
|
// printf("xyz: %" PRIuS, size);
|
||||||
|
// The "u" in the macro corresponds to %u, and S is for "size".
|
||||||
|
|
||||||
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
|
#if defined(WEBRTC_POSIX)
|
||||||
|
|
||||||
|
#if (defined(_INTTYPES_H) || defined(_INTTYPES_H_)) && !defined(PRId64)
|
||||||
|
#error "inttypes.h has already been included before this header file, but "
|
||||||
|
#error "without __STDC_FORMAT_MACROS defined."
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if !defined(__STDC_FORMAT_MACROS)
|
||||||
|
#define __STDC_FORMAT_MACROS
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include <inttypes.h>
|
||||||
|
|
||||||
|
#if !defined(PRIuS)
|
||||||
|
#define PRIuS "zu"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// The size of NSInteger and NSUInteger varies between 32-bit and 64-bit
|
||||||
|
// architectures and Apple does not provides standard format macros and
|
||||||
|
// recommends casting. This has many drawbacks, so instead define macros
|
||||||
|
// for formatting those types.
|
||||||
|
#if defined(WEBRTC_MAC)
|
||||||
|
#if defined(WEBRTC_ARCH_64_BITS)
|
||||||
|
#if !defined(PRIdNS)
|
||||||
|
#define PRIdNS "ld"
|
||||||
|
#endif
|
||||||
|
#if !defined(PRIuNS)
|
||||||
|
#define PRIuNS "lu"
|
||||||
|
#endif
|
||||||
|
#if !defined(PRIxNS)
|
||||||
|
#define PRIxNS "lx"
|
||||||
|
#endif
|
||||||
|
#else // defined(WEBRTC_ARCH_64_BITS)
|
||||||
|
#if !defined(PRIdNS)
|
||||||
|
#define PRIdNS "d"
|
||||||
|
#endif
|
||||||
|
#if !defined(PRIuNS)
|
||||||
|
#define PRIuNS "u"
|
||||||
|
#endif
|
||||||
|
#if !defined(PRIxNS)
|
||||||
|
#define PRIxNS "x"
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#endif // defined(WEBRTC_MAC)
|
||||||
|
|
||||||
|
#else // WEBRTC_WIN
|
||||||
|
|
||||||
|
#if !defined(PRId64)
|
||||||
|
#define PRId64 "I64d"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if !defined(PRIu64)
|
||||||
|
#define PRIu64 "I64u"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if !defined(PRIx64)
|
||||||
|
#define PRIx64 "I64x"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if !defined(PRIuS)
|
||||||
|
#define PRIuS "Iu"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif // WEBRTC_BASE_FORMAT_MACROS_H_
|
@ -56,7 +56,7 @@ class Config;
|
|||||||
class InStream
|
class InStream
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual int Read(void *buf,int len) = 0;
|
virtual int Read(void *buf, size_t len) = 0;
|
||||||
virtual int Rewind() {return -1;}
|
virtual int Rewind() {return -1;}
|
||||||
virtual ~InStream() {}
|
virtual ~InStream() {}
|
||||||
protected:
|
protected:
|
||||||
@ -66,7 +66,7 @@ protected:
|
|||||||
class OutStream
|
class OutStream
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual bool Write(const void *buf,int len) = 0;
|
virtual bool Write(const void *buf, size_t len) = 0;
|
||||||
virtual int Rewind() {return -1;}
|
virtual int Rewind() {return -1;}
|
||||||
virtual ~OutStream() {}
|
virtual ~OutStream() {}
|
||||||
protected:
|
protected:
|
||||||
@ -166,8 +166,8 @@ enum FrameType
|
|||||||
class Transport
|
class Transport
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual int SendPacket(int channel, const void *data, int len) = 0;
|
virtual int SendPacket(int channel, const void *data, size_t len) = 0;
|
||||||
virtual int SendRTCPPacket(int channel, const void *data, int len) = 0;
|
virtual int SendRTCPPacket(int channel, const void *data, size_t len) = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual ~Transport() {}
|
virtual ~Transport() {}
|
||||||
@ -240,9 +240,9 @@ struct StreamDataCounters {
|
|||||||
fec_packets(0) {}
|
fec_packets(0) {}
|
||||||
|
|
||||||
// TODO(pbos): Rename bytes -> media_bytes.
|
// TODO(pbos): Rename bytes -> media_bytes.
|
||||||
uint32_t bytes; // Payload bytes, excluding RTP headers and padding.
|
size_t bytes; // Payload bytes, excluding RTP headers and padding.
|
||||||
uint32_t header_bytes; // Number of bytes used by RTP headers.
|
size_t header_bytes; // Number of bytes used by RTP headers.
|
||||||
uint32_t padding_bytes; // Number of padding bytes.
|
size_t padding_bytes; // Number of padding bytes.
|
||||||
uint32_t packets; // Number of packets.
|
uint32_t packets; // Number of packets.
|
||||||
uint32_t retransmitted_packets; // Number of retransmitted packets.
|
uint32_t retransmitted_packets; // Number of retransmitted packets.
|
||||||
uint32_t fec_packets; // Number of redundancy packets.
|
uint32_t fec_packets; // Number of redundancy packets.
|
||||||
@ -828,8 +828,8 @@ struct RTPHeader {
|
|||||||
uint32_t ssrc;
|
uint32_t ssrc;
|
||||||
uint8_t numCSRCs;
|
uint8_t numCSRCs;
|
||||||
uint32_t arrOfCSRCs[kRtpCsrcSize];
|
uint32_t arrOfCSRCs[kRtpCsrcSize];
|
||||||
uint8_t paddingLength;
|
size_t paddingLength;
|
||||||
uint16_t headerLength;
|
size_t headerLength;
|
||||||
int payload_type_frequency;
|
int payload_type_frequency;
|
||||||
RTPHeaderExtension extension;
|
RTPHeaderExtension extension;
|
||||||
};
|
};
|
||||||
|
@ -81,8 +81,8 @@ void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv);
|
|||||||
// - width :frame width in pixels.
|
// - width :frame width in pixels.
|
||||||
// - height :frame height in pixels.
|
// - height :frame height in pixels.
|
||||||
// Return value: :The required size in bytes to accommodate the specified
|
// Return value: :The required size in bytes to accommodate the specified
|
||||||
// video frame or -1 in case of an error .
|
// video frame.
|
||||||
int CalcBufferSize(VideoType type, int width, int height);
|
size_t CalcBufferSize(VideoType type, int width, int height);
|
||||||
|
|
||||||
// TODO(mikhal): Add unit test for these two functions and determine location.
|
// TODO(mikhal): Add unit test for these two functions and determine location.
|
||||||
// Print I420VideoFrame to file
|
// Print I420VideoFrame to file
|
||||||
@ -101,7 +101,7 @@ int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file);
|
|||||||
// - buffer : Pointer to buffer
|
// - buffer : Pointer to buffer
|
||||||
// Return value: length of buffer if OK, < 0 otherwise.
|
// Return value: length of buffer if OK, < 0 otherwise.
|
||||||
int ExtractBuffer(const I420VideoFrame& input_frame,
|
int ExtractBuffer(const I420VideoFrame& input_frame,
|
||||||
int size, uint8_t* buffer);
|
size_t size, uint8_t* buffer);
|
||||||
// Convert To I420
|
// Convert To I420
|
||||||
// Input:
|
// Input:
|
||||||
// - src_video_type : Type of input video.
|
// - src_video_type : Type of input video.
|
||||||
@ -119,7 +119,7 @@ int ConvertToI420(VideoType src_video_type,
|
|||||||
const uint8_t* src_frame,
|
const uint8_t* src_frame,
|
||||||
int crop_x, int crop_y,
|
int crop_x, int crop_y,
|
||||||
int src_width, int src_height,
|
int src_width, int src_height,
|
||||||
int sample_size,
|
size_t sample_size,
|
||||||
VideoRotationMode rotation,
|
VideoRotationMode rotation,
|
||||||
I420VideoFrame* dst_frame);
|
I420VideoFrame* dst_frame);
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ class TestLibYuv : public ::testing::Test {
|
|||||||
const int height_;
|
const int height_;
|
||||||
const int size_y_;
|
const int size_y_;
|
||||||
const int size_uv_;
|
const int size_uv_;
|
||||||
const int frame_length_;
|
const size_t frame_length_;
|
||||||
};
|
};
|
||||||
|
|
||||||
TestLibYuv::TestLibYuv()
|
TestLibYuv::TestLibYuv()
|
||||||
@ -110,8 +110,8 @@ void TestLibYuv::SetUp() {
|
|||||||
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
|
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
|
||||||
input_file_name << "\n";
|
input_file_name << "\n";
|
||||||
|
|
||||||
EXPECT_EQ(fread(orig_buffer_.get(), 1, frame_length_, source_file_),
|
EXPECT_EQ(frame_length_,
|
||||||
static_cast<unsigned int>(frame_length_));
|
fread(orig_buffer_.get(), 1, frame_length_, source_file_));
|
||||||
EXPECT_EQ(0, orig_frame_.CreateFrame(size_y_, orig_buffer_.get(),
|
EXPECT_EQ(0, orig_frame_.CreateFrame(size_y_, orig_buffer_.get(),
|
||||||
size_uv_, orig_buffer_.get() + size_y_,
|
size_uv_, orig_buffer_.get() + size_y_,
|
||||||
size_uv_, orig_buffer_.get() +
|
size_uv_, orig_buffer_.get() +
|
||||||
@ -206,8 +206,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
width_, height_,
|
width_, height_,
|
||||||
width_, (width_ + 1) / 2, (width_ + 1) / 2);
|
width_, (width_ + 1) / 2, (width_ + 1) / 2);
|
||||||
EXPECT_EQ(0, ConvertFromYV12(yv12_frame, kI420, 0, res_i420_buffer.get()));
|
EXPECT_EQ(0, ConvertFromYV12(yv12_frame, kI420, 0, res_i420_buffer.get()));
|
||||||
if (fwrite(res_i420_buffer.get(), 1, frame_length_,
|
if (fwrite(res_i420_buffer.get(), 1, frame_length_, output_file) !=
|
||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
frame_length_) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ class TestScaler : public ::testing::Test {
|
|||||||
const int half_height_;
|
const int half_height_;
|
||||||
const int size_y_;
|
const int size_y_;
|
||||||
const int size_uv_;
|
const int size_uv_;
|
||||||
const int frame_length_;
|
const size_t frame_length_;
|
||||||
};
|
};
|
||||||
|
|
||||||
TestScaler::TestScaler()
|
TestScaler::TestScaler()
|
||||||
@ -392,7 +392,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
|||||||
rewind(input_file);
|
rewind(input_file);
|
||||||
rewind(output_file);
|
rewind(output_file);
|
||||||
|
|
||||||
int required_size = CalcBufferSize(kI420, width, height);
|
size_t required_size = CalcBufferSize(kI420, width, height);
|
||||||
uint8_t* input_buffer = new uint8_t[required_size];
|
uint8_t* input_buffer = new uint8_t[required_size];
|
||||||
uint8_t* output_buffer = new uint8_t[required_size];
|
uint8_t* output_buffer = new uint8_t[required_size];
|
||||||
|
|
||||||
@ -400,12 +400,10 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
|||||||
double avg_psnr = 0;
|
double avg_psnr = 0;
|
||||||
I420VideoFrame in_frame, out_frame;
|
I420VideoFrame in_frame, out_frame;
|
||||||
while (feof(input_file) == 0) {
|
while (feof(input_file) == 0) {
|
||||||
if ((size_t)required_size !=
|
if (fread(input_buffer, 1, required_size, input_file) != required_size) {
|
||||||
fread(input_buffer, 1, required_size, input_file)) {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if ((size_t)required_size !=
|
if (fread(output_buffer, 1, required_size, output_file) != required_size) {
|
||||||
fread(output_buffer, 1, required_size, output_file)) {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
frame_count++;
|
frame_count++;
|
||||||
@ -441,15 +439,15 @@ void TestScaler::ScaleSequence(ScaleMethod method,
|
|||||||
int64_t start_clock, total_clock;
|
int64_t start_clock, total_clock;
|
||||||
total_clock = 0;
|
total_clock = 0;
|
||||||
int frame_count = 0;
|
int frame_count = 0;
|
||||||
int src_required_size = CalcBufferSize(kI420, src_width, src_height);
|
size_t src_required_size = CalcBufferSize(kI420, src_width, src_height);
|
||||||
scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]);
|
scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]);
|
||||||
int size_y = src_width * src_height;
|
int size_y = src_width * src_height;
|
||||||
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);
|
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);
|
||||||
|
|
||||||
// Running through entire sequence.
|
// Running through entire sequence.
|
||||||
while (feof(source_file) == 0) {
|
while (feof(source_file) == 0) {
|
||||||
if ((size_t)src_required_size !=
|
if (fread(frame_buffer.get(), 1, src_required_size, source_file) !=
|
||||||
fread(frame_buffer.get(), 1, src_required_size, source_file))
|
src_required_size)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
input_frame.CreateFrame(size_y, frame_buffer.get(),
|
input_frame.CreateFrame(size_y, frame_buffer.get(),
|
||||||
|
@ -66,8 +66,10 @@ void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
|
|||||||
*stride_uv = AlignInt((width + 1) / 2, k16ByteAlignment);
|
*stride_uv = AlignInt((width + 1) / 2, k16ByteAlignment);
|
||||||
}
|
}
|
||||||
|
|
||||||
int CalcBufferSize(VideoType type, int width, int height) {
|
size_t CalcBufferSize(VideoType type, int width, int height) {
|
||||||
int buffer_size = 0;
|
assert(width >= 0);
|
||||||
|
assert(height >= 0);
|
||||||
|
size_t buffer_size = 0;
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case kI420:
|
case kI420:
|
||||||
case kNV12:
|
case kNV12:
|
||||||
@ -95,7 +97,7 @@ int CalcBufferSize(VideoType type, int width, int height) {
|
|||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
assert(false);
|
assert(false);
|
||||||
return -1;
|
break;
|
||||||
}
|
}
|
||||||
return buffer_size;
|
return buffer_size;
|
||||||
}
|
}
|
||||||
@ -122,11 +124,12 @@ int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int ExtractBuffer(const I420VideoFrame& input_frame,
|
int ExtractBuffer(const I420VideoFrame& input_frame,
|
||||||
int size, uint8_t* buffer) {
|
size_t size, uint8_t* buffer) {
|
||||||
assert(buffer);
|
assert(buffer);
|
||||||
if (input_frame.IsZeroSize())
|
if (input_frame.IsZeroSize())
|
||||||
return -1;
|
return -1;
|
||||||
int length = CalcBufferSize(kI420, input_frame.width(), input_frame.height());
|
size_t length =
|
||||||
|
CalcBufferSize(kI420, input_frame.width(), input_frame.height());
|
||||||
if (size < length) {
|
if (size < length) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -147,7 +150,7 @@ int ExtractBuffer(const I420VideoFrame& input_frame,
|
|||||||
plane_ptr += input_frame.stride(static_cast<PlaneType>(plane));
|
plane_ptr += input_frame.stride(static_cast<PlaneType>(plane));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return length;
|
return static_cast<int>(length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -230,7 +233,7 @@ int ConvertToI420(VideoType src_video_type,
|
|||||||
const uint8_t* src_frame,
|
const uint8_t* src_frame,
|
||||||
int crop_x, int crop_y,
|
int crop_x, int crop_y,
|
||||||
int src_width, int src_height,
|
int src_width, int src_height,
|
||||||
int sample_size,
|
size_t sample_size,
|
||||||
VideoRotationMode rotation,
|
VideoRotationMode rotation,
|
||||||
I420VideoFrame* dst_frame) {
|
I420VideoFrame* dst_frame) {
|
||||||
int dst_width = dst_frame->width();
|
int dst_width = dst_frame->width();
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
|
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
|
||||||
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
|
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
|
||||||
|
|
||||||
|
#include <stddef.h>
|
||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
@ -120,7 +121,7 @@ int16_t WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
|
int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
|
||||||
int16_t length);
|
size_t length);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcCng_Generate(...)
|
* WebRtcCng_Generate(...)
|
||||||
|
@ -411,7 +411,7 @@ int16_t WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
|
int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
|
||||||
int16_t length) {
|
size_t length) {
|
||||||
|
|
||||||
WebRtcCngDecInst_t* inst = (WebRtcCngDecInst_t*) cng_inst;
|
WebRtcCngDecInst_t* inst = (WebRtcCngDecInst_t*) cng_inst;
|
||||||
int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
|
int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
|
||||||
@ -427,7 +427,7 @@ int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
|
|||||||
if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
|
if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
|
||||||
length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
|
length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
|
||||||
|
|
||||||
inst->dec_order = length - 1;
|
inst->dec_order = (int16_t)length - 1;
|
||||||
|
|
||||||
if (SID[0] > 93)
|
if (SID[0] > 93)
|
||||||
SID[0] = 93;
|
SID[0] = 93;
|
||||||
|
@ -113,10 +113,9 @@ void AcmReceiveTest::Run() {
|
|||||||
header.header = packet->header();
|
header.header = packet->header();
|
||||||
header.frameType = kAudioFrameSpeech;
|
header.frameType = kAudioFrameSpeech;
|
||||||
memset(&header.type.Audio, 0, sizeof(RTPAudioHeader));
|
memset(&header.type.Audio, 0, sizeof(RTPAudioHeader));
|
||||||
EXPECT_TRUE(
|
EXPECT_TRUE(acm_->InsertPacket(packet->payload(),
|
||||||
acm_->InsertPacket(packet->payload(),
|
packet->payload_length_bytes(),
|
||||||
static_cast<int32_t>(packet->payload_length_bytes()),
|
header))
|
||||||
header))
|
|
||||||
<< "Failure when inserting packet:" << std::endl
|
<< "Failure when inserting packet:" << std::endl
|
||||||
<< " PT = " << static_cast<int>(header.header.payloadType) << std::endl
|
<< " PT = " << static_cast<int>(header.header.payloadType) << std::endl
|
||||||
<< " TS = " << header.header.timestamp << std::endl
|
<< " TS = " << header.header.timestamp << std::endl
|
||||||
|
@ -261,7 +261,7 @@ AudioPlayoutMode AcmReceiver::PlayoutMode() const {
|
|||||||
|
|
||||||
int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
|
int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* incoming_payload,
|
const uint8_t* incoming_payload,
|
||||||
int length_payload) {
|
size_t length_payload) {
|
||||||
uint32_t receive_timestamp = 0;
|
uint32_t receive_timestamp = 0;
|
||||||
InitialDelayManager::PacketType packet_type =
|
InitialDelayManager::PacketType packet_type =
|
||||||
InitialDelayManager::kUndefinedPacket;
|
InitialDelayManager::kUndefinedPacket;
|
||||||
|
@ -67,7 +67,7 @@ class AcmReceiver {
|
|||||||
//
|
//
|
||||||
int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* incoming_payload,
|
const uint8_t* incoming_payload,
|
||||||
int length_payload);
|
size_t length_payload);
|
||||||
|
|
||||||
//
|
//
|
||||||
// Asks NetEq for 10 milliseconds of decoded audio.
|
// Asks NetEq for 10 milliseconds of decoded audio.
|
||||||
|
@ -115,12 +115,12 @@ class AcmReceiverTest : public AudioPacketizationCallback,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual int SendData(
|
virtual int32_t SendData(
|
||||||
FrameType frame_type,
|
FrameType frame_type,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
||||||
if (frame_type == kFrameEmpty)
|
if (frame_type == kFrameEmpty)
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -124,7 +124,7 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback,
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
||||||
if (frame_type == kFrameEmpty)
|
if (frame_type == kFrameEmpty)
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -94,7 +94,7 @@ int32_t AcmSendTest::SendData(FrameType frame_type,
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
// Store the packet locally.
|
// Store the packet locally.
|
||||||
frame_type_ = frame_type;
|
frame_type_ = frame_type;
|
||||||
|
@ -49,7 +49,7 @@ class AcmSendTest : public AudioPacketizationCallback, public PacketSource {
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -98,7 +98,7 @@ int32_t AcmSendTestOldApi::SendData(
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
// Store the packet locally.
|
// Store the packet locally.
|
||||||
frame_type_ = frame_type;
|
frame_type_ = frame_type;
|
||||||
|
@ -51,7 +51,7 @@ class AcmSendTestOldApi : public AudioPacketizationCallback,
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -314,7 +314,7 @@ int AudioCodingModuleImpl::EncodeFragmentation(int fragmentation_index,
|
|||||||
int AudioCodingModuleImpl::ProcessDualStream() {
|
int AudioCodingModuleImpl::ProcessDualStream() {
|
||||||
uint8_t stream[kMaxNumFragmentationVectors * MAX_PAYLOAD_SIZE_BYTE];
|
uint8_t stream[kMaxNumFragmentationVectors * MAX_PAYLOAD_SIZE_BYTE];
|
||||||
uint32_t current_timestamp;
|
uint32_t current_timestamp;
|
||||||
int16_t length_bytes = 0;
|
size_t length_bytes = 0;
|
||||||
RTPFragmentationHeader my_fragmentation;
|
RTPFragmentationHeader my_fragmentation;
|
||||||
|
|
||||||
uint8_t my_red_payload_type;
|
uint8_t my_red_payload_type;
|
||||||
@ -336,8 +336,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
|
|||||||
// Nothing to send.
|
// Nothing to send.
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
int len_bytes_previous_secondary = static_cast<int>(
|
size_t len_bytes_previous_secondary = fragmentation_.fragmentationLength[2];
|
||||||
fragmentation_.fragmentationLength[2]);
|
|
||||||
assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
|
assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
|
||||||
bool has_previous_payload = len_bytes_previous_secondary > 0;
|
bool has_previous_payload = len_bytes_previous_secondary > 0;
|
||||||
|
|
||||||
@ -1689,13 +1688,8 @@ int AudioCodingModuleImpl::ReceiveCodec(CodecInst* current_codec) const {
|
|||||||
|
|
||||||
// Incoming packet from network parsed and ready for decode.
|
// Incoming packet from network parsed and ready for decode.
|
||||||
int AudioCodingModuleImpl::IncomingPacket(const uint8_t* incoming_payload,
|
int AudioCodingModuleImpl::IncomingPacket(const uint8_t* incoming_payload,
|
||||||
const int payload_length,
|
const size_t payload_length,
|
||||||
const WebRtcRTPHeader& rtp_header) {
|
const WebRtcRTPHeader& rtp_header) {
|
||||||
if (payload_length < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
|
|
||||||
"IncomingPacket() Error, payload-length cannot be negative");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
int last_audio_pltype = receiver_.last_audio_payload_type();
|
int last_audio_pltype = receiver_.last_audio_payload_type();
|
||||||
if (receiver_.InsertPacket(rtp_header, incoming_payload, payload_length) <
|
if (receiver_.InsertPacket(rtp_header, incoming_payload, payload_length) <
|
||||||
0) {
|
0) {
|
||||||
@ -1797,16 +1791,9 @@ int AudioCodingModuleImpl::RegisterVADCallback(ACMVADCallback* vad_callback) {
|
|||||||
|
|
||||||
// TODO(tlegrand): Modify this function to work for stereo, and add tests.
|
// TODO(tlegrand): Modify this function to work for stereo, and add tests.
|
||||||
int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
|
int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
|
||||||
int payload_length,
|
size_t payload_length,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp) {
|
uint32_t timestamp) {
|
||||||
if (payload_length < 0) {
|
|
||||||
// Log error in trace file.
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
|
|
||||||
"IncomingPacket() Error, payload-length cannot be negative");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We are not acquiring any lock when interacting with |aux_rtp_header_| no
|
// We are not acquiring any lock when interacting with |aux_rtp_header_| no
|
||||||
// other method uses this member variable.
|
// other method uses this member variable.
|
||||||
if (aux_rtp_header_ == NULL) {
|
if (aux_rtp_header_ == NULL) {
|
||||||
@ -1960,7 +1947,7 @@ int AudioCodingModuleImpl::REDPayloadISAC(int isac_rate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void AudioCodingModuleImpl::ResetFragmentation(int vector_size) {
|
void AudioCodingModuleImpl::ResetFragmentation(int vector_size) {
|
||||||
for (int n = 0; n < kMaxNumFragmentationVectors; n++) {
|
for (size_t n = 0; n < kMaxNumFragmentationVectors; n++) {
|
||||||
fragmentation_.fragmentationOffset[n] = n * MAX_PAYLOAD_SIZE_BYTE;
|
fragmentation_.fragmentationOffset[n] = n * MAX_PAYLOAD_SIZE_BYTE;
|
||||||
}
|
}
|
||||||
memset(fragmentation_.fragmentationLength, 0, kMaxNumFragmentationVectors *
|
memset(fragmentation_.fragmentationLength, 0, kMaxNumFragmentationVectors *
|
||||||
@ -2116,14 +2103,14 @@ bool AudioCodingImpl::RegisterReceiveCodec(int decoder_type,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool AudioCodingImpl::InsertPacket(const uint8_t* incoming_payload,
|
bool AudioCodingImpl::InsertPacket(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const WebRtcRTPHeader& rtp_info) {
|
const WebRtcRTPHeader& rtp_info) {
|
||||||
return acm_old_->IncomingPacket(
|
return acm_old_->IncomingPacket(
|
||||||
incoming_payload, payload_len_bytes, rtp_info) == 0;
|
incoming_payload, payload_len_bytes, rtp_info) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioCodingImpl::InsertPayload(const uint8_t* incoming_payload,
|
bool AudioCodingImpl::InsertPayload(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_byte,
|
size_t payload_len_byte,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp) {
|
uint32_t timestamp) {
|
||||||
FATAL() << "Not implemented yet.";
|
FATAL() << "Not implemented yet.";
|
||||||
|
@ -156,13 +156,13 @@ class AudioCodingModuleImpl : public AudioCodingModule {
|
|||||||
|
|
||||||
// Incoming packet from network parsed and ready for decode.
|
// Incoming packet from network parsed and ready for decode.
|
||||||
virtual int IncomingPacket(const uint8_t* incoming_payload,
|
virtual int IncomingPacket(const uint8_t* incoming_payload,
|
||||||
int payload_length,
|
const size_t payload_length,
|
||||||
const WebRtcRTPHeader& rtp_info) OVERRIDE;
|
const WebRtcRTPHeader& rtp_info) OVERRIDE;
|
||||||
|
|
||||||
// Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
|
// Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
|
||||||
// One usage for this API is when pre-encoded files are pushed in ACM.
|
// One usage for this API is when pre-encoded files are pushed in ACM.
|
||||||
virtual int IncomingPayload(const uint8_t* incoming_payload,
|
virtual int IncomingPayload(const uint8_t* incoming_payload,
|
||||||
int payload_length,
|
const size_t payload_length,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp) OVERRIDE;
|
uint32_t timestamp) OVERRIDE;
|
||||||
|
|
||||||
@ -423,11 +423,11 @@ class AudioCodingImpl : public AudioCoding {
|
|||||||
uint8_t payload_type) OVERRIDE;
|
uint8_t payload_type) OVERRIDE;
|
||||||
|
|
||||||
virtual bool InsertPacket(const uint8_t* incoming_payload,
|
virtual bool InsertPacket(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const WebRtcRTPHeader& rtp_info) OVERRIDE;
|
const WebRtcRTPHeader& rtp_info) OVERRIDE;
|
||||||
|
|
||||||
virtual bool InsertPayload(const uint8_t* incoming_payload,
|
virtual bool InsertPayload(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_byte,
|
size_t payload_len_byte,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp) OVERRIDE;
|
uint32_t timestamp) OVERRIDE;
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ const int kSampleRateHz = 16000;
|
|||||||
const int kNumSamples10ms = kSampleRateHz / 100;
|
const int kNumSamples10ms = kSampleRateHz / 100;
|
||||||
const int kFrameSizeMs = 10; // Multiple of 10.
|
const int kFrameSizeMs = 10; // Multiple of 10.
|
||||||
const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
|
const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
|
||||||
const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
|
const size_t kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
|
||||||
const uint8_t kPayloadType = 111;
|
const uint8_t kPayloadType = 111;
|
||||||
|
|
||||||
class RtpUtility {
|
class RtpUtility {
|
||||||
@ -87,7 +87,7 @@ class PacketizationCallbackStub : public AudioPacketizationCallback {
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
||||||
CriticalSectionScoped lock(crit_sect_.get());
|
CriticalSectionScoped lock(crit_sect_.get());
|
||||||
++num_calls_;
|
++num_calls_;
|
||||||
|
@ -87,7 +87,7 @@ class PacketizationCallbackStub : public AudioPacketizationCallback {
|
|||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
const RTPFragmentationHeader* fragmentation) OVERRIDE {
|
||||||
CriticalSectionScoped lock(crit_sect_.get());
|
CriticalSectionScoped lock(crit_sect_.get());
|
||||||
++num_calls_;
|
++num_calls_;
|
||||||
|
@ -36,13 +36,12 @@ class AudioPacketizationCallback {
|
|||||||
public:
|
public:
|
||||||
virtual ~AudioPacketizationCallback() {}
|
virtual ~AudioPacketizationCallback() {}
|
||||||
|
|
||||||
virtual int32_t SendData(
|
virtual int32_t SendData(FrameType frame_type,
|
||||||
FrameType frame_type,
|
uint8_t payload_type,
|
||||||
uint8_t payload_type,
|
uint32_t timestamp,
|
||||||
uint32_t timestamp,
|
const uint8_t* payload_data,
|
||||||
const uint8_t* payload_data,
|
size_t payload_len_bytes,
|
||||||
uint16_t payload_len_bytes,
|
const RTPFragmentationHeader* fragmentation) = 0;
|
||||||
const RTPFragmentationHeader* fragmentation) = 0;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Callback class used for inband Dtmf detection
|
// Callback class used for inband Dtmf detection
|
||||||
@ -668,8 +667,8 @@ class AudioCodingModule: public Module {
|
|||||||
// 0 if payload is successfully pushed in.
|
// 0 if payload is successfully pushed in.
|
||||||
//
|
//
|
||||||
virtual int32_t IncomingPacket(const uint8_t* incoming_payload,
|
virtual int32_t IncomingPacket(const uint8_t* incoming_payload,
|
||||||
const int32_t payload_len_bytes,
|
const size_t payload_len_bytes,
|
||||||
const WebRtcRTPHeader& rtp_info) = 0;
|
const WebRtcRTPHeader& rtp_info) = 0;
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// int32_t IncomingPayload()
|
// int32_t IncomingPayload()
|
||||||
@ -696,9 +695,9 @@ class AudioCodingModule: public Module {
|
|||||||
// 0 if payload is successfully pushed in.
|
// 0 if payload is successfully pushed in.
|
||||||
//
|
//
|
||||||
virtual int32_t IncomingPayload(const uint8_t* incoming_payload,
|
virtual int32_t IncomingPayload(const uint8_t* incoming_payload,
|
||||||
const int32_t payload_len_byte,
|
const size_t payload_len_byte,
|
||||||
const uint8_t payload_type,
|
const uint8_t payload_type,
|
||||||
const uint32_t timestamp = 0) = 0;
|
const uint32_t timestamp = 0) = 0;
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// int SetMinimumPlayoutDelay()
|
// int SetMinimumPlayoutDelay()
|
||||||
@ -1090,12 +1089,12 @@ class AudioCoding {
|
|||||||
// |incoming_payload| contains the RTP payload after the RTP header. Return
|
// |incoming_payload| contains the RTP payload after the RTP header. Return
|
||||||
// true if successful, false if not.
|
// true if successful, false if not.
|
||||||
virtual bool InsertPacket(const uint8_t* incoming_payload,
|
virtual bool InsertPacket(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_bytes,
|
size_t payload_len_bytes,
|
||||||
const WebRtcRTPHeader& rtp_info) = 0;
|
const WebRtcRTPHeader& rtp_info) = 0;
|
||||||
|
|
||||||
// TODO(henrik.lundin): Remove this method?
|
// TODO(henrik.lundin): Remove this method?
|
||||||
virtual bool InsertPayload(const uint8_t* incoming_payload,
|
virtual bool InsertPayload(const uint8_t* incoming_payload,
|
||||||
int32_t payload_len_byte,
|
size_t payload_len_byte,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint32_t timestamp) = 0;
|
uint32_t timestamp) = 0;
|
||||||
|
|
||||||
|
@ -13,18 +13,21 @@
|
|||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
|
#include "webrtc/base/format_macros.h"
|
||||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
|
int32_t Channel::SendData(FrameType frameType,
|
||||||
const uint32_t timeStamp, const uint8_t* payloadData,
|
uint8_t payloadType,
|
||||||
const uint16_t payloadSize,
|
uint32_t timeStamp,
|
||||||
|
const uint8_t* payloadData,
|
||||||
|
size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
WebRtcRTPHeader rtpInfo;
|
WebRtcRTPHeader rtpInfo;
|
||||||
int32_t status;
|
int32_t status;
|
||||||
uint16_t payloadDataSize = payloadSize;
|
size_t payloadDataSize = payloadSize;
|
||||||
|
|
||||||
rtpInfo.header.markerBit = false;
|
rtpInfo.header.markerBit = false;
|
||||||
rtpInfo.header.ssrc = 0;
|
rtpInfo.header.ssrc = 0;
|
||||||
@ -52,8 +55,8 @@ int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
|
|||||||
(fragmentation->fragmentationVectorSize == 2)) {
|
(fragmentation->fragmentationVectorSize == 2)) {
|
||||||
// only 0x80 if we have multiple blocks
|
// only 0x80 if we have multiple blocks
|
||||||
_payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
|
_payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
|
||||||
uint32_t REDheader = (((uint32_t) fragmentation->fragmentationTimeDiff[1])
|
size_t REDheader = (fragmentation->fragmentationTimeDiff[1] << 10) +
|
||||||
<< 10) + fragmentation->fragmentationLength[1];
|
fragmentation->fragmentationLength[1];
|
||||||
_payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
|
_payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
|
||||||
_payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
|
_payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
|
||||||
_payloadData[3] = uint8_t(REDheader & 0x000000FF);
|
_payloadData[3] = uint8_t(REDheader & 0x000000FF);
|
||||||
@ -72,7 +75,7 @@ int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
|
|||||||
// single block (newest one)
|
// single block (newest one)
|
||||||
memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
|
memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
|
||||||
fragmentation->fragmentationLength[0]);
|
fragmentation->fragmentationLength[0]);
|
||||||
payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
|
payloadDataSize = fragmentation->fragmentationLength[0];
|
||||||
rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
|
rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -121,7 +124,7 @@ int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO(turajs): rewite this method.
|
// TODO(turajs): rewite this method.
|
||||||
void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize) {
|
void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize) {
|
||||||
int n;
|
int n;
|
||||||
if ((rtpInfo.header.payloadType != _lastPayloadType)
|
if ((rtpInfo.header.payloadType != _lastPayloadType)
|
||||||
&& (_lastPayloadType != -1)) {
|
&& (_lastPayloadType != -1)) {
|
||||||
@ -371,7 +374,7 @@ void Channel::PrintStats(CodecInst& codecInst) {
|
|||||||
payloadStats.frameSizeStats[k].frameSizeSample);
|
payloadStats.frameSizeStats[k].frameSizeSample);
|
||||||
printf("Average Rate.................. %.0f bits/sec\n",
|
printf("Average Rate.................. %.0f bits/sec\n",
|
||||||
payloadStats.frameSizeStats[k].rateBitPerSec);
|
payloadStats.frameSizeStats[k].rateBitPerSec);
|
||||||
printf("Maximum Payload-Size.......... %d Bytes\n",
|
printf("Maximum Payload-Size.......... %" PRIuS " Bytes\n",
|
||||||
payloadStats.frameSizeStats[k].maxPayloadLen);
|
payloadStats.frameSizeStats[k].maxPayloadLen);
|
||||||
printf(
|
printf(
|
||||||
"Maximum Instantaneous Rate.... %.0f bits/sec\n",
|
"Maximum Instantaneous Rate.... %.0f bits/sec\n",
|
||||||
|
@ -27,7 +27,7 @@ class CriticalSectionWrapper;
|
|||||||
// TODO(turajs): Write constructor for this structure.
|
// TODO(turajs): Write constructor for this structure.
|
||||||
struct ACMTestFrameSizeStats {
|
struct ACMTestFrameSizeStats {
|
||||||
uint16_t frameSizeSample;
|
uint16_t frameSizeSample;
|
||||||
int16_t maxPayloadLen;
|
size_t maxPayloadLen;
|
||||||
uint32_t numPackets;
|
uint32_t numPackets;
|
||||||
uint64_t totalPayloadLenByte;
|
uint64_t totalPayloadLenByte;
|
||||||
uint64_t totalEncodedSamples;
|
uint64_t totalEncodedSamples;
|
||||||
@ -39,7 +39,7 @@ struct ACMTestFrameSizeStats {
|
|||||||
struct ACMTestPayloadStats {
|
struct ACMTestPayloadStats {
|
||||||
bool newPacket;
|
bool newPacket;
|
||||||
int16_t payloadType;
|
int16_t payloadType;
|
||||||
int16_t lastPayloadLenByte;
|
size_t lastPayloadLenByte;
|
||||||
uint32_t lastTimestamp;
|
uint32_t lastTimestamp;
|
||||||
ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
|
ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
|
||||||
};
|
};
|
||||||
@ -51,9 +51,11 @@ class Channel : public AudioPacketizationCallback {
|
|||||||
~Channel();
|
~Channel();
|
||||||
|
|
||||||
virtual int32_t SendData(
|
virtual int32_t SendData(
|
||||||
const FrameType frameType, const uint8_t payloadType,
|
FrameType frameType,
|
||||||
const uint32_t timeStamp, const uint8_t* payloadData,
|
uint8_t payloadType,
|
||||||
const uint16_t payloadSize,
|
uint32_t timeStamp,
|
||||||
|
const uint8_t* payloadData,
|
||||||
|
size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
void RegisterReceiverACM(AudioCodingModule *acm);
|
void RegisterReceiverACM(AudioCodingModule *acm);
|
||||||
@ -93,7 +95,7 @@ class Channel : public AudioPacketizationCallback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize);
|
void CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize);
|
||||||
|
|
||||||
AudioCodingModule* _receiverACM;
|
AudioCodingModule* _receiverACM;
|
||||||
uint16_t _seqNo;
|
uint16_t _seqNo;
|
||||||
|
@ -37,7 +37,7 @@ TestPacketization::~TestPacketization() {
|
|||||||
int32_t TestPacketization::SendData(
|
int32_t TestPacketization::SendData(
|
||||||
const FrameType /* frameType */, const uint8_t payloadType,
|
const FrameType /* frameType */, const uint8_t payloadType,
|
||||||
const uint32_t timeStamp, const uint8_t* payloadData,
|
const uint32_t timeStamp, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize,
|
const size_t payloadSize,
|
||||||
const RTPFragmentationHeader* /* fragmentation */) {
|
const RTPFragmentationHeader* /* fragmentation */) {
|
||||||
_rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
|
_rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
|
||||||
_frequency);
|
_frequency);
|
||||||
|
@ -30,9 +30,11 @@ class TestPacketization : public AudioPacketizationCallback {
|
|||||||
TestPacketization(RTPStream *rtpStream, uint16_t frequency);
|
TestPacketization(RTPStream *rtpStream, uint16_t frequency);
|
||||||
~TestPacketization();
|
~TestPacketization();
|
||||||
virtual int32_t SendData(
|
virtual int32_t SendData(
|
||||||
const FrameType frameType, const uint8_t payloadType,
|
const FrameType frameType,
|
||||||
const uint32_t timeStamp, const uint8_t* payloadData,
|
const uint8_t payloadType,
|
||||||
const uint16_t payloadSize,
|
const uint32_t timeStamp,
|
||||||
|
const uint8_t* payloadData,
|
||||||
|
const size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -92,8 +94,8 @@ class Receiver {
|
|||||||
uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD];
|
uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD];
|
||||||
RTPStream* _rtpStream;
|
RTPStream* _rtpStream;
|
||||||
WebRtcRTPHeader _rtpInfo;
|
WebRtcRTPHeader _rtpInfo;
|
||||||
uint16_t _realPayloadSizeBytes;
|
size_t _realPayloadSizeBytes;
|
||||||
uint16_t _payloadSizeBytes;
|
size_t _payloadSizeBytes;
|
||||||
uint32_t _nextTime;
|
uint32_t _nextTime;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
#include "RTPFile.h"
|
#include "RTPFile.h"
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
#ifdef WIN32
|
#ifdef WIN32
|
||||||
# include <Winsock2.h>
|
# include <Winsock2.h>
|
||||||
@ -60,7 +61,7 @@ void RTPStream::MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType,
|
|||||||
}
|
}
|
||||||
|
|
||||||
RTPPacket::RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
|
RTPPacket::RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
|
||||||
const uint8_t* payloadData, uint16_t payloadSize,
|
const uint8_t* payloadData, size_t payloadSize,
|
||||||
uint32_t frequency)
|
uint32_t frequency)
|
||||||
: payloadType(payloadType),
|
: payloadType(payloadType),
|
||||||
timeStamp(timeStamp),
|
timeStamp(timeStamp),
|
||||||
@ -87,7 +88,7 @@ RTPBuffer::~RTPBuffer() {
|
|||||||
|
|
||||||
void RTPBuffer::Write(const uint8_t payloadType, const uint32_t timeStamp,
|
void RTPBuffer::Write(const uint8_t payloadType, const uint32_t timeStamp,
|
||||||
const int16_t seqNo, const uint8_t* payloadData,
|
const int16_t seqNo, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize, uint32_t frequency) {
|
const size_t payloadSize, uint32_t frequency) {
|
||||||
RTPPacket *packet = new RTPPacket(payloadType, timeStamp, seqNo, payloadData,
|
RTPPacket *packet = new RTPPacket(payloadType, timeStamp, seqNo, payloadData,
|
||||||
payloadSize, frequency);
|
payloadSize, frequency);
|
||||||
_queueRWLock->AcquireLockExclusive();
|
_queueRWLock->AcquireLockExclusive();
|
||||||
@ -95,8 +96,8 @@ void RTPBuffer::Write(const uint8_t payloadType, const uint32_t timeStamp,
|
|||||||
_queueRWLock->ReleaseLockExclusive();
|
_queueRWLock->ReleaseLockExclusive();
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t RTPBuffer::Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
size_t RTPBuffer::Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
||||||
uint16_t payloadSize, uint32_t* offset) {
|
size_t payloadSize, uint32_t* offset) {
|
||||||
_queueRWLock->AcquireLockShared();
|
_queueRWLock->AcquireLockShared();
|
||||||
RTPPacket *packet = _rtpQueue.front();
|
RTPPacket *packet = _rtpQueue.front();
|
||||||
_rtpQueue.pop();
|
_rtpQueue.pop();
|
||||||
@ -143,21 +144,11 @@ void RTPFile::WriteHeader() {
|
|||||||
fprintf(_rtpFile, "#!RTPencode%s\n", "1.0");
|
fprintf(_rtpFile, "#!RTPencode%s\n", "1.0");
|
||||||
uint32_t dummy_variable = 0;
|
uint32_t dummy_variable = 0;
|
||||||
// should be converted to network endian format, but does not matter when 0
|
// should be converted to network endian format, but does not matter when 0
|
||||||
if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
|
EXPECT_EQ(1u, fwrite(&dummy_variable, 4, 1, _rtpFile));
|
||||||
return;
|
EXPECT_EQ(1u, fwrite(&dummy_variable, 4, 1, _rtpFile));
|
||||||
}
|
EXPECT_EQ(1u, fwrite(&dummy_variable, 4, 1, _rtpFile));
|
||||||
if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
|
EXPECT_EQ(1u, fwrite(&dummy_variable, 2, 1, _rtpFile));
|
||||||
return;
|
EXPECT_EQ(1u, fwrite(&dummy_variable, 2, 1, _rtpFile));
|
||||||
}
|
|
||||||
if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (fwrite(&dummy_variable, 2, 1, _rtpFile) != 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (fwrite(&dummy_variable, 2, 1, _rtpFile) != 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
fflush(_rtpFile);
|
fflush(_rtpFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,35 +171,26 @@ void RTPFile::ReadHeader() {
|
|||||||
|
|
||||||
void RTPFile::Write(const uint8_t payloadType, const uint32_t timeStamp,
|
void RTPFile::Write(const uint8_t payloadType, const uint32_t timeStamp,
|
||||||
const int16_t seqNo, const uint8_t* payloadData,
|
const int16_t seqNo, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize, uint32_t frequency) {
|
const size_t payloadSize, uint32_t frequency) {
|
||||||
/* write RTP packet to file */
|
/* write RTP packet to file */
|
||||||
uint8_t rtpHeader[12];
|
uint8_t rtpHeader[12];
|
||||||
MakeRTPheader(rtpHeader, payloadType, seqNo, timeStamp, 0);
|
MakeRTPheader(rtpHeader, payloadType, seqNo, timeStamp, 0);
|
||||||
uint16_t lengthBytes = htons(12 + payloadSize + 8);
|
ASSERT_LE(12 + payloadSize + 8, std::numeric_limits<u_short>::max());
|
||||||
uint16_t plen = htons(12 + payloadSize);
|
uint16_t lengthBytes = htons(static_cast<u_short>(12 + payloadSize + 8));
|
||||||
|
uint16_t plen = htons(static_cast<u_short>(12 + payloadSize));
|
||||||
uint32_t offsetMs;
|
uint32_t offsetMs;
|
||||||
|
|
||||||
offsetMs = (timeStamp / (frequency / 1000));
|
offsetMs = (timeStamp / (frequency / 1000));
|
||||||
offsetMs = htonl(offsetMs);
|
offsetMs = htonl(offsetMs);
|
||||||
if (fwrite(&lengthBytes, 2, 1, _rtpFile) != 1) {
|
EXPECT_EQ(1u, fwrite(&lengthBytes, 2, 1, _rtpFile));
|
||||||
return;
|
EXPECT_EQ(1u, fwrite(&plen, 2, 1, _rtpFile));
|
||||||
}
|
EXPECT_EQ(1u, fwrite(&offsetMs, 4, 1, _rtpFile));
|
||||||
if (fwrite(&plen, 2, 1, _rtpFile) != 1) {
|
EXPECT_EQ(1u, fwrite(&rtpHeader, 12, 1, _rtpFile));
|
||||||
return;
|
EXPECT_EQ(payloadSize, fwrite(payloadData, 1, payloadSize, _rtpFile));
|
||||||
}
|
|
||||||
if (fwrite(&offsetMs, 4, 1, _rtpFile) != 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (fwrite(rtpHeader, 12, 1, _rtpFile) != 1) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (fwrite(payloadData, 1, payloadSize, _rtpFile) != payloadSize) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t RTPFile::Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
size_t RTPFile::Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
||||||
uint16_t payloadSize, uint32_t* offset) {
|
size_t payloadSize, uint32_t* offset) {
|
||||||
uint16_t lengthBytes;
|
uint16_t lengthBytes;
|
||||||
uint16_t plen;
|
uint16_t plen;
|
||||||
uint8_t rtpHeader[12];
|
uint8_t rtpHeader[12];
|
||||||
@ -237,7 +219,7 @@ uint16_t RTPFile::Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
|||||||
if (lengthBytes < 20) {
|
if (lengthBytes < 20) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (payloadSize < (lengthBytes - 20)) {
|
if (payloadSize < static_cast<size_t>((lengthBytes - 20))) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
lengthBytes -= 20;
|
lengthBytes -= 20;
|
||||||
|
@ -28,12 +28,12 @@ class RTPStream {
|
|||||||
|
|
||||||
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
||||||
const int16_t seqNo, const uint8_t* payloadData,
|
const int16_t seqNo, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize, uint32_t frequency) = 0;
|
const size_t payloadSize, uint32_t frequency) = 0;
|
||||||
|
|
||||||
// Returns the packet's payload size. Zero should be treated as an
|
// Returns the packet's payload size. Zero should be treated as an
|
||||||
// end-of-stream (in the case that EndOfFile() is true) or an error.
|
// end-of-stream (in the case that EndOfFile() is true) or an error.
|
||||||
virtual uint16_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
virtual size_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
||||||
uint16_t payloadSize, uint32_t* offset) = 0;
|
size_t payloadSize, uint32_t* offset) = 0;
|
||||||
virtual bool EndOfFile() const = 0;
|
virtual bool EndOfFile() const = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
@ -46,7 +46,7 @@ class RTPStream {
|
|||||||
class RTPPacket {
|
class RTPPacket {
|
||||||
public:
|
public:
|
||||||
RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
|
RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
|
||||||
const uint8_t* payloadData, uint16_t payloadSize,
|
const uint8_t* payloadData, size_t payloadSize,
|
||||||
uint32_t frequency);
|
uint32_t frequency);
|
||||||
|
|
||||||
~RTPPacket();
|
~RTPPacket();
|
||||||
@ -55,7 +55,7 @@ class RTPPacket {
|
|||||||
uint32_t timeStamp;
|
uint32_t timeStamp;
|
||||||
int16_t seqNo;
|
int16_t seqNo;
|
||||||
uint8_t* payloadData;
|
uint8_t* payloadData;
|
||||||
uint16_t payloadSize;
|
size_t payloadSize;
|
||||||
uint32_t frequency;
|
uint32_t frequency;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -67,10 +67,10 @@ class RTPBuffer : public RTPStream {
|
|||||||
|
|
||||||
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
||||||
const int16_t seqNo, const uint8_t* payloadData,
|
const int16_t seqNo, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize, uint32_t frequency) OVERRIDE;
|
const size_t payloadSize, uint32_t frequency) OVERRIDE;
|
||||||
|
|
||||||
virtual uint16_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
virtual size_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
||||||
uint16_t payloadSize, uint32_t* offset) OVERRIDE;
|
size_t payloadSize, uint32_t* offset) OVERRIDE;
|
||||||
|
|
||||||
virtual bool EndOfFile() const OVERRIDE;
|
virtual bool EndOfFile() const OVERRIDE;
|
||||||
|
|
||||||
@ -99,10 +99,10 @@ class RTPFile : public RTPStream {
|
|||||||
|
|
||||||
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
|
||||||
const int16_t seqNo, const uint8_t* payloadData,
|
const int16_t seqNo, const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize, uint32_t frequency) OVERRIDE;
|
const size_t payloadSize, uint32_t frequency) OVERRIDE;
|
||||||
|
|
||||||
virtual uint16_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
virtual size_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
|
||||||
uint16_t payloadSize, uint32_t* offset) OVERRIDE;
|
size_t payloadSize, uint32_t* offset) OVERRIDE;
|
||||||
|
|
||||||
virtual bool EndOfFile() const OVERRIDE {
|
virtual bool EndOfFile() const OVERRIDE {
|
||||||
return _rtpEOF;
|
return _rtpEOF;
|
||||||
|
@ -10,7 +10,8 @@
|
|||||||
|
|
||||||
#include "webrtc/modules/audio_coding/main/test/TestAllCodecs.h"
|
#include "webrtc/modules/audio_coding/main/test/TestAllCodecs.h"
|
||||||
|
|
||||||
#include <stdio.h>
|
#include <cstdio>
|
||||||
|
#include <limits>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "testing/gtest/include/gtest/gtest.h"
|
#include "testing/gtest/include/gtest/gtest.h"
|
||||||
@ -32,6 +33,10 @@
|
|||||||
// The test loops through all available mono codecs, encode at "a" sends over
|
// The test loops through all available mono codecs, encode at "a" sends over
|
||||||
// the channel, and decodes at "b".
|
// the channel, and decodes at "b".
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
const size_t kVariableSize = std::numeric_limits<size_t>::max();
|
||||||
|
}
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
// Class for simulating packet handling.
|
// Class for simulating packet handling.
|
||||||
@ -54,7 +59,7 @@ void TestPack::RegisterReceiverACM(AudioCodingModule* acm) {
|
|||||||
|
|
||||||
int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
|
int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
|
||||||
uint32_t timestamp, const uint8_t* payload_data,
|
uint32_t timestamp, const uint8_t* payload_data,
|
||||||
uint16_t payload_size,
|
size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
int32_t status;
|
int32_t status;
|
||||||
@ -87,7 +92,7 @@ int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
|
|||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t TestPack::payload_size() {
|
size_t TestPack::payload_size() {
|
||||||
return payload_size_;
|
return payload_size_;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -459,13 +464,13 @@ void TestAllCodecs::Perform() {
|
|||||||
test_count_++;
|
test_count_++;
|
||||||
OpenOutFile(test_count_);
|
OpenOutFile(test_count_);
|
||||||
char codec_isac[] = "ISAC";
|
char codec_isac[] = "ISAC";
|
||||||
RegisterSendCodec('A', codec_isac, 16000, -1, 480, -1);
|
RegisterSendCodec('A', codec_isac, 16000, -1, 480, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 16000, -1, 960, -1);
|
RegisterSendCodec('A', codec_isac, 16000, -1, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 16000, 15000, 480, -1);
|
RegisterSendCodec('A', codec_isac, 16000, 15000, 480, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 16000, 32000, 960, -1);
|
RegisterSendCodec('A', codec_isac, 16000, 32000, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
outfile_b_.Close();
|
outfile_b_.Close();
|
||||||
#endif
|
#endif
|
||||||
@ -475,13 +480,13 @@ void TestAllCodecs::Perform() {
|
|||||||
}
|
}
|
||||||
test_count_++;
|
test_count_++;
|
||||||
OpenOutFile(test_count_);
|
OpenOutFile(test_count_);
|
||||||
RegisterSendCodec('A', codec_isac, 32000, -1, 960, -1);
|
RegisterSendCodec('A', codec_isac, 32000, -1, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 32000, 56000, 960, -1);
|
RegisterSendCodec('A', codec_isac, 32000, 56000, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 32000, 37000, 960, -1);
|
RegisterSendCodec('A', codec_isac, 32000, 37000, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_isac, 32000, 32000, 960, -1);
|
RegisterSendCodec('A', codec_isac, 32000, 32000, 960, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
outfile_b_.Close();
|
outfile_b_.Close();
|
||||||
#endif
|
#endif
|
||||||
@ -611,19 +616,19 @@ void TestAllCodecs::Perform() {
|
|||||||
test_count_++;
|
test_count_++;
|
||||||
OpenOutFile(test_count_);
|
OpenOutFile(test_count_);
|
||||||
char codec_opus[] = "OPUS";
|
char codec_opus[] = "OPUS";
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 6000, 480, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 6000, 480, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 20000, 480*2, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 20000, 480*2, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 32000, 480*4, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 32000, 480*4, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 48000, 480, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 48000, 480, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 96000, 480*6, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 96000, 480*6, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
RegisterSendCodec('A', codec_opus, 48000, 500000, 480*2, -1);
|
RegisterSendCodec('A', codec_opus, 48000, 500000, 480*2, kVariableSize);
|
||||||
Run(channel_a_to_b_);
|
Run(channel_a_to_b_);
|
||||||
outfile_b_.Close();
|
outfile_b_.Close();
|
||||||
#endif
|
#endif
|
||||||
@ -686,10 +691,11 @@ void TestAllCodecs::Perform() {
|
|||||||
// packet_size - packet size in samples
|
// packet_size - packet size in samples
|
||||||
// extra_byte - if extra bytes needed compared to the bitrate
|
// extra_byte - if extra bytes needed compared to the bitrate
|
||||||
// used when registering, can be an internal header
|
// used when registering, can be an internal header
|
||||||
// set to -1 if the codec is a variable rate codec
|
// set to kVariableSize if the codec is a variable
|
||||||
|
// rate codec
|
||||||
void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
|
void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
|
||||||
int32_t sampling_freq_hz, int rate,
|
int32_t sampling_freq_hz, int rate,
|
||||||
int packet_size, int extra_byte) {
|
int packet_size, size_t extra_byte) {
|
||||||
if (test_mode_ != 0) {
|
if (test_mode_ != 0) {
|
||||||
// Print out codec and settings.
|
// Print out codec and settings.
|
||||||
printf("codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
|
printf("codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
|
||||||
@ -711,14 +717,14 @@ void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
|
|||||||
|
|
||||||
// Store the expected packet size in bytes, used to validate the received
|
// Store the expected packet size in bytes, used to validate the received
|
||||||
// packet. If variable rate codec (extra_byte == -1), set to -1.
|
// packet. If variable rate codec (extra_byte == -1), set to -1.
|
||||||
if (extra_byte != -1) {
|
if (extra_byte != kVariableSize) {
|
||||||
// Add 0.875 to always round up to a whole byte
|
// Add 0.875 to always round up to a whole byte
|
||||||
packet_size_bytes_ = static_cast<int>(static_cast<float>(packet_size
|
packet_size_bytes_ = static_cast<size_t>(
|
||||||
* rate) / static_cast<float>(sampling_freq_hz * 8) + 0.875)
|
static_cast<float>(packet_size * rate) /
|
||||||
+ extra_byte;
|
static_cast<float>(sampling_freq_hz * 8) + 0.875) + extra_byte;
|
||||||
} else {
|
} else {
|
||||||
// Packets will have a variable size.
|
// Packets will have a variable size.
|
||||||
packet_size_bytes_ = -1;
|
packet_size_bytes_ = kVariableSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set pointer to the ACM where to register the codec.
|
// Set pointer to the ACM where to register the codec.
|
||||||
@ -751,7 +757,7 @@ void TestAllCodecs::Run(TestPack* channel) {
|
|||||||
AudioFrame audio_frame;
|
AudioFrame audio_frame;
|
||||||
|
|
||||||
int32_t out_freq_hz = outfile_b_.SamplingFrequency();
|
int32_t out_freq_hz = outfile_b_.SamplingFrequency();
|
||||||
uint16_t receive_size;
|
size_t receive_size;
|
||||||
uint32_t timestamp_diff;
|
uint32_t timestamp_diff;
|
||||||
channel->reset_payload_size();
|
channel->reset_payload_size();
|
||||||
int error_count = 0;
|
int error_count = 0;
|
||||||
@ -768,8 +774,8 @@ void TestAllCodecs::Run(TestPack* channel) {
|
|||||||
// Verify that the received packet size matches the settings.
|
// Verify that the received packet size matches the settings.
|
||||||
receive_size = channel->payload_size();
|
receive_size = channel->payload_size();
|
||||||
if (receive_size) {
|
if (receive_size) {
|
||||||
if ((static_cast<int>(receive_size) != packet_size_bytes_) &&
|
if ((receive_size != packet_size_bytes_) &&
|
||||||
(packet_size_bytes_ > -1)) {
|
(packet_size_bytes_ != kVariableSize)) {
|
||||||
error_count++;
|
error_count++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,12 +29,14 @@ class TestPack : public AudioPacketizationCallback {
|
|||||||
void RegisterReceiverACM(AudioCodingModule* acm);
|
void RegisterReceiverACM(AudioCodingModule* acm);
|
||||||
|
|
||||||
virtual int32_t SendData(
|
virtual int32_t SendData(
|
||||||
FrameType frame_type, uint8_t payload_type,
|
FrameType frame_type,
|
||||||
uint32_t timestamp, const uint8_t* payload_data,
|
uint8_t payload_type,
|
||||||
uint16_t payload_size,
|
uint32_t timestamp,
|
||||||
|
const uint8_t* payload_data,
|
||||||
|
size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
uint16_t payload_size();
|
size_t payload_size();
|
||||||
uint32_t timestamp_diff();
|
uint32_t timestamp_diff();
|
||||||
void reset_payload_size();
|
void reset_payload_size();
|
||||||
|
|
||||||
@ -45,7 +47,7 @@ class TestPack : public AudioPacketizationCallback {
|
|||||||
uint32_t timestamp_diff_;
|
uint32_t timestamp_diff_;
|
||||||
uint32_t last_in_timestamp_;
|
uint32_t last_in_timestamp_;
|
||||||
uint64_t total_bytes_;
|
uint64_t total_bytes_;
|
||||||
uint16_t payload_size_;
|
size_t payload_size_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class TestAllCodecs : public ACMTest {
|
class TestAllCodecs : public ACMTest {
|
||||||
@ -61,7 +63,7 @@ class TestAllCodecs : public ACMTest {
|
|||||||
// This is useful for codecs which support several sampling frequency.
|
// This is useful for codecs which support several sampling frequency.
|
||||||
// Note! Only mono mode is tested in this test.
|
// Note! Only mono mode is tested in this test.
|
||||||
void RegisterSendCodec(char side, char* codec_name, int32_t sampling_freq_hz,
|
void RegisterSendCodec(char side, char* codec_name, int32_t sampling_freq_hz,
|
||||||
int rate, int packet_size, int extra_byte);
|
int rate, int packet_size, size_t extra_byte);
|
||||||
|
|
||||||
void Run(TestPack* channel);
|
void Run(TestPack* channel);
|
||||||
void OpenOutFile(int test_number);
|
void OpenOutFile(int test_number);
|
||||||
@ -75,7 +77,7 @@ class TestAllCodecs : public ACMTest {
|
|||||||
PCMFile outfile_b_;
|
PCMFile outfile_b_;
|
||||||
int test_count_;
|
int test_count_;
|
||||||
int packet_size_samples_;
|
int packet_size_samples_;
|
||||||
int packet_size_bytes_;
|
size_t packet_size_bytes_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -48,7 +48,7 @@ int32_t TestPackStereo::SendData(const FrameType frame_type,
|
|||||||
const uint8_t payload_type,
|
const uint8_t payload_type,
|
||||||
const uint32_t timestamp,
|
const uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
const uint16_t payload_size,
|
const size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
int32_t status = 0;
|
int32_t status = 0;
|
||||||
@ -114,18 +114,26 @@ TestStereo::TestStereo(int test_mode)
|
|||||||
test_cntr_(0),
|
test_cntr_(0),
|
||||||
pack_size_samp_(0),
|
pack_size_samp_(0),
|
||||||
pack_size_bytes_(0),
|
pack_size_bytes_(0),
|
||||||
counter_(0),
|
counter_(0)
|
||||||
g722_pltype_(0),
|
#ifdef WEBRTC_CODEC_G722
|
||||||
l16_8khz_pltype_(-1),
|
, g722_pltype_(0)
|
||||||
l16_16khz_pltype_(-1),
|
#endif
|
||||||
l16_32khz_pltype_(-1),
|
#ifdef WEBRTC_CODEC_PCM16
|
||||||
pcma_pltype_(-1),
|
, l16_8khz_pltype_(-1)
|
||||||
pcmu_pltype_(-1),
|
, l16_16khz_pltype_(-1)
|
||||||
celt_pltype_(-1),
|
, l16_32khz_pltype_(-1)
|
||||||
opus_pltype_(-1),
|
#endif
|
||||||
cn_8khz_pltype_(-1),
|
#ifdef PCMA_AND_PCMU
|
||||||
cn_16khz_pltype_(-1),
|
, pcma_pltype_(-1)
|
||||||
cn_32khz_pltype_(-1) {
|
, pcmu_pltype_(-1)
|
||||||
|
#endif
|
||||||
|
#ifdef WEBRTC_CODEC_CELT
|
||||||
|
, celt_pltype_(-1)
|
||||||
|
#endif
|
||||||
|
#ifdef WEBRTC_CODEC_OPUS
|
||||||
|
, opus_pltype_(-1)
|
||||||
|
#endif
|
||||||
|
{
|
||||||
// test_mode = 0 for silent test (auto test)
|
// test_mode = 0 for silent test (auto test)
|
||||||
test_mode_ = test_mode;
|
test_mode_ = test_mode;
|
||||||
}
|
}
|
||||||
@ -302,7 +310,6 @@ void TestStereo::Perform() {
|
|||||||
Run(channel_a2b_, audio_channels, codec_channels);
|
Run(channel_a2b_, audio_channels, codec_channels);
|
||||||
out_file_.Close();
|
out_file_.Close();
|
||||||
#endif
|
#endif
|
||||||
#define PCMA_AND_PCMU
|
|
||||||
#ifdef PCMA_AND_PCMU
|
#ifdef PCMA_AND_PCMU
|
||||||
if (test_mode_ != 0) {
|
if (test_mode_ != 0) {
|
||||||
printf("===========================================================\n");
|
printf("===========================================================\n");
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
#include "webrtc/modules/audio_coding/main/test/Channel.h"
|
#include "webrtc/modules/audio_coding/main/test/Channel.h"
|
||||||
#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
|
#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
|
||||||
|
|
||||||
|
#define PCMA_AND_PCMU
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
enum StereoMonoMode {
|
enum StereoMonoMode {
|
||||||
@ -38,7 +40,7 @@ class TestPackStereo : public AudioPacketizationCallback {
|
|||||||
const uint8_t payload_type,
|
const uint8_t payload_type,
|
||||||
const uint32_t timestamp,
|
const uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
const uint16_t payload_size,
|
const size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
uint16_t payload_size();
|
uint16_t payload_size();
|
||||||
@ -78,11 +80,6 @@ class TestStereo : public ACMTest {
|
|||||||
void OpenOutFile(int16_t test_number);
|
void OpenOutFile(int16_t test_number);
|
||||||
void DisplaySendReceiveCodec();
|
void DisplaySendReceiveCodec();
|
||||||
|
|
||||||
int32_t SendData(const FrameType frame_type, const uint8_t payload_type,
|
|
||||||
const uint32_t timestamp, const uint8_t* payload_data,
|
|
||||||
const uint16_t payload_size,
|
|
||||||
const RTPFragmentationHeader* fragmentation);
|
|
||||||
|
|
||||||
int test_mode_;
|
int test_mode_;
|
||||||
|
|
||||||
scoped_ptr<AudioCodingModule> acm_a_;
|
scoped_ptr<AudioCodingModule> acm_a_;
|
||||||
@ -100,17 +97,24 @@ class TestStereo : public ACMTest {
|
|||||||
char* send_codec_name_;
|
char* send_codec_name_;
|
||||||
|
|
||||||
// Payload types for stereo codecs and CNG
|
// Payload types for stereo codecs and CNG
|
||||||
|
#ifdef WEBRTC_CODEC_G722
|
||||||
int g722_pltype_;
|
int g722_pltype_;
|
||||||
|
#endif
|
||||||
|
#ifdef WEBRTC_CODEC_PCM16
|
||||||
int l16_8khz_pltype_;
|
int l16_8khz_pltype_;
|
||||||
int l16_16khz_pltype_;
|
int l16_16khz_pltype_;
|
||||||
int l16_32khz_pltype_;
|
int l16_32khz_pltype_;
|
||||||
|
#endif
|
||||||
|
#ifdef PCMA_AND_PCMU
|
||||||
int pcma_pltype_;
|
int pcma_pltype_;
|
||||||
int pcmu_pltype_;
|
int pcmu_pltype_;
|
||||||
|
#endif
|
||||||
|
#ifdef WEBRTC_CODEC_CELT
|
||||||
int celt_pltype_;
|
int celt_pltype_;
|
||||||
|
#endif
|
||||||
|
#ifdef WEBRTC_CODEC_OPUS
|
||||||
int opus_pltype_;
|
int opus_pltype_;
|
||||||
int cn_8khz_pltype_;
|
#endif
|
||||||
int cn_16khz_pltype_;
|
|
||||||
int cn_32khz_pltype_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -36,9 +36,11 @@ class DualStreamTest : public AudioPacketizationCallback,
|
|||||||
void ApiTest();
|
void ApiTest();
|
||||||
|
|
||||||
virtual int32_t SendData(
|
virtual int32_t SendData(
|
||||||
FrameType frameType, uint8_t payload_type,
|
FrameType frameType,
|
||||||
uint32_t timestamp, const uint8_t* payload_data,
|
uint8_t payload_type,
|
||||||
uint16_t payload_size,
|
uint32_t timestamp,
|
||||||
|
const uint8_t* payload_data,
|
||||||
|
size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
const RTPFragmentationHeader* fragmentation) OVERRIDE;
|
||||||
|
|
||||||
void Perform(bool start_in_sync, int num_channels_input);
|
void Perform(bool start_in_sync, int num_channels_input);
|
||||||
@ -49,9 +51,9 @@ class DualStreamTest : public AudioPacketizationCallback,
|
|||||||
void PopulateCodecInstances(int frame_size_primary_ms,
|
void PopulateCodecInstances(int frame_size_primary_ms,
|
||||||
int num_channels_primary, int sampling_rate);
|
int num_channels_primary, int sampling_rate);
|
||||||
|
|
||||||
void Validate(bool start_in_sync, int tolerance);
|
void Validate(bool start_in_sync, size_t tolerance);
|
||||||
bool EqualTimestamp(int stream, int position);
|
bool EqualTimestamp(int stream, int position);
|
||||||
int EqualPayloadLength(int stream, int position);
|
size_t EqualPayloadLength(int stream, int position);
|
||||||
bool EqualPayloadData(int stream, int position);
|
bool EqualPayloadData(int stream, int position);
|
||||||
|
|
||||||
static const int kMaxNumStoredPayloads = 2;
|
static const int kMaxNumStoredPayloads = 2;
|
||||||
@ -77,8 +79,8 @@ class DualStreamTest : public AudioPacketizationCallback,
|
|||||||
uint32_t timestamp_ref_[kMaxNumStreams][kMaxNumStoredPayloads];
|
uint32_t timestamp_ref_[kMaxNumStreams][kMaxNumStoredPayloads];
|
||||||
uint32_t timestamp_dual_[kMaxNumStreams][kMaxNumStoredPayloads];
|
uint32_t timestamp_dual_[kMaxNumStreams][kMaxNumStoredPayloads];
|
||||||
|
|
||||||
int payload_len_ref_[kMaxNumStreams][kMaxNumStoredPayloads];
|
size_t payload_len_ref_[kMaxNumStreams][kMaxNumStoredPayloads];
|
||||||
int payload_len_dual_[kMaxNumStreams][kMaxNumStoredPayloads];
|
size_t payload_len_dual_[kMaxNumStreams][kMaxNumStoredPayloads];
|
||||||
|
|
||||||
uint8_t payload_data_ref_[kMaxNumStreams][MAX_PAYLOAD_SIZE_BYTE
|
uint8_t payload_data_ref_[kMaxNumStreams][MAX_PAYLOAD_SIZE_BYTE
|
||||||
* kMaxNumStoredPayloads];
|
* kMaxNumStoredPayloads];
|
||||||
@ -174,7 +176,7 @@ void DualStreamTest::Perform(bool start_in_sync, int num_channels_input) {
|
|||||||
pcm_file.ReadStereo(num_channels_input == 2);
|
pcm_file.ReadStereo(num_channels_input == 2);
|
||||||
AudioFrame audio_frame;
|
AudioFrame audio_frame;
|
||||||
|
|
||||||
int tolerance = 0;
|
size_t tolerance = 0;
|
||||||
if (num_channels_input == 2 && primary_encoder_.channels == 2
|
if (num_channels_input == 2 && primary_encoder_.channels == 2
|
||||||
&& secondary_encoder_.channels == 1) {
|
&& secondary_encoder_.channels == 1) {
|
||||||
tolerance = 12;
|
tolerance = 12;
|
||||||
@ -253,10 +255,10 @@ bool DualStreamTest::EqualTimestamp(int stream_index, int position) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
int DualStreamTest::EqualPayloadLength(int stream_index, int position) {
|
size_t DualStreamTest::EqualPayloadLength(int stream_index, int position) {
|
||||||
return abs(
|
size_t dual = payload_len_dual_[stream_index][position];
|
||||||
payload_len_dual_[stream_index][position]
|
size_t ref = payload_len_ref_[stream_index][position];
|
||||||
- payload_len_ref_[stream_index][position]);
|
return (dual > ref) ? (dual - ref) : (ref - dual);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DualStreamTest::EqualPayloadData(int stream_index, int position) {
|
bool DualStreamTest::EqualPayloadData(int stream_index, int position) {
|
||||||
@ -264,7 +266,7 @@ bool DualStreamTest::EqualPayloadData(int stream_index, int position) {
|
|||||||
payload_len_dual_[stream_index][position]
|
payload_len_dual_[stream_index][position]
|
||||||
== payload_len_ref_[stream_index][position]);
|
== payload_len_ref_[stream_index][position]);
|
||||||
int offset = position * MAX_PAYLOAD_SIZE_BYTE;
|
int offset = position * MAX_PAYLOAD_SIZE_BYTE;
|
||||||
for (int n = 0; n < payload_len_dual_[stream_index][position]; n++) {
|
for (size_t n = 0; n < payload_len_dual_[stream_index][position]; n++) {
|
||||||
if (payload_data_dual_[stream_index][offset + n]
|
if (payload_data_dual_[stream_index][offset + n]
|
||||||
!= payload_data_ref_[stream_index][offset + n]) {
|
!= payload_data_ref_[stream_index][offset + n]) {
|
||||||
return false;
|
return false;
|
||||||
@ -273,9 +275,9 @@ bool DualStreamTest::EqualPayloadData(int stream_index, int position) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DualStreamTest::Validate(bool start_in_sync, int tolerance) {
|
void DualStreamTest::Validate(bool start_in_sync, size_t tolerance) {
|
||||||
for (int stream_index = 0; stream_index < kMaxNumStreams; stream_index++) {
|
for (int stream_index = 0; stream_index < kMaxNumStreams; stream_index++) {
|
||||||
int my_tolerance = stream_index == kPrimary ? 0 : tolerance;
|
size_t my_tolerance = stream_index == kPrimary ? 0 : tolerance;
|
||||||
for (int position = 0; position < kMaxNumStoredPayloads; position++) {
|
for (int position = 0; position < kMaxNumStoredPayloads; position++) {
|
||||||
if (payload_ref_is_stored_[stream_index][position] == 1
|
if (payload_ref_is_stored_[stream_index][position] == 1
|
||||||
&& payload_dual_is_stored_[stream_index][position] == 1) {
|
&& payload_dual_is_stored_[stream_index][position] == 1) {
|
||||||
@ -296,7 +298,7 @@ void DualStreamTest::Validate(bool start_in_sync, int tolerance) {
|
|||||||
int32_t DualStreamTest::SendData(FrameType frameType, uint8_t payload_type,
|
int32_t DualStreamTest::SendData(FrameType frameType, uint8_t payload_type,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const uint8_t* payload_data,
|
const uint8_t* payload_data,
|
||||||
uint16_t payload_size,
|
size_t payload_size,
|
||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
int position;
|
int position;
|
||||||
int stream_index;
|
int stream_index;
|
||||||
|
@ -46,7 +46,7 @@ class TargetDelayTest : public ::testing::Test {
|
|||||||
|
|
||||||
int16_t audio[kFrameSizeSamples];
|
int16_t audio[kFrameSizeSamples];
|
||||||
const int kRange = 0x7FF; // 2047, easy for masking.
|
const int kRange = 0x7FF; // 2047, easy for masking.
|
||||||
for (int n = 0; n < kFrameSizeSamples; ++n)
|
for (size_t n = 0; n < kFrameSizeSamples; ++n)
|
||||||
audio[n] = (rand() & kRange) - kRange / 2;
|
audio[n] = (rand() & kRange) - kRange / 2;
|
||||||
WebRtcPcm16b_Encode(audio, kFrameSizeSamples, payload_);
|
WebRtcPcm16b_Encode(audio, kFrameSizeSamples, payload_);
|
||||||
}
|
}
|
||||||
@ -133,7 +133,7 @@ class TargetDelayTest : public ::testing::Test {
|
|||||||
private:
|
private:
|
||||||
static const int kSampleRateHz = 16000;
|
static const int kSampleRateHz = 16000;
|
||||||
static const int kNum10msPerFrame = 2;
|
static const int kNum10msPerFrame = 2;
|
||||||
static const int kFrameSizeSamples = 320; // 20 ms @ 16 kHz.
|
static const size_t kFrameSizeSamples = 320; // 20 ms @ 16 kHz.
|
||||||
// payload-len = frame-samples * 2 bytes/sample.
|
// payload-len = frame-samples * 2 bytes/sample.
|
||||||
static const int kPayloadLenBytes = 320 * 2;
|
static const int kPayloadLenBytes = 320 * 2;
|
||||||
// Inter-arrival time in number of packets in a jittery channel. One is no
|
// Inter-arrival time in number of packets in a jittery channel. One is no
|
||||||
|
@ -55,7 +55,7 @@ namespace webrtc {
|
|||||||
//
|
//
|
||||||
int DtmfBuffer::ParseEvent(uint32_t rtp_timestamp,
|
int DtmfBuffer::ParseEvent(uint32_t rtp_timestamp,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int payload_length_bytes,
|
size_t payload_length_bytes,
|
||||||
DtmfEvent* event) {
|
DtmfEvent* event) {
|
||||||
if (!payload || !event) {
|
if (!payload || !event) {
|
||||||
return kInvalidPointer;
|
return kInvalidPointer;
|
||||||
|
@ -69,7 +69,7 @@ class DtmfBuffer {
|
|||||||
// |rtp_timestamp| is simply copied into the struct.
|
// |rtp_timestamp| is simply copied into the struct.
|
||||||
static int ParseEvent(uint32_t rtp_timestamp,
|
static int ParseEvent(uint32_t rtp_timestamp,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int payload_length_bytes,
|
size_t payload_length_bytes,
|
||||||
DtmfEvent* event);
|
DtmfEvent* event);
|
||||||
|
|
||||||
// Inserts |event| into the buffer. The method looks for a matching event and
|
// Inserts |event| into the buffer. The method looks for a matching event and
|
||||||
|
@ -132,7 +132,7 @@ class NetEq {
|
|||||||
// Returns 0 on success, -1 on failure.
|
// Returns 0 on success, -1 on failure.
|
||||||
virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int length_bytes,
|
size_t length_bytes,
|
||||||
uint32_t receive_timestamp) = 0;
|
uint32_t receive_timestamp) = 0;
|
||||||
|
|
||||||
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
|
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
|
||||||
|
@ -28,11 +28,11 @@ class MockPayloadSplitter : public PayloadSplitter {
|
|||||||
MOCK_METHOD2(SplitAudio,
|
MOCK_METHOD2(SplitAudio,
|
||||||
int(PacketList* packet_list, const DecoderDatabase& decoder_database));
|
int(PacketList* packet_list, const DecoderDatabase& decoder_database));
|
||||||
MOCK_METHOD4(SplitBySamples,
|
MOCK_METHOD4(SplitBySamples,
|
||||||
void(const Packet* packet, int bytes_per_ms, int timestamps_per_ms,
|
void(const Packet* packet, size_t bytes_per_ms,
|
||||||
PacketList* new_packets));
|
uint32_t timestamps_per_ms, PacketList* new_packets));
|
||||||
MOCK_METHOD4(SplitByFrames,
|
MOCK_METHOD4(SplitByFrames,
|
||||||
int(const Packet* packet, int bytes_per_frame, int timestamps_per_frame,
|
int(const Packet* packet, size_t bytes_per_frame,
|
||||||
PacketList* new_packets));
|
uint32_t timestamps_per_frame, PacketList* new_packets));
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -203,7 +203,7 @@ class NetEqExternalDecoderTest : public ::testing::Test {
|
|||||||
int sample_rate_hz_;
|
int sample_rate_hz_;
|
||||||
int samples_per_ms_;
|
int samples_per_ms_;
|
||||||
const int frame_size_ms_;
|
const int frame_size_ms_;
|
||||||
int frame_size_samples_;
|
size_t frame_size_samples_;
|
||||||
int output_size_samples_;
|
int output_size_samples_;
|
||||||
NetEq* neteq_external_;
|
NetEq* neteq_external_;
|
||||||
NetEq* neteq_;
|
NetEq* neteq_;
|
||||||
@ -214,7 +214,7 @@ class NetEqExternalDecoderTest : public ::testing::Test {
|
|||||||
int16_t output_[kMaxBlockSize];
|
int16_t output_[kMaxBlockSize];
|
||||||
int16_t output_external_[kMaxBlockSize];
|
int16_t output_external_[kMaxBlockSize];
|
||||||
WebRtcRTPHeader rtp_header_;
|
WebRtcRTPHeader rtp_header_;
|
||||||
int payload_size_bytes_;
|
size_t payload_size_bytes_;
|
||||||
int last_send_time_;
|
int last_send_time_;
|
||||||
int last_arrival_time_;
|
int last_arrival_time_;
|
||||||
scoped_ptr<test::InputAudioFile> input_file_;
|
scoped_ptr<test::InputAudioFile> input_file_;
|
||||||
|
@ -117,7 +117,7 @@ NetEqImpl::~NetEqImpl() {
|
|||||||
|
|
||||||
int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
|
int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int length_bytes,
|
size_t length_bytes,
|
||||||
uint32_t receive_timestamp) {
|
uint32_t receive_timestamp) {
|
||||||
CriticalSectionScoped lock(crit_sect_.get());
|
CriticalSectionScoped lock(crit_sect_.get());
|
||||||
LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp <<
|
LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp <<
|
||||||
@ -399,7 +399,7 @@ const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
|
|||||||
|
|
||||||
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
|
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int length_bytes,
|
size_t length_bytes,
|
||||||
uint32_t receive_timestamp,
|
uint32_t receive_timestamp,
|
||||||
bool is_sync_packet) {
|
bool is_sync_packet) {
|
||||||
if (!payload) {
|
if (!payload) {
|
||||||
@ -1241,7 +1241,7 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation,
|
|||||||
assert(*operation == kNormal || *operation == kAccelerate ||
|
assert(*operation == kNormal || *operation == kAccelerate ||
|
||||||
*operation == kMerge || *operation == kPreemptiveExpand);
|
*operation == kMerge || *operation == kPreemptiveExpand);
|
||||||
packet_list->pop_front();
|
packet_list->pop_front();
|
||||||
int payload_length = packet->payload_length;
|
size_t payload_length = packet->payload_length;
|
||||||
int16_t decode_length;
|
int16_t decode_length;
|
||||||
if (packet->sync_packet) {
|
if (packet->sync_packet) {
|
||||||
// Decode to silence with the same frame size as the last decode.
|
// Decode to silence with the same frame size as the last decode.
|
||||||
|
@ -81,7 +81,7 @@ class NetEqImpl : public webrtc::NetEq {
|
|||||||
// Returns 0 on success, -1 on failure.
|
// Returns 0 on success, -1 on failure.
|
||||||
virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int length_bytes,
|
size_t length_bytes,
|
||||||
uint32_t receive_timestamp) OVERRIDE;
|
uint32_t receive_timestamp) OVERRIDE;
|
||||||
|
|
||||||
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
|
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
|
||||||
@ -210,7 +210,7 @@ class NetEqImpl : public webrtc::NetEq {
|
|||||||
// TODO(hlundin): Merge this with InsertPacket above?
|
// TODO(hlundin): Merge this with InsertPacket above?
|
||||||
int InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
|
int InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int length_bytes,
|
size_t length_bytes,
|
||||||
uint32_t receive_timestamp,
|
uint32_t receive_timestamp,
|
||||||
bool is_sync_packet)
|
bool is_sync_packet)
|
||||||
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
|
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
|
||||||
|
@ -253,7 +253,7 @@ TEST_F(NetEqImplTest, RemovePayloadType) {
|
|||||||
|
|
||||||
TEST_F(NetEqImplTest, InsertPacket) {
|
TEST_F(NetEqImplTest, InsertPacket) {
|
||||||
CreateInstance();
|
CreateInstance();
|
||||||
const int kPayloadLength = 100;
|
const size_t kPayloadLength = 100;
|
||||||
const uint8_t kPayloadType = 0;
|
const uint8_t kPayloadType = 0;
|
||||||
const uint16_t kFirstSequenceNumber = 0x1234;
|
const uint16_t kFirstSequenceNumber = 0x1234;
|
||||||
const uint32_t kFirstTimestamp = 0x12345678;
|
const uint32_t kFirstTimestamp = 0x12345678;
|
||||||
|
@ -192,7 +192,7 @@ class NetEqDecodingTest : public ::testing::Test {
|
|||||||
static const int kBlockSize8kHz = kTimeStepMs * 8;
|
static const int kBlockSize8kHz = kTimeStepMs * 8;
|
||||||
static const int kBlockSize16kHz = kTimeStepMs * 16;
|
static const int kBlockSize16kHz = kTimeStepMs * 16;
|
||||||
static const int kBlockSize32kHz = kTimeStepMs * 32;
|
static const int kBlockSize32kHz = kTimeStepMs * 32;
|
||||||
static const int kMaxBlockSize = kBlockSize32kHz;
|
static const size_t kMaxBlockSize = kBlockSize32kHz;
|
||||||
static const int kInitSampleRateHz = 8000;
|
static const int kInitSampleRateHz = 8000;
|
||||||
|
|
||||||
NetEqDecodingTest();
|
NetEqDecodingTest();
|
||||||
@ -213,7 +213,7 @@ class NetEqDecodingTest : public ::testing::Test {
|
|||||||
int timestamp,
|
int timestamp,
|
||||||
WebRtcRTPHeader* rtp_info,
|
WebRtcRTPHeader* rtp_info,
|
||||||
uint8_t* payload,
|
uint8_t* payload,
|
||||||
int* payload_len);
|
size_t* payload_len);
|
||||||
|
|
||||||
void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
|
void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
|
||||||
const std::set<uint16_t>& drop_seq_numbers,
|
const std::set<uint16_t>& drop_seq_numbers,
|
||||||
@ -244,7 +244,7 @@ const int NetEqDecodingTest::kTimeStepMs;
|
|||||||
const int NetEqDecodingTest::kBlockSize8kHz;
|
const int NetEqDecodingTest::kBlockSize8kHz;
|
||||||
const int NetEqDecodingTest::kBlockSize16kHz;
|
const int NetEqDecodingTest::kBlockSize16kHz;
|
||||||
const int NetEqDecodingTest::kBlockSize32kHz;
|
const int NetEqDecodingTest::kBlockSize32kHz;
|
||||||
const int NetEqDecodingTest::kMaxBlockSize;
|
const size_t NetEqDecodingTest::kMaxBlockSize;
|
||||||
const int NetEqDecodingTest::kInitSampleRateHz;
|
const int NetEqDecodingTest::kInitSampleRateHz;
|
||||||
|
|
||||||
NetEqDecodingTest::NetEqDecodingTest()
|
NetEqDecodingTest::NetEqDecodingTest()
|
||||||
@ -396,7 +396,7 @@ void NetEqDecodingTest::PopulateCng(int frame_index,
|
|||||||
int timestamp,
|
int timestamp,
|
||||||
WebRtcRTPHeader* rtp_info,
|
WebRtcRTPHeader* rtp_info,
|
||||||
uint8_t* payload,
|
uint8_t* payload,
|
||||||
int* payload_len) {
|
size_t* payload_len) {
|
||||||
rtp_info->header.sequenceNumber = frame_index;
|
rtp_info->header.sequenceNumber = frame_index;
|
||||||
rtp_info->header.timestamp = timestamp;
|
rtp_info->header.timestamp = timestamp;
|
||||||
rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
|
rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
|
||||||
@ -448,8 +448,8 @@ class NetEqDecodingTestFaxMode : public NetEqDecodingTest {
|
|||||||
TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
|
TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
|
||||||
// Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
|
// Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
|
||||||
size_t num_frames = 30;
|
size_t num_frames = 30;
|
||||||
const int kSamples = 10 * 16;
|
const size_t kSamples = 10 * 16;
|
||||||
const int kPayloadBytes = kSamples * 2;
|
const size_t kPayloadBytes = kSamples * 2;
|
||||||
for (size_t i = 0; i < num_frames; ++i) {
|
for (size_t i = 0; i < num_frames; ++i) {
|
||||||
uint16_t payload[kSamples] = {0};
|
uint16_t payload[kSamples] = {0};
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
@ -518,8 +518,8 @@ TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
|
|||||||
TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
|
TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
|
||||||
const int kNumFrames = 3000; // Needed for convergence.
|
const int kNumFrames = 3000; // Needed for convergence.
|
||||||
int frame_index = 0;
|
int frame_index = 0;
|
||||||
const int kSamples = 10 * 16;
|
const size_t kSamples = 10 * 16;
|
||||||
const int kPayloadBytes = kSamples * 2;
|
const size_t kPayloadBytes = kSamples * 2;
|
||||||
while (frame_index < kNumFrames) {
|
while (frame_index < kNumFrames) {
|
||||||
// Insert one packet each time, except every 10th time where we insert two
|
// Insert one packet each time, except every 10th time where we insert two
|
||||||
// packets at once. This will create a negative clock-drift of approx. 10%.
|
// packets at once. This will create a negative clock-drift of approx. 10%.
|
||||||
@ -549,8 +549,8 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
|
|||||||
TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
|
TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
|
||||||
const int kNumFrames = 5000; // Needed for convergence.
|
const int kNumFrames = 5000; // Needed for convergence.
|
||||||
int frame_index = 0;
|
int frame_index = 0;
|
||||||
const int kSamples = 10 * 16;
|
const size_t kSamples = 10 * 16;
|
||||||
const int kPayloadBytes = kSamples * 2;
|
const size_t kPayloadBytes = kSamples * 2;
|
||||||
for (int i = 0; i < kNumFrames; ++i) {
|
for (int i = 0; i < kNumFrames; ++i) {
|
||||||
// Insert one packet each time, except every 10th time where we don't insert
|
// Insert one packet each time, except every 10th time where we don't insert
|
||||||
// any packet. This will create a positive clock-drift of approx. 11%.
|
// any packet. This will create a positive clock-drift of approx. 11%.
|
||||||
@ -585,8 +585,8 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
|
|||||||
uint16_t seq_no = 0;
|
uint16_t seq_no = 0;
|
||||||
uint32_t timestamp = 0;
|
uint32_t timestamp = 0;
|
||||||
const int kFrameSizeMs = 30;
|
const int kFrameSizeMs = 30;
|
||||||
const int kSamples = kFrameSizeMs * 16;
|
const size_t kSamples = kFrameSizeMs * 16;
|
||||||
const int kPayloadBytes = kSamples * 2;
|
const size_t kPayloadBytes = kSamples * 2;
|
||||||
double next_input_time_ms = 0.0;
|
double next_input_time_ms = 0.0;
|
||||||
double t_ms;
|
double t_ms;
|
||||||
int out_len;
|
int out_len;
|
||||||
@ -625,7 +625,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
|
|||||||
while (next_input_time_ms <= t_ms) {
|
while (next_input_time_ms <= t_ms) {
|
||||||
// Insert one CNG frame each 100 ms.
|
// Insert one CNG frame each 100 ms.
|
||||||
uint8_t payload[kPayloadBytes];
|
uint8_t payload[kPayloadBytes];
|
||||||
int payload_len;
|
size_t payload_len;
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
||||||
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
||||||
@ -672,7 +672,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
|
|||||||
}
|
}
|
||||||
// Insert one CNG frame each 100 ms.
|
// Insert one CNG frame each 100 ms.
|
||||||
uint8_t payload[kPayloadBytes];
|
uint8_t payload[kPayloadBytes];
|
||||||
int payload_len;
|
size_t payload_len;
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
||||||
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
||||||
@ -797,7 +797,7 @@ TEST_F(NetEqDecodingTest, LongCngWithoutClockDrift) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(NetEqDecodingTest, UnknownPayloadType) {
|
TEST_F(NetEqDecodingTest, UnknownPayloadType) {
|
||||||
const int kPayloadBytes = 100;
|
const size_t kPayloadBytes = 100;
|
||||||
uint8_t payload[kPayloadBytes] = {0};
|
uint8_t payload[kPayloadBytes] = {0};
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateRtpInfo(0, 0, &rtp_info);
|
PopulateRtpInfo(0, 0, &rtp_info);
|
||||||
@ -808,7 +808,7 @@ TEST_F(NetEqDecodingTest, UnknownPayloadType) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
|
TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
|
||||||
const int kPayloadBytes = 100;
|
const size_t kPayloadBytes = 100;
|
||||||
uint8_t payload[kPayloadBytes] = {0};
|
uint8_t payload[kPayloadBytes] = {0};
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateRtpInfo(0, 0, &rtp_info);
|
PopulateRtpInfo(0, 0, &rtp_info);
|
||||||
@ -817,7 +817,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
|
|||||||
NetEqOutputType type;
|
NetEqOutputType type;
|
||||||
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
|
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
|
||||||
// to GetAudio.
|
// to GetAudio.
|
||||||
for (int i = 0; i < kMaxBlockSize; ++i) {
|
for (size_t i = 0; i < kMaxBlockSize; ++i) {
|
||||||
out_data_[i] = 1;
|
out_data_[i] = 1;
|
||||||
}
|
}
|
||||||
int num_channels;
|
int num_channels;
|
||||||
@ -838,7 +838,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
|
|||||||
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
|
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
|
||||||
EXPECT_EQ(0, out_data_[i]);
|
EXPECT_EQ(0, out_data_[i]);
|
||||||
}
|
}
|
||||||
for (int i = kExpectedOutputLength; i < kMaxBlockSize; ++i) {
|
for (size_t i = kExpectedOutputLength; i < kMaxBlockSize; ++i) {
|
||||||
std::ostringstream ss;
|
std::ostringstream ss;
|
||||||
ss << "i = " << i;
|
ss << "i = " << i;
|
||||||
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
|
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
|
||||||
@ -850,7 +850,7 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
|
|||||||
NetEqOutputType type;
|
NetEqOutputType type;
|
||||||
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
|
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
|
||||||
// to GetAudio.
|
// to GetAudio.
|
||||||
for (int i = 0; i < kMaxBlockSize; ++i) {
|
for (size_t i = 0; i < kMaxBlockSize; ++i) {
|
||||||
out_data_[i] = 1;
|
out_data_[i] = 1;
|
||||||
}
|
}
|
||||||
int num_channels;
|
int num_channels;
|
||||||
@ -875,7 +875,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
|
|||||||
bool should_be_faded) = 0;
|
bool should_be_faded) = 0;
|
||||||
|
|
||||||
void CheckBgn(int sampling_rate_hz) {
|
void CheckBgn(int sampling_rate_hz) {
|
||||||
int expected_samples_per_channel = 0;
|
int16_t expected_samples_per_channel = 0;
|
||||||
uint8_t payload_type = 0xFF; // Invalid.
|
uint8_t payload_type = 0xFF; // Invalid.
|
||||||
if (sampling_rate_hz == 8000) {
|
if (sampling_rate_hz == 8000) {
|
||||||
expected_samples_per_channel = kBlockSize8kHz;
|
expected_samples_per_channel = kBlockSize8kHz;
|
||||||
@ -899,7 +899,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
|
|||||||
ASSERT_TRUE(input.Init(
|
ASSERT_TRUE(input.Init(
|
||||||
webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
|
webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
|
||||||
10 * sampling_rate_hz, // Max 10 seconds loop length.
|
10 * sampling_rate_hz, // Max 10 seconds loop length.
|
||||||
expected_samples_per_channel));
|
static_cast<size_t>(expected_samples_per_channel)));
|
||||||
|
|
||||||
// Payload of 10 ms of PCM16 32 kHz.
|
// Payload of 10 ms of PCM16 32 kHz.
|
||||||
uint8_t payload[kBlockSize32kHz * sizeof(int16_t)];
|
uint8_t payload[kBlockSize32kHz * sizeof(int16_t)];
|
||||||
@ -912,7 +912,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
|
|||||||
|
|
||||||
uint32_t receive_timestamp = 0;
|
uint32_t receive_timestamp = 0;
|
||||||
for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
|
for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
|
||||||
int enc_len_bytes =
|
int16_t enc_len_bytes =
|
||||||
WebRtcPcm16b_EncodeW16(input.GetNextBlock(),
|
WebRtcPcm16b_EncodeW16(input.GetNextBlock(),
|
||||||
expected_samples_per_channel,
|
expected_samples_per_channel,
|
||||||
reinterpret_cast<int16_t*>(payload));
|
reinterpret_cast<int16_t*>(payload));
|
||||||
@ -921,8 +921,9 @@ class NetEqBgnTest : public NetEqDecodingTest {
|
|||||||
number_channels = 0;
|
number_channels = 0;
|
||||||
samples_per_channel = 0;
|
samples_per_channel = 0;
|
||||||
ASSERT_EQ(0,
|
ASSERT_EQ(0,
|
||||||
neteq_->InsertPacket(
|
neteq_->InsertPacket(rtp_info, payload,
|
||||||
rtp_info, payload, enc_len_bytes, receive_timestamp));
|
static_cast<size_t>(enc_len_bytes),
|
||||||
|
receive_timestamp));
|
||||||
ASSERT_EQ(0,
|
ASSERT_EQ(0,
|
||||||
neteq_->GetAudio(kBlockSize32kHz,
|
neteq_->GetAudio(kBlockSize32kHz,
|
||||||
output,
|
output,
|
||||||
@ -1074,7 +1075,7 @@ TEST_F(NetEqDecodingTest, SyncPacketInsert) {
|
|||||||
EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
|
EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
|
||||||
|
|
||||||
// Payload length of 10 ms PCM16 16 kHz.
|
// Payload length of 10 ms PCM16 16 kHz.
|
||||||
const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
const size_t kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
||||||
uint8_t payload[kPayloadBytes] = {0};
|
uint8_t payload[kPayloadBytes] = {0};
|
||||||
ASSERT_EQ(0, neteq_->InsertPacket(
|
ASSERT_EQ(0, neteq_->InsertPacket(
|
||||||
rtp_info, payload, kPayloadBytes, receive_timestamp));
|
rtp_info, payload, kPayloadBytes, receive_timestamp));
|
||||||
@ -1125,11 +1126,11 @@ TEST_F(NetEqDecodingTest, SyncPacketInsert) {
|
|||||||
TEST_F(NetEqDecodingTest, SyncPacketDecode) {
|
TEST_F(NetEqDecodingTest, SyncPacketDecode) {
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateRtpInfo(0, 0, &rtp_info);
|
PopulateRtpInfo(0, 0, &rtp_info);
|
||||||
const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
const size_t kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
||||||
uint8_t payload[kPayloadBytes];
|
uint8_t payload[kPayloadBytes];
|
||||||
int16_t decoded[kBlockSize16kHz];
|
int16_t decoded[kBlockSize16kHz];
|
||||||
int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
|
int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
|
||||||
for (int n = 0; n < kPayloadBytes; ++n) {
|
for (size_t n = 0; n < kPayloadBytes; ++n) {
|
||||||
payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
|
payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
|
||||||
}
|
}
|
||||||
// Insert some packets which decode to noise. We are not interested in
|
// Insert some packets which decode to noise. We are not interested in
|
||||||
@ -1204,10 +1205,10 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
|
|||||||
TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
|
TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
|
||||||
WebRtcRTPHeader rtp_info;
|
WebRtcRTPHeader rtp_info;
|
||||||
PopulateRtpInfo(0, 0, &rtp_info);
|
PopulateRtpInfo(0, 0, &rtp_info);
|
||||||
const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
const size_t kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
|
||||||
uint8_t payload[kPayloadBytes];
|
uint8_t payload[kPayloadBytes];
|
||||||
int16_t decoded[kBlockSize16kHz];
|
int16_t decoded[kBlockSize16kHz];
|
||||||
for (int n = 0; n < kPayloadBytes; ++n) {
|
for (size_t n = 0; n < kPayloadBytes; ++n) {
|
||||||
payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
|
payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
|
||||||
}
|
}
|
||||||
// Insert some packets which decode to noise. We are not interested in
|
// Insert some packets which decode to noise. We are not interested in
|
||||||
@ -1279,7 +1280,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
|
|||||||
const int kBlocksPerFrame = 3; // Number of 10 ms blocks per frame.
|
const int kBlocksPerFrame = 3; // Number of 10 ms blocks per frame.
|
||||||
const int kFrameSizeMs = kBlocksPerFrame * kTimeStepMs;
|
const int kFrameSizeMs = kBlocksPerFrame * kTimeStepMs;
|
||||||
const int kSamples = kBlockSize16kHz * kBlocksPerFrame;
|
const int kSamples = kBlockSize16kHz * kBlocksPerFrame;
|
||||||
const int kPayloadBytes = kSamples * sizeof(int16_t);
|
const size_t kPayloadBytes = kSamples * sizeof(int16_t);
|
||||||
double next_input_time_ms = 0.0;
|
double next_input_time_ms = 0.0;
|
||||||
int16_t decoded[kBlockSize16kHz];
|
int16_t decoded[kBlockSize16kHz];
|
||||||
int num_channels;
|
int num_channels;
|
||||||
@ -1380,7 +1381,7 @@ void NetEqDecodingTest::DuplicateCng() {
|
|||||||
const int kFrameSizeMs = 10;
|
const int kFrameSizeMs = 10;
|
||||||
const int kSampleRateKhz = 16;
|
const int kSampleRateKhz = 16;
|
||||||
const int kSamples = kFrameSizeMs * kSampleRateKhz;
|
const int kSamples = kFrameSizeMs * kSampleRateKhz;
|
||||||
const int kPayloadBytes = kSamples * 2;
|
const size_t kPayloadBytes = kSamples * 2;
|
||||||
|
|
||||||
const int algorithmic_delay_samples = std::max(
|
const int algorithmic_delay_samples = std::max(
|
||||||
algorithmic_delay_ms_ * kSampleRateKhz, 5 * kSampleRateKhz / 8);
|
algorithmic_delay_ms_ * kSampleRateKhz, 5 * kSampleRateKhz / 8);
|
||||||
@ -1409,7 +1410,7 @@ void NetEqDecodingTest::DuplicateCng() {
|
|||||||
// Insert same CNG packet twice.
|
// Insert same CNG packet twice.
|
||||||
const int kCngPeriodMs = 100;
|
const int kCngPeriodMs = 100;
|
||||||
const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz;
|
const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz;
|
||||||
int payload_len;
|
size_t payload_len;
|
||||||
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
|
||||||
// This is the first time this CNG packet is inserted.
|
// This is the first time this CNG packet is inserted.
|
||||||
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
|
||||||
|
@ -22,7 +22,7 @@ namespace webrtc {
|
|||||||
struct Packet {
|
struct Packet {
|
||||||
RTPHeader header;
|
RTPHeader header;
|
||||||
uint8_t* payload; // Datagram excluding RTP header and header extension.
|
uint8_t* payload; // Datagram excluding RTP header and header extension.
|
||||||
int payload_length;
|
size_t payload_length;
|
||||||
bool primary; // Primary, i.e., not redundant payload.
|
bool primary; // Primary, i.e., not redundant payload.
|
||||||
int waiting_time;
|
int waiting_time;
|
||||||
bool sync_packet;
|
bool sync_packet;
|
||||||
|
@ -46,7 +46,7 @@ int PayloadSplitter::SplitRed(PacketList* packet_list) {
|
|||||||
// +-+-+-+-+-+-+-+-+
|
// +-+-+-+-+-+-+-+-+
|
||||||
|
|
||||||
bool last_block = false;
|
bool last_block = false;
|
||||||
int sum_length = 0;
|
size_t sum_length = 0;
|
||||||
while (!last_block) {
|
while (!last_block) {
|
||||||
Packet* new_packet = new Packet;
|
Packet* new_packet = new Packet;
|
||||||
new_packet->header = red_packet->header;
|
new_packet->header = red_packet->header;
|
||||||
@ -82,7 +82,7 @@ int PayloadSplitter::SplitRed(PacketList* packet_list) {
|
|||||||
// |payload_ptr| now points at the first payload byte.
|
// |payload_ptr| now points at the first payload byte.
|
||||||
PacketList::iterator new_it;
|
PacketList::iterator new_it;
|
||||||
for (new_it = new_packets.begin(); new_it != new_packets.end(); ++new_it) {
|
for (new_it = new_packets.begin(); new_it != new_packets.end(); ++new_it) {
|
||||||
int payload_length = (*new_it)->payload_length;
|
size_t payload_length = (*new_it)->payload_length;
|
||||||
if (payload_ptr + payload_length >
|
if (payload_ptr + payload_length >
|
||||||
red_packet->payload + red_packet->payload_length) {
|
red_packet->payload + red_packet->payload_length) {
|
||||||
// The block lengths in the RED headers do not match the overall packet
|
// The block lengths in the RED headers do not match the overall packet
|
||||||
@ -291,11 +291,12 @@ int PayloadSplitter::SplitAudio(PacketList* packet_list,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case kDecoderILBC: {
|
case kDecoderILBC: {
|
||||||
int bytes_per_frame;
|
size_t bytes_per_frame;
|
||||||
int timestamps_per_frame;
|
int timestamps_per_frame;
|
||||||
if (packet->payload_length >= 950) {
|
if (packet->payload_length >= 950) {
|
||||||
return kTooLargePayload;
|
return kTooLargePayload;
|
||||||
} else if (packet->payload_length % 38 == 0) {
|
}
|
||||||
|
if (packet->payload_length % 38 == 0) {
|
||||||
// 20 ms frames.
|
// 20 ms frames.
|
||||||
bytes_per_frame = 38;
|
bytes_per_frame = 38;
|
||||||
timestamps_per_frame = 160;
|
timestamps_per_frame = 160;
|
||||||
@ -345,28 +346,28 @@ int PayloadSplitter::SplitAudio(PacketList* packet_list,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void PayloadSplitter::SplitBySamples(const Packet* packet,
|
void PayloadSplitter::SplitBySamples(const Packet* packet,
|
||||||
int bytes_per_ms,
|
size_t bytes_per_ms,
|
||||||
int timestamps_per_ms,
|
uint32_t timestamps_per_ms,
|
||||||
PacketList* new_packets) {
|
PacketList* new_packets) {
|
||||||
assert(packet);
|
assert(packet);
|
||||||
assert(new_packets);
|
assert(new_packets);
|
||||||
|
|
||||||
int split_size_bytes = packet->payload_length;
|
size_t split_size_bytes = packet->payload_length;
|
||||||
|
|
||||||
// Find a "chunk size" >= 20 ms and < 40 ms.
|
// Find a "chunk size" >= 20 ms and < 40 ms.
|
||||||
int min_chunk_size = bytes_per_ms * 20;
|
size_t min_chunk_size = bytes_per_ms * 20;
|
||||||
// Reduce the split size by half as long as |split_size_bytes| is at least
|
// Reduce the split size by half as long as |split_size_bytes| is at least
|
||||||
// twice the minimum chunk size (so that the resulting size is at least as
|
// twice the minimum chunk size (so that the resulting size is at least as
|
||||||
// large as the minimum chunk size).
|
// large as the minimum chunk size).
|
||||||
while (split_size_bytes >= 2 * min_chunk_size) {
|
while (split_size_bytes >= 2 * min_chunk_size) {
|
||||||
split_size_bytes >>= 1;
|
split_size_bytes >>= 1;
|
||||||
}
|
}
|
||||||
int timestamps_per_chunk =
|
uint32_t timestamps_per_chunk = static_cast<uint32_t>(
|
||||||
split_size_bytes * timestamps_per_ms / bytes_per_ms;
|
split_size_bytes * timestamps_per_ms / bytes_per_ms);
|
||||||
uint32_t timestamp = packet->header.timestamp;
|
uint32_t timestamp = packet->header.timestamp;
|
||||||
|
|
||||||
uint8_t* payload_ptr = packet->payload;
|
uint8_t* payload_ptr = packet->payload;
|
||||||
int len = packet->payload_length;
|
size_t len = packet->payload_length;
|
||||||
while (len >= (2 * split_size_bytes)) {
|
while (len >= (2 * split_size_bytes)) {
|
||||||
Packet* new_packet = new Packet;
|
Packet* new_packet = new Packet;
|
||||||
new_packet->payload_length = split_size_bytes;
|
new_packet->payload_length = split_size_bytes;
|
||||||
@ -394,22 +395,21 @@ void PayloadSplitter::SplitBySamples(const Packet* packet,
|
|||||||
}
|
}
|
||||||
|
|
||||||
int PayloadSplitter::SplitByFrames(const Packet* packet,
|
int PayloadSplitter::SplitByFrames(const Packet* packet,
|
||||||
int bytes_per_frame,
|
size_t bytes_per_frame,
|
||||||
int timestamps_per_frame,
|
uint32_t timestamps_per_frame,
|
||||||
PacketList* new_packets) {
|
PacketList* new_packets) {
|
||||||
if (packet->payload_length % bytes_per_frame != 0) {
|
if (packet->payload_length % bytes_per_frame != 0) {
|
||||||
return kFrameSplitError;
|
return kFrameSplitError;
|
||||||
}
|
}
|
||||||
|
|
||||||
int num_frames = packet->payload_length / bytes_per_frame;
|
if (packet->payload_length == bytes_per_frame) {
|
||||||
if (num_frames == 1) {
|
|
||||||
// Special case. Do not split the payload.
|
// Special case. Do not split the payload.
|
||||||
return kNoSplit;
|
return kNoSplit;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t timestamp = packet->header.timestamp;
|
uint32_t timestamp = packet->header.timestamp;
|
||||||
uint8_t* payload_ptr = packet->payload;
|
uint8_t* payload_ptr = packet->payload;
|
||||||
int len = packet->payload_length;
|
size_t len = packet->payload_length;
|
||||||
while (len > 0) {
|
while (len > 0) {
|
||||||
assert(len >= bytes_per_frame);
|
assert(len >= bytes_per_frame);
|
||||||
Packet* new_packet = new Packet;
|
Packet* new_packet = new Packet;
|
||||||
|
@ -71,16 +71,16 @@ class PayloadSplitter {
|
|||||||
// Splits the payload in |packet|. The payload is assumed to be from a
|
// Splits the payload in |packet|. The payload is assumed to be from a
|
||||||
// sample-based codec.
|
// sample-based codec.
|
||||||
virtual void SplitBySamples(const Packet* packet,
|
virtual void SplitBySamples(const Packet* packet,
|
||||||
int bytes_per_ms,
|
size_t bytes_per_ms,
|
||||||
int timestamps_per_ms,
|
uint32_t timestamps_per_ms,
|
||||||
PacketList* new_packets);
|
PacketList* new_packets);
|
||||||
|
|
||||||
// Splits the payload in |packet|. The payload will be split into chunks of
|
// Splits the payload in |packet|. The payload will be split into chunks of
|
||||||
// size |bytes_per_frame|, corresponding to a |timestamps_per_frame|
|
// size |bytes_per_frame|, corresponding to a |timestamps_per_frame|
|
||||||
// RTP timestamps.
|
// RTP timestamps.
|
||||||
virtual int SplitByFrames(const Packet* packet,
|
virtual int SplitByFrames(const Packet* packet,
|
||||||
int bytes_per_frame,
|
size_t bytes_per_frame,
|
||||||
int timestamps_per_frame,
|
uint32_t timestamps_per_frame,
|
||||||
PacketList* new_packets);
|
PacketList* new_packets);
|
||||||
|
|
||||||
DISALLOW_COPY_AND_ASSIGN(PayloadSplitter);
|
DISALLOW_COPY_AND_ASSIGN(PayloadSplitter);
|
||||||
|
@ -27,8 +27,8 @@ using ::testing::ReturnNull;
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
static const int kRedPayloadType = 100;
|
static const int kRedPayloadType = 100;
|
||||||
static const int kPayloadLength = 10;
|
static const size_t kPayloadLength = 10;
|
||||||
static const int kRedHeaderLength = 4; // 4 bytes RED header.
|
static const size_t kRedHeaderLength = 4; // 4 bytes RED header.
|
||||||
static const uint16_t kSequenceNumber = 0;
|
static const uint16_t kSequenceNumber = 0;
|
||||||
static const uint32_t kBaseTimestamp = 0x12345678;
|
static const uint32_t kBaseTimestamp = 0x12345678;
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ static const uint32_t kBaseTimestamp = 0x12345678;
|
|||||||
// by the values in array |payload_types| (which must be of length
|
// by the values in array |payload_types| (which must be of length
|
||||||
// |num_payloads|). Each redundant payload is |timestamp_offset| samples
|
// |num_payloads|). Each redundant payload is |timestamp_offset| samples
|
||||||
// "behind" the the previous payload.
|
// "behind" the the previous payload.
|
||||||
Packet* CreateRedPayload(int num_payloads,
|
Packet* CreateRedPayload(size_t num_payloads,
|
||||||
uint8_t* payload_types,
|
uint8_t* payload_types,
|
||||||
int timestamp_offset) {
|
int timestamp_offset) {
|
||||||
Packet* packet = new Packet;
|
Packet* packet = new Packet;
|
||||||
@ -61,7 +61,7 @@ Packet* CreateRedPayload(int num_payloads,
|
|||||||
(num_payloads - 1) * (kPayloadLength + kRedHeaderLength);
|
(num_payloads - 1) * (kPayloadLength + kRedHeaderLength);
|
||||||
uint8_t* payload = new uint8_t[packet->payload_length];
|
uint8_t* payload = new uint8_t[packet->payload_length];
|
||||||
uint8_t* payload_ptr = payload;
|
uint8_t* payload_ptr = payload;
|
||||||
for (int i = 0; i < num_payloads; ++i) {
|
for (size_t i = 0; i < num_payloads; ++i) {
|
||||||
// Write the RED headers.
|
// Write the RED headers.
|
||||||
if (i == num_payloads - 1) {
|
if (i == num_payloads - 1) {
|
||||||
// Special case for last payload.
|
// Special case for last payload.
|
||||||
@ -82,9 +82,9 @@ Packet* CreateRedPayload(int num_payloads,
|
|||||||
*payload_ptr = kPayloadLength & 0xFF;
|
*payload_ptr = kPayloadLength & 0xFF;
|
||||||
++payload_ptr;
|
++payload_ptr;
|
||||||
}
|
}
|
||||||
for (int i = 0; i < num_payloads; ++i) {
|
for (size_t i = 0; i < num_payloads; ++i) {
|
||||||
// Write |i| to all bytes in each payload.
|
// Write |i| to all bytes in each payload.
|
||||||
memset(payload_ptr, i, kPayloadLength);
|
memset(payload_ptr, static_cast<int>(i), kPayloadLength);
|
||||||
payload_ptr += kPayloadLength;
|
payload_ptr += kPayloadLength;
|
||||||
}
|
}
|
||||||
packet->payload = payload;
|
packet->payload = payload;
|
||||||
@ -104,7 +104,7 @@ Packet* CreateRedPayload(int num_payloads,
|
|||||||
// : |
|
// : |
|
||||||
// | |
|
// | |
|
||||||
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||||
Packet* CreateOpusFecPacket(uint8_t payload_type, int payload_length,
|
Packet* CreateOpusFecPacket(uint8_t payload_type, size_t payload_length,
|
||||||
uint8_t payload_value) {
|
uint8_t payload_value) {
|
||||||
Packet* packet = new Packet;
|
Packet* packet = new Packet;
|
||||||
packet->header.payloadType = payload_type;
|
packet->header.payloadType = payload_type;
|
||||||
@ -120,7 +120,7 @@ Packet* CreateOpusFecPacket(uint8_t payload_type, int payload_length,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create a packet with all payload bytes set to |payload_value|.
|
// Create a packet with all payload bytes set to |payload_value|.
|
||||||
Packet* CreatePacket(uint8_t payload_type, int payload_length,
|
Packet* CreatePacket(uint8_t payload_type, size_t payload_length,
|
||||||
uint8_t payload_value) {
|
uint8_t payload_value) {
|
||||||
Packet* packet = new Packet;
|
Packet* packet = new Packet;
|
||||||
packet->header.payloadType = payload_type;
|
packet->header.payloadType = payload_type;
|
||||||
@ -135,7 +135,7 @@ Packet* CreatePacket(uint8_t payload_type, int payload_length,
|
|||||||
|
|
||||||
// Checks that |packet| has the attributes given in the remaining parameters.
|
// Checks that |packet| has the attributes given in the remaining parameters.
|
||||||
void VerifyPacket(const Packet* packet,
|
void VerifyPacket(const Packet* packet,
|
||||||
int payload_length,
|
size_t payload_length,
|
||||||
uint8_t payload_type,
|
uint8_t payload_type,
|
||||||
uint16_t sequence_number,
|
uint16_t sequence_number,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
@ -147,7 +147,7 @@ void VerifyPacket(const Packet* packet,
|
|||||||
EXPECT_EQ(timestamp, packet->header.timestamp);
|
EXPECT_EQ(timestamp, packet->header.timestamp);
|
||||||
EXPECT_EQ(primary, packet->primary);
|
EXPECT_EQ(primary, packet->primary);
|
||||||
ASSERT_FALSE(packet->payload == NULL);
|
ASSERT_FALSE(packet->payload == NULL);
|
||||||
for (int i = 0; i < packet->payload_length; ++i) {
|
for (size_t i = 0; i < packet->payload_length; ++i) {
|
||||||
EXPECT_EQ(payload_value, packet->payload[i]);
|
EXPECT_EQ(payload_value, packet->payload[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -295,7 +295,7 @@ TEST(RedPayloadSplitter, TwoPacketsThreePayloads) {
|
|||||||
// found in the list (which is PCMu).
|
// found in the list (which is PCMu).
|
||||||
TEST(RedPayloadSplitter, CheckRedPayloads) {
|
TEST(RedPayloadSplitter, CheckRedPayloads) {
|
||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
for (int i = 0; i <= 3; ++i) {
|
for (uint8_t i = 0; i <= 3; ++i) {
|
||||||
// Create packet with payload type |i|, payload length 10 bytes, all 0.
|
// Create packet with payload type |i|, payload length 10 bytes, all 0.
|
||||||
Packet* packet = CreatePacket(i, 10, 0);
|
Packet* packet = CreatePacket(i, 10, 0);
|
||||||
packet_list.push_back(packet);
|
packet_list.push_back(packet);
|
||||||
@ -357,7 +357,7 @@ TEST(AudioPayloadSplitter, NonSplittable) {
|
|||||||
// Set up packets with different RTP payload types. The actual values do not
|
// Set up packets with different RTP payload types. The actual values do not
|
||||||
// matter, since we are mocking the decoder database anyway.
|
// matter, since we are mocking the decoder database anyway.
|
||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
for (int i = 0; i < 6; ++i) {
|
for (uint8_t i = 0; i < 6; ++i) {
|
||||||
// Let the payload type be |i|, and the payload value 10 * |i|.
|
// Let the payload type be |i|, and the payload value 10 * |i|.
|
||||||
packet_list.push_back(CreatePacket(i, kPayloadLength, 10 * i));
|
packet_list.push_back(CreatePacket(i, kPayloadLength, 10 * i));
|
||||||
}
|
}
|
||||||
@ -415,7 +415,7 @@ TEST(AudioPayloadSplitter, NonSplittable) {
|
|||||||
TEST(AudioPayloadSplitter, UnknownPayloadType) {
|
TEST(AudioPayloadSplitter, UnknownPayloadType) {
|
||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
static const uint8_t kPayloadType = 17; // Just a random number.
|
static const uint8_t kPayloadType = 17; // Just a random number.
|
||||||
int kPayloadLengthBytes = 4711; // Random number.
|
size_t kPayloadLengthBytes = 4711; // Random number.
|
||||||
packet_list.push_back(CreatePacket(kPayloadType, kPayloadLengthBytes, 0));
|
packet_list.push_back(CreatePacket(kPayloadType, kPayloadLengthBytes, 0));
|
||||||
|
|
||||||
MockDecoderDatabase decoder_database;
|
MockDecoderDatabase decoder_database;
|
||||||
@ -502,7 +502,7 @@ class SplitBySamplesTest : public ::testing::TestWithParam<NetEqDecoder> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int bytes_per_ms_;
|
size_t bytes_per_ms_;
|
||||||
int samples_per_ms_;
|
int samples_per_ms_;
|
||||||
NetEqDecoder decoder_type_;
|
NetEqDecoder decoder_type_;
|
||||||
};
|
};
|
||||||
@ -514,7 +514,7 @@ TEST_P(SplitBySamplesTest, PayloadSizes) {
|
|||||||
for (int payload_size_ms = 10; payload_size_ms <= 60; payload_size_ms += 10) {
|
for (int payload_size_ms = 10; payload_size_ms <= 60; payload_size_ms += 10) {
|
||||||
// The payload values are set to be the same as the payload_size, so that
|
// The payload values are set to be the same as the payload_size, so that
|
||||||
// one can distinguish from which packet the split payloads come from.
|
// one can distinguish from which packet the split payloads come from.
|
||||||
int payload_size_bytes = payload_size_ms * bytes_per_ms_;
|
size_t payload_size_bytes = payload_size_ms * bytes_per_ms_;
|
||||||
packet_list.push_back(CreatePacket(kPayloadType, payload_size_bytes,
|
packet_list.push_back(CreatePacket(kPayloadType, payload_size_bytes,
|
||||||
payload_size_ms));
|
payload_size_ms));
|
||||||
}
|
}
|
||||||
@ -548,7 +548,7 @@ TEST_P(SplitBySamplesTest, PayloadSizes) {
|
|||||||
PacketList::iterator it = packet_list.begin();
|
PacketList::iterator it = packet_list.begin();
|
||||||
int i = 0;
|
int i = 0;
|
||||||
while (it != packet_list.end()) {
|
while (it != packet_list.end()) {
|
||||||
int length_bytes = expected_size_ms[i] * bytes_per_ms_;
|
size_t length_bytes = expected_size_ms[i] * bytes_per_ms_;
|
||||||
uint32_t expected_timestamp = kBaseTimestamp +
|
uint32_t expected_timestamp = kBaseTimestamp +
|
||||||
expected_timestamp_offset_ms[i] * samples_per_ms_;
|
expected_timestamp_offset_ms[i] * samples_per_ms_;
|
||||||
VerifyPacket((*it), length_bytes, kPayloadType, kSequenceNumber,
|
VerifyPacket((*it), length_bytes, kPayloadType, kSequenceNumber,
|
||||||
@ -583,7 +583,7 @@ class SplitIlbcTest : public ::testing::TestWithParam<std::pair<int, int> > {
|
|||||||
}
|
}
|
||||||
size_t num_frames_;
|
size_t num_frames_;
|
||||||
int frame_length_ms_;
|
int frame_length_ms_;
|
||||||
int frame_length_bytes_;
|
size_t frame_length_bytes_;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Test splitting sample-based payloads.
|
// Test splitting sample-based payloads.
|
||||||
@ -591,10 +591,10 @@ TEST_P(SplitIlbcTest, NumFrames) {
|
|||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
static const uint8_t kPayloadType = 17; // Just a random number.
|
static const uint8_t kPayloadType = 17; // Just a random number.
|
||||||
const int frame_length_samples = frame_length_ms_ * 8;
|
const int frame_length_samples = frame_length_ms_ * 8;
|
||||||
int payload_length_bytes = frame_length_bytes_ * num_frames_;
|
size_t payload_length_bytes = frame_length_bytes_ * num_frames_;
|
||||||
Packet* packet = CreatePacket(kPayloadType, payload_length_bytes, 0);
|
Packet* packet = CreatePacket(kPayloadType, payload_length_bytes, 0);
|
||||||
// Fill payload with increasing integers {0, 1, 2, ...}.
|
// Fill payload with increasing integers {0, 1, 2, ...}.
|
||||||
for (int i = 0; i < packet->payload_length; ++i) {
|
for (size_t i = 0; i < packet->payload_length; ++i) {
|
||||||
packet->payload[i] = static_cast<uint8_t>(i);
|
packet->payload[i] = static_cast<uint8_t>(i);
|
||||||
}
|
}
|
||||||
packet_list.push_back(packet);
|
packet_list.push_back(packet);
|
||||||
@ -624,7 +624,7 @@ TEST_P(SplitIlbcTest, NumFrames) {
|
|||||||
EXPECT_EQ(kSequenceNumber, packet->header.sequenceNumber);
|
EXPECT_EQ(kSequenceNumber, packet->header.sequenceNumber);
|
||||||
EXPECT_EQ(true, packet->primary);
|
EXPECT_EQ(true, packet->primary);
|
||||||
ASSERT_FALSE(packet->payload == NULL);
|
ASSERT_FALSE(packet->payload == NULL);
|
||||||
for (int i = 0; i < packet->payload_length; ++i) {
|
for (size_t i = 0; i < packet->payload_length; ++i) {
|
||||||
EXPECT_EQ(payload_value, packet->payload[i]);
|
EXPECT_EQ(payload_value, packet->payload[i]);
|
||||||
++payload_value;
|
++payload_value;
|
||||||
}
|
}
|
||||||
@ -661,7 +661,7 @@ INSTANTIATE_TEST_CASE_P(
|
|||||||
TEST(IlbcPayloadSplitter, TooLargePayload) {
|
TEST(IlbcPayloadSplitter, TooLargePayload) {
|
||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
static const uint8_t kPayloadType = 17; // Just a random number.
|
static const uint8_t kPayloadType = 17; // Just a random number.
|
||||||
int kPayloadLengthBytes = 950;
|
size_t kPayloadLengthBytes = 950;
|
||||||
Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
|
Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
|
||||||
packet_list.push_back(packet);
|
packet_list.push_back(packet);
|
||||||
|
|
||||||
@ -692,7 +692,7 @@ TEST(IlbcPayloadSplitter, TooLargePayload) {
|
|||||||
TEST(IlbcPayloadSplitter, UnevenPayload) {
|
TEST(IlbcPayloadSplitter, UnevenPayload) {
|
||||||
PacketList packet_list;
|
PacketList packet_list;
|
||||||
static const uint8_t kPayloadType = 17; // Just a random number.
|
static const uint8_t kPayloadType = 17; // Just a random number.
|
||||||
int kPayloadLengthBytes = 39; // Not an even number of frames.
|
size_t kPayloadLengthBytes = 39; // Not an even number of frames.
|
||||||
Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
|
Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
|
||||||
packet_list.push_back(packet);
|
packet_list.push_back(packet);
|
||||||
|
|
||||||
@ -744,7 +744,7 @@ TEST(FecPayloadSplitter, MixedPayload) {
|
|||||||
packet = packet_list.front();
|
packet = packet_list.front();
|
||||||
EXPECT_EQ(0, packet->header.payloadType);
|
EXPECT_EQ(0, packet->header.payloadType);
|
||||||
EXPECT_EQ(kBaseTimestamp - 20 * 48, packet->header.timestamp);
|
EXPECT_EQ(kBaseTimestamp - 20 * 48, packet->header.timestamp);
|
||||||
EXPECT_EQ(10, packet->payload_length);
|
EXPECT_EQ(10U, packet->payload_length);
|
||||||
EXPECT_FALSE(packet->primary);
|
EXPECT_FALSE(packet->primary);
|
||||||
delete [] packet->payload;
|
delete [] packet->payload;
|
||||||
delete packet;
|
delete packet;
|
||||||
@ -754,7 +754,7 @@ TEST(FecPayloadSplitter, MixedPayload) {
|
|||||||
packet = packet_list.front();
|
packet = packet_list.front();
|
||||||
EXPECT_EQ(0, packet->header.payloadType);
|
EXPECT_EQ(0, packet->header.payloadType);
|
||||||
EXPECT_EQ(kBaseTimestamp, packet->header.timestamp);
|
EXPECT_EQ(kBaseTimestamp, packet->header.timestamp);
|
||||||
EXPECT_EQ(10, packet->payload_length);
|
EXPECT_EQ(10U, packet->payload_length);
|
||||||
EXPECT_TRUE(packet->primary);
|
EXPECT_TRUE(packet->primary);
|
||||||
delete [] packet->payload;
|
delete [] packet->payload;
|
||||||
delete packet;
|
delete packet;
|
||||||
|
@ -329,7 +329,7 @@ uint8_t * NETEQTEST_RTPpacket::payload() const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t NETEQTEST_RTPpacket::payloadLen()
|
size_t NETEQTEST_RTPpacket::payloadLen()
|
||||||
{
|
{
|
||||||
parseHeader();
|
parseHeader();
|
||||||
return _payloadLen;
|
return _payloadLen;
|
||||||
@ -752,7 +752,7 @@ void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket* slaveRtp,
|
|||||||
int stride)
|
int stride)
|
||||||
{
|
{
|
||||||
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
||||||
|| _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
|
|| _payloadLen == 0 || slaveRtp->_memSize < _memSize)
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -761,7 +761,7 @@ void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket* slaveRtp,
|
|||||||
uint8_t *writeDataPtr = _payloadPtr;
|
uint8_t *writeDataPtr = _payloadPtr;
|
||||||
uint8_t *slaveData = slaveRtp->_payloadPtr;
|
uint8_t *slaveData = slaveRtp->_payloadPtr;
|
||||||
|
|
||||||
while (readDataPtr - _payloadPtr < _payloadLen)
|
while (readDataPtr - _payloadPtr < static_cast<ptrdiff_t>(_payloadLen))
|
||||||
{
|
{
|
||||||
// master data
|
// master data
|
||||||
for (int ix = 0; ix < stride; ix++) {
|
for (int ix = 0; ix < stride; ix++) {
|
||||||
@ -786,7 +786,7 @@ void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket* slaveRtp,
|
|||||||
void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp)
|
void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp)
|
||||||
{
|
{
|
||||||
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
||||||
|| _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
|
|| _payloadLen == 0 || slaveRtp->_memSize < _memSize)
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -799,7 +799,7 @@ void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp)
|
|||||||
void NETEQTEST_RTPpacket::splitStereoDouble(NETEQTEST_RTPpacket* slaveRtp)
|
void NETEQTEST_RTPpacket::splitStereoDouble(NETEQTEST_RTPpacket* slaveRtp)
|
||||||
{
|
{
|
||||||
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
|
||||||
|| _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
|
|| _payloadLen == 0 || slaveRtp->_memSize < _memSize)
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -868,7 +868,7 @@ void NETEQTEST_RTPpacket::scramblePayload(void)
|
|||||||
{
|
{
|
||||||
parseHeader();
|
parseHeader();
|
||||||
|
|
||||||
for (int i = 0; i < _payloadLen; ++i)
|
for (size_t i = 0; i < _payloadLen; ++i)
|
||||||
{
|
{
|
||||||
_payloadPtr[i] = static_cast<uint8_t>(rand());
|
_payloadPtr[i] = static_cast<uint8_t>(rand());
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ public:
|
|||||||
const webrtc::WebRtcRTPHeader* RTPinfo() const;
|
const webrtc::WebRtcRTPHeader* RTPinfo() const;
|
||||||
uint8_t * datagram() const;
|
uint8_t * datagram() const;
|
||||||
uint8_t * payload() const;
|
uint8_t * payload() const;
|
||||||
int16_t payloadLen();
|
size_t payloadLen();
|
||||||
int16_t dataLen() const;
|
int16_t dataLen() const;
|
||||||
bool isParsed() const;
|
bool isParsed() const;
|
||||||
bool isLost() const;
|
bool isLost() const;
|
||||||
@ -73,7 +73,7 @@ public:
|
|||||||
uint8_t * _payloadPtr;
|
uint8_t * _payloadPtr;
|
||||||
int _memSize;
|
int _memSize;
|
||||||
int16_t _datagramLen;
|
int16_t _datagramLen;
|
||||||
int16_t _payloadLen;
|
size_t _payloadLen;
|
||||||
webrtc::WebRtcRTPHeader _rtpInfo;
|
webrtc::WebRtcRTPHeader _rtpInfo;
|
||||||
bool _rtpParsed;
|
bool _rtpParsed;
|
||||||
uint32_t _receiveTime;
|
uint32_t _receiveTime;
|
||||||
|
@ -64,9 +64,9 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
|
|||||||
const int16_t* input_samples = audio_loop.GetNextBlock();
|
const int16_t* input_samples = audio_loop.GetNextBlock();
|
||||||
if (!input_samples) exit(1);
|
if (!input_samples) exit(1);
|
||||||
uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)];
|
uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)];
|
||||||
int payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
|
size_t payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
|
||||||
kInputBlockSizeSamples,
|
kInputBlockSizeSamples,
|
||||||
input_payload);
|
input_payload);
|
||||||
assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
|
assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
|
||||||
|
|
||||||
// Main loop.
|
// Main loop.
|
||||||
|
@ -118,7 +118,7 @@ class NetEqQualityTest : public ::testing::Test {
|
|||||||
// Expected output number of samples per channel in a frame.
|
// Expected output number of samples per channel in a frame.
|
||||||
const int out_size_samples_;
|
const int out_size_samples_;
|
||||||
|
|
||||||
int payload_size_bytes_;
|
size_t payload_size_bytes_;
|
||||||
int max_payload_bytes_;
|
int max_payload_bytes_;
|
||||||
|
|
||||||
scoped_ptr<InputAudioFile> in_file_;
|
scoped_ptr<InputAudioFile> in_file_;
|
||||||
@ -134,7 +134,7 @@ class NetEqQualityTest : public ::testing::Test {
|
|||||||
scoped_ptr<int16_t[]> out_data_;
|
scoped_ptr<int16_t[]> out_data_;
|
||||||
WebRtcRTPHeader rtp_header_;
|
WebRtcRTPHeader rtp_header_;
|
||||||
|
|
||||||
long total_payload_size_bytes_;
|
size_t total_payload_size_bytes_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
|
@ -286,7 +286,7 @@ int main(int argc, char* argv[]) {
|
|||||||
int error =
|
int error =
|
||||||
neteq->InsertPacket(rtp_header,
|
neteq->InsertPacket(rtp_header,
|
||||||
payload_ptr,
|
payload_ptr,
|
||||||
static_cast<int>(payload_len),
|
payload_len,
|
||||||
packet->time_ms() * sample_rate_hz / 1000);
|
packet->time_ms() * sample_rate_hz / 1000);
|
||||||
if (error != NetEq::kOK) {
|
if (error != NetEq::kOK) {
|
||||||
if (neteq->LastError() == NetEq::kUnknownRtpPayloadType) {
|
if (neteq->LastError() == NetEq::kUnknownRtpPayloadType) {
|
||||||
|
@ -136,10 +136,10 @@ class RTPFragmentationHeader {
|
|||||||
if (src.fragmentationVectorSize > 0) {
|
if (src.fragmentationVectorSize > 0) {
|
||||||
// allocate new
|
// allocate new
|
||||||
if (src.fragmentationOffset) {
|
if (src.fragmentationOffset) {
|
||||||
fragmentationOffset = new uint32_t[src.fragmentationVectorSize];
|
fragmentationOffset = new size_t[src.fragmentationVectorSize];
|
||||||
}
|
}
|
||||||
if (src.fragmentationLength) {
|
if (src.fragmentationLength) {
|
||||||
fragmentationLength = new uint32_t[src.fragmentationVectorSize];
|
fragmentationLength = new size_t[src.fragmentationVectorSize];
|
||||||
}
|
}
|
||||||
if (src.fragmentationTimeDiff) {
|
if (src.fragmentationTimeDiff) {
|
||||||
fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
|
fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
|
||||||
@ -156,11 +156,11 @@ class RTPFragmentationHeader {
|
|||||||
// copy values
|
// copy values
|
||||||
if (src.fragmentationOffset) {
|
if (src.fragmentationOffset) {
|
||||||
memcpy(fragmentationOffset, src.fragmentationOffset,
|
memcpy(fragmentationOffset, src.fragmentationOffset,
|
||||||
src.fragmentationVectorSize * sizeof(uint32_t));
|
src.fragmentationVectorSize * sizeof(size_t));
|
||||||
}
|
}
|
||||||
if (src.fragmentationLength) {
|
if (src.fragmentationLength) {
|
||||||
memcpy(fragmentationLength, src.fragmentationLength,
|
memcpy(fragmentationLength, src.fragmentationLength,
|
||||||
src.fragmentationVectorSize * sizeof(uint32_t));
|
src.fragmentationVectorSize * sizeof(size_t));
|
||||||
}
|
}
|
||||||
if (src.fragmentationTimeDiff) {
|
if (src.fragmentationTimeDiff) {
|
||||||
memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
|
memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
|
||||||
@ -178,23 +178,23 @@ class RTPFragmentationHeader {
|
|||||||
uint16_t oldVectorSize = fragmentationVectorSize;
|
uint16_t oldVectorSize = fragmentationVectorSize;
|
||||||
{
|
{
|
||||||
// offset
|
// offset
|
||||||
uint32_t* oldOffsets = fragmentationOffset;
|
size_t* oldOffsets = fragmentationOffset;
|
||||||
fragmentationOffset = new uint32_t[size];
|
fragmentationOffset = new size_t[size];
|
||||||
memset(fragmentationOffset + oldVectorSize, 0,
|
memset(fragmentationOffset + oldVectorSize, 0,
|
||||||
sizeof(uint32_t) * (size - oldVectorSize));
|
sizeof(size_t) * (size - oldVectorSize));
|
||||||
// copy old values
|
// copy old values
|
||||||
memcpy(fragmentationOffset, oldOffsets,
|
memcpy(fragmentationOffset, oldOffsets,
|
||||||
sizeof(uint32_t) * oldVectorSize);
|
sizeof(size_t) * oldVectorSize);
|
||||||
delete[] oldOffsets;
|
delete[] oldOffsets;
|
||||||
}
|
}
|
||||||
// length
|
// length
|
||||||
{
|
{
|
||||||
uint32_t* oldLengths = fragmentationLength;
|
size_t* oldLengths = fragmentationLength;
|
||||||
fragmentationLength = new uint32_t[size];
|
fragmentationLength = new size_t[size];
|
||||||
memset(fragmentationLength + oldVectorSize, 0,
|
memset(fragmentationLength + oldVectorSize, 0,
|
||||||
sizeof(uint32_t) * (size - oldVectorSize));
|
sizeof(size_t) * (size - oldVectorSize));
|
||||||
memcpy(fragmentationLength, oldLengths,
|
memcpy(fragmentationLength, oldLengths,
|
||||||
sizeof(uint32_t) * oldVectorSize);
|
sizeof(size_t) * oldVectorSize);
|
||||||
delete[] oldLengths;
|
delete[] oldLengths;
|
||||||
}
|
}
|
||||||
// time diff
|
// time diff
|
||||||
@ -222,11 +222,12 @@ class RTPFragmentationHeader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
uint16_t fragmentationVectorSize; // Number of fragmentations
|
uint16_t fragmentationVectorSize; // Number of fragmentations
|
||||||
uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm.
|
size_t* fragmentationOffset; // Offset of pointer to data for each
|
||||||
uint32_t* fragmentationLength; // Data size for each fragmentation
|
// fragmentation
|
||||||
uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for
|
size_t* fragmentationLength; // Data size for each fragmentation
|
||||||
// each fragmentation
|
uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for
|
||||||
uint8_t* fragmentationPlType; // Payload type of each fragmentation
|
// each fragmentation
|
||||||
|
uint8_t* fragmentationPlType; // Payload type of each fragmentation
|
||||||
|
|
||||||
private:
|
private:
|
||||||
DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
|
DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
|
||||||
@ -348,7 +349,7 @@ class EncodedVideoData {
|
|||||||
}
|
}
|
||||||
return *this;
|
return *this;
|
||||||
};
|
};
|
||||||
void VerifyAndAllocate(const uint32_t size) {
|
void VerifyAndAllocate(const size_t size) {
|
||||||
if (bufferSize < size) {
|
if (bufferSize < size) {
|
||||||
uint8_t* oldPayload = payloadData;
|
uint8_t* oldPayload = payloadData;
|
||||||
payloadData = new uint8_t[size];
|
payloadData = new uint8_t[size];
|
||||||
@ -367,8 +368,8 @@ class EncodedVideoData {
|
|||||||
bool completeFrame;
|
bool completeFrame;
|
||||||
bool missingFrame;
|
bool missingFrame;
|
||||||
uint8_t* payloadData;
|
uint8_t* payloadData;
|
||||||
uint32_t payloadSize;
|
size_t payloadSize;
|
||||||
uint32_t bufferSize;
|
size_t bufferSize;
|
||||||
RTPFragmentationHeader fragmentationHeader;
|
RTPFragmentationHeader fragmentationHeader;
|
||||||
FrameType frameType;
|
FrameType frameType;
|
||||||
VideoCodecType codec;
|
VideoCodecType codec;
|
||||||
@ -414,17 +415,17 @@ class VideoFrame {
|
|||||||
* is copied to the new buffer.
|
* is copied to the new buffer.
|
||||||
* Buffer size is updated to minimumSize.
|
* Buffer size is updated to minimumSize.
|
||||||
*/
|
*/
|
||||||
int32_t VerifyAndAllocate(const uint32_t minimumSize);
|
int32_t VerifyAndAllocate(const size_t minimumSize);
|
||||||
/**
|
/**
|
||||||
* Update length of data buffer in frame. Function verifies that new length
|
* Update length of data buffer in frame. Function verifies that new length
|
||||||
* is less or
|
* is less or
|
||||||
* equal to allocated size.
|
* equal to allocated size.
|
||||||
*/
|
*/
|
||||||
int32_t SetLength(const uint32_t newLength);
|
int32_t SetLength(const size_t newLength);
|
||||||
/*
|
/*
|
||||||
* Swap buffer and size data
|
* Swap buffer and size data
|
||||||
*/
|
*/
|
||||||
int32_t Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize);
|
int32_t Swap(uint8_t*& newMemory, size_t& newLength, size_t& newSize);
|
||||||
/*
|
/*
|
||||||
* Swap buffer and size data
|
* Swap buffer and size data
|
||||||
*/
|
*/
|
||||||
@ -440,7 +441,7 @@ class VideoFrame {
|
|||||||
* size length
|
* size length
|
||||||
* is allocated.
|
* is allocated.
|
||||||
*/
|
*/
|
||||||
int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer);
|
int32_t CopyFrame(size_t length, const uint8_t* sourceBuffer);
|
||||||
/**
|
/**
|
||||||
* Delete VideoFrame and resets members to zero
|
* Delete VideoFrame and resets members to zero
|
||||||
*/
|
*/
|
||||||
@ -459,11 +460,11 @@ class VideoFrame {
|
|||||||
/**
|
/**
|
||||||
* Get allocated buffer size
|
* Get allocated buffer size
|
||||||
*/
|
*/
|
||||||
uint32_t Size() const { return _bufferSize; }
|
size_t Size() const { return _bufferSize; }
|
||||||
/**
|
/**
|
||||||
* Get frame length
|
* Get frame length
|
||||||
*/
|
*/
|
||||||
uint32_t Length() const { return _bufferLength; }
|
size_t Length() const { return _bufferLength; }
|
||||||
/**
|
/**
|
||||||
* Get frame timestamp (90kHz)
|
* Get frame timestamp (90kHz)
|
||||||
*/
|
*/
|
||||||
@ -498,10 +499,10 @@ class VideoFrame {
|
|||||||
private:
|
private:
|
||||||
void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp);
|
void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp);
|
||||||
|
|
||||||
uint8_t* _buffer; // Pointer to frame buffer
|
uint8_t* _buffer; // Pointer to frame buffer
|
||||||
uint32_t _bufferSize; // Allocated buffer size
|
size_t _bufferSize; // Allocated buffer size
|
||||||
uint32_t _bufferLength; // Length (in bytes) of buffer
|
size_t _bufferLength; // Length (in bytes) of buffer
|
||||||
uint32_t _timeStamp; // Timestamp of frame (90kHz)
|
uint32_t _timeStamp; // Timestamp of frame (90kHz)
|
||||||
uint32_t _width;
|
uint32_t _width;
|
||||||
uint32_t _height;
|
uint32_t _height;
|
||||||
int64_t _renderTimeMs;
|
int64_t _renderTimeMs;
|
||||||
@ -525,7 +526,7 @@ inline VideoFrame::~VideoFrame() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline int32_t VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) {
|
inline int32_t VideoFrame::VerifyAndAllocate(const size_t minimumSize) {
|
||||||
if (minimumSize < 1) {
|
if (minimumSize < 1) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -545,7 +546,7 @@ inline int32_t VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline int32_t VideoFrame::SetLength(const uint32_t newLength) {
|
inline int32_t VideoFrame::SetLength(const size_t newLength) {
|
||||||
if (newLength > _bufferSize) { // can't accomodate new value
|
if (newLength > _bufferSize) { // can't accomodate new value
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -573,21 +574,15 @@ inline int32_t VideoFrame::SwapFrame(VideoFrame& videoFrame) {
|
|||||||
videoFrame._bufferSize);
|
videoFrame._bufferSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline int32_t VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength,
|
inline int32_t VideoFrame::Swap(uint8_t*& newMemory, size_t& newLength,
|
||||||
uint32_t& newSize) {
|
size_t& newSize) {
|
||||||
uint8_t* tmpBuffer = _buffer;
|
std::swap(_buffer, newMemory);
|
||||||
uint32_t tmpLength = _bufferLength;
|
std::swap(_bufferLength, newLength);
|
||||||
uint32_t tmpSize = _bufferSize;
|
std::swap(_bufferSize, newSize);
|
||||||
_buffer = newMemory;
|
|
||||||
_bufferLength = newLength;
|
|
||||||
_bufferSize = newSize;
|
|
||||||
newMemory = tmpBuffer;
|
|
||||||
newLength = tmpLength;
|
|
||||||
newSize = tmpSize;
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline int32_t VideoFrame::CopyFrame(uint32_t length,
|
inline int32_t VideoFrame::CopyFrame(size_t length,
|
||||||
const uint8_t* sourceBuffer) {
|
const uint8_t* sourceBuffer) {
|
||||||
if (length > _bufferSize) {
|
if (length > _bufferSize) {
|
||||||
int32_t ret = VerifyAndAllocate(length);
|
int32_t ret = VerifyAndAllocate(length);
|
||||||
|
@ -39,7 +39,7 @@ public:
|
|||||||
// mono).
|
// mono).
|
||||||
virtual int32_t PlayoutAudioData(
|
virtual int32_t PlayoutAudioData(
|
||||||
int8_t* audioBuffer,
|
int8_t* audioBuffer,
|
||||||
uint32_t& dataLengthInBytes) = 0;
|
size_t& dataLengthInBytes) = 0;
|
||||||
|
|
||||||
// Put one video frame into videoBuffer. dataLengthInBytes is both an input
|
// Put one video frame into videoBuffer. dataLengthInBytes is both an input
|
||||||
// and output parameter. As input parameter it indicates the size of
|
// and output parameter. As input parameter it indicates the size of
|
||||||
@ -47,7 +47,7 @@ public:
|
|||||||
// to videoBuffer.
|
// to videoBuffer.
|
||||||
virtual int32_t PlayoutAVIVideoData(
|
virtual int32_t PlayoutAVIVideoData(
|
||||||
int8_t* videoBuffer,
|
int8_t* videoBuffer,
|
||||||
uint32_t& dataLengthInBytes) = 0;
|
size_t& dataLengthInBytes) = 0;
|
||||||
|
|
||||||
// Put 10-60ms, depending on codec frame size, of audio data from file into
|
// Put 10-60ms, depending on codec frame size, of audio data from file into
|
||||||
// audioBufferLeft and audioBufferRight. The buffers contain the left and
|
// audioBufferLeft and audioBufferRight. The buffers contain the left and
|
||||||
@ -61,7 +61,7 @@ public:
|
|||||||
virtual int32_t PlayoutStereoData(
|
virtual int32_t PlayoutStereoData(
|
||||||
int8_t* audioBufferLeft,
|
int8_t* audioBufferLeft,
|
||||||
int8_t* audioBufferRight,
|
int8_t* audioBufferRight,
|
||||||
uint32_t& dataLengthInBytes) = 0;
|
size_t& dataLengthInBytes) = 0;
|
||||||
|
|
||||||
// Open the file specified by fileName (relative path is allowed) for
|
// Open the file specified by fileName (relative path is allowed) for
|
||||||
// reading. FileCallback::PlayNotification(..) will be called after
|
// reading. FileCallback::PlayNotification(..) will be called after
|
||||||
@ -130,8 +130,8 @@ public:
|
|||||||
// parameter of the last sucessfull StartRecordingAudioFile(..) call.
|
// parameter of the last sucessfull StartRecordingAudioFile(..) call.
|
||||||
// Note: bufferLength must be exactly one frame.
|
// Note: bufferLength must be exactly one frame.
|
||||||
virtual int32_t IncomingAudioData(
|
virtual int32_t IncomingAudioData(
|
||||||
const int8_t* audioBuffer,
|
const int8_t* audioBuffer,
|
||||||
const uint32_t bufferLength) = 0;
|
const size_t bufferLength) = 0;
|
||||||
|
|
||||||
// Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
|
// Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
|
||||||
// to file.
|
// to file.
|
||||||
@ -140,8 +140,8 @@ public:
|
|||||||
// StartRecordingVideoFile(..) call. The videoBuffer must contain exactly
|
// StartRecordingVideoFile(..) call. The videoBuffer must contain exactly
|
||||||
// one video frame.
|
// one video frame.
|
||||||
virtual int32_t IncomingAVIVideoData(
|
virtual int32_t IncomingAVIVideoData(
|
||||||
const int8_t* videoBuffer,
|
const int8_t* videoBuffer,
|
||||||
const uint32_t bufferLength) = 0;
|
const size_t bufferLength) = 0;
|
||||||
|
|
||||||
// Open/creates file specified by fileName for writing (relative path is
|
// Open/creates file specified by fileName for writing (relative path is
|
||||||
// allowed). FileCallback::RecordNotification(..) will be called after
|
// allowed). FileCallback::RecordNotification(..) will be called after
|
||||||
|
@ -360,7 +360,7 @@ int32_t AviFile::GetAudioStreamInfo(WAVEFORMATEX& waveHeader)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t AviFile::WriteAudio(const uint8_t* data, int32_t length)
|
int32_t AviFile::WriteAudio(const uint8_t* data, size_t length)
|
||||||
{
|
{
|
||||||
_crit->Enter();
|
_crit->Enter();
|
||||||
size_t newBytesWritten = _bytesWritten;
|
size_t newBytesWritten = _bytesWritten;
|
||||||
@ -410,7 +410,7 @@ int32_t AviFile::WriteAudio(const uint8_t* data, int32_t length)
|
|||||||
return static_cast<int32_t>(newBytesWritten);
|
return static_cast<int32_t>(newBytesWritten);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t AviFile::WriteVideo(const uint8_t* data, int32_t length)
|
int32_t AviFile::WriteVideo(const uint8_t* data, size_t length)
|
||||||
{
|
{
|
||||||
_crit->Enter();
|
_crit->Enter();
|
||||||
size_t newBytesWritten = _bytesWritten;
|
size_t newBytesWritten = _bytesWritten;
|
||||||
@ -482,7 +482,7 @@ int32_t AviFile::PrepareDataChunkHeaders()
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
int32_t AviFile::ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
|
||||||
uint32_t tag2)
|
uint32_t tag2)
|
||||||
{
|
{
|
||||||
if (!_reading)
|
if (!_reading)
|
||||||
@ -563,7 +563,7 @@ int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
|||||||
_bytesRead += size;
|
_bytesRead += size;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (static_cast<int32_t>(size) > length)
|
if (size > length)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
|
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
|
||||||
"AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
|
"AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
|
||||||
@ -589,7 +589,7 @@ int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t AviFile::ReadAudio(uint8_t* data, int32_t& length)
|
int32_t AviFile::ReadAudio(uint8_t* data, size_t& length)
|
||||||
{
|
{
|
||||||
_crit->Enter();
|
_crit->Enter();
|
||||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadAudio()");
|
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadAudio()");
|
||||||
@ -616,7 +616,7 @@ int32_t AviFile::ReadAudio(uint8_t* data, int32_t& length)
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t AviFile::ReadVideo(uint8_t* data, int32_t& length)
|
int32_t AviFile::ReadVideo(uint8_t* data, size_t& length)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
|
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
|
||||||
|
|
||||||
|
@ -104,8 +104,8 @@ public:
|
|||||||
const WAVEFORMATEX& waveFormatHeader);
|
const WAVEFORMATEX& waveFormatHeader);
|
||||||
int32_t Create(const char* fileName);
|
int32_t Create(const char* fileName);
|
||||||
|
|
||||||
int32_t WriteAudio(const uint8_t* data, int32_t length);
|
int32_t WriteAudio(const uint8_t* data, size_t length);
|
||||||
int32_t WriteVideo(const uint8_t* data, int32_t length);
|
int32_t WriteVideo(const uint8_t* data, size_t length);
|
||||||
|
|
||||||
int32_t GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
|
int32_t GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
|
||||||
BITMAPINFOHEADER& bitmapInfo,
|
BITMAPINFOHEADER& bitmapInfo,
|
||||||
@ -116,8 +116,8 @@ public:
|
|||||||
|
|
||||||
int32_t GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
|
int32_t GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
|
||||||
|
|
||||||
int32_t ReadAudio(uint8_t* data, int32_t& length);
|
int32_t ReadAudio(uint8_t* data, size_t& length);
|
||||||
int32_t ReadVideo(uint8_t* data, int32_t& length);
|
int32_t ReadVideo(uint8_t* data, size_t& length);
|
||||||
|
|
||||||
int32_t Close();
|
int32_t Close();
|
||||||
|
|
||||||
@ -145,7 +145,7 @@ private:
|
|||||||
|
|
||||||
int32_t PrepareDataChunkHeaders();
|
int32_t PrepareDataChunkHeaders();
|
||||||
|
|
||||||
int32_t ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
int32_t ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
|
||||||
uint32_t tag2 = 0);
|
uint32_t tag2 = 0);
|
||||||
|
|
||||||
int32_t WriteRIFF();
|
int32_t WriteRIFF();
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
|
|
||||||
|
#include "webrtc/base/format_macros.h"
|
||||||
#include "webrtc/modules/media_file/source/media_file_impl.h"
|
#include "webrtc/modules/media_file/source/media_file_impl.h"
|
||||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
#include "webrtc/system_wrappers/interface/file_wrapper.h"
|
#include "webrtc/system_wrappers/interface/file_wrapper.h"
|
||||||
@ -109,25 +110,25 @@ int32_t MediaFileImpl::Process()
|
|||||||
|
|
||||||
int32_t MediaFileImpl::PlayoutAVIVideoData(
|
int32_t MediaFileImpl::PlayoutAVIVideoData(
|
||||||
int8_t* buffer,
|
int8_t* buffer,
|
||||||
uint32_t& dataLengthInBytes)
|
size_t& dataLengthInBytes)
|
||||||
{
|
{
|
||||||
return PlayoutData( buffer, dataLengthInBytes, true);
|
return PlayoutData( buffer, dataLengthInBytes, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
|
int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
|
||||||
uint32_t& dataLengthInBytes)
|
size_t& dataLengthInBytes)
|
||||||
{
|
{
|
||||||
return PlayoutData( buffer, dataLengthInBytes, false);
|
return PlayoutData( buffer, dataLengthInBytes, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
int32_t MediaFileImpl::PlayoutData(int8_t* buffer, size_t& dataLengthInBytes,
|
||||||
bool video)
|
bool video)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||||
"MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %ld)",
|
"MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")",
|
||||||
buffer, dataLengthInBytes);
|
buffer, dataLengthInBytes);
|
||||||
|
|
||||||
const uint32_t bufferLengthInBytes = dataLengthInBytes;
|
const size_t bufferLengthInBytes = dataLengthInBytes;
|
||||||
dataLengthInBytes = 0;
|
dataLengthInBytes = 0;
|
||||||
|
|
||||||
if(buffer == NULL || bufferLengthInBytes == 0)
|
if(buffer == NULL || bufferLengthInBytes == 0)
|
||||||
@ -185,7 +186,7 @@ int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
|||||||
bufferLengthInBytes);
|
bufferLengthInBytes);
|
||||||
if(bytesRead > 0)
|
if(bytesRead > 0)
|
||||||
{
|
{
|
||||||
dataLengthInBytes = bytesRead;
|
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -216,7 +217,7 @@ int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
|||||||
|
|
||||||
if( bytesRead > 0)
|
if( bytesRead > 0)
|
||||||
{
|
{
|
||||||
dataLengthInBytes =(uint32_t) bytesRead;
|
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HandlePlayCallbacks(bytesRead);
|
HandlePlayCallbacks(bytesRead);
|
||||||
@ -266,16 +267,16 @@ void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead)
|
|||||||
int32_t MediaFileImpl::PlayoutStereoData(
|
int32_t MediaFileImpl::PlayoutStereoData(
|
||||||
int8_t* bufferLeft,
|
int8_t* bufferLeft,
|
||||||
int8_t* bufferRight,
|
int8_t* bufferRight,
|
||||||
uint32_t& dataLengthInBytes)
|
size_t& dataLengthInBytes)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||||
"MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,\
|
"MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,"
|
||||||
Len= %ld)",
|
" Len= %" PRIuS ")",
|
||||||
bufferLeft,
|
bufferLeft,
|
||||||
bufferRight,
|
bufferRight,
|
||||||
dataLengthInBytes);
|
dataLengthInBytes);
|
||||||
|
|
||||||
const uint32_t bufferLengthInBytes = dataLengthInBytes;
|
const size_t bufferLengthInBytes = dataLengthInBytes;
|
||||||
dataLengthInBytes = 0;
|
dataLengthInBytes = 0;
|
||||||
|
|
||||||
if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
|
if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
|
||||||
@ -328,7 +329,7 @@ int32_t MediaFileImpl::PlayoutStereoData(
|
|||||||
|
|
||||||
if(bytesRead > 0)
|
if(bytesRead > 0)
|
||||||
{
|
{
|
||||||
dataLengthInBytes = bytesRead;
|
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||||
|
|
||||||
// Check if it's time for PlayNotification(..).
|
// Check if it's time for PlayNotification(..).
|
||||||
_playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
|
_playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
|
||||||
@ -690,25 +691,25 @@ bool MediaFileImpl::IsPlaying()
|
|||||||
|
|
||||||
int32_t MediaFileImpl::IncomingAudioData(
|
int32_t MediaFileImpl::IncomingAudioData(
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t bufferLengthInBytes)
|
const size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
|
return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaFileImpl::IncomingAVIVideoData(
|
int32_t MediaFileImpl::IncomingAVIVideoData(
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t bufferLengthInBytes)
|
const size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
|
return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaFileImpl::IncomingAudioVideoData(
|
int32_t MediaFileImpl::IncomingAudioVideoData(
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t bufferLengthInBytes,
|
const size_t bufferLengthInBytes,
|
||||||
const bool video)
|
const bool video)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||||
"MediaFile::IncomingData(buffer= 0x%x, bufLen= %hd",
|
"MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS,
|
||||||
buffer, bufferLengthInBytes);
|
buffer, bufferLengthInBytes);
|
||||||
|
|
||||||
if(buffer == NULL || bufferLengthInBytes == 0)
|
if(buffer == NULL || bufferLengthInBytes == 0)
|
||||||
@ -803,7 +804,7 @@ int32_t MediaFileImpl::IncomingAudioVideoData(
|
|||||||
{
|
{
|
||||||
if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
|
if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
|
||||||
{
|
{
|
||||||
bytesWritten = bufferLengthInBytes;
|
bytesWritten = static_cast<int32_t>(bufferLengthInBytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,12 +32,12 @@ public:
|
|||||||
|
|
||||||
// MediaFile functions
|
// MediaFile functions
|
||||||
virtual int32_t PlayoutAudioData(int8_t* audioBuffer,
|
virtual int32_t PlayoutAudioData(int8_t* audioBuffer,
|
||||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
size_t& dataLengthInBytes) OVERRIDE;
|
||||||
virtual int32_t PlayoutAVIVideoData(int8_t* videoBuffer,
|
virtual int32_t PlayoutAVIVideoData(int8_t* videoBuffer,
|
||||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
size_t& dataLengthInBytes) OVERRIDE;
|
||||||
virtual int32_t PlayoutStereoData(int8_t* audioBufferLeft,
|
virtual int32_t PlayoutStereoData(int8_t* audioBufferLeft,
|
||||||
int8_t* audioBufferRight,
|
int8_t* audioBufferRight,
|
||||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
size_t& dataLengthInBytes) OVERRIDE;
|
||||||
virtual int32_t StartPlayingAudioFile(
|
virtual int32_t StartPlayingAudioFile(
|
||||||
const char* fileName,
|
const char* fileName,
|
||||||
const uint32_t notificationTimeMs = 0,
|
const uint32_t notificationTimeMs = 0,
|
||||||
@ -58,10 +58,10 @@ public:
|
|||||||
virtual int32_t StopPlaying() OVERRIDE;
|
virtual int32_t StopPlaying() OVERRIDE;
|
||||||
virtual bool IsPlaying() OVERRIDE;
|
virtual bool IsPlaying() OVERRIDE;
|
||||||
virtual int32_t PlayoutPositionMs(uint32_t& positionMs) const OVERRIDE;
|
virtual int32_t PlayoutPositionMs(uint32_t& positionMs) const OVERRIDE;
|
||||||
virtual int32_t IncomingAudioData(const int8_t* audioBuffer,
|
virtual int32_t IncomingAudioData(const int8_t* audioBuffer,
|
||||||
const uint32_t bufferLength) OVERRIDE;
|
const size_t bufferLength) OVERRIDE;
|
||||||
virtual int32_t IncomingAVIVideoData(const int8_t* audioBuffer,
|
virtual int32_t IncomingAVIVideoData(const int8_t* audioBuffer,
|
||||||
const uint32_t bufferLength) OVERRIDE;
|
const size_t bufferLength) OVERRIDE;
|
||||||
virtual int32_t StartRecordingAudioFile(
|
virtual int32_t StartRecordingAudioFile(
|
||||||
const char* fileName,
|
const char* fileName,
|
||||||
const FileFormats format,
|
const FileFormats format,
|
||||||
@ -157,14 +157,14 @@ private:
|
|||||||
// audioBuffer. As output parameter it indicates the number of bytes
|
// audioBuffer. As output parameter it indicates the number of bytes
|
||||||
// written to audioBuffer. If video is true the data written is a video
|
// written to audioBuffer. If video is true the data written is a video
|
||||||
// frame otherwise it is an audio frame.
|
// frame otherwise it is an audio frame.
|
||||||
int32_t PlayoutData(int8_t* dataBuffer, uint32_t& dataLengthInBytes,
|
int32_t PlayoutData(int8_t* dataBuffer, size_t& dataLengthInBytes,
|
||||||
bool video);
|
bool video);
|
||||||
|
|
||||||
// Write one frame, i.e. the bufferLength first bytes of audioBuffer,
|
// Write one frame, i.e. the bufferLength first bytes of audioBuffer,
|
||||||
// to file. The frame is an audio frame if video is true otherwise it is an
|
// to file. The frame is an audio frame if video is true otherwise it is an
|
||||||
// audio frame.
|
// audio frame.
|
||||||
int32_t IncomingAudioVideoData(const int8_t* buffer,
|
int32_t IncomingAudioVideoData(const int8_t* buffer,
|
||||||
const uint32_t bufferLength,
|
const size_t bufferLength,
|
||||||
const bool video);
|
const bool video);
|
||||||
|
|
||||||
// Open/creates file specified by fileName for writing (relative path is
|
// Open/creates file specified by fileName for writing (relative path is
|
||||||
|
@ -49,9 +49,11 @@ TEST_F(MediaFileTest, DISABLED_ON_ANDROID(StartPlayingAudioFileWithoutError)) {
|
|||||||
|
|
||||||
TEST_F(MediaFileTest, WriteWavFile) {
|
TEST_F(MediaFileTest, WriteWavFile) {
|
||||||
// Write file.
|
// Write file.
|
||||||
static const int kHeaderSize = 44;
|
static const size_t kHeaderSize = 44;
|
||||||
static const int kPayloadSize = 320;
|
static const size_t kPayloadSize = 320;
|
||||||
webrtc::CodecInst codec = {0, "L16", 16000, kPayloadSize, 1};
|
webrtc::CodecInst codec = {
|
||||||
|
0, "L16", 16000, static_cast<int>(kPayloadSize), 1
|
||||||
|
};
|
||||||
std::string outfile = webrtc::test::OutputPath() + "wavtest.wav";
|
std::string outfile = webrtc::test::OutputPath() + "wavtest.wav";
|
||||||
ASSERT_EQ(0,
|
ASSERT_EQ(0,
|
||||||
media_file_->StartRecordingAudioFile(
|
media_file_->StartRecordingAudioFile(
|
||||||
@ -78,8 +80,7 @@ TEST_F(MediaFileTest, WriteWavFile) {
|
|||||||
};
|
};
|
||||||
COMPILE_ASSERT(sizeof(kExpectedHeader) == kHeaderSize, header_size);
|
COMPILE_ASSERT(sizeof(kExpectedHeader) == kHeaderSize, header_size);
|
||||||
|
|
||||||
EXPECT_EQ(size_t(kHeaderSize + kPayloadSize),
|
EXPECT_EQ(kHeaderSize + kPayloadSize, webrtc::test::GetFileSize(outfile));
|
||||||
webrtc::test::GetFileSize(outfile));
|
|
||||||
FILE* f = fopen(outfile.c_str(), "rb");
|
FILE* f = fopen(outfile.c_str(), "rb");
|
||||||
ASSERT_TRUE(f);
|
ASSERT_TRUE(f);
|
||||||
|
|
||||||
|
@ -13,7 +13,9 @@
|
|||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
#include "webrtc/base/format_macros.h"
|
||||||
#include "webrtc/common_audio/wav_header.h"
|
#include "webrtc/common_audio/wav_header.h"
|
||||||
#include "webrtc/common_types.h"
|
#include "webrtc/common_types.h"
|
||||||
#include "webrtc/engine_configurations.h"
|
#include "webrtc/engine_configurations.h"
|
||||||
@ -234,7 +236,7 @@ int32_t ModuleFileUtility::InitAviWriting(
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::WriteAviAudioData(
|
int32_t ModuleFileUtility::WriteAviAudioData(
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
uint32_t bufferLengthInBytes)
|
size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
if( _aviOutFile != 0)
|
if( _aviOutFile != 0)
|
||||||
{
|
{
|
||||||
@ -251,7 +253,7 @@ int32_t ModuleFileUtility::WriteAviAudioData(
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::WriteAviVideoData(
|
int32_t ModuleFileUtility::WriteAviVideoData(
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
uint32_t bufferLengthInBytes)
|
size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
if( _aviOutFile != 0)
|
if( _aviOutFile != 0)
|
||||||
{
|
{
|
||||||
@ -370,7 +372,7 @@ int32_t ModuleFileUtility::InitAviReading(const char* filename, bool videoOnly,
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::ReadAviAudioData(
|
int32_t ModuleFileUtility::ReadAviAudioData(
|
||||||
int8_t* outBuffer,
|
int8_t* outBuffer,
|
||||||
const uint32_t bufferLengthInBytes)
|
size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
if(_aviAudioInFile == 0)
|
if(_aviAudioInFile == 0)
|
||||||
{
|
{
|
||||||
@ -378,22 +380,20 @@ int32_t ModuleFileUtility::ReadAviAudioData(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t length = bufferLengthInBytes;
|
if(_aviAudioInFile->ReadAudio(reinterpret_cast<uint8_t*>(outBuffer),
|
||||||
if(_aviAudioInFile->ReadAudio(
|
bufferLengthInBytes) != 0)
|
||||||
reinterpret_cast<uint8_t*>(outBuffer),
|
|
||||||
length) != 0)
|
|
||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return length;
|
return static_cast<int32_t>(bufferLengthInBytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ModuleFileUtility::ReadAviVideoData(
|
int32_t ModuleFileUtility::ReadAviVideoData(
|
||||||
int8_t* outBuffer,
|
int8_t* outBuffer,
|
||||||
const uint32_t bufferLengthInBytes)
|
size_t bufferLengthInBytes)
|
||||||
{
|
{
|
||||||
if(_aviVideoInFile == 0)
|
if(_aviVideoInFile == 0)
|
||||||
{
|
{
|
||||||
@ -401,14 +401,12 @@ int32_t ModuleFileUtility::ReadAviVideoData(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t length = bufferLengthInBytes;
|
if(_aviVideoInFile->ReadVideo(reinterpret_cast<uint8_t*>(outBuffer),
|
||||||
if( _aviVideoInFile->ReadVideo(
|
bufferLengthInBytes) != 0)
|
||||||
reinterpret_cast<uint8_t*>(outBuffer),
|
|
||||||
length) != 0)
|
|
||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
} else {
|
} else {
|
||||||
return length;
|
return static_cast<int32_t>(bufferLengthInBytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -774,14 +772,14 @@ int32_t ModuleFileUtility::InitWavReading(InStream& wav,
|
|||||||
int32_t ModuleFileUtility::ReadWavDataAsMono(
|
int32_t ModuleFileUtility::ReadWavDataAsMono(
|
||||||
InStream& wav,
|
InStream& wav,
|
||||||
int8_t* outData,
|
int8_t* outData,
|
||||||
const uint32_t bufferSize)
|
const size_t bufferSize)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d,\
|
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d, "
|
||||||
bufSize= %ld)",
|
"bufSize= %" PRIuS ")",
|
||||||
&wav,
|
&wav,
|
||||||
outData,
|
outData,
|
||||||
bufferSize);
|
bufferSize);
|
||||||
@ -853,14 +851,14 @@ int32_t ModuleFileUtility::ReadWavDataAsStereo(
|
|||||||
InStream& wav,
|
InStream& wav,
|
||||||
int8_t* outDataLeft,
|
int8_t* outDataLeft,
|
||||||
int8_t* outDataRight,
|
int8_t* outDataRight,
|
||||||
const uint32_t bufferSize)
|
const size_t bufferSize)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x,\
|
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x, "
|
||||||
outRight= 0x%x, bufSize= %ld)",
|
"outRight= 0x%x, bufSize= %" PRIuS ")",
|
||||||
&wav,
|
&wav,
|
||||||
outDataLeft,
|
outDataLeft,
|
||||||
outDataRight,
|
outDataRight,
|
||||||
@ -1083,13 +1081,14 @@ int32_t ModuleFileUtility::InitWavWriting(OutStream& wav,
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t dataLength)
|
const size_t dataLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
||||||
|
")",
|
||||||
&out,
|
&out,
|
||||||
buffer,
|
buffer,
|
||||||
dataLength);
|
dataLength);
|
||||||
@ -1106,7 +1105,7 @@ int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
_bytesWritten += dataLength;
|
_bytesWritten += dataLength;
|
||||||
return dataLength;
|
return static_cast<int32_t>(dataLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -1192,14 +1191,14 @@ int32_t ModuleFileUtility::InitPreEncodedReading(InStream& in,
|
|||||||
int32_t ModuleFileUtility::ReadPreEncodedData(
|
int32_t ModuleFileUtility::ReadPreEncodedData(
|
||||||
InStream& in,
|
InStream& in,
|
||||||
int8_t* outData,
|
int8_t* outData,
|
||||||
const uint32_t bufferSize)
|
const size_t bufferSize)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x,\
|
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x, "
|
||||||
bufferSize= %d)",
|
"bufferSize= %" PRIuS ")",
|
||||||
&in,
|
&in,
|
||||||
outData,
|
outData,
|
||||||
bufferSize);
|
bufferSize);
|
||||||
@ -1259,14 +1258,14 @@ int32_t ModuleFileUtility::InitPreEncodedWriting(
|
|||||||
int32_t ModuleFileUtility::WritePreEncodedData(
|
int32_t ModuleFileUtility::WritePreEncodedData(
|
||||||
OutStream& out,
|
OutStream& out,
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t dataLength)
|
const size_t dataLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x,\
|
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x, "
|
||||||
dataLen= %d)",
|
"dataLen= %" PRIuS ")",
|
||||||
&out,
|
&out,
|
||||||
buffer,
|
buffer,
|
||||||
dataLength);
|
dataLength);
|
||||||
@ -1276,11 +1275,12 @@ int32_t ModuleFileUtility::WritePreEncodedData(
|
|||||||
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t bytesWritten = 0;
|
size_t bytesWritten = 0;
|
||||||
// The first two bytes is the size of the frame.
|
// The first two bytes is the size of the frame.
|
||||||
int16_t lengthBuf;
|
int16_t lengthBuf;
|
||||||
lengthBuf = (int16_t)dataLength;
|
lengthBuf = (int16_t)dataLength;
|
||||||
if(!out.Write(&lengthBuf, 2))
|
if(dataLength > static_cast<size_t>(std::numeric_limits<int16_t>::max()) ||
|
||||||
|
!out.Write(&lengthBuf, 2))
|
||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -1291,7 +1291,7 @@ int32_t ModuleFileUtility::WritePreEncodedData(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
bytesWritten += dataLength;
|
bytesWritten += dataLength;
|
||||||
return bytesWritten;
|
return static_cast<int32_t>(bytesWritten);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ModuleFileUtility::InitCompressedReading(
|
int32_t ModuleFileUtility::InitCompressedReading(
|
||||||
@ -1495,14 +1495,14 @@ int32_t ModuleFileUtility::InitCompressedReading(
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
||||||
int8_t* outData,
|
int8_t* outData,
|
||||||
uint32_t bufferSize)
|
size_t bufferSize)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x,\
|
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x, bytes=%"
|
||||||
bytes=%ld)",
|
PRIuS ")",
|
||||||
&in,
|
&in,
|
||||||
outData,
|
outData,
|
||||||
bufferSize);
|
bufferSize);
|
||||||
@ -1554,7 +1554,7 @@ int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
|||||||
}
|
}
|
||||||
if(mode != 15)
|
if(mode != 15)
|
||||||
{
|
{
|
||||||
if(bufferSize < AMRmode2bytes[mode] + 1)
|
if(bufferSize < static_cast<size_t>(AMRmode2bytes[mode] + 1))
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceError,
|
kTraceError,
|
||||||
@ -1612,7 +1612,7 @@ int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
|||||||
}
|
}
|
||||||
if(mode != 15)
|
if(mode != 15)
|
||||||
{
|
{
|
||||||
if(bufferSize < AMRWBmode2bytes[mode] + 1)
|
if(bufferSize < static_cast<size_t>(AMRWBmode2bytes[mode] + 1))
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
||||||
"output buffer is too short to read AMRWB\
|
"output buffer is too short to read AMRWB\
|
||||||
@ -1770,14 +1770,14 @@ int32_t ModuleFileUtility::InitCompressedWriting(
|
|||||||
int32_t ModuleFileUtility::WriteCompressedData(
|
int32_t ModuleFileUtility::WriteCompressedData(
|
||||||
OutStream& out,
|
OutStream& out,
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t dataLength)
|
const size_t dataLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x,\
|
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x, "
|
||||||
dataLen= %d)",
|
"dataLen= %" PRIuS ")",
|
||||||
&out,
|
&out,
|
||||||
buffer,
|
buffer,
|
||||||
dataLength);
|
dataLength);
|
||||||
@ -1791,7 +1791,7 @@ int32_t ModuleFileUtility::WriteCompressedData(
|
|||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return dataLength;
|
return static_cast<int32_t>(dataLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
||||||
@ -1872,13 +1872,14 @@ int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
|
int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
|
||||||
int8_t* outData,
|
int8_t* outData,
|
||||||
uint32_t bufferSize)
|
size_t bufferSize)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %d)",
|
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %"
|
||||||
|
PRIuS ")",
|
||||||
&pcm,
|
&pcm,
|
||||||
outData,
|
outData,
|
||||||
bufferSize);
|
bufferSize);
|
||||||
@ -2006,13 +2007,14 @@ int32_t ModuleFileUtility::InitPCMWriting(OutStream& out, uint32_t freq)
|
|||||||
|
|
||||||
int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
||||||
const int8_t* buffer,
|
const int8_t* buffer,
|
||||||
const uint32_t dataLength)
|
const size_t dataLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(
|
WEBRTC_TRACE(
|
||||||
kTraceStream,
|
kTraceStream,
|
||||||
kTraceFile,
|
kTraceFile,
|
||||||
_id,
|
_id,
|
||||||
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
||||||
|
")",
|
||||||
&out,
|
&out,
|
||||||
buffer,
|
buffer,
|
||||||
dataLength);
|
dataLength);
|
||||||
@ -2028,7 +2030,7 @@ int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
|||||||
}
|
}
|
||||||
|
|
||||||
_bytesWritten += dataLength;
|
_bytesWritten += dataLength;
|
||||||
return dataLength;
|
return static_cast<int32_t>(dataLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
|
int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
|
||||||
|
@ -43,13 +43,13 @@ public:
|
|||||||
// audio with more channels (in which case the audio will be coverted to
|
// audio with more channels (in which case the audio will be coverted to
|
||||||
// mono).
|
// mono).
|
||||||
int32_t ReadAviAudioData(int8_t* outBuffer,
|
int32_t ReadAviAudioData(int8_t* outBuffer,
|
||||||
const uint32_t bufferLengthInBytes);
|
size_t bufferLengthInBytes);
|
||||||
|
|
||||||
// Put one video frame into outBuffer. bufferLengthInBytes indicates the
|
// Put one video frame into outBuffer. bufferLengthInBytes indicates the
|
||||||
// size of outBuffer.
|
// size of outBuffer.
|
||||||
// The return value is the number of bytes written to videoBuffer.
|
// The return value is the number of bytes written to videoBuffer.
|
||||||
int32_t ReadAviVideoData(int8_t* videoBuffer,
|
int32_t ReadAviVideoData(int8_t* videoBuffer,
|
||||||
const uint32_t bufferLengthInBytes);
|
size_t bufferLengthInBytes);
|
||||||
|
|
||||||
// Open/create the file specified by fileName for writing audio/video data
|
// Open/create the file specified by fileName for writing audio/video data
|
||||||
// (relative path is allowed). codecInst specifies the encoding of the audio
|
// (relative path is allowed). codecInst specifies the encoding of the audio
|
||||||
@ -66,7 +66,7 @@ public:
|
|||||||
// InitAviWriting(..) call.
|
// InitAviWriting(..) call.
|
||||||
// Note: bufferLength must be exactly one frame.
|
// Note: bufferLength must be exactly one frame.
|
||||||
int32_t WriteAviAudioData(const int8_t* audioBuffer,
|
int32_t WriteAviAudioData(const int8_t* audioBuffer,
|
||||||
uint32_t bufferLengthInBytes);
|
size_t bufferLengthInBytes);
|
||||||
|
|
||||||
|
|
||||||
// Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
|
// Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
|
||||||
@ -76,7 +76,7 @@ public:
|
|||||||
// InitAviWriting(..) call. The videoBuffer must contain exactly
|
// InitAviWriting(..) call. The videoBuffer must contain exactly
|
||||||
// one video frame.
|
// one video frame.
|
||||||
int32_t WriteAviVideoData(const int8_t* videoBuffer,
|
int32_t WriteAviVideoData(const int8_t* videoBuffer,
|
||||||
uint32_t bufferLengthInBytes);
|
size_t bufferLengthInBytes);
|
||||||
|
|
||||||
// Stop recording to file or stream.
|
// Stop recording to file or stream.
|
||||||
int32_t CloseAviFile();
|
int32_t CloseAviFile();
|
||||||
@ -98,7 +98,7 @@ public:
|
|||||||
// audio with more channels (in which case the audio will be converted to
|
// audio with more channels (in which case the audio will be converted to
|
||||||
// mono).
|
// mono).
|
||||||
int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
|
int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
|
||||||
const uint32_t dataLengthInBytes);
|
const size_t dataLengthInBytes);
|
||||||
|
|
||||||
// Put 10-60ms, depending on codec frame size, of audio data from file into
|
// Put 10-60ms, depending on codec frame size, of audio data from file into
|
||||||
// audioBufferLeft and audioBufferRight. The buffers contain the left and
|
// audioBufferLeft and audioBufferRight. The buffers contain the left and
|
||||||
@ -111,7 +111,7 @@ public:
|
|||||||
int32_t ReadWavDataAsStereo(InStream& wav,
|
int32_t ReadWavDataAsStereo(InStream& wav,
|
||||||
int8_t* audioBufferLeft,
|
int8_t* audioBufferLeft,
|
||||||
int8_t* audioBufferRight,
|
int8_t* audioBufferRight,
|
||||||
const uint32_t bufferLength);
|
const size_t bufferLength);
|
||||||
|
|
||||||
// Prepare for recording audio to stream.
|
// Prepare for recording audio to stream.
|
||||||
// codecInst specifies the encoding of the audio data.
|
// codecInst specifies the encoding of the audio data.
|
||||||
@ -125,7 +125,7 @@ public:
|
|||||||
// The return value is the number of bytes written to audioBuffer.
|
// The return value is the number of bytes written to audioBuffer.
|
||||||
int32_t WriteWavData(OutStream& stream,
|
int32_t WriteWavData(OutStream& stream,
|
||||||
const int8_t* audioBuffer,
|
const int8_t* audioBuffer,
|
||||||
const uint32_t bufferLength);
|
const size_t bufferLength);
|
||||||
|
|
||||||
// Finalizes the WAV header so that it is correct if nothing more will be
|
// Finalizes the WAV header so that it is correct if nothing more will be
|
||||||
// written to stream.
|
// written to stream.
|
||||||
@ -148,7 +148,7 @@ public:
|
|||||||
// codec frame size. dataLengthInBytes indicates the size of audioBuffer.
|
// codec frame size. dataLengthInBytes indicates the size of audioBuffer.
|
||||||
// The return value is the number of bytes written to audioBuffer.
|
// The return value is the number of bytes written to audioBuffer.
|
||||||
int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
|
int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
|
||||||
const uint32_t dataLengthInBytes);
|
const size_t dataLengthInBytes);
|
||||||
|
|
||||||
// Prepare for recording audio to stream.
|
// Prepare for recording audio to stream.
|
||||||
// freqInHz is the PCM sampling frequency.
|
// freqInHz is the PCM sampling frequency.
|
||||||
@ -161,7 +161,7 @@ public:
|
|||||||
// The return value is the number of bytes written to audioBuffer.
|
// The return value is the number of bytes written to audioBuffer.
|
||||||
int32_t WritePCMData(OutStream& stream,
|
int32_t WritePCMData(OutStream& stream,
|
||||||
const int8_t* audioBuffer,
|
const int8_t* audioBuffer,
|
||||||
uint32_t bufferLength);
|
size_t bufferLength);
|
||||||
|
|
||||||
// Prepare for playing audio from stream.
|
// Prepare for playing audio from stream.
|
||||||
// startPointMs and stopPointMs, unless zero, specify what part of the file
|
// startPointMs and stopPointMs, unless zero, specify what part of the file
|
||||||
@ -175,7 +175,7 @@ public:
|
|||||||
// The return value is the number of bytes written to audioBuffer.
|
// The return value is the number of bytes written to audioBuffer.
|
||||||
int32_t ReadCompressedData(InStream& stream,
|
int32_t ReadCompressedData(InStream& stream,
|
||||||
int8_t* audioBuffer,
|
int8_t* audioBuffer,
|
||||||
const uint32_t dataLengthInBytes);
|
const size_t dataLengthInBytes);
|
||||||
|
|
||||||
// Prepare for recording audio to stream.
|
// Prepare for recording audio to stream.
|
||||||
// codecInst specifies the encoding of the audio data.
|
// codecInst specifies the encoding of the audio data.
|
||||||
@ -189,7 +189,7 @@ public:
|
|||||||
// Note: bufferLength must be exactly one frame.
|
// Note: bufferLength must be exactly one frame.
|
||||||
int32_t WriteCompressedData(OutStream& stream,
|
int32_t WriteCompressedData(OutStream& stream,
|
||||||
const int8_t* audioBuffer,
|
const int8_t* audioBuffer,
|
||||||
const uint32_t bufferLength);
|
const size_t bufferLength);
|
||||||
|
|
||||||
// Prepare for playing audio from stream.
|
// Prepare for playing audio from stream.
|
||||||
// codecInst specifies the encoding of the audio data.
|
// codecInst specifies the encoding of the audio data.
|
||||||
@ -201,7 +201,7 @@ public:
|
|||||||
// The return value is the number of bytes written to audioBuffer.
|
// The return value is the number of bytes written to audioBuffer.
|
||||||
int32_t ReadPreEncodedData(InStream& stream,
|
int32_t ReadPreEncodedData(InStream& stream,
|
||||||
int8_t* audioBuffer,
|
int8_t* audioBuffer,
|
||||||
const uint32_t dataLengthInBytes);
|
const size_t dataLengthInBytes);
|
||||||
|
|
||||||
// Prepare for recording audio to stream.
|
// Prepare for recording audio to stream.
|
||||||
// codecInst specifies the encoding of the audio data.
|
// codecInst specifies the encoding of the audio data.
|
||||||
@ -215,7 +215,7 @@ public:
|
|||||||
// Note: bufferLength must be exactly one frame.
|
// Note: bufferLength must be exactly one frame.
|
||||||
int32_t WritePreEncodedData(OutStream& stream,
|
int32_t WritePreEncodedData(OutStream& stream,
|
||||||
const int8_t* inData,
|
const int8_t* inData,
|
||||||
const uint32_t dataLengthInBytes);
|
const size_t dataLengthInBytes);
|
||||||
|
|
||||||
// Set durationMs to the size of the file (in ms) specified by fileName.
|
// Set durationMs to the size of the file (in ms) specified by fileName.
|
||||||
// freqInHz specifies the sampling frequency of the file.
|
// freqInHz specifies the sampling frequency of the file.
|
||||||
@ -320,7 +320,7 @@ private:
|
|||||||
uint32_t _stopPointInMs;
|
uint32_t _stopPointInMs;
|
||||||
uint32_t _startPointInMs;
|
uint32_t _startPointInMs;
|
||||||
uint32_t _playoutPositionMs;
|
uint32_t _playoutPositionMs;
|
||||||
uint32_t _bytesWritten;
|
size_t _bytesWritten;
|
||||||
|
|
||||||
CodecInst codec_info_;
|
CodecInst codec_info_;
|
||||||
MediaFileUtility_CodecType _codecId;
|
MediaFileUtility_CodecType _codecId;
|
||||||
|
@ -27,7 +27,7 @@ class MockPacedSender : public PacedSender {
|
|||||||
uint32_t ssrc,
|
uint32_t ssrc,
|
||||||
uint16_t sequence_number,
|
uint16_t sequence_number,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
int bytes,
|
size_t bytes,
|
||||||
bool retransmission));
|
bool retransmission));
|
||||||
MOCK_CONST_METHOD0(QueueInMs, int());
|
MOCK_CONST_METHOD0(QueueInMs, int());
|
||||||
MOCK_CONST_METHOD0(QueueInPackets, int());
|
MOCK_CONST_METHOD0(QueueInPackets, int());
|
||||||
|
@ -52,7 +52,7 @@ class PacedSender : public Module {
|
|||||||
bool retransmission) = 0;
|
bool retransmission) = 0;
|
||||||
// Called when it's a good time to send a padding data.
|
// Called when it's a good time to send a padding data.
|
||||||
// Returns the number of bytes sent.
|
// Returns the number of bytes sent.
|
||||||
virtual int TimeToSendPadding(int bytes) = 0;
|
virtual size_t TimeToSendPadding(size_t bytes) = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual ~Callback() {}
|
virtual ~Callback() {}
|
||||||
@ -102,7 +102,7 @@ class PacedSender : public Module {
|
|||||||
uint32_t ssrc,
|
uint32_t ssrc,
|
||||||
uint16_t sequence_number,
|
uint16_t sequence_number,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
int bytes,
|
size_t bytes,
|
||||||
bool retransmission);
|
bool retransmission);
|
||||||
|
|
||||||
// Returns the time since the oldest queued packet was enqueued.
|
// Returns the time since the oldest queued packet was enqueued.
|
||||||
@ -131,7 +131,7 @@ class PacedSender : public Module {
|
|||||||
|
|
||||||
bool SendPacket(const paced_sender::Packet& packet)
|
bool SendPacket(const paced_sender::Packet& packet)
|
||||||
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
|
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
|
||||||
void SendPadding(int padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
|
void SendPadding(size_t padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
|
||||||
|
|
||||||
Clock* const clock_;
|
Clock* const clock_;
|
||||||
Callback* const callback_;
|
Callback* const callback_;
|
||||||
|
@ -42,7 +42,7 @@ struct Packet {
|
|||||||
uint16_t seq_number,
|
uint16_t seq_number,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
int64_t enqueue_time_ms,
|
int64_t enqueue_time_ms,
|
||||||
int length_in_bytes,
|
size_t length_in_bytes,
|
||||||
bool retransmission,
|
bool retransmission,
|
||||||
uint64_t enqueue_order)
|
uint64_t enqueue_order)
|
||||||
: priority(priority),
|
: priority(priority),
|
||||||
@ -59,7 +59,7 @@ struct Packet {
|
|||||||
uint16_t sequence_number;
|
uint16_t sequence_number;
|
||||||
int64_t capture_time_ms;
|
int64_t capture_time_ms;
|
||||||
int64_t enqueue_time_ms;
|
int64_t enqueue_time_ms;
|
||||||
int bytes;
|
size_t bytes;
|
||||||
bool retransmission;
|
bool retransmission;
|
||||||
uint64_t enqueue_order;
|
uint64_t enqueue_order;
|
||||||
std::list<Packet>::iterator this_it;
|
std::list<Packet>::iterator this_it;
|
||||||
@ -189,8 +189,8 @@ class IntervalBudget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void UseBudget(int bytes) {
|
void UseBudget(size_t bytes) {
|
||||||
bytes_remaining_ = std::max(bytes_remaining_ - bytes,
|
bytes_remaining_ = std::max(bytes_remaining_ - static_cast<int>(bytes),
|
||||||
-500 * target_rate_kbps_ / 8);
|
-500 * target_rate_kbps_ / 8);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,7 +258,7 @@ void PacedSender::UpdateBitrate(int bitrate_kbps,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
|
bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
|
||||||
uint16_t sequence_number, int64_t capture_time_ms, int bytes,
|
uint16_t sequence_number, int64_t capture_time_ms, size_t bytes,
|
||||||
bool retransmission) {
|
bool retransmission) {
|
||||||
CriticalSectionScoped cs(critsect_.get());
|
CriticalSectionScoped cs(critsect_.get());
|
||||||
|
|
||||||
@ -353,7 +353,7 @@ int32_t PacedSender::Process() {
|
|||||||
|
|
||||||
int padding_needed = padding_budget_->bytes_remaining();
|
int padding_needed = padding_budget_->bytes_remaining();
|
||||||
if (padding_needed > 0) {
|
if (padding_needed > 0) {
|
||||||
SendPadding(padding_needed);
|
SendPadding(static_cast<size_t>(padding_needed));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -377,9 +377,9 @@ bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
|
|||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PacedSender::SendPadding(int padding_needed) {
|
void PacedSender::SendPadding(size_t padding_needed) {
|
||||||
critsect_->Leave();
|
critsect_->Leave();
|
||||||
int bytes_sent = callback_->TimeToSendPadding(padding_needed);
|
size_t bytes_sent = callback_->TimeToSendPadding(padding_needed);
|
||||||
critsect_->Enter();
|
critsect_->Enter();
|
||||||
|
|
||||||
// Update padding bytes sent.
|
// Update padding bytes sent.
|
||||||
|
@ -32,7 +32,7 @@ class MockPacedSenderCallback : public PacedSender::Callback {
|
|||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
bool retransmission));
|
bool retransmission));
|
||||||
MOCK_METHOD1(TimeToSendPadding,
|
MOCK_METHOD1(TimeToSendPadding,
|
||||||
int(int bytes));
|
size_t(size_t bytes));
|
||||||
};
|
};
|
||||||
|
|
||||||
class PacedSenderPadding : public PacedSender::Callback {
|
class PacedSenderPadding : public PacedSender::Callback {
|
||||||
@ -46,17 +46,17 @@ class PacedSenderPadding : public PacedSender::Callback {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
int TimeToSendPadding(int bytes) {
|
size_t TimeToSendPadding(size_t bytes) {
|
||||||
const int kPaddingPacketSize = 224;
|
const size_t kPaddingPacketSize = 224;
|
||||||
int num_packets = (bytes + kPaddingPacketSize - 1) / kPaddingPacketSize;
|
size_t num_packets = (bytes + kPaddingPacketSize - 1) / kPaddingPacketSize;
|
||||||
padding_sent_ += kPaddingPacketSize * num_packets;
|
padding_sent_ += kPaddingPacketSize * num_packets;
|
||||||
return kPaddingPacketSize * num_packets;
|
return kPaddingPacketSize * num_packets;
|
||||||
}
|
}
|
||||||
|
|
||||||
int padding_sent() { return padding_sent_; }
|
size_t padding_sent() { return padding_sent_; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int padding_sent_;
|
size_t padding_sent_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class PacedSenderProbing : public PacedSender::Callback {
|
class PacedSenderProbing : public PacedSender::Callback {
|
||||||
@ -84,7 +84,7 @@ class PacedSenderProbing : public PacedSender::Callback {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
int TimeToSendPadding(int bytes) {
|
size_t TimeToSendPadding(size_t bytes) {
|
||||||
EXPECT_TRUE(false);
|
EXPECT_TRUE(false);
|
||||||
return bytes;
|
return bytes;
|
||||||
}
|
}
|
||||||
@ -114,7 +114,7 @@ class PacedSenderTest : public ::testing::Test {
|
|||||||
uint32_t ssrc,
|
uint32_t ssrc,
|
||||||
uint16_t sequence_number,
|
uint16_t sequence_number,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
int size,
|
size_t size,
|
||||||
bool retransmission) {
|
bool retransmission) {
|
||||||
EXPECT_FALSE(send_bucket_->SendPacket(priority, ssrc,
|
EXPECT_FALSE(send_bucket_->SendPacket(priority, ssrc,
|
||||||
sequence_number, capture_time_ms, size, retransmission));
|
sequence_number, capture_time_ms, size, retransmission));
|
||||||
@ -421,9 +421,9 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
|
|||||||
send_bucket_->UpdateBitrate(
|
send_bucket_->UpdateBitrate(
|
||||||
kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
|
kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
|
||||||
int64_t start_time = clock_.TimeInMilliseconds();
|
int64_t start_time = clock_.TimeInMilliseconds();
|
||||||
int media_bytes = 0;
|
size_t media_bytes = 0;
|
||||||
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
|
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
|
||||||
int media_payload = rand() % 100 + 200; // [200, 300] bytes.
|
size_t media_payload = rand() % 100 + 200; // [200, 300] bytes.
|
||||||
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
|
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
|
||||||
sequence_number++, capture_time_ms,
|
sequence_number++, capture_time_ms,
|
||||||
media_payload, false));
|
media_payload, false));
|
||||||
@ -431,8 +431,9 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
|
|||||||
clock_.AdvanceTimeMilliseconds(kTimeStep);
|
clock_.AdvanceTimeMilliseconds(kTimeStep);
|
||||||
send_bucket_->Process();
|
send_bucket_->Process();
|
||||||
}
|
}
|
||||||
EXPECT_NEAR(kTargetBitrate, 8 * (media_bytes + callback.padding_sent()) /
|
EXPECT_NEAR(kTargetBitrate,
|
||||||
kBitrateWindow, 1);
|
static_cast<int>(8 * (media_bytes + callback.padding_sent()) /
|
||||||
|
kBitrateWindow), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(PacedSenderTest, Priority) {
|
TEST_F(PacedSenderTest, Priority) {
|
||||||
@ -642,19 +643,20 @@ TEST_F(PacedSenderTest, ResendPacket) {
|
|||||||
TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
|
TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
|
||||||
uint32_t ssrc = 12346;
|
uint32_t ssrc = 12346;
|
||||||
uint16_t sequence_number = 1234;
|
uint16_t sequence_number = 1234;
|
||||||
const int32_t kNumPackets = 60;
|
const size_t kNumPackets = 60;
|
||||||
const int32_t kPacketSize = 1200;
|
const size_t kPacketSize = 1200;
|
||||||
const int32_t kMaxBitrate = kPaceMultiplier * 30;
|
const int32_t kMaxBitrate = kPaceMultiplier * 30;
|
||||||
EXPECT_EQ(0, send_bucket_->ExpectedQueueTimeMs());
|
EXPECT_EQ(0, send_bucket_->ExpectedQueueTimeMs());
|
||||||
|
|
||||||
send_bucket_->UpdateBitrate(30, kMaxBitrate, 0);
|
send_bucket_->UpdateBitrate(30, kMaxBitrate, 0);
|
||||||
for (int i = 0; i < kNumPackets; ++i) {
|
for (size_t i = 0; i < kNumPackets; ++i) {
|
||||||
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
|
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
|
||||||
clock_.TimeInMilliseconds(), kPacketSize, false);
|
clock_.TimeInMilliseconds(), kPacketSize, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Queue in ms = 1000 * (bytes in queue) / (kbit per second * 1000 / 8)
|
// Queue in ms = 1000 * (bytes in queue) / (kbit per second * 1000 / 8)
|
||||||
int64_t queue_in_ms = kNumPackets * kPacketSize * 8 / kMaxBitrate;
|
int64_t queue_in_ms =
|
||||||
|
static_cast<int64_t>(kNumPackets * kPacketSize * 8 / kMaxBitrate);
|
||||||
EXPECT_EQ(queue_in_ms, send_bucket_->ExpectedQueueTimeMs());
|
EXPECT_EQ(queue_in_ms, send_bucket_->ExpectedQueueTimeMs());
|
||||||
|
|
||||||
int64_t time_start = clock_.TimeInMilliseconds();
|
int64_t time_start = clock_.TimeInMilliseconds();
|
||||||
@ -672,7 +674,8 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
|
|||||||
|
|
||||||
// Allow for aliasing, duration should be in [expected(n - 1), expected(n)].
|
// Allow for aliasing, duration should be in [expected(n - 1), expected(n)].
|
||||||
EXPECT_LE(duration, queue_in_ms);
|
EXPECT_LE(duration, queue_in_ms);
|
||||||
EXPECT_GE(duration, queue_in_ms - (kPacketSize * 8 / kMaxBitrate));
|
EXPECT_GE(duration,
|
||||||
|
queue_in_ms - static_cast<int64_t>(kPacketSize * 8 / kMaxBitrate));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
|
TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
|
||||||
@ -713,7 +716,7 @@ class ProbingPacedSender : public PacedSender {
|
|||||||
TEST_F(PacedSenderTest, ProbingWithInitialFrame) {
|
TEST_F(PacedSenderTest, ProbingWithInitialFrame) {
|
||||||
const int kNumPackets = 11;
|
const int kNumPackets = 11;
|
||||||
const int kNumDeltas = kNumPackets - 1;
|
const int kNumDeltas = kNumPackets - 1;
|
||||||
const int kPacketSize = 1200;
|
const size_t kPacketSize = 1200;
|
||||||
const int kInitialBitrateKbps = 300;
|
const int kInitialBitrateKbps = 300;
|
||||||
uint32_t ssrc = 12346;
|
uint32_t ssrc = 12346;
|
||||||
uint16_t sequence_number = 1234;
|
uint16_t sequence_number = 1234;
|
||||||
@ -749,7 +752,7 @@ TEST_F(PacedSenderTest, ProbingWithInitialFrame) {
|
|||||||
TEST_F(PacedSenderTest, PriorityInversion) {
|
TEST_F(PacedSenderTest, PriorityInversion) {
|
||||||
uint32_t ssrc = 12346;
|
uint32_t ssrc = 12346;
|
||||||
uint16_t sequence_number = 1234;
|
uint16_t sequence_number = 1234;
|
||||||
const int32_t kPacketSize = 1200;
|
const size_t kPacketSize = 1200;
|
||||||
|
|
||||||
EXPECT_FALSE(send_bucket_->SendPacket(
|
EXPECT_FALSE(send_bucket_->SendPacket(
|
||||||
PacedSender::kHighPriority, ssrc, sequence_number + 3,
|
PacedSender::kHighPriority, ssrc, sequence_number + 3,
|
||||||
@ -797,7 +800,7 @@ TEST_F(PacedSenderTest, PriorityInversion) {
|
|||||||
TEST_F(PacedSenderTest, PaddingOveruse) {
|
TEST_F(PacedSenderTest, PaddingOveruse) {
|
||||||
uint32_t ssrc = 12346;
|
uint32_t ssrc = 12346;
|
||||||
uint16_t sequence_number = 1234;
|
uint16_t sequence_number = 1234;
|
||||||
const int32_t kPacketSize = 1200;
|
const size_t kPacketSize = 1200;
|
||||||
|
|
||||||
// Min bitrate 0 => no padding, padding budget will stay at 0.
|
// Min bitrate 0 => no padding, padding budget will stay at 0.
|
||||||
send_bucket_->UpdateBitrate(60, 90, 0);
|
send_bucket_->UpdateBitrate(60, 90, 0);
|
||||||
|
@ -73,7 +73,7 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
|
|||||||
// packet size excluding headers.
|
// packet size excluding headers.
|
||||||
// Note that |arrival_time_ms| can be of an arbitrary time base.
|
// Note that |arrival_time_ms| can be of an arbitrary time base.
|
||||||
virtual void IncomingPacket(int64_t arrival_time_ms,
|
virtual void IncomingPacket(int64_t arrival_time_ms,
|
||||||
int payload_size,
|
size_t payload_size,
|
||||||
const RTPHeader& header) = 0;
|
const RTPHeader& header) = 0;
|
||||||
|
|
||||||
// Removes all data for |ssrc|.
|
// Removes all data for |ssrc|.
|
||||||
|
@ -46,7 +46,7 @@ OveruseDetector::~OveruseDetector() {
|
|||||||
ts_delta_hist_.clear();
|
ts_delta_hist_.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OveruseDetector::Update(uint16_t packet_size,
|
void OveruseDetector::Update(size_t packet_size,
|
||||||
int64_t timestamp_ms,
|
int64_t timestamp_ms,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
const int64_t arrival_time_ms) {
|
const int64_t arrival_time_ms) {
|
||||||
@ -157,8 +157,8 @@ double OveruseDetector::CurrentDrift() {
|
|||||||
|
|
||||||
void OveruseDetector::UpdateKalman(int64_t t_delta,
|
void OveruseDetector::UpdateKalman(int64_t t_delta,
|
||||||
double ts_delta,
|
double ts_delta,
|
||||||
uint32_t frame_size,
|
size_t frame_size,
|
||||||
uint32_t prev_frame_size) {
|
size_t prev_frame_size) {
|
||||||
const double min_frame_period = UpdateMinFramePeriod(ts_delta);
|
const double min_frame_period = UpdateMinFramePeriod(ts_delta);
|
||||||
const double drift = CurrentDrift();
|
const double drift = CurrentDrift();
|
||||||
// Compensate for drift
|
// Compensate for drift
|
||||||
|
@ -25,7 +25,7 @@ class OveruseDetector {
|
|||||||
public:
|
public:
|
||||||
explicit OveruseDetector(const OverUseDetectorOptions& options);
|
explicit OveruseDetector(const OverUseDetectorOptions& options);
|
||||||
~OveruseDetector();
|
~OveruseDetector();
|
||||||
void Update(uint16_t packet_size,
|
void Update(size_t packet_size,
|
||||||
int64_t timestamp_ms,
|
int64_t timestamp_ms,
|
||||||
uint32_t rtp_timestamp,
|
uint32_t rtp_timestamp,
|
||||||
int64_t arrival_time_ms);
|
int64_t arrival_time_ms);
|
||||||
@ -41,7 +41,7 @@ class OveruseDetector {
|
|||||||
timestamp(-1),
|
timestamp(-1),
|
||||||
timestamp_ms(-1) {}
|
timestamp_ms(-1) {}
|
||||||
|
|
||||||
uint32_t size;
|
size_t size;
|
||||||
int64_t complete_time_ms;
|
int64_t complete_time_ms;
|
||||||
int64_t timestamp;
|
int64_t timestamp;
|
||||||
int64_t timestamp_ms;
|
int64_t timestamp_ms;
|
||||||
@ -63,8 +63,8 @@ class OveruseDetector {
|
|||||||
double* ts_delta);
|
double* ts_delta);
|
||||||
void UpdateKalman(int64_t t_delta,
|
void UpdateKalman(int64_t t_delta,
|
||||||
double ts_elta,
|
double ts_elta,
|
||||||
uint32_t frame_size,
|
size_t frame_size,
|
||||||
uint32_t prev_frame_size);
|
size_t prev_frame_size);
|
||||||
double UpdateMinFramePeriod(double ts_delta);
|
double UpdateMinFramePeriod(double ts_delta);
|
||||||
void UpdateNoiseEstimate(double residual, double ts_delta, bool stable_state);
|
void UpdateNoiseEstimate(double residual, double ts_delta, bool stable_state);
|
||||||
BandwidthUsage Detect(double ts_delta);
|
BandwidthUsage Detect(double ts_delta);
|
||||||
|
@ -16,7 +16,7 @@ namespace webrtc {
|
|||||||
|
|
||||||
RateStatistics::RateStatistics(uint32_t window_size_ms, float scale)
|
RateStatistics::RateStatistics(uint32_t window_size_ms, float scale)
|
||||||
: num_buckets_(window_size_ms + 1), // N ms in (N+1) buckets.
|
: num_buckets_(window_size_ms + 1), // N ms in (N+1) buckets.
|
||||||
buckets_(new uint32_t[num_buckets_]()),
|
buckets_(new size_t[num_buckets_]()),
|
||||||
accumulated_count_(0),
|
accumulated_count_(0),
|
||||||
oldest_time_(0),
|
oldest_time_(0),
|
||||||
oldest_index_(0),
|
oldest_index_(0),
|
||||||
@ -35,7 +35,7 @@ void RateStatistics::Reset() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void RateStatistics::Update(uint32_t count, int64_t now_ms) {
|
void RateStatistics::Update(size_t count, int64_t now_ms) {
|
||||||
if (now_ms < oldest_time_) {
|
if (now_ms < oldest_time_) {
|
||||||
// Too old data is ignored.
|
// Too old data is ignored.
|
||||||
return;
|
return;
|
||||||
@ -65,7 +65,7 @@ void RateStatistics::EraseOld(int64_t now_ms) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
while (oldest_time_ < new_oldest_time) {
|
while (oldest_time_ < new_oldest_time) {
|
||||||
uint32_t count_in_oldest_bucket = buckets_[oldest_index_];
|
size_t count_in_oldest_bucket = buckets_[oldest_index_];
|
||||||
assert(accumulated_count_ >= count_in_oldest_bucket);
|
assert(accumulated_count_ >= count_in_oldest_bucket);
|
||||||
accumulated_count_ -= count_in_oldest_bucket;
|
accumulated_count_ -= count_in_oldest_bucket;
|
||||||
buckets_[oldest_index_] = 0;
|
buckets_[oldest_index_] = 0;
|
||||||
|
@ -25,7 +25,7 @@ class RateStatistics {
|
|||||||
~RateStatistics();
|
~RateStatistics();
|
||||||
|
|
||||||
void Reset();
|
void Reset();
|
||||||
void Update(uint32_t count, int64_t now_ms);
|
void Update(size_t count, int64_t now_ms);
|
||||||
uint32_t Rate(int64_t now_ms);
|
uint32_t Rate(int64_t now_ms);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -34,10 +34,10 @@ class RateStatistics {
|
|||||||
// Counters are kept in buckets (circular buffer), with one bucket
|
// Counters are kept in buckets (circular buffer), with one bucket
|
||||||
// per millisecond.
|
// per millisecond.
|
||||||
const int num_buckets_;
|
const int num_buckets_;
|
||||||
scoped_ptr<uint32_t[]> buckets_;
|
scoped_ptr<size_t[]> buckets_;
|
||||||
|
|
||||||
// Total count recorded in buckets.
|
// Total count recorded in buckets.
|
||||||
uint32_t accumulated_count_;
|
size_t accumulated_count_;
|
||||||
|
|
||||||
// Oldest time recorded in buckets.
|
// Oldest time recorded in buckets.
|
||||||
int64_t oldest_time_;
|
int64_t oldest_time_;
|
||||||
|
@ -35,7 +35,7 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
|
|||||||
// remote bitrate estimate will be updated. Note that |payload_size| is the
|
// remote bitrate estimate will be updated. Note that |payload_size| is the
|
||||||
// packet size excluding headers.
|
// packet size excluding headers.
|
||||||
virtual void IncomingPacket(int64_t arrival_time_ms,
|
virtual void IncomingPacket(int64_t arrival_time_ms,
|
||||||
int payload_size,
|
size_t payload_size,
|
||||||
const RTPHeader& header) OVERRIDE;
|
const RTPHeader& header) OVERRIDE;
|
||||||
|
|
||||||
// Triggers a new estimate calculation.
|
// Triggers a new estimate calculation.
|
||||||
@ -107,7 +107,7 @@ RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream(
|
|||||||
|
|
||||||
void RemoteBitrateEstimatorSingleStream::IncomingPacket(
|
void RemoteBitrateEstimatorSingleStream::IncomingPacket(
|
||||||
int64_t arrival_time_ms,
|
int64_t arrival_time_ms,
|
||||||
int payload_size,
|
size_t payload_size,
|
||||||
const RTPHeader& header) {
|
const RTPHeader& header) {
|
||||||
uint32_t ssrc = header.ssrc;
|
uint32_t ssrc = header.ssrc;
|
||||||
uint32_t rtp_timestamp = header.timestamp +
|
uint32_t rtp_timestamp = header.timestamp +
|
||||||
@ -133,7 +133,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
|
|||||||
const BandwidthUsage prior_state = overuse_detector->State();
|
const BandwidthUsage prior_state = overuse_detector->State();
|
||||||
overuse_detector->Update(payload_size, -1, rtp_timestamp, arrival_time_ms);
|
overuse_detector->Update(payload_size, -1, rtp_timestamp, arrival_time_ms);
|
||||||
if (overuse_detector->State() == kBwOverusing) {
|
if (overuse_detector->State() == kBwOverusing) {
|
||||||
unsigned int incoming_bitrate = incoming_bitrate_.Rate(now_ms);
|
uint32_t incoming_bitrate = incoming_bitrate_.Rate(now_ms);
|
||||||
if (prior_state != kBwOverusing ||
|
if (prior_state != kBwOverusing ||
|
||||||
remote_rate_.TimeToReduceFurther(now_ms, incoming_bitrate)) {
|
remote_rate_.TimeToReduceFurther(now_ms, incoming_bitrate)) {
|
||||||
// The first overuse should immediately trigger a new estimate.
|
// The first overuse should immediately trigger a new estimate.
|
||||||
|
@ -14,7 +14,8 @@
|
|||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
enum { kMtu = 1200, kAcceptedBitrateErrorBps = 50000u };
|
const size_t kMtu = 1200;
|
||||||
|
const unsigned int kAcceptedBitrateErrorBps = 50000;
|
||||||
|
|
||||||
namespace testing {
|
namespace testing {
|
||||||
|
|
||||||
@ -54,11 +55,11 @@ int64_t RtpStream::GenerateFrame(int64_t time_now_us, PacketList* packets) {
|
|||||||
return next_rtp_time_;
|
return next_rtp_time_;
|
||||||
}
|
}
|
||||||
assert(packets != NULL);
|
assert(packets != NULL);
|
||||||
int bits_per_frame = (bitrate_bps_ + fps_ / 2) / fps_;
|
size_t bits_per_frame = (bitrate_bps_ + fps_ / 2) / fps_;
|
||||||
int n_packets = std::max((bits_per_frame + 4 * kMtu) / (8 * kMtu), 1);
|
size_t n_packets =
|
||||||
int packet_size = (bits_per_frame + 4 * n_packets) / (8 * n_packets);
|
std::max<size_t>((bits_per_frame + 4 * kMtu) / (8 * kMtu), 1u);
|
||||||
assert(n_packets >= 0);
|
size_t packet_size = (bits_per_frame + 4 * n_packets) / (8 * n_packets);
|
||||||
for (int i = 0; i < n_packets; ++i) {
|
for (size_t i = 0; i < n_packets; ++i) {
|
||||||
RtpPacket* packet = new RtpPacket;
|
RtpPacket* packet = new RtpPacket;
|
||||||
packet->send_time = time_now_us + kSendSideOffsetUs;
|
packet->send_time = time_now_us + kSendSideOffsetUs;
|
||||||
packet->size = packet_size;
|
packet->size = packet_size;
|
||||||
@ -217,7 +218,7 @@ uint32_t RemoteBitrateEstimatorTest::AddAbsSendTime(uint32_t t1, uint32_t t2) {
|
|||||||
const unsigned int RemoteBitrateEstimatorTest::kDefaultSsrc = 1;
|
const unsigned int RemoteBitrateEstimatorTest::kDefaultSsrc = 1;
|
||||||
|
|
||||||
void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
|
void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
|
||||||
uint32_t payload_size,
|
size_t payload_size,
|
||||||
int64_t arrival_time,
|
int64_t arrival_time,
|
||||||
uint32_t rtp_timestamp,
|
uint32_t rtp_timestamp,
|
||||||
uint32_t absolute_send_time) {
|
uint32_t absolute_send_time) {
|
||||||
|
@ -49,7 +49,7 @@ class RtpStream {
|
|||||||
int64_t send_time;
|
int64_t send_time;
|
||||||
int64_t arrival_time;
|
int64_t arrival_time;
|
||||||
uint32_t rtp_timestamp;
|
uint32_t rtp_timestamp;
|
||||||
unsigned int size;
|
size_t size;
|
||||||
unsigned int ssrc;
|
unsigned int ssrc;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
|
|||||||
// estimator (all other fields are cleared) and call IncomingPacket on the
|
// estimator (all other fields are cleared) and call IncomingPacket on the
|
||||||
// estimator.
|
// estimator.
|
||||||
void IncomingPacket(uint32_t ssrc,
|
void IncomingPacket(uint32_t ssrc,
|
||||||
uint32_t payload_size,
|
size_t payload_size,
|
||||||
int64_t arrival_time,
|
int64_t arrival_time,
|
||||||
uint32_t rtp_timestamp,
|
uint32_t rtp_timestamp,
|
||||||
uint32_t absolute_send_time);
|
uint32_t absolute_send_time);
|
||||||
|
@ -127,7 +127,7 @@ Packet::Packet()
|
|||||||
memset(&header_, 0, sizeof(header_));
|
memset(&header_, 0, sizeof(header_));
|
||||||
}
|
}
|
||||||
|
|
||||||
Packet::Packet(int flow_id, int64_t send_time_us, uint32_t payload_size,
|
Packet::Packet(int flow_id, int64_t send_time_us, size_t payload_size,
|
||||||
const RTPHeader& header)
|
const RTPHeader& header)
|
||||||
: flow_id_(flow_id),
|
: flow_id_(flow_id),
|
||||||
creation_time_us_(send_time_us),
|
creation_time_us_(send_time_us),
|
||||||
@ -785,7 +785,7 @@ bool PacedVideoSender::TimeToSendPacket(uint32_t ssrc,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
int PacedVideoSender::TimeToSendPadding(int bytes) {
|
size_t PacedVideoSender::TimeToSendPadding(size_t bytes) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
} // namespace bwe
|
} // namespace bwe
|
||||||
|
@ -153,7 +153,7 @@ class Random {
|
|||||||
class Packet {
|
class Packet {
|
||||||
public:
|
public:
|
||||||
Packet();
|
Packet();
|
||||||
Packet(int flow_id, int64_t send_time_us, uint32_t payload_size,
|
Packet(int flow_id, int64_t send_time_us, size_t payload_size,
|
||||||
const RTPHeader& header);
|
const RTPHeader& header);
|
||||||
Packet(int64_t send_time_us, uint32_t sequence_number);
|
Packet(int64_t send_time_us, uint32_t sequence_number);
|
||||||
|
|
||||||
@ -164,14 +164,14 @@ class Packet {
|
|||||||
void set_send_time_us(int64_t send_time_us);
|
void set_send_time_us(int64_t send_time_us);
|
||||||
int64_t send_time_us() const { return send_time_us_; }
|
int64_t send_time_us() const { return send_time_us_; }
|
||||||
void SetAbsSendTimeMs(int64_t abs_send_time_ms);
|
void SetAbsSendTimeMs(int64_t abs_send_time_ms);
|
||||||
uint32_t payload_size() const { return payload_size_; }
|
size_t payload_size() const { return payload_size_; }
|
||||||
const RTPHeader& header() const { return header_; }
|
const RTPHeader& header() const { return header_; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int flow_id_;
|
int flow_id_;
|
||||||
int64_t creation_time_us_; // Time when the packet was created.
|
int64_t creation_time_us_; // Time when the packet was created.
|
||||||
int64_t send_time_us_; // Time the packet left last processor touching it.
|
int64_t send_time_us_; // Time the packet left last processor touching it.
|
||||||
uint32_t payload_size_; // Size of the (non-existent, simulated) payload.
|
size_t payload_size_; // Size of the (non-existent, simulated) payload.
|
||||||
RTPHeader header_; // Actual contents.
|
RTPHeader header_; // Actual contents.
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -474,7 +474,7 @@ class PacedVideoSender : public PacketSender, public PacedSender::Callback {
|
|||||||
uint16_t sequence_number,
|
uint16_t sequence_number,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
bool retransmission) OVERRIDE;
|
bool retransmission) OVERRIDE;
|
||||||
virtual int TimeToSendPadding(int bytes) OVERRIDE;
|
virtual size_t TimeToSendPadding(size_t bytes) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
class ProbingPacedSender : public PacedSender {
|
class ProbingPacedSender : public PacedSender {
|
||||||
|
@ -83,7 +83,7 @@ int main(int argc, char** argv) {
|
|||||||
if (header.extension.transmissionTimeOffset != 0)
|
if (header.extension.transmissionTimeOffset != 0)
|
||||||
++non_zero_ts_offsets;
|
++non_zero_ts_offsets;
|
||||||
rbe->IncomingPacket(clock.TimeInMilliseconds(),
|
rbe->IncomingPacket(clock.TimeInMilliseconds(),
|
||||||
static_cast<int>(packet.length - header.headerLength),
|
packet.length - header.headerLength,
|
||||||
header);
|
header);
|
||||||
++packet_counter;
|
++packet_counter;
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ class FecReceiver {
|
|||||||
|
|
||||||
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
|
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
|
||||||
const uint8_t* incoming_rtp_packet,
|
const uint8_t* incoming_rtp_packet,
|
||||||
int packet_length,
|
size_t packet_length,
|
||||||
uint8_t ulpfec_payload_type) = 0;
|
uint8_t ulpfec_payload_type) = 0;
|
||||||
|
|
||||||
virtual int32_t ProcessReceivedFec() = 0;
|
virtual int32_t ProcessReceivedFec() = 0;
|
||||||
|
@ -26,7 +26,7 @@ class StreamStatistician {
|
|||||||
virtual ~StreamStatistician();
|
virtual ~StreamStatistician();
|
||||||
|
|
||||||
virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
|
virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
|
||||||
virtual void GetDataCounters(uint32_t* bytes_received,
|
virtual void GetDataCounters(size_t* bytes_received,
|
||||||
uint32_t* packets_received) const = 0;
|
uint32_t* packets_received) const = 0;
|
||||||
virtual uint32_t BitrateReceived() const = 0;
|
virtual uint32_t BitrateReceived() const = 0;
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class RTPPayloadRegistry {
|
|||||||
|
|
||||||
bool RestoreOriginalPacket(uint8_t** restored_packet,
|
bool RestoreOriginalPacket(uint8_t** restored_packet,
|
||||||
const uint8_t* packet,
|
const uint8_t* packet,
|
||||||
int* packet_length,
|
size_t* packet_length,
|
||||||
uint32_t original_ssrc,
|
uint32_t original_ssrc,
|
||||||
const RTPHeader& header) const;
|
const RTPHeader& header) const;
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ class RtpReceiver {
|
|||||||
// detected and acted upon.
|
// detected and acted upon.
|
||||||
virtual bool IncomingRtpPacket(const RTPHeader& rtp_header,
|
virtual bool IncomingRtpPacket(const RTPHeader& rtp_header,
|
||||||
const uint8_t* payload,
|
const uint8_t* payload,
|
||||||
int payload_length,
|
size_t payload_length,
|
||||||
PayloadUnion payload_specific,
|
PayloadUnion payload_specific,
|
||||||
bool in_order) = 0;
|
bool in_order) = 0;
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class RtpRtcp : public Module {
|
|||||||
***************************************************************************/
|
***************************************************************************/
|
||||||
|
|
||||||
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
|
virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
|
||||||
uint16_t incoming_packet_length) = 0;
|
size_t incoming_packet_length) = 0;
|
||||||
|
|
||||||
virtual void SetRemoteSSRC(const uint32_t ssrc) = 0;
|
virtual void SetRemoteSSRC(const uint32_t ssrc) = 0;
|
||||||
|
|
||||||
@ -328,7 +328,7 @@ class RtpRtcp : public Module {
|
|||||||
const uint32_t timeStamp,
|
const uint32_t timeStamp,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
const uint8_t* payloadData,
|
const uint8_t* payloadData,
|
||||||
const uint32_t payloadSize,
|
const size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation = NULL,
|
const RTPFragmentationHeader* fragmentation = NULL,
|
||||||
const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
|
const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
|
||||||
|
|
||||||
@ -337,7 +337,7 @@ class RtpRtcp : public Module {
|
|||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
bool retransmission) = 0;
|
bool retransmission) = 0;
|
||||||
|
|
||||||
virtual int TimeToSendPadding(int bytes) = 0;
|
virtual size_t TimeToSendPadding(size_t bytes) = 0;
|
||||||
|
|
||||||
virtual bool GetSendSideDelay(int* avg_send_delay_ms,
|
virtual bool GetSendSideDelay(int* avg_send_delay_ms,
|
||||||
int* max_send_delay_ms) const = 0;
|
int* max_send_delay_ms) const = 0;
|
||||||
@ -465,7 +465,7 @@ class RtpRtcp : public Module {
|
|||||||
* return -1 on failure else 0
|
* return -1 on failure else 0
|
||||||
*/
|
*/
|
||||||
virtual int32_t DataCountersRTP(
|
virtual int32_t DataCountersRTP(
|
||||||
uint32_t* bytesSent,
|
size_t* bytesSent,
|
||||||
uint32_t* packetsSent) const = 0;
|
uint32_t* packetsSent) const = 0;
|
||||||
/*
|
/*
|
||||||
* Get received RTCP sender info
|
* Get received RTCP sender info
|
||||||
|
@ -143,7 +143,7 @@ enum RtxMode {
|
|||||||
// instead of padding.
|
// instead of padding.
|
||||||
};
|
};
|
||||||
|
|
||||||
const int kRtxHeaderSize = 2;
|
const size_t kRtxHeaderSize = 2;
|
||||||
|
|
||||||
struct RTCPSenderInfo
|
struct RTCPSenderInfo
|
||||||
{
|
{
|
||||||
@ -220,11 +220,11 @@ public:
|
|||||||
|
|
||||||
virtual int32_t OnReceivedPayloadData(
|
virtual int32_t OnReceivedPayloadData(
|
||||||
const uint8_t* payloadData,
|
const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize,
|
const size_t payloadSize,
|
||||||
const WebRtcRTPHeader* rtpHeader) = 0;
|
const WebRtcRTPHeader* rtpHeader) = 0;
|
||||||
|
|
||||||
virtual bool OnRecoveredPacket(const uint8_t* packet,
|
virtual bool OnRecoveredPacket(const uint8_t* packet,
|
||||||
int packet_length) = 0;
|
size_t packet_length) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class RtpFeedback
|
class RtpFeedback
|
||||||
@ -334,13 +334,13 @@ class NullRtpData : public RtpData {
|
|||||||
|
|
||||||
virtual int32_t OnReceivedPayloadData(
|
virtual int32_t OnReceivedPayloadData(
|
||||||
const uint8_t* payloadData,
|
const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize,
|
const size_t payloadSize,
|
||||||
const WebRtcRTPHeader* rtpHeader) OVERRIDE {
|
const WebRtcRTPHeader* rtpHeader) OVERRIDE {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual bool OnRecoveredPacket(const uint8_t* packet,
|
virtual bool OnRecoveredPacket(const uint8_t* packet,
|
||||||
int packet_length) OVERRIDE {
|
size_t packet_length) OVERRIDE {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -23,11 +23,11 @@ class MockRtpData : public RtpData {
|
|||||||
public:
|
public:
|
||||||
MOCK_METHOD3(OnReceivedPayloadData,
|
MOCK_METHOD3(OnReceivedPayloadData,
|
||||||
int32_t(const uint8_t* payloadData,
|
int32_t(const uint8_t* payloadData,
|
||||||
const uint16_t payloadSize,
|
const size_t payloadSize,
|
||||||
const WebRtcRTPHeader* rtpHeader));
|
const WebRtcRTPHeader* rtpHeader));
|
||||||
|
|
||||||
MOCK_METHOD2(OnRecoveredPacket,
|
MOCK_METHOD2(OnRecoveredPacket,
|
||||||
bool(const uint8_t* packet, int packet_length));
|
bool(const uint8_t* packet, size_t packet_length));
|
||||||
};
|
};
|
||||||
|
|
||||||
class MockRtpRtcp : public RtpRtcp {
|
class MockRtpRtcp : public RtpRtcp {
|
||||||
@ -47,7 +47,7 @@ class MockRtpRtcp : public RtpRtcp {
|
|||||||
MOCK_METHOD0(DeRegisterSyncModule,
|
MOCK_METHOD0(DeRegisterSyncModule,
|
||||||
int32_t());
|
int32_t());
|
||||||
MOCK_METHOD2(IncomingRtcpPacket,
|
MOCK_METHOD2(IncomingRtcpPacket,
|
||||||
int32_t(const uint8_t* incomingPacket, uint16_t packetLength));
|
int32_t(const uint8_t* incomingPacket, size_t packetLength));
|
||||||
MOCK_METHOD1(SetRemoteSSRC, void(const uint32_t ssrc));
|
MOCK_METHOD1(SetRemoteSSRC, void(const uint32_t ssrc));
|
||||||
MOCK_METHOD4(IncomingAudioNTP,
|
MOCK_METHOD4(IncomingAudioNTP,
|
||||||
int32_t(const uint32_t audioReceivedNTPsecs,
|
int32_t(const uint32_t audioReceivedNTPsecs,
|
||||||
@ -126,14 +126,14 @@ class MockRtpRtcp : public RtpRtcp {
|
|||||||
const uint32_t timeStamp,
|
const uint32_t timeStamp,
|
||||||
int64_t capture_time_ms,
|
int64_t capture_time_ms,
|
||||||
const uint8_t* payloadData,
|
const uint8_t* payloadData,
|
||||||
const uint32_t payloadSize,
|
const size_t payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation,
|
const RTPFragmentationHeader* fragmentation,
|
||||||
const RTPVideoHeader* rtpVideoHdr));
|
const RTPVideoHeader* rtpVideoHdr));
|
||||||
MOCK_METHOD4(TimeToSendPacket,
|
MOCK_METHOD4(TimeToSendPacket,
|
||||||
bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms,
|
bool(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms,
|
||||||
bool retransmission));
|
bool retransmission));
|
||||||
MOCK_METHOD1(TimeToSendPadding,
|
MOCK_METHOD1(TimeToSendPadding,
|
||||||
int(int bytes));
|
size_t(size_t bytes));
|
||||||
MOCK_CONST_METHOD2(GetSendSideDelay,
|
MOCK_CONST_METHOD2(GetSendSideDelay,
|
||||||
bool(int* avg_send_delay_ms, int* max_send_delay_ms));
|
bool(int* avg_send_delay_ms, int* max_send_delay_ms));
|
||||||
MOCK_METHOD2(RegisterRtcpObservers,
|
MOCK_METHOD2(RegisterRtcpObservers,
|
||||||
@ -172,7 +172,7 @@ class MockRtpRtcp : public RtpRtcp {
|
|||||||
MOCK_METHOD0(ResetSendDataCountersRTP,
|
MOCK_METHOD0(ResetSendDataCountersRTP,
|
||||||
int32_t());
|
int32_t());
|
||||||
MOCK_CONST_METHOD2(DataCountersRTP,
|
MOCK_CONST_METHOD2(DataCountersRTP,
|
||||||
int32_t(uint32_t *bytesSent, uint32_t *packetsSent));
|
int32_t(size_t *bytesSent, uint32_t *packetsSent));
|
||||||
MOCK_METHOD1(RemoteRTCPStat,
|
MOCK_METHOD1(RemoteRTCPStat,
|
||||||
int32_t(RTCPSenderInfo* senderInfo));
|
int32_t(RTCPSenderInfo* senderInfo));
|
||||||
MOCK_CONST_METHOD1(RemoteRTCPStat,
|
MOCK_CONST_METHOD1(RemoteRTCPStat,
|
||||||
|
@ -32,7 +32,7 @@ Bitrate::Bitrate(Clock* clock, Observer* observer)
|
|||||||
|
|
||||||
Bitrate::~Bitrate() {}
|
Bitrate::~Bitrate() {}
|
||||||
|
|
||||||
void Bitrate::Update(const int32_t bytes) {
|
void Bitrate::Update(const size_t bytes) {
|
||||||
CriticalSectionScoped cs(crit_.get());
|
CriticalSectionScoped cs(crit_.get());
|
||||||
bytes_count_ += bytes;
|
bytes_count_ += bytes;
|
||||||
packet_count_++;
|
packet_count_++;
|
||||||
|
@ -35,7 +35,7 @@ class Bitrate {
|
|||||||
void Process();
|
void Process();
|
||||||
|
|
||||||
// Update with a packet.
|
// Update with a packet.
|
||||||
void Update(const int32_t bytes);
|
void Update(const size_t bytes);
|
||||||
|
|
||||||
// Packet rate last second, updated roughly every 100 ms.
|
// Packet rate last second, updated roughly every 100 ms.
|
||||||
uint32_t PacketRate() const;
|
uint32_t PacketRate() const;
|
||||||
@ -68,7 +68,7 @@ class Bitrate {
|
|||||||
int64_t bitrate_array_[10];
|
int64_t bitrate_array_[10];
|
||||||
int64_t bitrate_diff_ms_[10];
|
int64_t bitrate_diff_ms_[10];
|
||||||
int64_t time_last_rate_update_;
|
int64_t time_last_rate_update_;
|
||||||
uint32_t bytes_count_;
|
size_t bytes_count_;
|
||||||
uint32_t packet_count_;
|
uint32_t packet_count_;
|
||||||
Observer* const observer_;
|
Observer* const observer_;
|
||||||
};
|
};
|
||||||
|
@ -71,10 +71,10 @@ FecReceiverImpl::~FecReceiverImpl() {
|
|||||||
|
|
||||||
int32_t FecReceiverImpl::AddReceivedRedPacket(
|
int32_t FecReceiverImpl::AddReceivedRedPacket(
|
||||||
const RTPHeader& header, const uint8_t* incoming_rtp_packet,
|
const RTPHeader& header, const uint8_t* incoming_rtp_packet,
|
||||||
int packet_length, uint8_t ulpfec_payload_type) {
|
size_t packet_length, uint8_t ulpfec_payload_type) {
|
||||||
CriticalSectionScoped cs(crit_sect_.get());
|
CriticalSectionScoped cs(crit_sect_.get());
|
||||||
uint8_t REDHeaderLength = 1;
|
uint8_t REDHeaderLength = 1;
|
||||||
uint16_t payload_data_length = packet_length - header.headerLength;
|
size_t payload_data_length = packet_length - header.headerLength;
|
||||||
|
|
||||||
// Add to list without RED header, aka a virtual RTP packet
|
// Add to list without RED header, aka a virtual RTP packet
|
||||||
// we remove the RED header
|
// we remove the RED header
|
||||||
|
@ -30,7 +30,7 @@ class FecReceiverImpl : public FecReceiver {
|
|||||||
|
|
||||||
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
|
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
|
||||||
const uint8_t* incoming_rtp_packet,
|
const uint8_t* incoming_rtp_packet,
|
||||||
int packet_length,
|
size_t packet_length,
|
||||||
uint8_t ulpfec_payload_type) OVERRIDE;
|
uint8_t ulpfec_payload_type) OVERRIDE;
|
||||||
|
|
||||||
virtual int32_t ProcessReceivedFec() OVERRIDE;
|
virtual int32_t ProcessReceivedFec() OVERRIDE;
|
||||||
|
@ -44,7 +44,7 @@ RtpPacket* FrameGenerator::NextPacket(int offset, size_t length) {
|
|||||||
|
|
||||||
// Creates a new RtpPacket with the RED header added to the packet.
|
// Creates a new RtpPacket with the RED header added to the packet.
|
||||||
RtpPacket* FrameGenerator::BuildMediaRedPacket(const RtpPacket* packet) {
|
RtpPacket* FrameGenerator::BuildMediaRedPacket(const RtpPacket* packet) {
|
||||||
const int kHeaderLength = packet->header.header.headerLength;
|
const size_t kHeaderLength = packet->header.header.headerLength;
|
||||||
RtpPacket* red_packet = new RtpPacket;
|
RtpPacket* red_packet = new RtpPacket;
|
||||||
red_packet->header = packet->header;
|
red_packet->header = packet->header;
|
||||||
red_packet->length = packet->length + 1; // 1 byte RED header.
|
red_packet->length = packet->length + 1; // 1 byte RED header.
|
||||||
@ -65,7 +65,7 @@ RtpPacket* FrameGenerator::BuildFecRedPacket(const Packet* packet) {
|
|||||||
++num_packets_;
|
++num_packets_;
|
||||||
RtpPacket* red_packet = NextPacket(0, packet->length + 1);
|
RtpPacket* red_packet = NextPacket(0, packet->length + 1);
|
||||||
red_packet->data[1] &= ~0x80; // Clear marker bit.
|
red_packet->data[1] &= ~0x80; // Clear marker bit.
|
||||||
const int kHeaderLength = red_packet->header.header.headerLength;
|
const size_t kHeaderLength = red_packet->header.header.headerLength;
|
||||||
SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
|
SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
|
||||||
memcpy(red_packet->data + kHeaderLength + 1, packet->data, packet->length);
|
memcpy(red_packet->data + kHeaderLength + 1, packet->data, packet->length);
|
||||||
red_packet->length = kHeaderLength + 1 + packet->length;
|
red_packet->length = kHeaderLength + 1 + packet->length;
|
||||||
@ -73,7 +73,7 @@ RtpPacket* FrameGenerator::BuildFecRedPacket(const Packet* packet) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void FrameGenerator::SetRedHeader(Packet* red_packet, uint8_t payload_type,
|
void FrameGenerator::SetRedHeader(Packet* red_packet, uint8_t payload_type,
|
||||||
int header_length) const {
|
size_t header_length) const {
|
||||||
// Replace pltype.
|
// Replace pltype.
|
||||||
red_packet->data[1] &= 0x80; // Reset.
|
red_packet->data[1] &= 0x80; // Reset.
|
||||||
red_packet->data[1] += kRedPayloadType; // Replace.
|
red_packet->data[1] += kRedPayloadType; // Replace.
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user