Replace scoped_array<T> with scoped_ptr<T[]>.

scoped_array is deprecated. This was done using a Chromium clang tool:
http://src.chromium.org/viewvc/chrome/trunk/src/tools/clang/rewrite_scoped_ar...

except for the few not-built-on-Linux files which were updated manually.

TESTED=trybots
BUG=2515
R=niklas.enbom@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/12429004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5985 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
andrew@webrtc.org 2014-04-25 23:10:28 +00:00
parent 658a94595d
commit 8f69330310
52 changed files with 86 additions and 166 deletions

View File

@ -62,7 +62,7 @@ TEST(SincResamplerTest, ChunkedResample) {
static const int kChunks = 2;
int max_chunk_size = resampler.ChunkSize() * kChunks;
scoped_array<float> resampled_destination(new float[max_chunk_size]);
scoped_ptr<float[]> resampled_destination(new float[max_chunk_size]);
// Verify requesting ChunkSize() frames causes a single callback.
EXPECT_CALL(mock_source, Run(_, _))
@ -81,7 +81,7 @@ TEST(SincResamplerTest, Flush) {
MockSource mock_source;
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
&mock_source);
scoped_array<float> resampled_destination(new float[resampler.ChunkSize()]);
scoped_ptr<float[]> resampled_destination(new float[resampler.ChunkSize()]);
// Fill the resampler with junk data.
EXPECT_CALL(mock_source, Run(_, _))
@ -266,7 +266,7 @@ TEST_P(SincResamplerTest, Resample) {
// Force an update to the sample rate ratio to ensure dyanmic sample rate
// changes are working correctly.
scoped_array<float> kernel(new float[SincResampler::kKernelStorageSize]);
scoped_ptr<float[]> kernel(new float[SincResampler::kKernelStorageSize]);
memcpy(kernel.get(), resampler.get_kernel_for_testing(),
SincResampler::kKernelStorageSize);
resampler.SetRatio(M_PI);
@ -278,8 +278,8 @@ TEST_P(SincResamplerTest, Resample) {
// TODO(dalecurtis): If we switch to AVX/SSE optimization, we'll need to
// allocate these on 32-byte boundaries and ensure they're sized % 32 bytes.
scoped_array<float> resampled_destination(new float[output_samples]);
scoped_array<float> pure_destination(new float[output_samples]);
scoped_ptr<float[]> resampled_destination(new float[output_samples]);
scoped_ptr<float[]> pure_destination(new float[output_samples]);
// Generate resampled signal.
resampler.Resample(output_samples, resampled_destination.get());

View File

@ -84,7 +84,7 @@ class TestLibYuv : public ::testing::Test {
FILE* source_file_;
I420VideoFrame orig_frame_;
scoped_array<uint8_t> orig_buffer_;
scoped_ptr<uint8_t[]> orig_buffer_;
const int width_;
const int height_;
const int size_y_;
@ -147,7 +147,7 @@ TEST_F(TestLibYuv, ConvertTest) {
(width_ + 1) / 2,
(width_ + 1) / 2));
printf("\nConvert #%d I420 <-> I420 \n", j);
scoped_array<uint8_t> out_i420_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,
@ -162,7 +162,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> RGB24\n", j);
scoped_array<uint8_t> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]);
scoped_ptr<uint8_t[]> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]);
// Align the stride values for the output frame.
int stride_y = 0;
int stride_uv = 0;
@ -184,7 +184,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> UYVY\n", j);
scoped_array<uint8_t> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
scoped_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
@ -196,8 +196,8 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> YV12\n", j);
scoped_array<uint8_t> outYV120Buffer(new uint8_t[frame_length_]);
scoped_array<uint8_t> res_i420_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> outYV120Buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> res_i420_buffer(new uint8_t[frame_length_]);
I420VideoFrame yv12_frame;
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYV12, 0, outYV120Buffer.get()));
yv12_frame.CreateFrame(size_y_, outYV120Buffer.get(),
@ -218,7 +218,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> YUY2\n", j);
scoped_array<uint8_t> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
scoped_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
@ -231,7 +231,7 @@ TEST_F(TestLibYuv, ConvertTest) {
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
scoped_array<uint8_t> out_rgb565_buffer(new uint8_t[width_ * height_ * 2]);
scoped_ptr<uint8_t[]> out_rgb565_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
out_rgb565_buffer.get()));
@ -250,7 +250,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_GT(ceil(psnr), 40);
printf("\nConvert #%d I420 <-> ARGB8888\n", j);
scoped_array<uint8_t> out_argb8888_buffer(new uint8_t[width_ * height_ * 4]);
scoped_ptr<uint8_t[]> out_argb8888_buffer(new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
out_argb8888_buffer.get()));
@ -283,7 +283,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_,
stride_y, stride_uv, stride_uv));
scoped_array<uint8_t> out_i420_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,

View File

@ -99,7 +99,7 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
kI420, kI420,
kScalePoint));
I420VideoFrame test_frame2;
scoped_array<uint8_t> orig_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> orig_buffer(new uint8_t[frame_length_]);
EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U);
test_frame_.CreateFrame(size_y_, orig_buffer.get(),
size_uv_, orig_buffer.get() + size_y_,
@ -442,7 +442,7 @@ void TestScaler::ScaleSequence(ScaleMethod method,
total_clock = 0;
int frame_count = 0;
int src_required_size = CalcBufferSize(kI420, src_width, src_height);
scoped_array<uint8_t> frame_buffer(new uint8_t[src_required_size]);
scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]);
int size_y = src_width * src_height;
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);

View File

@ -22,7 +22,7 @@ FakeAudioDeviceBuffer::FakeAudioDeviceBuffer()
next_available_buffer_(0),
record_channels_(0),
play_channels_(0) {
buf_.reset(new scoped_array<int8_t>[kNumBuffers]);
buf_.reset(new scoped_ptr<int8_t[]>[kNumBuffers]);
for (int i = 0; i < kNumBuffers; ++i) {
buf_[i].reset(new int8_t[buffer_size_bytes()]);
}

View File

@ -55,7 +55,7 @@ class FakeAudioDeviceBuffer : public AudioDeviceBuffer {
AudioManagerJni audio_manager_;
SingleRwFifo fifo_;
scoped_array<scoped_array<int8_t> > buf_;
scoped_ptr<scoped_ptr<int8_t[]>[]> buf_;
int next_available_buffer_;
uint8_t record_channels_;

View File

@ -398,7 +398,7 @@ TEST(NackTest, ChangeOfListSizeAppliedAndOldElementsRemoved) {
// Packet lost more than NACK-list size limit.
uint16_t num_lost_packets = kNackThreshold + kNackListSize + 5;
scoped_array<uint16_t> seq_num_lost(new uint16_t[num_lost_packets]);
scoped_ptr<uint16_t[]> seq_num_lost(new uint16_t[num_lost_packets]);
for (int n = 0; n < num_lost_packets; ++n) {
seq_num_lost[n] = ++seq_num;
}

View File

@ -126,7 +126,7 @@ class BackgroundNoise {
int32_t residual_energy);
size_t num_channels_;
scoped_array<ChannelParameters> channel_parameters_;
scoped_ptr<ChannelParameters[]> channel_parameters_;
bool initialized_;
NetEqBackgroundNoiseMode mode_;

View File

@ -167,7 +167,7 @@ class Expand {
int lag_index_direction_;
int current_lag_index_;
bool stop_muting_;
scoped_array<ChannelParameters> channel_parameters_;
scoped_ptr<ChannelParameters[]> channel_parameters_;
DISALLOW_COPY_AND_ASSIGN(Expand);
};

View File

@ -77,7 +77,6 @@ NetEqImpl::NetEqImpl(int fs,
accelerate_factory_(accelerate_factory),
preemptive_expand_factory_(preemptive_expand_factory),
last_mode_(kModeNormal),
mute_factor_array_(NULL),
decoded_buffer_length_(kMaxFrameSize),
decoded_buffer_(new int16_t[decoded_buffer_length_]),
playout_timestamp_(0),

View File

@ -371,9 +371,9 @@ class NetEqImpl : public webrtc::NetEq {
int output_size_samples_ GUARDED_BY(crit_sect_);
int decoder_frame_length_ GUARDED_BY(crit_sect_);
Modes last_mode_ GUARDED_BY(crit_sect_);
scoped_array<int16_t> mute_factor_array_ GUARDED_BY(crit_sect_);
scoped_ptr<int16_t[]> mute_factor_array_ GUARDED_BY(crit_sect_);
size_t decoded_buffer_length_ GUARDED_BY(crit_sect_);
scoped_array<int16_t> decoded_buffer_ GUARDED_BY(crit_sect_);
scoped_ptr<int16_t[]> decoded_buffer_ GUARDED_BY(crit_sect_);
uint32_t playout_timestamp_ GUARDED_BY(crit_sect_);
bool new_codec_ GUARDED_BY(crit_sect_);
uint32_t timestamp_ GUARDED_BY(crit_sect_);

View File

@ -29,7 +29,7 @@ TimeStretch::ReturnCodes TimeStretch::Process(
int fs_mult_120 = fs_mult_ * 120; // Corresponds to 15 ms.
const int16_t* signal;
scoped_array<int16_t> signal_array;
scoped_ptr<int16_t[]> signal_array;
size_t signal_len;
if (num_channels_ == 1) {
signal = input;

View File

@ -27,8 +27,7 @@ class AudioLoop {
AudioLoop()
: next_index_(0),
loop_length_samples_(0),
block_length_samples_(0),
audio_array_(NULL) {
block_length_samples_(0) {
}
virtual ~AudioLoop() {}
@ -50,7 +49,7 @@ class AudioLoop {
size_t next_index_;
size_t loop_length_samples_;
size_t block_length_samples_;
scoped_array<int16_t> audio_array_;
scoped_ptr<int16_t[]> audio_array_;
DISALLOW_COPY_AND_ASSIGN(AudioLoop);
};

View File

@ -56,7 +56,7 @@ class FineAudioBuffer {
int bytes_per_10_ms_;
// Storage for samples that are not yet asked for.
scoped_array<int8_t> cache_buffer_;
scoped_ptr<int8_t[]> cache_buffer_;
int cached_buffer_start_; // Location of first unread sample.
int cached_bytes_; // Number of bytes stored in cache.
};

View File

@ -80,7 +80,7 @@ void RunFineBufferTest(int sample_rate, int frame_size_in_samples) {
FineAudioBuffer fine_buffer(&audio_device_buffer, kFrameSizeBytes,
sample_rate);
scoped_array<int8_t> out_buffer;
scoped_ptr<int8_t[]> out_buffer;
out_buffer.reset(
new int8_t[fine_buffer.RequiredBufferSizeBytes()]);
for (int i = 0; i < kNumberOfFrames; ++i) {

View File

@ -289,7 +289,7 @@ void OpenSlesInput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
rec_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
for (int i = 0; i < TotalBuffersUsed(); ++i) {
rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
}

View File

@ -205,7 +205,7 @@ class OpenSlesInput {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
// Holds all allocated memory such that it is deallocated properly.
scoped_array<scoped_array<int8_t> > rec_buf_;
scoped_ptr<scoped_ptr<int8_t[]>[]> rec_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time RecorderSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer contains audio data from the device.

View File

@ -340,7 +340,7 @@ void OpenSlesOutput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
play_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
play_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
int required_buffer_size = fine_buffer_->RequiredBufferSizeBytes();
for (int i = 0; i < TotalBuffersUsed(); ++i) {
play_buf_[i].reset(new int8_t[required_buffer_size]);

View File

@ -223,7 +223,7 @@ class OpenSlesOutput : public PlayoutDelayProvider {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
scoped_ptr<FineAudioBuffer> fine_buffer_;
scoped_array<scoped_array<int8_t> > play_buf_;
scoped_ptr<scoped_ptr<int8_t[]>[]> play_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time PlayerSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer is ready to be played out to device.

View File

@ -35,7 +35,7 @@ class SingleRwFifo {
int capacity() const { return capacity_; }
private:
scoped_array<int8_t*> queue_;
scoped_ptr<int8_t*[]> queue_;
int capacity_;
Atomic32 size_;

View File

@ -90,7 +90,7 @@ class SingleRwFifoTest : public testing::Test {
protected:
SingleRwFifo fifo_;
// Memory area for proper de-allocation.
scoped_array<int8_t> buffer_[kCapacity];
scoped_ptr<int8_t[]> buffer_[kCapacity];
std::list<int8_t*> memory_queue_;
int pushed_;

View File

@ -518,7 +518,7 @@ void void_main(int argc, char* argv[]) {
const size_t path_size =
apm->echo_control_mobile()->echo_path_size_bytes();
scoped_array<char> echo_path(new char[path_size]);
scoped_ptr<char[]> echo_path(new char[path_size]);
ASSERT_EQ(path_size, fread(echo_path.get(),
sizeof(char),
path_size,
@ -1004,7 +1004,7 @@ void void_main(int argc, char* argv[]) {
if (aecm_echo_path_out_file != NULL) {
const size_t path_size =
apm->echo_control_mobile()->echo_path_size_bytes();
scoped_array<char> echo_path(new char[path_size]);
scoped_ptr<char[]> echo_path(new char[path_size]);
apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
ASSERT_EQ(path_size, fwrite(echo_path.get(),
sizeof(char),

View File

@ -61,8 +61,8 @@ static void RandomStressTest(int** data_ptr) {
srand(seed);
for (int i = 0; i < kNumTests; i++) {
const int buffer_size = std::max(rand() % kMaxBufferSize, 1);
scoped_array<int> write_data(new int[buffer_size]);
scoped_array<int> read_data(new int[buffer_size]);
scoped_ptr<int[]> write_data(new int[buffer_size]);
scoped_ptr<int[]> read_data(new int[buffer_size]);
scoped_ring_buffer buffer(WebRtc_CreateBuffer(buffer_size, sizeof(int)));
ASSERT_TRUE(buffer.get() != NULL);
ASSERT_EQ(0, WebRtc_InitBuffer(buffer.get()));

View File

@ -76,7 +76,7 @@ class Differ {
int bytes_per_row_;
// Diff information for each block in the image.
scoped_array<DiffInfo> diff_info_;
scoped_ptr<DiffInfo[]> diff_info_;
// Dimensions and total size of diff info array.
int diff_info_width_;

View File

@ -200,8 +200,8 @@ class DifferTest : public testing::Test {
int buffer_size_;
// Previous and current screen buffers.
scoped_array<uint8_t> prev_;
scoped_array<uint8_t> curr_;
scoped_ptr<uint8_t[]> prev_;
scoped_ptr<uint8_t[]> curr_;
private:
DISALLOW_COPY_AND_ASSIGN(DifferTest);

View File

@ -137,7 +137,7 @@ MouseCursor* CreateMouseCursorFromHCursor(HDC dc, HCURSOR cursor) {
int width = bitmap_info.bmWidth;
int height = bitmap_info.bmHeight;
scoped_array<uint32_t> mask_data(new uint32_t[width * height]);
scoped_ptr<uint32_t[]> mask_data(new uint32_t[width * height]);
// Get pixel data from |scoped_mask| converting it to 32bpp along the way.
// GetDIBits() sets the alpha component of every pixel to 0.

View File

@ -62,7 +62,7 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) {
// Get the pixels from |scoped_color|.
int size = width * height;
scoped_array<uint32_t> data(new uint32_t[size]);
scoped_ptr<uint32_t[]> data(new uint32_t[size]);
EXPECT_TRUE(GetBitmapBits(scoped_color, size * sizeof(uint32_t), data.get()));
// Compare the 32bpp image in |mouse_shape| with the one loaded from |right|.

View File

@ -34,7 +34,7 @@ class RateStatistics {
// Counters are kept in buckets (circular buffer), with one bucket
// per millisecond.
const int num_buckets_;
scoped_array<uint32_t> buckets_;
scoped_ptr<uint32_t[]> buckets_;
// Total count recorded in buckets.
uint32_t accumulated_count_;

View File

@ -198,7 +198,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int RecoveredMediaPackets(int num_media_packets,
int num_fec_packets,
uint8_t* state) {
scoped_array<uint8_t> state_tmp(
scoped_ptr<uint8_t[]> state_tmp(
new uint8_t[num_media_packets + num_fec_packets]);
memcpy(state_tmp.get(), state, num_media_packets + num_fec_packets);
int num_recovered_packets = 0;
@ -392,7 +392,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
// (which containes the code size parameters/protection length).
void ComputeMetricsForCode(CodeType code_type,
int code_index) {
scoped_array<double> prob_weight(new double[kNumLossModels]);
scoped_ptr<double[]> prob_weight(new double[kNumLossModels]);
memset(prob_weight.get() , 0, sizeof(double) * kNumLossModels);
MetricsFecCode metrics_code;
SetMetricsZero(&metrics_code);
@ -400,7 +400,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int num_media_packets = code_params_[code_index].num_media_packets;
int num_fec_packets = code_params_[code_index].num_fec_packets;
int tot_num_packets = num_media_packets + num_fec_packets;
scoped_array<uint8_t> state(new uint8_t[tot_num_packets]);
scoped_ptr<uint8_t[]> state(new uint8_t[tot_num_packets]);
memset(state.get() , 0, tot_num_packets);
int num_loss_configurations = static_cast<int>(pow(2.0f, tot_num_packets));

View File

@ -473,7 +473,7 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -558,7 +558,7 @@ TEST_F(VideoCaptureExternalTest , FrameRate) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -574,7 +574,7 @@ TEST_F(VideoCaptureExternalTest , FrameRate) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -592,7 +592,7 @@ TEST_F(VideoCaptureExternalTest, Rotation) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));

View File

@ -331,7 +331,7 @@ void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
}
// TODO(mikhal): Extracting the buffer for now - need to update test.
int length = CalcBufferSize(kI420, up_image.width(), up_image.height());
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> image_buffer(new uint8_t[length]);
length = ExtractBuffer(up_image, length, image_buffer.get());
// Update our copy of the last successful frame:
memcpy(last_successful_frame_buffer_, image_buffer.get(), length);
@ -344,7 +344,7 @@ void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
// Update our copy of the last successful frame:
// TODO(mikhal): Add as a member function, so won't be allocated per frame.
int length = CalcBufferSize(kI420, image.width(), image.height());
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> image_buffer(new uint8_t[length]);
length = ExtractBuffer(image, length, image_buffer.get());
assert(length > 0);
memcpy(last_successful_frame_buffer_, image_buffer.get(), length);

View File

@ -565,7 +565,7 @@ UnitTest::Perform()
frameLength = WaitForDecodedFrame();
}
unsigned int length = CalcBufferSize(kI420, width, height);
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame,
@ -645,7 +645,7 @@ UnitTest::Perform()
// check that decoded frame matches with reference
unsigned int length = CalcBufferSize(kI420, width, height);
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length,
_refDecFrame, _lengthSourceFrame) == true);

View File

@ -181,7 +181,7 @@ class TestVp8Impl : public ::testing::Test {
scoped_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_;
scoped_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
scoped_array<uint8_t> source_buffer_;
scoped_ptr<uint8_t[]> source_buffer_;
FILE* source_file_;
I420VideoFrame input_frame_;
scoped_ptr<VideoEncoder> encoder_;

View File

@ -142,7 +142,7 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
webrtc::I420VideoFrame input_frame;
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
webrtc::scoped_array<uint8_t> frame_buffer(new uint8_t[length]);
webrtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
int half_width = (width + 1) / 2;
// Set and register callbacks.

View File

@ -61,7 +61,7 @@ class RawRtpPacket {
uint16_t seq_num() const { return seq_num_; }
private:
scoped_array<uint8_t> data_;
scoped_ptr<uint8_t[]> data_;
uint32_t length_;
int64_t resend_time_ms_;
uint32_t ssrc_;

View File

@ -19,7 +19,7 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
uint32_t frameNum = 0;
int32_t brightnessWarning = 0;
uint32_t warningCount = 0;
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_)
{

View File

@ -39,7 +39,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
uint32_t frameNum = 0;
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_)
{
@ -86,7 +86,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
width_, half_width_, half_width_);
// Compare frame-by-frame.
scoped_array<uint8_t> ref_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> ref_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, modFile) ==
frame_length_)
{
@ -114,7 +114,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
// Verify that all color pixels are enhanced, and no luminance values are
// altered.
scoped_array<uint8_t> testFrame(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> testFrame(new uint8_t[frame_length_]);
// Use value 128 as probe value, since we know that this will be changed
// in the enhancement.

View File

@ -23,7 +23,7 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis) {
ca__c.Initialize(width_,height_);
ca__sse.Initialize(width_,height_);
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_)
== frame_length_) {
// Using ConvertToI420 to add stride to the image.

View File

@ -43,7 +43,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
"Could not open output file: " << output_file << "\n";
printf("\nRun time [us / frame]:\n");
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++)
{
TickTime t0;

View File

@ -49,7 +49,7 @@ TEST_F(VideoProcessingModuleTest, DISABLED_ON_ANDROID(Denoising))
int32_t modifiedPixels = 0;
frameNum = 0;
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_)
{

View File

@ -89,7 +89,7 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer) {
TEST_F(VideoProcessingModuleTest, HandleBadStats) {
VideoProcessingModule::FrameStats stats;
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
@ -129,7 +129,7 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
I420VideoFrame video_frame2;
VideoProcessingModule::FrameStats stats;
// Only testing non-static functions here.
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
@ -172,7 +172,7 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
TEST_F(VideoProcessingModuleTest, FrameStats) {
VideoProcessingModule::FrameStats stats;
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
@ -242,7 +242,7 @@ TEST_F(VideoProcessingModuleTest, Resampler) {
vpm_->EnableTemporalDecimation(false);
// Reading test frame
scoped_array<uint8_t> video_buffer(new uint8_t[frame_length_]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
// Using ConvertToI420 to add stride to the image.

View File

@ -563,82 +563,4 @@ bool operator!=(T* p1, const webrtc::scoped_ptr<T, D>& p2) {
return p1 != p2.get();
}
namespace webrtc {
// DEPRECATED: Use scoped_ptr<T[]> instead.
// TODO(ajm): Remove scoped_array.
//
// scoped_array extends scoped_ptr to arrays. Deletion of the array pointed to
// is guaranteed, either on destruction of the scoped_array or via an explicit
// reset(). Use shared_array or std::vector if your needs are more complex.
template<typename T>
class scoped_array {
private:
T* ptr;
scoped_array(scoped_array const &);
scoped_array & operator=(scoped_array const &);
public:
typedef T element_type;
explicit scoped_array(T* p = NULL) : ptr(p) {}
~scoped_array() {
typedef char type_must_be_complete[sizeof(T)];
delete[] ptr;
}
void reset(T* p = NULL) {
typedef char type_must_be_complete[sizeof(T)];
if (ptr != p) {
T* arr = ptr;
ptr = p;
// Delete last, in case arr destructor indirectly results in ~scoped_array
delete [] arr;
}
}
T& operator[](ptrdiff_t i) const {
assert(ptr != NULL);
assert(i >= 0);
return ptr[i];
}
T* get() const {
return ptr;
}
void swap(scoped_array & b) {
T* tmp = b.ptr;
b.ptr = ptr;
ptr = tmp;
}
T* release() {
T* tmp = ptr;
ptr = NULL;
return tmp;
}
T** accept() {
if (ptr) {
delete [] ptr;
ptr = NULL;
}
return &ptr;
}
};
template<class T> inline
void swap(scoped_array<T>& a, scoped_array<T>& b) {
a.swap(b);
}
} // namespace webrtc
#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_

View File

@ -47,7 +47,7 @@ class FakeNetworkPipeTest : public ::testing::Test {
}
void SendPackets(FakeNetworkPipe* pipe, int number_packets, int kPacketSize) {
scoped_array<uint8_t> packet(new uint8_t[kPacketSize]);
scoped_ptr<uint8_t[]> packet(new uint8_t[kPacketSize]);
for (int i = 0; i < number_packets; ++i) {
pipe->SendPacket(packet.get(), kPacketSize);
}

View File

@ -111,8 +111,8 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
const size_t frame_length = 3 * width * height >> 1;
I420VideoFrame ref_frame;
I420VideoFrame test_frame;
scoped_array<uint8_t> ref_buffer(new uint8_t[frame_length]);
scoped_array<uint8_t> test_buffer(new uint8_t[frame_length]);
scoped_ptr<uint8_t[]> ref_buffer(new uint8_t[frame_length]);
scoped_ptr<uint8_t[]> test_buffer(new uint8_t[frame_length]);
// Set decoded image parameters.
int half_width = (width + 1) / 2;

View File

@ -38,7 +38,7 @@ int EditFrames(const string& in_path, int width, int height,
// Frame size of I420.
int frame_length = CalcBufferSize(kI420, width, height);
webrtc::scoped_array<uint8_t> temp_buffer(new uint8_t[frame_length]);
webrtc::scoped_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);
FILE* out_fid = fopen(out_path.c_str(), "wb");

View File

@ -53,8 +53,8 @@ class FrameEditingTest : public ::testing::Test {
}
// Compares the frames in both streams to the end of one of the streams.
void CompareToTheEnd(FILE* test_video_fid, FILE* ref_video_fid,
scoped_array<int>* ref_buffer,
scoped_array<int>* test_buffer) {
scoped_ptr<int[]>* ref_buffer,
scoped_ptr<int[]>* test_buffer) {
while (!feof(test_video_fid) && !feof(ref_video_fid)) {
num_bytes_read_ = fread(ref_buffer->get(), 1, kFrameSize, ref_video_fid);
if (!feof(ref_video_fid)) {
@ -78,8 +78,8 @@ class FrameEditingTest : public ::testing::Test {
FILE* original_fid_;
FILE* edited_fid_;
int num_bytes_read_;
scoped_array<int> original_buffer_;
scoped_array<int> edited_buffer_;
scoped_ptr<int[]> original_buffer_;
scoped_ptr<int[]> edited_buffer_;
int num_frames_read_;
};

View File

@ -30,7 +30,7 @@ struct Frame {
memcpy(this->buffer.get(), buffer, buffer_size);
}
webrtc::scoped_array<unsigned char> buffer;
webrtc::scoped_ptr<unsigned char[]> buffer;
int buffer_size;
uint32_t timestamp;
int64_t render_time;

View File

@ -535,7 +535,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
unsigned int length = CalcBufferSize(kI420,
video_frame->width(),
video_frame->height());
scoped_array<uint8_t> video_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get());
effect_filter_->Transform(length,
video_buffer.get(),

View File

@ -1401,7 +1401,7 @@ int32_t ViEChannel::FrameToRender(
unsigned int length = CalcBufferSize(kI420,
video_frame.width(),
video_frame.height());
scoped_array<uint8_t> video_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(video_frame, length, video_buffer.get());
effect_filter_->Transform(length,
video_buffer.get(),

View File

@ -503,7 +503,7 @@ void ViEEncoder::DeliverFrame(int id,
unsigned int length = CalcBufferSize(kI420,
video_frame->width(),
video_frame->height());
scoped_array<uint8_t> video_buffer(new uint8_t[length]);
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get());
effect_filter_->Transform(length,
video_buffer.get(),

View File

@ -4279,7 +4279,7 @@ Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
int32_t
Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
{
scoped_array<int16_t> fileBuffer(new int16_t[640]);
scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{
@ -4349,7 +4349,7 @@ Channel::MixAudioWithFile(AudioFrame& audioFrame,
{
assert(mixingFrequency <= 32000);
scoped_array<int16_t> fileBuffer(new int16_t[640]);
scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{

View File

@ -53,7 +53,7 @@ class FakeVoEExternalMedia : public VoEExternalMedia {
int samples_per_channel, int sample_rate_hz,
int num_channels) {
const int length = samples_per_channel * num_channels;
scoped_array<int16_t> data;
scoped_ptr<int16_t[]> data;
if (!audio) {
data.reset(new int16_t[length]);
memset(data.get(), 0, length * sizeof(data[0]));

View File

@ -1217,7 +1217,7 @@ int32_t TransmitMixer::RecordAudioToFile(
int32_t TransmitMixer::MixOrReplaceAudioWithFile(
int mixingFrequency)
{
scoped_array<int16_t> fileBuffer(new int16_t[640]);
scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{