Convert webrtc/video/ abort/assert to CHECK/DCHECK.

Also replaces NULL with nullptr. This gives nicer error messages and
keeps style consistent.

BUG=1756
R=magjed@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/42879004

Cr-Commit-Position: refs/heads/master@{#8831}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8831 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2015-03-23 13:12:24 +00:00
parent 41d2befe9f
commit 2b4ce3a501
13 changed files with 195 additions and 225 deletions

View File

@ -13,6 +13,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
@ -48,7 +49,7 @@ class TraceObserver {
}
~TraceObserver() {
Trace::SetTraceCallback(NULL);
Trace::SetTraceCallback(nullptr);
Trace::ReturnTrace();
}
@ -143,7 +144,7 @@ class BitrateEstimatorTest : public test::CallTest {
send_config_ = VideoSendStream::Config();
send_config_.rtp.ssrcs.push_back(kSendSsrcs[0]);
// Encoders will be set separately per stream.
send_config_.encoder_settings.encoder = NULL;
send_config_.encoder_settings.encoder = nullptr;
send_config_.encoder_settings.payload_name = "FAKE";
send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
encoder_config_.streams = test::CreateVideoStreams(1);
@ -181,8 +182,8 @@ class BitrateEstimatorTest : public test::CallTest {
explicit Stream(BitrateEstimatorTest* test)
: test_(test),
is_sending_receiving_(false),
send_stream_(NULL),
receive_stream_(NULL),
send_stream_(nullptr),
receive_stream_(nullptr),
frame_generator_capturer_(),
fake_encoder_(Clock::GetRealTimeClock()),
fake_decoder_() {
@ -190,7 +191,7 @@ class BitrateEstimatorTest : public test::CallTest {
test_->send_config_.encoder_settings.encoder = &fake_encoder_;
send_stream_ = test_->sender_call_->CreateVideoSendStream(
test_->send_config_, test_->encoder_config_);
assert(test_->encoder_config_.streams.size() == 1);
DCHECK_EQ(1u, test_->encoder_config_.streams.size());
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
send_stream_->Input(),
test_->encoder_config_.streams[0].width,
@ -215,11 +216,11 @@ class BitrateEstimatorTest : public test::CallTest {
}
~Stream() {
frame_generator_capturer_.reset(NULL);
frame_generator_capturer_.reset(nullptr);
test_->sender_call_->DestroyVideoSendStream(send_stream_);
send_stream_ = NULL;
send_stream_ = nullptr;
test_->receiver_call_->DestroyVideoReceiveStream(receive_stream_);
receive_stream_ = NULL;
receive_stream_ = nullptr;
}
void StopSending() {

View File

@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include <string.h>
#include <map>
#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
@ -56,8 +56,8 @@ VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
case kVp9:
return VP9Encoder::Create();
}
assert(false);
return NULL;
RTC_NOTREACHED();
return nullptr;
}
VideoDecoder* VideoDecoder::Create(VideoDecoder::DecoderType codec_type) {
@ -67,8 +67,8 @@ VideoDecoder* VideoDecoder::Create(VideoDecoder::DecoderType codec_type) {
case kVp9:
return VP9Decoder::Create();
}
assert(false);
return NULL;
RTC_NOTREACHED();
return nullptr;
}
const int Call::Config::kDefaultStartBitrateBps = 300000;
@ -80,7 +80,7 @@ class CpuOveruseObserverProxy : public webrtc::CpuOveruseObserver {
explicit CpuOveruseObserverProxy(LoadObserver* overuse_callback)
: crit_(CriticalSectionWrapper::CreateCriticalSection()),
overuse_callback_(overuse_callback) {
assert(overuse_callback != NULL);
DCHECK(overuse_callback != nullptr);
}
virtual ~CpuOveruseObserverProxy() {}
@ -164,10 +164,10 @@ class Call : public webrtc::Call, public PacketReceiver {
} // namespace internal
Call* Call::Create(const Call::Config& config) {
VideoEngine* video_engine = config.webrtc_config != NULL
VideoEngine* video_engine = config.webrtc_config != nullptr
? VideoEngine::Create(*config.webrtc_config)
: VideoEngine::Create();
assert(video_engine != NULL);
DCHECK(video_engine != nullptr);
return new internal::Call(video_engine, config);
}
@ -183,15 +183,15 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
video_engine_(video_engine),
base_channel_id_(-1),
external_render_(
VideoRender::CreateVideoRender(42, NULL, false, kRenderExternal)) {
assert(video_engine != NULL);
assert(config.send_transport != NULL);
VideoRender::CreateVideoRender(42, nullptr, false, kRenderExternal)) {
DCHECK(video_engine != nullptr);
DCHECK(config.send_transport != nullptr);
assert(config.stream_bitrates.min_bitrate_bps >= 0);
assert(config.stream_bitrates.start_bitrate_bps >=
DCHECK_GE(config.stream_bitrates.min_bitrate_bps, 0);
DCHECK_GE(config.stream_bitrates.start_bitrate_bps,
config.stream_bitrates.min_bitrate_bps);
if (config.stream_bitrates.max_bitrate_bps != -1) {
assert(config.stream_bitrates.max_bitrate_bps >=
DCHECK_GE(config.stream_bitrates.max_bitrate_bps,
config.stream_bitrates.start_bitrate_bps);
}
@ -201,23 +201,23 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
}
render_ = ViERender::GetInterface(video_engine_);
assert(render_ != NULL);
DCHECK(render_ != nullptr);
render_->RegisterVideoRenderModule(*external_render_.get());
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine_);
assert(rtp_rtcp_ != NULL);
DCHECK(rtp_rtcp_ != nullptr);
codec_ = ViECodec::GetInterface(video_engine_);
assert(codec_ != NULL);
DCHECK(codec_ != nullptr);
// As a workaround for non-existing calls in the old API, create a base
// channel used as default channel when creating send and receive streams.
base_ = ViEBase::GetInterface(video_engine_);
assert(base_ != NULL);
DCHECK(base_ != nullptr);
base_->CreateChannel(base_channel_id_);
assert(base_channel_id_ != -1);
DCHECK(base_channel_id_ != -1);
}
Call::~Call() {
@ -239,7 +239,7 @@ VideoSendStream* Call::CreateVideoSendStream(
const VideoEncoderConfig& encoder_config) {
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
LOG(LS_INFO) << "CreateVideoSendStream: " << config.ToString();
assert(config.rtp.ssrcs.size() > 0);
DCHECK(!config.rtp.ssrcs.empty());
// TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if
// the call has already started.
@ -253,7 +253,7 @@ VideoSendStream* Call::CreateVideoSendStream(
CriticalSectionScoped lock(network_enabled_crit_.get());
WriteLockScoped write_lock(*send_crit_);
for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) {
assert(send_ssrcs_.find(config.rtp.ssrcs[i]) == send_ssrcs_.end());
DCHECK(send_ssrcs_.find(config.rtp.ssrcs[i]) == send_ssrcs_.end());
send_ssrcs_[config.rtp.ssrcs[i]] = send_stream;
}
if (!network_enabled_)
@ -263,11 +263,11 @@ VideoSendStream* Call::CreateVideoSendStream(
void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoSendStream");
assert(send_stream != NULL);
DCHECK(send_stream != nullptr);
send_stream->Stop();
VideoSendStream* send_stream_impl = NULL;
VideoSendStream* send_stream_impl = nullptr;
{
WriteLockScoped write_lock(*send_crit_);
std::map<uint32_t, VideoSendStream*>::iterator it = send_ssrcs_.begin();
@ -289,7 +289,7 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
suspended_send_ssrcs_[it->first] = it->second;
}
assert(send_stream_impl != NULL);
DCHECK(send_stream_impl != nullptr);
delete send_stream_impl;
}
@ -308,7 +308,7 @@ VideoReceiveStream* Call::CreateVideoReceiveStream(
// while changing network state.
CriticalSectionScoped lock(network_enabled_crit_.get());
WriteLockScoped write_lock(*receive_crit_);
assert(receive_ssrcs_.find(config.rtp.remote_ssrc) == receive_ssrcs_.end());
DCHECK(receive_ssrcs_.find(config.rtp.remote_ssrc) == receive_ssrcs_.end());
receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream;
// TODO(pbos): Configure different RTX payloads per receive payload.
VideoReceiveStream::Config::Rtp::RtxMap::const_iterator it =
@ -324,9 +324,9 @@ VideoReceiveStream* Call::CreateVideoReceiveStream(
void Call::DestroyVideoReceiveStream(
webrtc::VideoReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream");
assert(receive_stream != NULL);
DCHECK(receive_stream != nullptr);
VideoReceiveStream* receive_stream_impl = NULL;
VideoReceiveStream* receive_stream_impl = nullptr;
{
WriteLockScoped write_lock(*receive_crit_);
// Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
@ -335,8 +335,8 @@ void Call::DestroyVideoReceiveStream(
receive_ssrcs_.begin();
while (it != receive_ssrcs_.end()) {
if (it->second == static_cast<VideoReceiveStream*>(receive_stream)) {
assert(receive_stream_impl == NULL ||
receive_stream_impl == it->second);
if (receive_stream_impl != nullptr)
DCHECK(receive_stream_impl == it->second);
receive_stream_impl = it->second;
receive_ssrcs_.erase(it++);
} else {
@ -345,7 +345,7 @@ void Call::DestroyVideoReceiveStream(
}
}
assert(receive_stream_impl != NULL);
DCHECK(receive_stream_impl != nullptr);
delete receive_stream_impl;
}
@ -377,9 +377,9 @@ Call::Stats Call::GetStats() const {
void Call::SetBitrateConfig(
const webrtc::Call::Config::BitrateConfig& bitrate_config) {
TRACE_EVENT0("webrtc", "Call::SetBitrateConfig");
assert(bitrate_config.min_bitrate_bps >= 0);
assert(bitrate_config.max_bitrate_bps == -1 ||
bitrate_config.max_bitrate_bps > 0);
DCHECK_GE(bitrate_config.min_bitrate_bps, 0);
if (bitrate_config.max_bitrate_bps != -1)
DCHECK_GT(bitrate_config.max_bitrate_bps, 0);
if (config_.stream_bitrates.min_bitrate_bps ==
bitrate_config.min_bitrate_bps &&
(bitrate_config.start_bitrate_bps <= 0 ||

View File

@ -7,14 +7,13 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include <algorithm>
#include <sstream>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
@ -226,7 +225,7 @@ void CallPerfTest::TestAudioVideoSync(bool fec) {
ASSERT_STRNE("", audio_filename.c_str());
test::FakeAudioDevice fake_audio_device(Clock::GetRealTimeClock(),
audio_filename);
EXPECT_EQ(0, voe_base->Init(&fake_audio_device, NULL));
EXPECT_EQ(0, voe_base->Init(&fake_audio_device, nullptr));
int channel = voe_base->CreateChannel();
FakeNetworkPipe::Config net_config;
@ -330,7 +329,7 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
start_time_ms_(start_time_ms),
run_time_ms_(run_time_ms),
creation_time_ms_(clock_->TimeInMilliseconds()),
capturer_(NULL),
capturer_(nullptr),
rtp_start_timestamp_set_(false),
rtp_start_timestamp_(0) {}
@ -511,8 +510,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
public:
explicit BitrateObserver(bool using_min_transmit_bitrate)
: EndToEndTest(kLongTimeoutMs),
send_stream_(NULL),
send_transport_receiver_(NULL),
send_stream_(nullptr),
send_transport_receiver_(nullptr),
pad_to_min_bitrate_(using_min_transmit_bitrate),
num_bitrate_observations_in_range_(0) {}
@ -527,7 +526,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
size_t length) override {
VideoSendStream::Stats stats = send_stream_->GetStats();
if (stats.substreams.size() > 0) {
assert(stats.substreams.size() == 1);
DCHECK_EQ(1u, stats.substreams.size());
int bitrate_kbps =
stats.substreams.begin()->second.total_bitrate_bps / 1000;
if (bitrate_kbps > 0) {
@ -571,7 +570,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
if (pad_to_min_bitrate_) {
encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps;
} else {
assert(encoder_config->min_transmit_bitrate_bps == 0);
DCHECK_EQ(0, encoder_config->min_transmit_bitrate_bps);
}
}

View File

@ -10,6 +10,7 @@
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/base/checks.h"
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
namespace webrtc {
@ -25,7 +26,7 @@ int32_t EncodedFrameCallbackAdapter::Encoded(
const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) {
assert(observer_ != NULL);
DCHECK(observer_ != nullptr);
FrameType frame_type =
VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
const EncodedFrame frame(encodedImage._buffer,

View File

@ -7,8 +7,6 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include <algorithm>
#include <map>
#include <sstream>
@ -16,6 +14,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/frame_callback.h"
@ -52,7 +51,7 @@ class EndToEndTest : public test::CallTest {
EndToEndTest() {}
virtual ~EndToEndTest() {
EXPECT_EQ(NULL, send_stream_);
EXPECT_EQ(nullptr, send_stream_);
EXPECT_TRUE(receive_streams_.empty());
}
@ -1326,8 +1325,8 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
public:
RtcpObserver()
: EndToEndTest(kDefaultTimeoutMs),
sender_call_(NULL),
receiver_call_(NULL),
sender_call_(nullptr),
receiver_call_(nullptr),
has_seen_pacer_delay_(false) {}
DeliveryStatus DeliverPacket(const uint8_t* packet,
@ -1611,7 +1610,7 @@ TEST_F(EndToEndTest, GetStats) {
public:
explicit StatsObserver(const FakeNetworkPipe::Config& config)
: EndToEndTest(kLongTimeoutMs, config),
send_stream_(NULL),
send_stream_(nullptr),
expected_send_ssrcs_(),
check_stats_event_(EventWrapper::Create()) {}
@ -1692,7 +1691,7 @@ TEST_F(EndToEndTest, GetStats) {
}
bool CheckSendStats() {
assert(send_stream_ != NULL);
DCHECK(send_stream_ != nullptr);
VideoSendStream::Stats stats = send_stream_->GetStats();
send_stats_filled_["NumStreams"] |=
@ -1871,7 +1870,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
public:
ReceivedRtpStatsObserver()
: EndToEndTest(kDefaultTimeoutMs),
receive_stream_(NULL),
receive_stream_(nullptr),
sent_rtp_(0) {}
private:
@ -2079,7 +2078,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
CreateCalls(Call::Config(observer.SendTransport()),
Call::Config(observer.ReceiveTransport()));
observer.SetReceivers(sender_call_->Receiver(), NULL);
observer.SetReceivers(sender_call_->Receiver(), nullptr);
CreateSendConfig(kNumSsrcs);

View File

@ -73,7 +73,7 @@ class VideoAnalyzer : public PacketReceiver,
int duration_frames)
: input_(input),
transport_(transport),
receiver_(NULL),
receiver_(nullptr),
test_label_(test_label),
frames_to_process_(duration_frames),
frames_recorded_(0),
@ -95,7 +95,7 @@ class VideoAnalyzer : public PacketReceiver,
// spare cores.
uint32_t num_cores = CpuInfo::DetectNumberOfCores();
assert(num_cores >= 1);
DCHECK_GE(num_cores, 1u);
static const uint32_t kMinCoresLeft = 4;
static const uint32_t kMaxComparisonThreads = 8;
@ -430,7 +430,7 @@ class VideoAnalyzer : public PacketReceiver,
void FullStackTest::RunTest(const FullStackTestParams& params) {
test::DirectTransport send_transport(params.link);
test::DirectTransport recv_transport(params.link);
VideoAnalyzer analyzer(NULL, &send_transport, params.test_label,
VideoAnalyzer analyzer(nullptr, &send_transport, params.test_label,
params.avg_psnr_threshold, params.avg_ssim_threshold,
params.test_durations_secs * params.clip.fps);
@ -505,7 +505,7 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
params.clip.width, params.clip.height, params.clip.fps,
Clock::GetRealTimeClock()));
ASSERT_TRUE(frame_generator_capturer_.get() != NULL)
ASSERT_TRUE(frame_generator_capturer_.get() != nullptr)
<< "Could not create capturer for " << params.clip.name
<< ".yuv. Is this resource file present?";
}

View File

@ -16,6 +16,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
@ -95,7 +96,7 @@ void Loopback::Run() {
encoder.reset(VideoEncoder::Create(VideoEncoder::kVp9));
} else {
// Codec not supported.
assert(false && "Codec not supported!");
RTC_NOTREACHED() << "Codec not supported!";
return;
}
send_config.encoder_settings.encoder = encoder.get();

View File

@ -9,6 +9,7 @@
*/
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/common.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
@ -92,7 +93,7 @@ void StreamObserver::set_start_bitrate_bps(unsigned int start_bitrate_bps) {
void StreamObserver::OnReceiveBitrateChanged(
const std::vector<unsigned int>& ssrcs, unsigned int bitrate) {
CriticalSectionScoped lock(crit_.get());
assert(expected_bitrate_bps_ > 0);
DCHECK_GT(expected_bitrate_bps_, 0u);
if (start_bitrate_bps_ != 0) {
// For tests with an explicitly set start bitrate, verify the first
// bitrate estimate is close to the start bitrate and lower than the
@ -197,7 +198,7 @@ LowRateStreamObserver::LowRateStreamObserver(
feedback_transport_(feedback_transport),
receive_stats_(ReceiveStatistics::Create(clock)),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
send_stream_(NULL),
send_stream_(nullptr),
test_state_(kFirstRampup),
state_start_ms_(clock_->TimeInMilliseconds()),
interval_start_ms_(state_start_ms_),
@ -298,7 +299,7 @@ std::string LowRateStreamObserver::GetModifierString() {
void LowRateStreamObserver::EvolveTestState(unsigned int bitrate_bps) {
int64_t now = clock_->TimeInMilliseconds();
CriticalSectionScoped lock(crit_.get());
assert(send_stream_ != NULL);
DCHECK(send_stream_ != nullptr);
switch (test_state_) {
case kFirstRampup: {
EXPECT_FALSE(suspended_in_stats_);

View File

@ -16,6 +16,7 @@
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@ -140,26 +141,26 @@ class FileRenderPassthrough : public VideoRenderer {
FileRenderPassthrough(const std::string& basename, VideoRenderer* renderer)
: basename_(basename),
renderer_(renderer),
file_(NULL),
file_(nullptr),
count_(0),
last_width_(0),
last_height_(0) {}
~FileRenderPassthrough() {
if (file_ != NULL)
if (file_ != nullptr)
fclose(file_);
}
private:
void RenderFrame(const I420VideoFrame& video_frame,
int time_to_render_ms) override {
if (renderer_ != NULL)
if (renderer_ != nullptr)
renderer_->RenderFrame(video_frame, time_to_render_ms);
if (basename_ == "")
return;
if (last_width_ != video_frame.width() ||
last_height_ != video_frame.height()) {
if (file_ != NULL)
if (file_ != nullptr)
fclose(file_);
std::stringstream filename;
filename << basename_;
@ -168,7 +169,7 @@ class FileRenderPassthrough : public VideoRenderer {
filename << '_' << video_frame.width() << 'x' << video_frame.height()
<< ".yuv";
file_ = fopen(filename.str().c_str(), "wb");
if (file_ == NULL) {
if (file_ == nullptr) {
fprintf(stderr,
"Couldn't open file for writing: %s\n",
filename.str().c_str());
@ -176,7 +177,7 @@ class FileRenderPassthrough : public VideoRenderer {
}
last_width_ = video_frame.width();
last_height_ = video_frame.height();
if (file_ == NULL)
if (file_ == nullptr)
return;
PrintI420VideoFrame(video_frame, file_);
}
@ -195,7 +196,7 @@ class DecoderBitstreamFileWriter : public EncodedFrameObserver {
public:
explicit DecoderBitstreamFileWriter(const char* filename)
: file_(fopen(filename, "wb")) {
assert(file_ != NULL);
DCHECK(file_ != nullptr);
}
~DecoderBitstreamFileWriter() { fclose(file_); }
@ -259,17 +260,17 @@ void RtpReplay() {
rtc::scoped_ptr<test::RtpFileReader> rtp_reader(test::RtpFileReader::Create(
test::RtpFileReader::kRtpDump, flags::InputFile()));
if (rtp_reader.get() == NULL) {
if (rtp_reader.get() == nullptr) {
rtp_reader.reset(test::RtpFileReader::Create(test::RtpFileReader::kPcap,
flags::InputFile()));
if (rtp_reader.get() == NULL) {
if (rtp_reader.get() == nullptr) {
fprintf(stderr,
"Couldn't open input file as either a rtpdump or .pcap. Note "
"that .pcapng is not supported.\nTrying to interpret the file as "
"length/packet interleaved.\n");
rtp_reader.reset(test::RtpFileReader::Create(
test::RtpFileReader::kLengthPacketInterleaved, flags::InputFile()));
if (rtp_reader.get() == NULL) {
if (rtp_reader.get() == nullptr) {
fprintf(stderr,
"Unable to open input file with any supported format\n");
return;

View File

@ -85,7 +85,7 @@ VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry(
std::find(config_.rtp.rtx.ssrcs.begin(),
config_.rtp.rtx.ssrcs.end(),
ssrc) == config_.rtp.rtx.ssrcs.end()) {
return NULL;
return nullptr;
}
return &stats_.substreams[ssrc]; // Insert new entry and return ptr.
@ -100,7 +100,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
const EncodedImage& encoded_image,
const RTPVideoHeader* rtp_video_header) {
size_t simulcast_idx =
rtp_video_header != NULL ? rtp_video_header->simulcastIdx : 0;
rtp_video_header != nullptr ? rtp_video_header->simulcastIdx : 0;
if (simulcast_idx >= config_.rtp.ssrcs.size()) {
LOG(LS_ERROR) << "Encoded image outside simulcast range (" << simulcast_idx
<< " >= " << config_.rtp.ssrcs.size() << ").";
@ -110,7 +110,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->width = encoded_image._encodedWidth;
@ -128,7 +128,7 @@ void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
const RtcpPacketTypeCounter& packet_counter) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->rtcp_packet_type_counts = packet_counter;
@ -138,7 +138,7 @@ void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->rtcp_stats = statistics;
@ -152,7 +152,7 @@ void SendStatisticsProxy::DataCountersUpdated(
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
DCHECK(stats != NULL) << "DataCountersUpdated reported for unknown ssrc: "
DCHECK(stats != nullptr) << "DataCountersUpdated reported for unknown ssrc: "
<< ssrc;
stats->rtp_stats = counters;
@ -163,7 +163,7 @@ void SendStatisticsProxy::Notify(const BitrateStatistics& total_stats,
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->total_bitrate_bps = total_stats.bitrate_bps;
@ -174,7 +174,7 @@ void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts,
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->frame_counts = frame_counts;
@ -185,7 +185,7 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
if (stats == NULL)
if (stats == nullptr)
return;
stats->avg_delay_ms = avg_delay_ms;
stats->max_delay_ms = max_delay_ms;

View File

@ -10,7 +10,6 @@
#include "webrtc/video/video_receive_stream.h"
#include <assert.h>
#include <stdlib.h>
#include <string>
@ -34,7 +33,7 @@
namespace webrtc {
std::string VideoReceiveStream::Decoder::ToString() const {
std::stringstream ss;
ss << "{decoder: " << (decoder != NULL ? "(VideoDecoder)" : "NULL");
ss << "{decoder: " << (decoder != nullptr ? "(VideoDecoder)" : "nullptr");
ss << ", payload_type: " << payload_type;
ss << ", payload_name: " << payload_name;
ss << ", is_renderer: " << (is_renderer ? "yes" : "no");
@ -54,13 +53,13 @@ std::string VideoReceiveStream::Config::ToString() const {
}
ss << ']';
ss << ", rtp: " << rtp.ToString();
ss << ", renderer: " << (renderer != NULL ? "(renderer)" : "NULL");
ss << ", renderer: " << (renderer != nullptr ? "(renderer)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", audio_channel_id: " << audio_channel_id;
ss << ", pre_decode_callback: "
<< (pre_decode_callback != NULL ? "(EncodedFrameObserver)" : "NULL");
<< (pre_decode_callback != nullptr ? "(EncodedFrameObserver)" : "nullptr");
ss << ", pre_render_callback: "
<< (pre_render_callback != NULL ? "(I420FrameCallback)" : "NULL");
<< (pre_render_callback != nullptr ? "(I420FrameCallback)" : "nullptr");
ss << ", target_delay_ms: " << target_delay_ms;
ss << '}';
@ -144,27 +143,27 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
channel_(-1) {
video_engine_base_ = ViEBase::GetInterface(video_engine);
video_engine_base_->CreateReceiveChannel(channel_, base_channel);
assert(channel_ != -1);
DCHECK(channel_ != -1);
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
assert(rtp_rtcp_ != NULL);
DCHECK(rtp_rtcp_ != nullptr);
// TODO(pbos): This is not fine grained enough...
rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0);
rtp_rtcp_->SetKeyFrameRequestMethod(channel_, kViEKeyFrameRequestPliRtcp);
SetRtcpMode(config_.rtp.rtcp_mode);
assert(config_.rtp.remote_ssrc != 0);
DCHECK(config_.rtp.remote_ssrc != 0);
// TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
assert(config_.rtp.local_ssrc != 0);
assert(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
DCHECK(config_.rtp.local_ssrc != 0);
DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.local_ssrc);
// TODO(pbos): Support multiple RTX, per video payload.
Config::Rtp::RtxMap::const_iterator it = config_.rtp.rtx.begin();
if (it != config_.rtp.rtx.end()) {
assert(it->second.ssrc != 0);
assert(it->second.payload_type != 0);
DCHECK(it->second.ssrc != 0);
DCHECK(it->second.payload_type != 0);
rtp_rtcp_->SetRemoteSSRCType(channel_, kViEStreamTypeRtx, it->second.ssrc);
rtp_rtcp_->SetRtxReceivePayloadType(channel_, it->second.payload_type);
@ -176,21 +175,20 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
const std::string& extension = config_.rtp.extensions[i].name;
int id = config_.rtp.extensions[i].id;
if (extension == RtpExtension::kTOffset) {
if (rtp_rtcp_->SetReceiveTimestampOffsetStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0,
rtp_rtcp_->SetReceiveTimestampOffsetStatus(channel_, true, id));
} else if (extension == RtpExtension::kAbsSendTime) {
if (rtp_rtcp_->SetReceiveAbsoluteSendTimeStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0,
rtp_rtcp_->SetReceiveAbsoluteSendTimeStatus(channel_, true, id));
} else if (extension == RtpExtension::kVideoRotation) {
if (rtp_rtcp_->SetReceiveVideoRotationStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0, rtp_rtcp_->SetReceiveVideoRotationStatus(channel_, true, id));
} else {
abort(); // Unsupported extension.
RTC_NOTREACHED() << "Unsupported RTP extension.";
}
}
network_ = ViENetwork::GetInterface(video_engine);
assert(network_ != NULL);
DCHECK(network_ != nullptr);
network_->RegisterSendTransport(channel_, transport_adapter_);
@ -198,16 +196,13 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
if (config_.rtp.fec.ulpfec_payload_type != -1) {
// ULPFEC without RED doesn't make sense.
assert(config_.rtp.fec.red_payload_type != -1);
DCHECK(config_.rtp.fec.red_payload_type != -1);
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.codecType = kVideoCodecULPFEC;
strcpy(codec.plName, "ulpfec");
codec.plType = config_.rtp.fec.ulpfec_payload_type;
if (codec_->SetReceiveCodec(channel_, codec) != 0) {
LOG(LS_ERROR) << "Could not set ULPFEC codec. This shouldn't happen.";
abort();
}
CHECK_EQ(0, codec_->SetReceiveCodec(channel_, codec));
}
if (config_.rtp.fec.red_payload_type != -1) {
VideoCodec codec;
@ -215,59 +210,38 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
codec.codecType = kVideoCodecRED;
strcpy(codec.plName, "red");
codec.plType = config_.rtp.fec.red_payload_type;
if (codec_->SetReceiveCodec(channel_, codec) != 0) {
LOG(LS_ERROR) << "Could not set RED codec. This shouldn't happen.";
abort();
}
CHECK_EQ(0, codec_->SetReceiveCodec(channel_, codec));
}
stats_proxy_.reset(
new ReceiveStatisticsProxy(config_.rtp.remote_ssrc, clock_));
if (rtp_rtcp_->RegisterReceiveChannelRtcpStatisticsCallback(
channel_, stats_proxy_.get()) != 0) {
abort();
}
if (rtp_rtcp_->RegisterReceiveChannelRtpStatisticsCallback(
channel_, stats_proxy_.get()) != 0) {
abort();
}
if (rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(
channel_, stats_proxy_.get()) != 0) {
abort();
}
if (codec_->RegisterDecoderObserver(channel_, *stats_proxy_) != 0) {
abort();
}
CHECK_EQ(0, rtp_rtcp_->RegisterReceiveChannelRtcpStatisticsCallback(
channel_, stats_proxy_.get()));
CHECK_EQ(0, rtp_rtcp_->RegisterReceiveChannelRtpStatisticsCallback(
channel_, stats_proxy_.get()));
CHECK_EQ(0, rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(
channel_, stats_proxy_.get()));
CHECK_EQ(0, codec_->RegisterDecoderObserver(channel_, *stats_proxy_));
video_engine_base_->RegisterReceiveStatisticsProxy(channel_,
stats_proxy_.get());
external_codec_ = ViEExternalCodec::GetInterface(video_engine);
assert(!config_.decoders.empty());
DCHECK(!config_.decoders.empty());
for (size_t i = 0; i < config_.decoders.size(); ++i) {
const Decoder& decoder = config_.decoders[i];
if (external_codec_->RegisterExternalReceiveCodec(
CHECK_EQ(0, external_codec_->RegisterExternalReceiveCodec(
channel_, decoder.payload_type, decoder.decoder,
decoder.is_renderer, decoder.expected_delay_ms) != 0) {
// TODO(pbos): Abort gracefully? Can this be a runtime error?
abort();
}
decoder.is_renderer, decoder.expected_delay_ms));
VideoCodec codec = CreateDecoderVideoCodec(decoder);
if (codec_->SetReceiveCodec(channel_, codec) != 0) {
// TODO(pbos): Abort gracefully, this can be a runtime error.
// Factor out to an Init() method.
abort();
}
CHECK_EQ(0, codec_->SetReceiveCodec(channel_, codec));
}
render_ = ViERender::GetInterface(video_engine);
assert(render_ != NULL);
DCHECK(render_ != nullptr);
render_->AddRenderer(channel_, kVideoI420, this);
@ -301,7 +275,7 @@ VideoReceiveStream::~VideoReceiveStream() {
network_->DeregisterSendTransport(channel_);
video_engine_base_->SetVoiceEngine(NULL);
video_engine_base_->SetVoiceEngine(nullptr);
image_process_->Release();
external_codec_->Release();
codec_->DeregisterDecoderObserver(channel_);
@ -309,7 +283,7 @@ VideoReceiveStream::~VideoReceiveStream() {
stats_proxy_.get());
rtp_rtcp_->DeregisterReceiveChannelRtcpStatisticsCallback(channel_,
stats_proxy_.get());
rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(channel_, NULL);
rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(channel_, nullptr);
codec_->Release();
network_->Release();
render_->Release();
@ -320,17 +294,13 @@ VideoReceiveStream::~VideoReceiveStream() {
void VideoReceiveStream::Start() {
transport_adapter_.Enable();
if (render_->StartRender(channel_) != 0)
abort();
if (video_engine_base_->StartReceive(channel_) != 0)
abort();
CHECK_EQ(0, render_->StartRender(channel_));
CHECK_EQ(0, video_engine_base_->StartReceive(channel_));
}
void VideoReceiveStream::Stop() {
if (render_->StopRender(channel_) != 0)
abort();
if (video_engine_base_->StopReceive(channel_) != 0)
abort();
CHECK_EQ(0, render_->StopRender(channel_));
CHECK_EQ(0, video_engine_base_->StopReceive(channel_));
transport_adapter_.Disable();
}
@ -372,7 +342,7 @@ int VideoReceiveStream::DeliverFrame(unsigned char* buffer,
}
int VideoReceiveStream::DeliverI420Frame(const I420VideoFrame& video_frame) {
if (config_.renderer != NULL)
if (config_.renderer != nullptr)
config_.renderer->RenderFrame(
video_frame,
video_frame.render_time_ms() - clock_->TimeInMilliseconds());
@ -383,7 +353,7 @@ int VideoReceiveStream::DeliverI420Frame(const I420VideoFrame& video_frame) {
}
bool VideoReceiveStream::IsTextureSupported() {
if (config_.renderer == NULL)
if (config_.renderer == nullptr)
return false;
return config_.renderer->IsTextureSupported();
}

View File

@ -15,6 +15,7 @@
#include <string>
#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
@ -34,7 +35,7 @@ VideoSendStream::Config::EncoderSettings::ToString() const {
std::stringstream ss;
ss << "{payload_name: " << payload_name;
ss << ", payload_type: " << payload_type;
ss << ", encoder: " << (encoder != NULL ? "(VideoEncoder)" : "NULL");
ss << ", encoder: " << (encoder != nullptr ? "(VideoEncoder)" : "nullptr");
ss << '}';
return ss.str();
}
@ -86,11 +87,12 @@ std::string VideoSendStream::Config::ToString() const {
ss << "{encoder_settings: " << encoder_settings.ToString();
ss << ", rtp: " << rtp.ToString();
ss << ", pre_encode_callback: "
<< (pre_encode_callback != NULL ? "(I420FrameCallback)" : "NULL");
ss << ", post_encode_callback: "
<< (post_encode_callback != NULL ? "(EncodedFrameObserver)" : "NULL");
ss << "local_renderer: " << (local_renderer != NULL ? "(VideoRenderer)"
: "NULL");
<< (pre_encode_callback != nullptr ? "(I420FrameCallback)" : "nullptr");
ss << ", post_encode_callback: " << (post_encode_callback != nullptr
? "(EncodedFrameObserver)"
: "nullptr");
ss << "local_renderer: " << (local_renderer != nullptr ? "(VideoRenderer)"
: "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on"
@ -114,42 +116,39 @@ VideoSendStream::VideoSendStream(
config_(config),
bitrate_config_(bitrate_config),
suspended_ssrcs_(suspended_ssrcs),
external_codec_(NULL),
external_codec_(nullptr),
channel_(-1),
use_config_bitrate_(true),
stats_proxy_(Clock::GetRealTimeClock(), config) {
// Duplicate assert checking of bitrate config. These should be checked in
// Duplicate checking of bitrate config. These should be checked in
// Call but are added here for verbosity.
assert(bitrate_config.min_bitrate_bps >= 0);
DCHECK_GE(bitrate_config.min_bitrate_bps, 0);
if (bitrate_config.start_bitrate_bps > 0)
assert(bitrate_config.start_bitrate_bps >= bitrate_config.min_bitrate_bps);
DCHECK_GE(bitrate_config.start_bitrate_bps, bitrate_config.min_bitrate_bps);
if (bitrate_config.max_bitrate_bps != -1)
assert(bitrate_config.max_bitrate_bps >= bitrate_config.start_bitrate_bps);
DCHECK_GE(bitrate_config.max_bitrate_bps, bitrate_config.start_bitrate_bps);
video_engine_base_ = ViEBase::GetInterface(video_engine);
video_engine_base_->CreateChannelWithoutDefaultEncoder(channel_,
base_channel);
assert(channel_ != -1);
DCHECK(channel_ != -1);
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
assert(rtp_rtcp_ != NULL);
DCHECK(rtp_rtcp_ != nullptr);
assert(config_.rtp.ssrcs.size() > 0);
DCHECK(!config_.rtp.ssrcs.empty());
for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
const std::string& extension = config_.rtp.extensions[i].name;
int id = config_.rtp.extensions[i].id;
if (extension == RtpExtension::kTOffset) {
if (rtp_rtcp_->SetSendTimestampOffsetStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0, rtp_rtcp_->SetSendTimestampOffsetStatus(channel_, true, id));
} else if (extension == RtpExtension::kAbsSendTime) {
if (rtp_rtcp_->SetSendAbsoluteSendTimeStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0, rtp_rtcp_->SetSendAbsoluteSendTimeStatus(channel_, true, id));
} else if (extension == RtpExtension::kVideoRotation) {
if (rtp_rtcp_->SetSendVideoRotationStatus(channel_, true, id) != 0)
abort();
CHECK_EQ(0, rtp_rtcp_->SetSendVideoRotationStatus(channel_, true, id));
} else {
abort(); // Unsupported extension.
RTC_NOTREACHED() << "Registering unsupported RTP extension.";
}
}
@ -157,7 +156,7 @@ VideoSendStream::VideoSendStream(
// Enable NACK, FEC or both.
if (config_.rtp.fec.red_payload_type != -1) {
assert(config_.rtp.fec.ulpfec_payload_type != -1);
DCHECK(config_.rtp.fec.ulpfec_payload_type != -1);
if (config_.rtp.nack.rtp_history_ms > 0) {
rtp_rtcp_->SetHybridNACKFECStatus(
channel_,
@ -178,7 +177,8 @@ VideoSendStream::VideoSendStream(
ConfigureSsrcs();
char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength];
assert(config_.rtp.c_name.length() < ViERTP_RTCP::KMaxRTCPCNameLength);
DCHECK_LT(config_.rtp.c_name.length(),
static_cast<size_t>(ViERTP_RTCP::KMaxRTCPCNameLength));
strncpy(rtcp_cname, config_.rtp.c_name.c_str(), sizeof(rtcp_cname) - 1);
rtcp_cname[sizeof(rtcp_cname) - 1] = '\0';
@ -189,28 +189,23 @@ VideoSendStream::VideoSendStream(
capture_->ConnectCaptureDevice(capture_id_, channel_);
network_ = ViENetwork::GetInterface(video_engine);
assert(network_ != NULL);
DCHECK(network_ != nullptr);
network_->RegisterSendTransport(channel_, transport_adapter_);
// 28 to match packet overhead in ModuleRtpRtcpImpl.
network_->SetMTU(channel_,
static_cast<unsigned int>(config_.rtp.max_packet_size + 28));
assert(config.encoder_settings.encoder != NULL);
assert(config.encoder_settings.payload_type >= 0);
assert(config.encoder_settings.payload_type <= 127);
DCHECK(config.encoder_settings.encoder != nullptr);
DCHECK_GE(config.encoder_settings.payload_type, 0);
DCHECK_LE(config.encoder_settings.payload_type, 127);
external_codec_ = ViEExternalCodec::GetInterface(video_engine);
if (external_codec_->RegisterExternalSendCodec(
channel_,
config.encoder_settings.payload_type,
config.encoder_settings.encoder,
false) != 0) {
abort();
}
CHECK_EQ(0, external_codec_->RegisterExternalSendCodec(
channel_, config.encoder_settings.payload_type,
config.encoder_settings.encoder, false));
codec_ = ViECodec::GetInterface(video_engine);
if (!ReconfigureVideoEncoder(encoder_config))
abort();
CHECK(ReconfigureVideoEncoder(encoder_config));
if (overuse_observer)
video_engine_base_->RegisterCpuOveruseObserver(channel_, overuse_observer);
@ -249,7 +244,7 @@ VideoSendStream::~VideoSendStream() {
rtp_rtcp_->DeregisterSendFrameCountObserver(channel_, &stats_proxy_);
rtp_rtcp_->DeregisterSendBitrateObserver(channel_, &stats_proxy_);
rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(channel_, NULL);
rtp_rtcp_->RegisterRtcpPacketTypeCounterObserver(channel_, nullptr);
rtp_rtcp_->DeregisterSendChannelRtpStatisticsCallback(channel_,
&stats_proxy_);
rtp_rtcp_->DeregisterSendChannelRtcpStatisticsCallback(channel_,
@ -279,7 +274,7 @@ VideoSendStream::~VideoSendStream() {
void VideoSendStream::IncomingCapturedFrame(const I420VideoFrame& frame) {
// TODO(pbos): Local rendering should not be done on the capture thread.
if (config_.local_renderer != NULL)
if (config_.local_renderer != nullptr)
config_.local_renderer->RenderFrame(frame, 0);
stats_proxy_.OnIncomingFrame();
@ -305,8 +300,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder");
LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString();
const std::vector<VideoStream>& streams = config.streams;
assert(!streams.empty());
assert(config_.rtp.ssrcs.size() >= streams.size());
DCHECK(!streams.empty());
DCHECK_GE(config_.rtp.ssrcs.size(), streams.size());
VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
@ -343,7 +338,7 @@ bool VideoSendStream::ReconfigureVideoEncoder(
}
if (video_codec.codecType == kVideoCodecVP8) {
if (config.encoder_specific_settings != NULL) {
if (config.encoder_specific_settings != nullptr) {
video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
config.encoder_specific_settings);
}
@ -351,7 +346,7 @@ bool VideoSendStream::ReconfigureVideoEncoder(
static_cast<unsigned char>(
streams.back().temporal_layer_thresholds_bps.size() + 1);
} else if (video_codec.codecType == kVideoCodecVP9) {
if (config.encoder_specific_settings != NULL) {
if (config.encoder_specific_settings != nullptr) {
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
config.encoder_specific_settings);
}
@ -359,13 +354,14 @@ bool VideoSendStream::ReconfigureVideoEncoder(
static_cast<unsigned char>(
streams.back().temporal_layer_thresholds_bps.size() + 1);
} else if (video_codec.codecType == kVideoCodecH264) {
if (config.encoder_specific_settings != NULL) {
if (config.encoder_specific_settings != nullptr) {
video_codec.codecSpecific.H264 = *reinterpret_cast<const VideoCodecH264*>(
config.encoder_specific_settings);
}
} else {
// TODO(pbos): Support encoder_settings codec-agnostically.
assert(config.encoder_specific_settings == NULL);
DCHECK(config.encoder_specific_settings == nullptr)
<< "Encoder-specific settings for codec type not wired up.";
}
strncpy(video_codec.plName,
@ -376,18 +372,18 @@ bool VideoSendStream::ReconfigureVideoEncoder(
video_codec.numberOfSimulcastStreams =
static_cast<unsigned char>(streams.size());
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
assert(streams.size() <= kMaxSimulcastStreams);
DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
assert(streams[i].width > 0);
assert(streams[i].height > 0);
assert(streams[i].max_framerate > 0);
DCHECK_GT(streams[i].width, 0u);
DCHECK_GT(streams[i].height, 0u);
DCHECK_GT(streams[i].max_framerate, 0);
// Different framerates not supported per stream at the moment.
assert(streams[i].max_framerate == streams[0].max_framerate);
assert(streams[i].min_bitrate_bps >= 0);
assert(streams[i].target_bitrate_bps >= streams[i].min_bitrate_bps);
assert(streams[i].max_bitrate_bps >= streams[i].target_bitrate_bps);
assert(streams[i].max_qp >= 0);
DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
DCHECK_GE(streams[i].min_bitrate_bps, 0);
DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
DCHECK_GE(streams[i].max_qp, 0);
sim_stream->width = static_cast<unsigned short>(streams[i].width);
sim_stream->height = static_cast<unsigned short>(streams[i].height);
@ -435,13 +431,13 @@ bool VideoSendStream::ReconfigureVideoEncoder(
if (video_codec.startBitrate > video_codec.maxBitrate)
video_codec.startBitrate = video_codec.maxBitrate;
assert(streams[0].max_framerate > 0);
DCHECK_GT(streams[0].max_framerate, 0);
video_codec.maxFramerate = streams[0].max_framerate;
if (codec_->SetSendCodec(channel_, video_codec) != 0)
return false;
assert(config.min_transmit_bitrate_bps >= 0);
DCHECK_GE(config.min_transmit_bitrate_bps, 0);
rtp_rtcp_->SetMinTransmitBitrate(channel_,
config.min_transmit_bitrate_bps / 1000);
@ -474,7 +470,7 @@ void VideoSendStream::ConfigureSsrcs() {
}
// Set up RTX.
assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size());
DCHECK_EQ(config_.rtp.rtx.ssrcs.size(), config_.rtp.ssrcs.size());
for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
rtp_rtcp_->SetLocalSSRC(channel_,
@ -486,7 +482,7 @@ void VideoSendStream::ConfigureSsrcs() {
rtp_rtcp_->SetRtpStateForSsrc(channel_, ssrc, it->second);
}
assert(config_.rtp.rtx.payload_type >= 0);
DCHECK_GE(config_.rtp.rtx.payload_type, 0);
rtp_rtcp_->SetRtxSendPayloadType(channel_, config_.rtp.rtx.payload_type);
}

View File

@ -11,6 +11,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
@ -53,7 +54,7 @@ class FakeNativeHandle : public NativeHandle {
public:
FakeNativeHandle() {}
virtual ~FakeNativeHandle() {}
virtual void* GetHandle() { return NULL; }
virtual void* GetHandle() { return nullptr; }
};
class VideoSendStreamTest : public test::CallTest {
@ -463,7 +464,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
current_size_frame_(static_cast<int32_t>(start_size)) {
// Fragmentation required, this test doesn't make sense without it.
encoder_.SetFrameSize(start_size);
assert(stop_size > max_packet_size);
DCHECK_GT(stop_size, max_packet_size);
transport_adapter_.Enable();
}
@ -810,7 +811,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
transport_adapter_(ReceiveTransport()),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
last_packet_time_ms_(-1),
capturer_(NULL) {
capturer_(nullptr) {
transport_adapter_.Enable();
}
@ -914,7 +915,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
RTPHeader header;
if (!parser_->Parse(packet, length, &header))
return DELIVERY_PACKET_ERROR;
assert(stream_ != NULL);
DCHECK(stream_ != nullptr);
VideoSendStream::Stats stats = stream_->GetStats();
if (!stats.substreams.empty()) {
EXPECT_EQ(1u, stats.substreams.size());
@ -1100,7 +1101,7 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) {
void ExpectEqualFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
if (frame1.native_handle() != nullptr || frame2.native_handle() != nullptr)
ExpectEqualTextureFrames(frame1, frame2);
else
ExpectEqualBufferFrames(frame1, frame2);
@ -1645,8 +1646,8 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
encoded._frameType = (*frame_types)[i];
encoded._encodedWidth = kEncodedResolution[i].width;
encoded._encodedHeight = kEncodedResolution[i].height;
assert(callback_ != NULL);
if (callback_->Encoded(encoded, &specifics, NULL) != 0)
DCHECK(callback_ != nullptr);
if (callback_->Encoded(encoded, &specifics, nullptr) != 0)
return -1;
}