Revert "Changed argument occurences of const I420VideoFrame* to const I420VideoFrame& and non-const I420VideoFrame& to I420VideoFrame*."

This reverts commit r8731.

Reason for revert: Breakes Chromium FYI bots.

TBR=hbos, tommi

Review URL: https://webrtc-codereview.appspot.com/40359004

Cr-Commit-Position: refs/heads/master@{#8733}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8733 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
magjed@webrtc.org 2015-03-16 13:46:52 +00:00
parent 93d9d6503e
commit 2056ee3e3c
72 changed files with 290 additions and 275 deletions

View File

@ -657,11 +657,11 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
I420VideoFrame texture_image(
&native_handle_, width, height, output_timestamp_, 0);
texture_image.set_ntp_time_ms(output_ntp_time_ms_);
callback_status = callback_->Decoded(&texture_image);
callback_status = callback_->Decoded(texture_image);
} else {
decoded_image_.set_timestamp(output_timestamp_);
decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
callback_status = callback_->Decoded(&decoded_image_);
callback_status = callback_->Decoded(decoded_image_);
}
if (callback_status > 0) {
ALOGE("callback error");

View File

@ -115,7 +115,7 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
return false;
}
if (callback_) {
callback_->OnIncomingCapturedFrame(id_, &sample);
callback_->OnIncomingCapturedFrame(id_, sample);
}
return true;
}

View File

@ -44,7 +44,7 @@ class PassthroughStream: public webrtc::VideoRenderCallback {
virtual ~PassthroughStream() {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
webrtc::I420VideoFrame* videoFrame) {
webrtc::I420VideoFrame& videoFrame) {
rtc::CritScope cs(&stream_critical_);
// Send frame for rendering directly
if (running_ && renderer_) {

View File

@ -44,7 +44,7 @@ class WebRtcPassthroughRenderTest : public testing::Test {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
webrtc::I420VideoFrame* videoFrame) {
webrtc::I420VideoFrame& videoFrame) {
++frame_num_;
LOG(INFO) << "RenderFrame stream_id: " << stream_id
<< " frame_num: " << frame_num_;
@ -143,21 +143,21 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) {
int test_frame_num = 10;
// RenderFrame without starting the render
for (int i = 0; i < test_frame_num; ++i) {
stream1->RenderFrame(stream_id1, &frame);
stream1->RenderFrame(stream_id1, frame);
}
EXPECT_EQ(0, renderer1.frame_num());
// Start the render and test again.
EXPECT_FALSE(StartRender(stream_id3));
EXPECT_TRUE(StartRender(stream_id1));
for (int i = 0; i < test_frame_num; ++i) {
stream1->RenderFrame(stream_id1, &frame);
stream1->RenderFrame(stream_id1, frame);
}
EXPECT_EQ(test_frame_num, renderer1.frame_num());
// Stop the render and test again.
EXPECT_FALSE(StopRender(stream_id3));
EXPECT_TRUE(StopRender(stream_id1));
for (int i = 0; i < test_frame_num; ++i) {
stream1->RenderFrame(stream_id1, &frame);
stream1->RenderFrame(stream_id1, frame);
}
// The frame number should not have changed.
EXPECT_EQ(test_frame_num, renderer1.frame_num());
@ -166,7 +166,7 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) {
EXPECT_TRUE(StartRender(stream_id2));
test_frame_num = 30;
for (int i = 0; i < test_frame_num; ++i) {
stream2->RenderFrame(stream_id2, &frame);
stream2->RenderFrame(stream_id2, frame);
}
EXPECT_EQ(test_frame_num, renderer2.frame_num());
}

View File

@ -354,7 +354,7 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(
}
void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
webrtc::I420VideoFrame* sample) {
webrtc::I420VideoFrame& sample) {
// This would be a normal CritScope, except that it's possible that:
// (1) whatever system component producing this frame has taken a lock, and
// (2) Stop() probably calls back into that system component, which may take
@ -371,12 +371,12 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
// Log the size and pixel aspect ratio of the first captured frame.
if (1 == captured_frames_) {
LOG(LS_INFO) << "Captured frame size "
<< sample->width() << "x" << sample->height()
<< sample.width() << "x" << sample.height()
<< ". Expected format " << GetCaptureFormat()->ToString();
}
if (start_thread_->IsCurrent()) {
SignalFrameCapturedOnStartThread(sample);
SignalFrameCapturedOnStartThread(&sample);
} else {
// This currently happens on with at least VideoCaptureModuleV4L2 and
// possibly other implementations of WebRTC's VideoCaptureModule.
@ -385,7 +385,7 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
// thread hop.
start_thread_->Invoke<void>(
rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread,
this, sample));
this, &sample));
}
}

View File

@ -81,7 +81,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
private:
// Callback when a frame is captured by camera.
virtual void OnIncomingCapturedFrame(const int32_t id,
webrtc::I420VideoFrame* frame);
webrtc::I420VideoFrame& frame);
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay);

View File

@ -148,11 +148,11 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
// Compute PSNR for an I420 frame (all planes).
// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
double I420PSNR(const I420VideoFrame& ref_frame,
const I420VideoFrame& test_frame);
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
// Compute SSIM for an I420 frame (all planes).
double I420SSIM(const I420VideoFrame& ref_frame,
const I420VideoFrame& test_frame);
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
}
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_

View File

@ -38,16 +38,18 @@ int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) {
}
int PrintFrame(const I420VideoFrame& frame, const char* str) {
printf("%s %dx%d \n", str, frame.width(), frame.height());
int PrintFrame(const I420VideoFrame* frame, const char* str) {
if (frame == NULL)
return -1;
printf("%s %dx%d \n", str, frame->width(), frame->height());
int ret = 0;
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
PlaneType plane_type = static_cast<PlaneType>(plane_num);
int width = (plane_num ? (frame.width() + 1) / 2 : frame.width());
int height = (plane_num ? (frame.height() + 1) / 2 : frame.height());
ret += PrintBuffer(frame.buffer(plane_type), width, height,
frame.stride(plane_type));
int width = (plane_num ? (frame->width() + 1) / 2 : frame->width());
int height = (plane_num ? (frame->height() + 1) / 2 : frame->height());
ret += PrintBuffer(frame->buffer(plane_type), width, height,
frame->stride(plane_type));
}
return ret;
}
@ -154,7 +156,7 @@ TEST_F(TestLibYuv, ConvertTest) {
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
j++;
@ -174,7 +176,7 @@ TEST_F(TestLibYuv, ConvertTest) {
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44);
@ -185,7 +187,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -210,7 +212,7 @@ TEST_F(TestLibYuv, ConvertTest) {
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0,
kVideoRotation_0, &res_i420_frame);
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
j++;
@ -225,7 +227,7 @@ TEST_F(TestLibYuv, ConvertTest) {
return;
}
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
rtc::scoped_ptr<uint8_t[]> out_rgb565_buffer(
@ -241,7 +243,7 @@ TEST_F(TestLibYuv, ConvertTest) {
}
j++;
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@ -260,7 +262,7 @@ TEST_F(TestLibYuv, ConvertTest) {
return;
}
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42);
@ -291,7 +293,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(orig_frame_, res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
}

View File

@ -312,7 +312,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
required_size, kVideoRotation_0, &in_frame));
EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height,
required_size, kVideoRotation_0, &out_frame));
double psnr = I420PSNR(in_frame, out_frame);
double psnr = I420PSNR(&in_frame, &out_frame);
avg_psnr += psnr;
}
avg_psnr = avg_psnr / frame_count;

View File

@ -291,47 +291,57 @@ int ConvertFromYV12(const I420VideoFrame& src_frame,
}
// Compute PSNR for an I420 frame (all planes)
double I420PSNR(const I420VideoFrame& ref_frame,
const I420VideoFrame& test_frame) {
if (ref_frame.width() != test_frame.width() ||
ref_frame.height() != test_frame.height())
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
double psnr = libyuv::I420Psnr(ref_frame.buffer(kYPlane),
ref_frame.stride(kYPlane),
ref_frame.buffer(kUPlane),
ref_frame.stride(kUPlane),
ref_frame.buffer(kVPlane),
ref_frame.stride(kVPlane),
test_frame.buffer(kYPlane),
test_frame.stride(kYPlane),
test_frame.buffer(kUPlane),
test_frame.stride(kUPlane),
test_frame.buffer(kVPlane),
test_frame.stride(kVPlane),
test_frame.width(), test_frame.height());
else if ((ref_frame->width() != test_frame->width()) ||
(ref_frame->height() != test_frame->height()))
return -1;
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane),
ref_frame->stride(kYPlane),
ref_frame->buffer(kUPlane),
ref_frame->stride(kUPlane),
ref_frame->buffer(kVPlane),
ref_frame->stride(kVPlane),
test_frame->buffer(kYPlane),
test_frame->stride(kYPlane),
test_frame->buffer(kUPlane),
test_frame->stride(kUPlane),
test_frame->buffer(kVPlane),
test_frame->stride(kVPlane),
test_frame->width(), test_frame->height());
// LibYuv sets the max psnr value to 128, we restrict it here.
// In case of 0 mse in one frame, 128 can skew the results significantly.
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
}
// Compute SSIM for an I420 frame (all planes)
double I420SSIM(const I420VideoFrame& ref_frame,
const I420VideoFrame& test_frame) {
if (ref_frame.width() != test_frame.width() ||
ref_frame.height() != test_frame.height())
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
return libyuv::I420Ssim(ref_frame.buffer(kYPlane),
ref_frame.stride(kYPlane),
ref_frame.buffer(kUPlane),
ref_frame.stride(kUPlane),
ref_frame.buffer(kVPlane),
ref_frame.stride(kVPlane),
test_frame.buffer(kYPlane),
test_frame.stride(kYPlane),
test_frame.buffer(kUPlane),
test_frame.stride(kUPlane),
test_frame.buffer(kVPlane),
test_frame.stride(kVPlane),
test_frame.width(), test_frame.height());
else if ((ref_frame->width() != test_frame->width()) ||
(ref_frame->height() != test_frame->height()))
return -1;
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
return libyuv::I420Ssim(ref_frame->buffer(kYPlane),
ref_frame->stride(kYPlane),
ref_frame->buffer(kUPlane),
ref_frame->stride(kUPlane),
ref_frame->buffer(kVPlane),
ref_frame->stride(kVPlane),
test_frame->buffer(kYPlane),
test_frame->stride(kYPlane),
test_frame->buffer(kUPlane),
test_frame->stride(kUPlane),
test_frame->buffer(kVPlane),
test_frame->stride(kVPlane),
test_frame->width(), test_frame->height());
}
} // namespace webrtc

View File

@ -93,12 +93,12 @@ public:
virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
{return -1;}
virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/)
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/)
{ return -1;}
// Same as GetVideoFromFile(). videoFrame will have the resolution specified
// by the width outWidth and height outHeight in pixels.
virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/,
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/,
const uint32_t /*outWidth*/,
const uint32_t /*outHeight*/)
{return -1;}

View File

@ -98,7 +98,7 @@ class VideoCaptureDataCallback
{
public:
virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame* videoFrame) = 0;
I420VideoFrame& videoFrame) = 0;
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay) = 0;
protected:

View File

@ -105,10 +105,10 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
}
virtual void OnIncomingCapturedFrame(const int32_t id,
webrtc::I420VideoFrame* videoFrame) {
webrtc::I420VideoFrame& videoFrame) {
CriticalSectionScoped cs(capture_cs_.get());
int height = videoFrame->height();
int width = videoFrame->width();
int height = videoFrame.height();
int width = videoFrame.width();
#if ANDROID
// Android camera frames may be rotated depending on test device
// orientation.
@ -126,21 +126,21 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
#endif
// RenderTimstamp should be the time now.
EXPECT_TRUE(
videoFrame->render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
videoFrame->render_time_ms() <= TickTime::MillisecondTimestamp());
videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
if ((videoFrame->render_time_ms() >
if ((videoFrame.render_time_ms() >
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
last_render_time_ms_ > 0) ||
(videoFrame->render_time_ms() <
(videoFrame.render_time_ms() <
last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
last_render_time_ms_ > 0)) {
timing_warnings_++;
}
incoming_frames_++;
last_render_time_ms_ = videoFrame->render_time_ms();
last_frame_.CopyFrame(*videoFrame);
last_render_time_ms_ = videoFrame.render_time_ms();
last_frame_.CopyFrame(videoFrame);
}
virtual void OnCaptureDelayChanged(const int32_t id,

View File

@ -215,7 +215,7 @@ int32_t VideoCaptureImpl::CaptureDelay()
return _setCaptureDelay;
}
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame* captureFrame,
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame,
int64_t capture_time) {
UpdateFrameCount(); // frame count used for local frame rate callback.
@ -227,16 +227,16 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame* captureFrame,
// Set the capture time
if (capture_time != 0) {
captureFrame->set_render_time_ms(capture_time - delta_ntp_internal_ms_);
captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_);
} else {
captureFrame->set_render_time_ms(TickTime::MillisecondTimestamp());
captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
}
if (captureFrame->render_time_ms() == last_capture_time_) {
if (captureFrame.render_time_ms() == last_capture_time_) {
// We don't allow the same capture time for two frames, drop this one.
return -1;
}
last_capture_time_ = captureFrame->render_time_ms();
last_capture_time_ = captureFrame.render_time_ms();
if (_dataCallBack) {
if (callOnCaptureDelayChanged) {
@ -322,7 +322,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
_captureFrame.set_rotation(kVideoRotation_0);
}
DeliverCapturedFrame(&_captureFrame, captureTime);
DeliverCapturedFrame(_captureFrame, captureTime);
}
else // Encoded format
{
@ -338,7 +338,7 @@ int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
DeliverCapturedFrame(video_frame, captureTime);
DeliverCapturedFrame(*video_frame, captureTime);
return 0;
}

View File

@ -107,7 +107,7 @@ public:
protected:
VideoCaptureImpl(const int32_t id);
virtual ~VideoCaptureImpl();
int32_t DeliverCapturedFrame(I420VideoFrame* captureFrame,
int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame,
int64_t capture_time);
int32_t _id; // Module ID

View File

@ -215,7 +215,7 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
}
_decodedImage.set_timestamp(inputImage._timeStamp);
_decodeCompleteCallback->Decoded(&_decodedImage);
_decodeCompleteCallback->Decoded(_decodedImage);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -49,7 +49,7 @@ class MockVideoEncoder : public VideoEncoder {
class MockDecodedImageCallback : public DecodedImageCallback {
public:
MOCK_METHOD1(Decoded,
int32_t(I420VideoFrame* decodedImage));
int32_t(I420VideoFrame& decodedImage));
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
int32_t(const uint64_t pictureId));
MOCK_METHOD1(ReceivedDecodedFrame,

View File

@ -413,8 +413,8 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
}
int32_t
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
I420VideoFrame* image) {
video_processor_->FrameDecoded(*image); // forward to parent class
I420VideoFrame& image) {
video_processor_->FrameDecoded(image); // forward to parent class
return 0;
}

View File

@ -241,7 +241,7 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
int32_t Decoded(webrtc::I420VideoFrame* image) override;
int32_t Decoded(webrtc::I420VideoFrame& image) override;
private:
VideoProcessorImpl* video_processor_;

View File

@ -124,16 +124,16 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
Vp8TestDecodedImageCallback()
: decoded_frames_(0) {
}
virtual int32_t Decoded(I420VideoFrame* decoded_image) {
last_decoded_frame_.CopyFrame(*decoded_image);
for (int i = 0; i < decoded_image->width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image->buffer(kYPlane)[i], 1);
virtual int32_t Decoded(I420VideoFrame& decoded_image) {
last_decoded_frame_.CopyFrame(decoded_image);
for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
}
// TODO(mikhal): Verify the difference between U,V and the original.
for (int i = 0; i < ((decoded_image->width() + 1) / 2); ++i) {
EXPECT_NEAR(kColorU, decoded_image->buffer(kUPlane)[i], 4);
EXPECT_NEAR(kColorV, decoded_image->buffer(kVPlane)[i], 4);
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4);
EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4);
}
decoded_frames_++;
return 0;

View File

@ -78,7 +78,7 @@ class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame)
: decoded_frame_(frame), decode_complete(false) {}
int Decoded(webrtc::I420VideoFrame* frame);
int Decoded(webrtc::I420VideoFrame& frame);
bool DecodeComplete();
private:
@ -94,8 +94,8 @@ bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
return false;
}
int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame* image) {
decoded_frame_->CopyFrame(*image);
int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) {
decoded_frame_->CopyFrame(image);
decode_complete = true;
return 0;
}
@ -227,7 +227,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
decoder_->Decode(encoded_frame_, false, NULL));
EXPECT_GT(WaitForDecodedFrame(), 0u);
// Compute PSNR on all planes (faster than SSIM).
EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36);
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
}
@ -249,7 +249,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) {
encoded_frame_._frameType = kKeyFrame;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame_, false, NULL));
EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36);
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
}
TEST_F(TestVp8Impl, TestReset) {

View File

@ -1343,7 +1343,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
decoded_image_.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(&decoded_image_);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;

View File

@ -68,15 +68,15 @@ class Vp8SequenceCoderDecodeCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file)
: decoded_file_(decoded_file) {}
int Decoded(webrtc::I420VideoFrame* frame);
int Decoded(webrtc::I420VideoFrame& frame);
bool DecodeComplete();
private:
FILE* decoded_file_;
};
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame* image) {
EXPECT_EQ(0, webrtc::PrintI420VideoFrame(*image, decoded_file_));
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame& image) {
EXPECT_EQ(0, webrtc::PrintI420VideoFrame(image, decoded_file_));
return 0;
}

View File

@ -480,7 +480,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
int ret = decode_complete_callback_->Decoded(&decoded_image_);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
return WEBRTC_VIDEO_CODEC_OK;

View File

@ -81,7 +81,7 @@ class VCMPacketizationCallback {
// Callback class used for passing decoded frames which are ready to be rendered.
class VCMReceiveCallback {
public:
virtual int32_t FrameToRender(I420VideoFrame* videoFrame) = 0;
virtual int32_t FrameToRender(I420VideoFrame& videoFrame) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(
const uint64_t pictureId) {
return -1;

View File

@ -46,7 +46,7 @@ VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback()
return _receiveCallback;
}
int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
{
// TODO(holmer): We should improve this so that we can handle multiple
// callbacks from one call to Decode().
@ -55,7 +55,7 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
{
CriticalSectionScoped cs(_critSect);
frameInfo = static_cast<VCMFrameInformation*>(
_timestampMap.Pop(decodedImage->timestamp()));
_timestampMap.Pop(decodedImage.timestamp()));
callback = _receiveCallback;
}
@ -66,14 +66,14 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
}
_timing.StopDecodeTimer(
decodedImage->timestamp(),
decodedImage.timestamp(),
frameInfo->decodeStartTimeMs,
_clock->TimeInMilliseconds(),
frameInfo->renderTimeMs);
if (callback != NULL)
{
decodedImage->set_render_time_ms(frameInfo->renderTimeMs);
decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
callback->FrameToRender(decodedImage);
}
return WEBRTC_VIDEO_CODEC_OK;

View File

@ -39,7 +39,7 @@ public:
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
VCMReceiveCallback* UserReceiveCallback();
virtual int32_t Decoded(I420VideoFrame* decodedImage);
virtual int32_t Decoded(I420VideoFrame& decodedImage);
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);

View File

@ -147,13 +147,13 @@ VCMNTDecodeCompleteCallback::~VCMNTDecodeCompleteCallback()
fclose(_decodedFile);
}
int32_t
VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame* videoFrame)
VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame)
{
if (videoFrame->width() != _currentWidth ||
videoFrame->height() != _currentHeight)
if (videoFrame.width() != _currentWidth ||
videoFrame.height() != _currentHeight)
{
_currentWidth = videoFrame->width();
_currentHeight = videoFrame->height();
_currentWidth = videoFrame.width();
_currentHeight = videoFrame.height();
if (_decodedFile != NULL)
{
fclose(_decodedFile);
@ -161,11 +161,11 @@ VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame* videoFrame)
}
_decodedFile = fopen(_outname.c_str(), "wb");
}
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame->width(),
videoFrame->height());
_decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame.width(),
videoFrame.height());
return VCM_OK;
}

View File

@ -68,7 +68,7 @@ public:
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
// will write decoded frame into file
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override;
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override;
size_t DecodedBytes();
private:

View File

@ -483,18 +483,18 @@ VCMQMDecodeCompleteCallback::~VCMQMDecodeCompleteCallback()
}
int32_t
VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame)
{
++frames_cnt_since_drop_;
// When receiving the first coded frame the last_frame variable is not set
if (last_frame_.IsZeroSize()) {
last_frame_.CopyFrame(*videoFrame);
last_frame_.CopyFrame(videoFrame);
}
// Check if there were frames skipped.
int num_frames_skipped = static_cast<int>( 0.5f +
(videoFrame->timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) /
(videoFrame.timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) /
(9e4 / frame_rate_));
// If so...put the last frames into the encoded stream to make up for the
@ -510,9 +510,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
DataLog::InsertCell(
feature_table_name_,"num frames since drop",frames_cnt_since_drop_);
if (_origWidth == videoFrame->width() && _origHeight == videoFrame->height())
if (_origWidth == videoFrame.width() && _origHeight == videoFrame.height())
{
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_frameCnt++;
@ -531,9 +531,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
return -1;
}
_decodedBytes += CalcBufferSize(kI420, videoFrame->width(),
videoFrame->height());
videoFrame->SwapFrame(&last_frame_);
_decodedBytes += CalcBufferSize(kI420, videoFrame.width(),
videoFrame.height());
videoFrame.SwapFrame(&last_frame_);
return VCM_OK;
}

View File

@ -61,7 +61,7 @@ public:
virtual ~VCMQMDecodeCompleteCallback();
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
// will write decoded frame into file
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame);
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame);
size_t DecodedBytes();
void SetOriginalFrameDimensions(int32_t width, int32_t height);
int32_t buildInterpolator();

View File

@ -185,13 +185,13 @@ VCMRTPEncodeCompleteCallback::EncodeComplete()
// Decoded Frame Callback Implementation
int32_t
VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame)
{
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_decodedBytes += CalcBufferSize(kI420, videoFrame->width(),
videoFrame->height());
_decodedBytes += CalcBufferSize(kI420, videoFrame.width(),
videoFrame.height());
return VCM_OK;
}

View File

@ -136,7 +136,7 @@ public:
_decodedFile(decodedFile), _decodedBytes(0) {}
virtual ~VCMDecodeCompleteCallback() {}
// Write decoded frame into file
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override;
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override;
size_t DecodedBytes();
private:
FILE* _decodedFile;

View File

@ -113,7 +113,7 @@ FileOutputFrameReceiver::~FileOutputFrameReceiver() {
}
int32_t FileOutputFrameReceiver::FrameToRender(
webrtc::I420VideoFrame* video_frame) {
webrtc::I420VideoFrame& video_frame) {
if (timing_file_ == NULL) {
std::string basename;
std::string extension;
@ -123,14 +123,14 @@ int32_t FileOutputFrameReceiver::FrameToRender(
return -1;
}
}
if (out_file_ == NULL || video_frame->width() != width_ ||
video_frame->height() != height_) {
if (out_file_ == NULL || video_frame.width() != width_ ||
video_frame.height() != height_) {
if (out_file_) {
fclose(out_file_);
}
printf("New size: %dx%d\n", video_frame->width(), video_frame->height());
width_ = video_frame->width();
height_ = video_frame->height();
printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
width_ = video_frame.width();
height_ = video_frame.height();
std::string filename_with_width_height = AppendWidthHeightCount(
out_filename_, width_, height_, count_);
++count_;
@ -139,9 +139,9 @@ int32_t FileOutputFrameReceiver::FrameToRender(
return -1;
}
}
fprintf(timing_file_, "%u, %u\n", video_frame->timestamp(),
webrtc::MaskWord64ToUWord32(video_frame->render_time_ms()));
if (PrintI420VideoFrame(*video_frame, out_file_) < 0) {
fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(),
webrtc::MaskWord64ToUWord32(video_frame.render_time_ms()));
if (PrintI420VideoFrame(video_frame, out_file_) < 0) {
return -1;
}
return 0;

View File

@ -87,7 +87,7 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
virtual ~FileOutputFrameReceiver();
// VCMReceiveCallback
virtual int32_t FrameToRender(webrtc::I420VideoFrame* video_frame);
virtual int32_t FrameToRender(webrtc::I420VideoFrame& video_frame);
private:
std::string out_filename_;

View File

@ -358,7 +358,7 @@ void TestSize(const I420VideoFrame& source_frame,
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
// Compute PSNR against the cropped source frame and check expectation.
double psnr = I420PSNR(cropped_source_frame, *out_frame);
double psnr = I420PSNR(&cropped_source_frame, out_frame);
EXPECT_GT(psnr, expected_psnr);
printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
"source which is scaled down/up to: %d %d, and back to source size \n",

View File

@ -383,10 +383,10 @@ int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
int32_t AndroidNativeOpenGl2Channel::RenderFrame(
const uint32_t /*streamId*/,
I420VideoFrame* videoFrame) {
I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_bufferToRender.SwapFrame(&videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;

View File

@ -35,7 +35,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
//Implement VideoRenderCallback
virtual int32_t RenderFrame(
const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);

View File

@ -412,10 +412,10 @@ int32_t AndroidSurfaceViewChannel::Init(
int32_t AndroidSurfaceViewChannel::RenderFrame(
const uint32_t /*streamId*/,
I420VideoFrame* videoFrame) {
I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_bufferToRender.SwapFrame(&videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;

View File

@ -33,7 +33,7 @@ class AndroidSurfaceViewChannel : public AndroidStream {
//Implement VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);

View File

@ -188,8 +188,9 @@ int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
}
// VideoRenderCallback
int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame)
int32_t VideoRenderExternalImpl::RenderFrame(
const uint32_t streamId,
I420VideoFrame& videoFrame)
{
return 0;
}

View File

@ -115,7 +115,7 @@ public:
// VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
private:
CriticalSectionWrapper& _critSect;

View File

@ -49,7 +49,7 @@ class VideoRenderCallback
{
public:
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame) = 0;
I420VideoFrame& videoFrame) = 0;
protected:
virtual ~VideoRenderCallback()

View File

@ -85,11 +85,11 @@ VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
}
int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame) {
I420VideoFrame& video_frame) {
CriticalSectionScoped csS(&stream_critsect_);
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
video_frame->render_time_ms());
video_frame.render_time_ms());
if (!running_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
@ -110,7 +110,7 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
// Insert frame.
CriticalSectionScoped csB(&buffer_critsect_);
if (render_buffers_.AddFrame(*video_frame) == 1)
if (render_buffers_.AddFrame(video_frame) == 1)
deliver_buffer_event_.Set();
return 0;
@ -285,13 +285,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
// We have not rendered anything and have a start image.
temp_frame_.CopyFrame(start_image_);
render_callback_->RenderFrame(stream_id_, &temp_frame_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
} else if (!timeout_image_.IsZeroSize() &&
last_render_time_ms_ + timeout_time_ <
TickTime::MillisecondTimestamp()) {
// Render a timeout image.
temp_frame_.CopyFrame(timeout_image_);
render_callback_->RenderFrame(stream_id_, &temp_frame_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
}
}
@ -305,13 +305,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: executing external renderer callback to deliver frame",
__FUNCTION__, frame_to_render.render_time_ms());
external_callback_->RenderFrame(stream_id_, &frame_to_render);
external_callback_->RenderFrame(stream_id_, frame_to_render);
} else {
if (render_callback_) {
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
"%s: Render frame, time: ", __FUNCTION__,
frame_to_render.render_time_ms());
render_callback_->RenderFrame(stream_id_, &frame_to_render);
render_callback_->RenderFrame(stream_id_, frame_to_render);
}
}

View File

@ -31,7 +31,7 @@ class IncomingVideoStream : public VideoRenderCallback {
// Get callback to deliver frames to the module.
VideoRenderCallback* ModuleCallback();
virtual int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame);
I420VideoFrame& video_frame);
// Set callback to the platform dependent code.
int32_t SetRenderCallback(VideoRenderCallback* render_callback);

View File

@ -25,7 +25,7 @@ class VideoRenderIosChannel : public VideoRenderCallback {
// Implementation of VideoRenderCallback.
int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame) override;
I420VideoFrame& video_frame) override;
int SetStreamSettings(const float z_order,
const float left,

View File

@ -24,10 +24,10 @@ VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame) {
video_frame->set_render_time_ms(0);
I420VideoFrame& video_frame) {
video_frame.set_render_time_ms(0);
current_frame_->CopyFrame(*video_frame);
current_frame_->CopyFrame(video_frame);
buffer_is_updated_ = true;
return 0;

View File

@ -44,14 +44,15 @@ VideoX11Channel::~VideoX11Channel()
}
int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame) {
I420VideoFrame& videoFrame) {
CriticalSectionScoped cs(&_crit);
if (_width != videoFrame->width() || _height != videoFrame->height()) {
if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) {
if (_width != videoFrame.width() || _height
!= videoFrame.height()) {
if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
return -1;
}
}
return DeliverFrame(*videoFrame);
return DeliverFrame(videoFrame);
}
int32_t VideoX11Channel::FrameSizeChange(int32_t width,

View File

@ -34,7 +34,7 @@ public:
virtual ~VideoX11Channel();
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
int32_t FrameSizeChange(int32_t width, int32_t height,
int32_t numberOfStreams);

View File

@ -81,7 +81,7 @@ VideoChannelAGL::~VideoChannelAGL()
}
int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame) {
I420VideoFrame& videoFrame) {
_owner->LockAGLCntx();
if (_width != videoFrame.width() ||
_height != videoFrame.height()) {
@ -94,7 +94,7 @@ int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
}
_owner->UnlockAGLCntx();
return DeliverFrame(*videoFrame);
return DeliverFrame(videoFrame);
}
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)

View File

@ -52,7 +52,7 @@ class VideoChannelAGL : public VideoRenderCallback {
int IsUpdated(bool& isUpdated);
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
private:

View File

@ -66,7 +66,7 @@ public:
// ********** new module functions ************ //
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
// ********** new module helper functions ***** //
int ChangeContext(NSOpenGLContext *nsglContext);

View File

@ -90,17 +90,18 @@ int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
}
int32_t VideoChannelNSOpenGL::RenderFrame(
const uint32_t /*streamId*/, I420VideoFrame* videoFrame) {
const uint32_t /*streamId*/, I420VideoFrame& videoFrame) {
_owner->LockAGLCntx();
if(_width != videoFrame->width() || _height != videoFrame->height()) {
if(FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) {
if(_width != videoFrame.width() ||
_height != videoFrame.height()) {
if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
_owner->UnlockAGLCntx();
return -1;
}
}
int ret = DeliverFrame(*videoFrame);
int ret = DeliverFrame(videoFrame);
_owner->UnlockAGLCntx();
return ret;

View File

@ -262,7 +262,7 @@ public:
}
;
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame)
I420VideoFrame& videoFrame)
{
_cnt++;
if (_cnt % 100 == 0)
@ -318,7 +318,7 @@ int TestSingleStream(VideoRender* renderModule) {
// Render this frame with the specified delay
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
+ renderDelayMs);
renderCallback0->RenderFrame(streamId0, &videoFrame0);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(1000/TEST_FRAME_RATE);
}
@ -392,7 +392,7 @@ int TestBitmapText(VideoRender* renderModule) {
// Render this frame with the specified delay
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, &videoFrame0);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(1000/TEST_FRAME_RATE);
}
// Sleep and let all frames be rendered before closing
@ -477,22 +477,22 @@ int TestMultipleStreams(VideoRender* renderModule) {
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, &videoFrame0);
renderCallback0->RenderFrame(streamId0, videoFrame0);
GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback1->RenderFrame(streamId1, &videoFrame1);
renderCallback1->RenderFrame(streamId1, videoFrame1);
GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback2->RenderFrame(streamId2, &videoFrame2);
renderCallback2->RenderFrame(streamId2, videoFrame2);
GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback3->RenderFrame(streamId3, &videoFrame3);
renderCallback3->RenderFrame(streamId3, videoFrame3);
SleepMs(1000/TEST_FRAME_RATE);
}
@ -550,7 +550,7 @@ int TestExternalRender(VideoRender* renderModule) {
for (int i=0; i<frameCount; i++) {
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, &videoFrame0);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(33);
}

View File

@ -143,17 +143,17 @@ int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
}
int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame)
I420VideoFrame& videoFrame)
{
CriticalSectionScoped cs(_critSect);
if (_width != videoFrame->width() || _height != videoFrame->height())
if (_width != videoFrame.width() || _height != videoFrame.height())
{
if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1)
if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1)
{
return -1;
}
}
return DeliverFrame(*videoFrame);
return DeliverFrame(videoFrame);
}
// Called from video engine when a new frame should be rendered.

View File

@ -46,7 +46,7 @@ public:
// A new frame is delivered.
virtual int DeliverFrame(const I420VideoFrame& videoFrame);
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame* videoFrame);
I420VideoFrame& videoFrame);
// Called to check if the video frame is updated.
int IsUpdated(bool& isUpdated);

View File

@ -39,7 +39,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input,
frame_.set_ntp_time_ms(input.ntp_time_ms_);
frame_.set_render_time_ms(render_time_ms);
callback_->Decoded(&frame_);
callback_->Decoded(frame_);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -34,8 +34,8 @@ enum VideoMetricsType { kPSNR, kSSIM, kBoth };
// Calculates metrics for a frame and adds statistics to the result for it.
void CalculateFrame(VideoMetricsType video_metrics_type,
const I420VideoFrame& ref,
const I420VideoFrame& test,
const I420VideoFrame* ref,
const I420VideoFrame* test,
int frame_number,
QualityMetricsResult* result) {
FrameResult frame_result = {0, 0};
@ -129,17 +129,17 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
kVideoRotation_0, &test_frame);
switch (video_metrics_type) {
case kPSNR:
CalculateFrame(kPSNR, ref_frame, test_frame, frame_number,
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,
psnr_result);
break;
case kSSIM:
CalculateFrame(kSSIM, ref_frame, test_frame, frame_number,
CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number,
ssim_result);
break;
case kBoth:
CalculateFrame(kPSNR, ref_frame, test_frame, frame_number,
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,
psnr_result);
CalculateFrame(kSSIM, ref_frame, test_frame, frame_number,
CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number,
ssim_result);
break;
}

View File

@ -87,9 +87,9 @@ void VcmCapturer::Destroy() {
VcmCapturer::~VcmCapturer() { Destroy(); }
void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame* frame) {
I420VideoFrame& frame) {
if (started_)
input_->SwapFrame(frame);
input_->SwapFrame(&frame);
}
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {

View File

@ -28,7 +28,7 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
void Stop() override;
void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame* frame) override; // NOLINT
I420VideoFrame& frame) override; // NOLINT
void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
private:

View File

@ -201,7 +201,7 @@ class VideoAnalyzer : public PacketReceiver,
CriticalSectionScoped lock(crit_.get());
while (frames_.front()->timestamp() < send_timestamp) {
AddFrameComparison(
*frames_.front(), last_rendered_frame_, true, render_time_ms);
frames_.front(), &last_rendered_frame_, true, render_time_ms);
frame_pool_.push_back(frames_.front());
frames_.pop_front();
}
@ -212,7 +212,7 @@ class VideoAnalyzer : public PacketReceiver,
EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
assert(reference_frame->timestamp() == send_timestamp);
AddFrameComparison(*reference_frame, video_frame, false, render_time_ms);
AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
frame_pool_.push_back(reference_frame);
last_rendered_frame_.CopyFrame(video_frame);
@ -253,8 +253,8 @@ class VideoAnalyzer : public PacketReceiver,
FrameComparison()
: dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {}
FrameComparison(const I420VideoFrame& reference,
const I420VideoFrame& render,
FrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t send_time_ms,
int64_t recv_time_ms,
@ -263,8 +263,8 @@ class VideoAnalyzer : public PacketReceiver,
send_time_ms(send_time_ms),
recv_time_ms(recv_time_ms),
render_time_ms(render_time_ms) {
this->reference.CopyFrame(reference);
this->render.CopyFrame(render);
this->reference.CopyFrame(*reference);
this->render.CopyFrame(*render);
}
FrameComparison(const FrameComparison& compare)
@ -295,15 +295,15 @@ class VideoAnalyzer : public PacketReceiver,
int64_t render_time_ms;
};
void AddFrameComparison(const I420VideoFrame& reference,
const I420VideoFrame& render,
void AddFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t render_time_ms)
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
int64_t send_time_ms = send_times_[reference.timestamp()];
send_times_.erase(reference.timestamp());
int64_t recv_time_ms = recv_times_[reference.timestamp()];
recv_times_.erase(reference.timestamp());
int64_t send_time_ms = send_times_[reference->timestamp()];
send_times_.erase(reference->timestamp());
int64_t recv_time_ms = recv_times_[reference->timestamp()];
recv_times_.erase(reference->timestamp());
CriticalSectionScoped crit(comparison_lock_.get());
comparisons_.push_back(FrameComparison(reference,
@ -405,8 +405,8 @@ class VideoAnalyzer : public PacketReceiver,
void PerformFrameComparison(const FrameComparison& comparison) {
// Perform expensive psnr and ssim calculations while not holding lock.
double psnr = I420PSNR(comparison.reference, comparison.render);
double ssim = I420SSIM(comparison.reference, comparison.render);
double psnr = I420PSNR(&comparison.reference, &comparison.render);
double ssim = I420SSIM(&comparison.reference, &comparison.render);
CriticalSectionScoped crit(comparison_lock_.get());
psnr_.AddSample(psnr);

View File

@ -28,7 +28,7 @@ class DecodedImageCallback {
public:
virtual ~DecodedImageCallback() {}
virtual int32_t Decoded(I420VideoFrame* decodedImage) = 0;
virtual int32_t Decoded(I420VideoFrame& decodedImage) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
return -1;
}

View File

@ -246,7 +246,7 @@ int32_t TbI420Decoder::Decode(
_decodedImage.set_timestamp(inputImage._timeStamp);
_decodeCompleteCallback->Decoded(&_decodedImage);
_decodeCompleteCallback->Decoded(_decodedImage);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -339,21 +339,21 @@ void ViECapturer::SwapFrame(I420VideoFrame* frame) {
}
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
I420VideoFrame* video_frame) {
I420VideoFrame& video_frame) {
CriticalSectionScoped cs(capture_cs_.get());
// Make sure we render this frame earlier since we know the render time set
// is slightly off since it's being set when the frame has been received from
// the camera, and not when the camera actually captured the frame.
video_frame->set_render_time_ms(video_frame->render_time_ms() - FrameDelay());
video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
overuse_detector_->FrameCaptured(video_frame->width(),
video_frame->height(),
video_frame->render_time_ms());
overuse_detector_->FrameCaptured(video_frame.width(),
video_frame.height(),
video_frame.render_time_ms());
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame->render_time_ms(),
"render_time", video_frame->render_time_ms());
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
captured_frame_ = *video_frame;
captured_frame_ = video_frame;
capture_event_.Set();
}

View File

@ -123,7 +123,7 @@ class ViECapturer
// Implements VideoCaptureDataCallback.
virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame* video_frame);
I420VideoFrame& video_frame);
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay);

View File

@ -91,7 +91,7 @@ class ViECapturerTest : public ::testing::Test {
}
void AddInputFrame(I420VideoFrame* frame) {
data_callback_->OnIncomingCapturedFrame(0, frame);
data_callback_->OnIncomingCapturedFrame(0, *frame);
}
void AddOutputFrame(const I420VideoFrame* frame) {

View File

@ -1620,7 +1620,7 @@ CallStatsObserver* ViEChannel::GetStatsObserver() {
// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
// the same lock in the path of decode callback can deadlock.
int32_t ViEChannel::FrameToRender(
I420VideoFrame* video_frame) { // NOLINT
I420VideoFrame& video_frame) { // NOLINT
CriticalSectionScoped cs(callback_cs_.get());
if (decoder_reset_) {
@ -1628,30 +1628,30 @@ int32_t ViEChannel::FrameToRender(
if (codec_observer_) {
// The codec set by RegisterReceiveCodec might not be the size we're
// actually decoding.
receive_codec_.width = static_cast<uint16_t>(video_frame->width());
receive_codec_.height = static_cast<uint16_t>(video_frame->height());
receive_codec_.width = static_cast<uint16_t>(video_frame.width());
receive_codec_.height = static_cast<uint16_t>(video_frame.height());
codec_observer_->IncomingCodecChanged(channel_id_, receive_codec_);
}
decoder_reset_ = false;
}
// Post processing is not supported if the frame is backed by a texture.
if (video_frame->native_handle() == NULL) {
if (video_frame.native_handle() == NULL) {
if (pre_render_callback_ != NULL)
pre_render_callback_->FrameCallback(video_frame);
pre_render_callback_->FrameCallback(&video_frame);
if (effect_filter_) {
size_t length =
CalcBufferSize(kI420, video_frame->width(), video_frame->height());
CalcBufferSize(kI420, video_frame.width(), video_frame.height());
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get());
ExtractBuffer(video_frame, length, video_buffer.get());
effect_filter_->Transform(length,
video_buffer.get(),
video_frame->ntp_time_ms(),
video_frame->timestamp(),
video_frame->width(),
video_frame->height());
video_frame.ntp_time_ms(),
video_frame.timestamp(),
video_frame.width(),
video_frame.height());
}
if (color_enhancement_) {
VideoProcessingModule::ColorEnhancement(video_frame);
VideoProcessingModule::ColorEnhancement(&video_frame);
}
}
@ -1662,7 +1662,7 @@ int32_t ViEChannel::FrameToRender(
no_of_csrcs = 1;
}
std::vector<uint32_t> csrcs(arr_ofCSRC, arr_ofCSRC + no_of_csrcs);
DeliverFrame(video_frame, csrcs);
DeliverFrame(&video_frame, csrcs);
return 0;
}

View File

@ -313,7 +313,7 @@ class ViEChannel
CallStatsObserver* GetStatsObserver();
// Implements VCMReceiveCallback.
virtual int32_t FrameToRender(I420VideoFrame* video_frame); // NOLINT
virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT
// Implements VCMReceiveCallback.
virtual int32_t ReceivedDecodedReferenceFrame(

View File

@ -124,7 +124,7 @@ int32_t ViERenderer::Init(const uint32_t z_order,
void ViERenderer::DeliverFrame(int id,
I420VideoFrame* video_frame,
const std::vector<uint32_t>& csrcs) {
render_callback_->RenderFrame(render_id_, video_frame);
render_callback_->RenderFrame(render_id_, *video_frame);
}
void ViERenderer::DelayChanged(int id, int frame_delay) {}
@ -156,15 +156,15 @@ int ViEExternalRendererImpl::SetViEExternalRenderer(
}
int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame) {
I420VideoFrame& video_frame) {
if (external_renderer_format_ != kVideoI420)
return ConvertAndRenderFrame(stream_id, video_frame);
// Fast path for I420 without frame copy.
NotifyFrameSizeChange(stream_id, video_frame);
if (video_frame->native_handle() == NULL ||
if (video_frame.native_handle() == NULL ||
external_renderer_->IsTextureSupported()) {
external_renderer_->DeliverI420Frame(*video_frame);
external_renderer_->DeliverI420Frame(video_frame);
} else {
// TODO(wuchengli): readback the pixels and deliver the frame.
}
@ -173,17 +173,17 @@ int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
uint32_t stream_id,
I420VideoFrame* video_frame) {
if (video_frame->native_handle() != NULL) {
I420VideoFrame& video_frame) {
if (video_frame.native_handle() != NULL) {
NotifyFrameSizeChange(stream_id, video_frame);
if (external_renderer_->IsTextureSupported()) {
external_renderer_->DeliverFrame(NULL,
0,
video_frame->timestamp(),
video_frame->ntp_time_ms(),
video_frame->render_time_ms(),
video_frame->native_handle());
video_frame.timestamp(),
video_frame.ntp_time_ms(),
video_frame.render_time_ms(),
video_frame.native_handle());
} else {
// TODO(wuchengli): readback the pixels and deliver the frame.
}
@ -193,8 +193,8 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
// Convert to requested format.
VideoType type =
RawVideoTypeToCommonVideoVideoType(external_renderer_format_);
size_t buffer_size = CalcBufferSize(type, video_frame->width(),
video_frame->height());
size_t buffer_size = CalcBufferSize(type, video_frame.width(),
video_frame.height());
if (buffer_size == 0) {
// Unsupported video format.
assert(false);
@ -212,7 +212,7 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
case kVideoRGB565:
case kVideoARGB4444:
case kVideoARGB1555:
if (ConvertFromI420(*video_frame, type, 0, out_frame) < 0)
if (ConvertFromI420(video_frame, type, 0, out_frame) < 0)
return -1;
break;
case kVideoIYUV:
@ -229,9 +229,9 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
if (out_frame) {
external_renderer_->DeliverFrame(out_frame,
converted_frame_.size(),
video_frame->timestamp(),
video_frame->ntp_time_ms(),
video_frame->render_time_ms(),
video_frame.timestamp(),
video_frame.ntp_time_ms(),
video_frame.render_time_ms(),
NULL);
}
return 0;
@ -239,11 +239,11 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
void ViEExternalRendererImpl::NotifyFrameSizeChange(
const uint32_t stream_id,
I420VideoFrame* video_frame) {
if (external_renderer_width_ != video_frame->width() ||
external_renderer_height_ != video_frame->height()) {
external_renderer_width_ = video_frame->width();
external_renderer_height_ = video_frame->height();
I420VideoFrame& video_frame) {
if (external_renderer_width_ != video_frame.width() ||
external_renderer_height_ != video_frame.height()) {
external_renderer_width_ = video_frame.width();
external_renderer_height_ = video_frame.height();
external_renderer_->FrameSizeChange(
external_renderer_width_, external_renderer_height_, stream_id);
}

View File

@ -33,13 +33,13 @@ class ViEExternalRendererImpl : public VideoRenderCallback {
// Implements VideoRenderCallback.
virtual int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame* video_frame);
I420VideoFrame& video_frame);
private:
void NotifyFrameSizeChange(const uint32_t stream_id,
I420VideoFrame* video_frame);
I420VideoFrame& video_frame);
int32_t ConvertAndRenderFrame(uint32_t stream_id,
I420VideoFrame* video_frame);
I420VideoFrame& video_frame);
ExternalRenderer* external_renderer_;
RawVideoType external_renderer_format_;
int external_renderer_width_;