Revert "Changed argument occurences of const I420VideoFrame* to const I420VideoFrame& and non-const I420VideoFrame& to I420VideoFrame*."
This reverts commit r8731. Reason for revert: Breakes Chromium FYI bots. TBR=hbos, tommi Review URL: https://webrtc-codereview.appspot.com/40359004 Cr-Commit-Position: refs/heads/master@{#8733} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8733 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
93d9d6503e
commit
2056ee3e3c
@ -657,11 +657,11 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
I420VideoFrame texture_image(
|
I420VideoFrame texture_image(
|
||||||
&native_handle_, width, height, output_timestamp_, 0);
|
&native_handle_, width, height, output_timestamp_, 0);
|
||||||
texture_image.set_ntp_time_ms(output_ntp_time_ms_);
|
texture_image.set_ntp_time_ms(output_ntp_time_ms_);
|
||||||
callback_status = callback_->Decoded(&texture_image);
|
callback_status = callback_->Decoded(texture_image);
|
||||||
} else {
|
} else {
|
||||||
decoded_image_.set_timestamp(output_timestamp_);
|
decoded_image_.set_timestamp(output_timestamp_);
|
||||||
decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
|
decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
|
||||||
callback_status = callback_->Decoded(&decoded_image_);
|
callback_status = callback_->Decoded(decoded_image_);
|
||||||
}
|
}
|
||||||
if (callback_status > 0) {
|
if (callback_status > 0) {
|
||||||
ALOGE("callback error");
|
ALOGE("callback error");
|
||||||
|
@ -115,7 +115,7 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (callback_) {
|
if (callback_) {
|
||||||
callback_->OnIncomingCapturedFrame(id_, &sample);
|
callback_->OnIncomingCapturedFrame(id_, sample);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ class PassthroughStream: public webrtc::VideoRenderCallback {
|
|||||||
virtual ~PassthroughStream() {
|
virtual ~PassthroughStream() {
|
||||||
}
|
}
|
||||||
virtual int32_t RenderFrame(const uint32_t stream_id,
|
virtual int32_t RenderFrame(const uint32_t stream_id,
|
||||||
webrtc::I420VideoFrame* videoFrame) {
|
webrtc::I420VideoFrame& videoFrame) {
|
||||||
rtc::CritScope cs(&stream_critical_);
|
rtc::CritScope cs(&stream_critical_);
|
||||||
// Send frame for rendering directly
|
// Send frame for rendering directly
|
||||||
if (running_ && renderer_) {
|
if (running_ && renderer_) {
|
||||||
|
@ -44,7 +44,7 @@ class WebRtcPassthroughRenderTest : public testing::Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual int32_t RenderFrame(const uint32_t stream_id,
|
virtual int32_t RenderFrame(const uint32_t stream_id,
|
||||||
webrtc::I420VideoFrame* videoFrame) {
|
webrtc::I420VideoFrame& videoFrame) {
|
||||||
++frame_num_;
|
++frame_num_;
|
||||||
LOG(INFO) << "RenderFrame stream_id: " << stream_id
|
LOG(INFO) << "RenderFrame stream_id: " << stream_id
|
||||||
<< " frame_num: " << frame_num_;
|
<< " frame_num: " << frame_num_;
|
||||||
@ -143,21 +143,21 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) {
|
|||||||
int test_frame_num = 10;
|
int test_frame_num = 10;
|
||||||
// RenderFrame without starting the render
|
// RenderFrame without starting the render
|
||||||
for (int i = 0; i < test_frame_num; ++i) {
|
for (int i = 0; i < test_frame_num; ++i) {
|
||||||
stream1->RenderFrame(stream_id1, &frame);
|
stream1->RenderFrame(stream_id1, frame);
|
||||||
}
|
}
|
||||||
EXPECT_EQ(0, renderer1.frame_num());
|
EXPECT_EQ(0, renderer1.frame_num());
|
||||||
// Start the render and test again.
|
// Start the render and test again.
|
||||||
EXPECT_FALSE(StartRender(stream_id3));
|
EXPECT_FALSE(StartRender(stream_id3));
|
||||||
EXPECT_TRUE(StartRender(stream_id1));
|
EXPECT_TRUE(StartRender(stream_id1));
|
||||||
for (int i = 0; i < test_frame_num; ++i) {
|
for (int i = 0; i < test_frame_num; ++i) {
|
||||||
stream1->RenderFrame(stream_id1, &frame);
|
stream1->RenderFrame(stream_id1, frame);
|
||||||
}
|
}
|
||||||
EXPECT_EQ(test_frame_num, renderer1.frame_num());
|
EXPECT_EQ(test_frame_num, renderer1.frame_num());
|
||||||
// Stop the render and test again.
|
// Stop the render and test again.
|
||||||
EXPECT_FALSE(StopRender(stream_id3));
|
EXPECT_FALSE(StopRender(stream_id3));
|
||||||
EXPECT_TRUE(StopRender(stream_id1));
|
EXPECT_TRUE(StopRender(stream_id1));
|
||||||
for (int i = 0; i < test_frame_num; ++i) {
|
for (int i = 0; i < test_frame_num; ++i) {
|
||||||
stream1->RenderFrame(stream_id1, &frame);
|
stream1->RenderFrame(stream_id1, frame);
|
||||||
}
|
}
|
||||||
// The frame number should not have changed.
|
// The frame number should not have changed.
|
||||||
EXPECT_EQ(test_frame_num, renderer1.frame_num());
|
EXPECT_EQ(test_frame_num, renderer1.frame_num());
|
||||||
@ -166,7 +166,7 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) {
|
|||||||
EXPECT_TRUE(StartRender(stream_id2));
|
EXPECT_TRUE(StartRender(stream_id2));
|
||||||
test_frame_num = 30;
|
test_frame_num = 30;
|
||||||
for (int i = 0; i < test_frame_num; ++i) {
|
for (int i = 0; i < test_frame_num; ++i) {
|
||||||
stream2->RenderFrame(stream_id2, &frame);
|
stream2->RenderFrame(stream_id2, frame);
|
||||||
}
|
}
|
||||||
EXPECT_EQ(test_frame_num, renderer2.frame_num());
|
EXPECT_EQ(test_frame_num, renderer2.frame_num());
|
||||||
}
|
}
|
||||||
|
@ -354,7 +354,7 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
||||||
webrtc::I420VideoFrame* sample) {
|
webrtc::I420VideoFrame& sample) {
|
||||||
// This would be a normal CritScope, except that it's possible that:
|
// This would be a normal CritScope, except that it's possible that:
|
||||||
// (1) whatever system component producing this frame has taken a lock, and
|
// (1) whatever system component producing this frame has taken a lock, and
|
||||||
// (2) Stop() probably calls back into that system component, which may take
|
// (2) Stop() probably calls back into that system component, which may take
|
||||||
@ -371,12 +371,12 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
|||||||
// Log the size and pixel aspect ratio of the first captured frame.
|
// Log the size and pixel aspect ratio of the first captured frame.
|
||||||
if (1 == captured_frames_) {
|
if (1 == captured_frames_) {
|
||||||
LOG(LS_INFO) << "Captured frame size "
|
LOG(LS_INFO) << "Captured frame size "
|
||||||
<< sample->width() << "x" << sample->height()
|
<< sample.width() << "x" << sample.height()
|
||||||
<< ". Expected format " << GetCaptureFormat()->ToString();
|
<< ". Expected format " << GetCaptureFormat()->ToString();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (start_thread_->IsCurrent()) {
|
if (start_thread_->IsCurrent()) {
|
||||||
SignalFrameCapturedOnStartThread(sample);
|
SignalFrameCapturedOnStartThread(&sample);
|
||||||
} else {
|
} else {
|
||||||
// This currently happens on with at least VideoCaptureModuleV4L2 and
|
// This currently happens on with at least VideoCaptureModuleV4L2 and
|
||||||
// possibly other implementations of WebRTC's VideoCaptureModule.
|
// possibly other implementations of WebRTC's VideoCaptureModule.
|
||||||
@ -385,7 +385,7 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
|
|||||||
// thread hop.
|
// thread hop.
|
||||||
start_thread_->Invoke<void>(
|
start_thread_->Invoke<void>(
|
||||||
rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread,
|
rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread,
|
||||||
this, sample));
|
this, &sample));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
|
|||||||
private:
|
private:
|
||||||
// Callback when a frame is captured by camera.
|
// Callback when a frame is captured by camera.
|
||||||
virtual void OnIncomingCapturedFrame(const int32_t id,
|
virtual void OnIncomingCapturedFrame(const int32_t id,
|
||||||
webrtc::I420VideoFrame* frame);
|
webrtc::I420VideoFrame& frame);
|
||||||
virtual void OnCaptureDelayChanged(const int32_t id,
|
virtual void OnCaptureDelayChanged(const int32_t id,
|
||||||
const int32_t delay);
|
const int32_t delay);
|
||||||
|
|
||||||
|
@ -305,7 +305,7 @@ TEST(TestI420VideoFrame, TextureInitialValues) {
|
|||||||
TEST(TestI420VideoFrame, RefCount) {
|
TEST(TestI420VideoFrame, RefCount) {
|
||||||
NativeHandleImpl handle;
|
NativeHandleImpl handle;
|
||||||
EXPECT_EQ(0, handle.ref_count());
|
EXPECT_EQ(0, handle.ref_count());
|
||||||
I420VideoFrame* frame = new I420VideoFrame(&handle, 640, 480, 100, 200);
|
I420VideoFrame *frame = new I420VideoFrame(&handle, 640, 480, 100, 200);
|
||||||
EXPECT_EQ(1, handle.ref_count());
|
EXPECT_EQ(1, handle.ref_count());
|
||||||
delete frame;
|
delete frame;
|
||||||
EXPECT_EQ(0, handle.ref_count());
|
EXPECT_EQ(0, handle.ref_count());
|
||||||
|
@ -148,11 +148,11 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
|
|||||||
|
|
||||||
// Compute PSNR for an I420 frame (all planes).
|
// Compute PSNR for an I420 frame (all planes).
|
||||||
// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
|
// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
|
||||||
double I420PSNR(const I420VideoFrame& ref_frame,
|
double I420PSNR(const I420VideoFrame* ref_frame,
|
||||||
const I420VideoFrame& test_frame);
|
const I420VideoFrame* test_frame);
|
||||||
// Compute SSIM for an I420 frame (all planes).
|
// Compute SSIM for an I420 frame (all planes).
|
||||||
double I420SSIM(const I420VideoFrame& ref_frame,
|
double I420SSIM(const I420VideoFrame* ref_frame,
|
||||||
const I420VideoFrame& test_frame);
|
const I420VideoFrame* test_frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
|
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
|
||||||
|
@ -38,16 +38,18 @@ int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int PrintFrame(const I420VideoFrame& frame, const char* str) {
|
int PrintFrame(const I420VideoFrame* frame, const char* str) {
|
||||||
printf("%s %dx%d \n", str, frame.width(), frame.height());
|
if (frame == NULL)
|
||||||
|
return -1;
|
||||||
|
printf("%s %dx%d \n", str, frame->width(), frame->height());
|
||||||
|
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
|
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
|
||||||
PlaneType plane_type = static_cast<PlaneType>(plane_num);
|
PlaneType plane_type = static_cast<PlaneType>(plane_num);
|
||||||
int width = (plane_num ? (frame.width() + 1) / 2 : frame.width());
|
int width = (plane_num ? (frame->width() + 1) / 2 : frame->width());
|
||||||
int height = (plane_num ? (frame.height() + 1) / 2 : frame.height());
|
int height = (plane_num ? (frame->height() + 1) / 2 : frame->height());
|
||||||
ret += PrintBuffer(frame.buffer(plane_type), width, height,
|
ret += PrintBuffer(frame->buffer(plane_type), width, height,
|
||||||
frame.stride(plane_type));
|
frame->stride(plane_type));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@ -154,7 +156,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
j++;
|
j++;
|
||||||
|
|
||||||
@ -174,7 +176,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
|
|
||||||
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
||||||
EXPECT_GT(ceil(psnr), 44);
|
EXPECT_GT(ceil(psnr), 44);
|
||||||
@ -185,7 +187,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
|
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
|
||||||
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
|
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
|
||||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
||||||
return;
|
return;
|
||||||
@ -210,7 +212,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
|
|
||||||
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0,
|
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0,
|
||||||
kVideoRotation_0, &res_i420_frame);
|
kVideoRotation_0, &res_i420_frame);
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
j++;
|
j++;
|
||||||
|
|
||||||
@ -225,7 +227,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
printf("\nConvert #%d I420 <-> RGB565\n", j);
|
printf("\nConvert #%d I420 <-> RGB565\n", j);
|
||||||
rtc::scoped_ptr<uint8_t[]> out_rgb565_buffer(
|
rtc::scoped_ptr<uint8_t[]> out_rgb565_buffer(
|
||||||
@ -241,7 +243,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
}
|
}
|
||||||
j++;
|
j++;
|
||||||
|
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
||||||
// Another example is I420ToRGB24, the psnr is 44
|
// Another example is I420ToRGB24, the psnr is 44
|
||||||
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
|
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
|
||||||
@ -260,7 +262,7 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
||||||
EXPECT_GT(ceil(psnr), 42);
|
EXPECT_GT(ceil(psnr), 42);
|
||||||
|
|
||||||
@ -291,7 +293,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
|
|||||||
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_frame_, res_i420_frame);
|
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -312,7 +312,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
|||||||
required_size, kVideoRotation_0, &in_frame));
|
required_size, kVideoRotation_0, &in_frame));
|
||||||
EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height,
|
EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height,
|
||||||
required_size, kVideoRotation_0, &out_frame));
|
required_size, kVideoRotation_0, &out_frame));
|
||||||
double psnr = I420PSNR(in_frame, out_frame);
|
double psnr = I420PSNR(&in_frame, &out_frame);
|
||||||
avg_psnr += psnr;
|
avg_psnr += psnr;
|
||||||
}
|
}
|
||||||
avg_psnr = avg_psnr / frame_count;
|
avg_psnr = avg_psnr / frame_count;
|
||||||
|
@ -291,47 +291,57 @@ int ConvertFromYV12(const I420VideoFrame& src_frame,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Compute PSNR for an I420 frame (all planes)
|
// Compute PSNR for an I420 frame (all planes)
|
||||||
double I420PSNR(const I420VideoFrame& ref_frame,
|
double I420PSNR(const I420VideoFrame* ref_frame,
|
||||||
const I420VideoFrame& test_frame) {
|
const I420VideoFrame* test_frame) {
|
||||||
if (ref_frame.width() != test_frame.width() ||
|
if (!ref_frame || !test_frame)
|
||||||
ref_frame.height() != test_frame.height())
|
|
||||||
return -1;
|
return -1;
|
||||||
double psnr = libyuv::I420Psnr(ref_frame.buffer(kYPlane),
|
else if ((ref_frame->width() != test_frame->width()) ||
|
||||||
ref_frame.stride(kYPlane),
|
(ref_frame->height() != test_frame->height()))
|
||||||
ref_frame.buffer(kUPlane),
|
return -1;
|
||||||
ref_frame.stride(kUPlane),
|
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
|
||||||
ref_frame.buffer(kVPlane),
|
return -1;
|
||||||
ref_frame.stride(kVPlane),
|
|
||||||
test_frame.buffer(kYPlane),
|
double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane),
|
||||||
test_frame.stride(kYPlane),
|
ref_frame->stride(kYPlane),
|
||||||
test_frame.buffer(kUPlane),
|
ref_frame->buffer(kUPlane),
|
||||||
test_frame.stride(kUPlane),
|
ref_frame->stride(kUPlane),
|
||||||
test_frame.buffer(kVPlane),
|
ref_frame->buffer(kVPlane),
|
||||||
test_frame.stride(kVPlane),
|
ref_frame->stride(kVPlane),
|
||||||
test_frame.width(), test_frame.height());
|
test_frame->buffer(kYPlane),
|
||||||
|
test_frame->stride(kYPlane),
|
||||||
|
test_frame->buffer(kUPlane),
|
||||||
|
test_frame->stride(kUPlane),
|
||||||
|
test_frame->buffer(kVPlane),
|
||||||
|
test_frame->stride(kVPlane),
|
||||||
|
test_frame->width(), test_frame->height());
|
||||||
// LibYuv sets the max psnr value to 128, we restrict it here.
|
// LibYuv sets the max psnr value to 128, we restrict it here.
|
||||||
// In case of 0 mse in one frame, 128 can skew the results significantly.
|
// In case of 0 mse in one frame, 128 can skew the results significantly.
|
||||||
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
|
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute SSIM for an I420 frame (all planes)
|
// Compute SSIM for an I420 frame (all planes)
|
||||||
double I420SSIM(const I420VideoFrame& ref_frame,
|
double I420SSIM(const I420VideoFrame* ref_frame,
|
||||||
const I420VideoFrame& test_frame) {
|
const I420VideoFrame* test_frame) {
|
||||||
if (ref_frame.width() != test_frame.width() ||
|
if (!ref_frame || !test_frame)
|
||||||
ref_frame.height() != test_frame.height())
|
|
||||||
return -1;
|
return -1;
|
||||||
return libyuv::I420Ssim(ref_frame.buffer(kYPlane),
|
else if ((ref_frame->width() != test_frame->width()) ||
|
||||||
ref_frame.stride(kYPlane),
|
(ref_frame->height() != test_frame->height()))
|
||||||
ref_frame.buffer(kUPlane),
|
return -1;
|
||||||
ref_frame.stride(kUPlane),
|
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
|
||||||
ref_frame.buffer(kVPlane),
|
return -1;
|
||||||
ref_frame.stride(kVPlane),
|
|
||||||
test_frame.buffer(kYPlane),
|
return libyuv::I420Ssim(ref_frame->buffer(kYPlane),
|
||||||
test_frame.stride(kYPlane),
|
ref_frame->stride(kYPlane),
|
||||||
test_frame.buffer(kUPlane),
|
ref_frame->buffer(kUPlane),
|
||||||
test_frame.stride(kUPlane),
|
ref_frame->stride(kUPlane),
|
||||||
test_frame.buffer(kVPlane),
|
ref_frame->buffer(kVPlane),
|
||||||
test_frame.stride(kVPlane),
|
ref_frame->stride(kVPlane),
|
||||||
test_frame.width(), test_frame.height());
|
test_frame->buffer(kYPlane),
|
||||||
|
test_frame->stride(kYPlane),
|
||||||
|
test_frame->buffer(kUPlane),
|
||||||
|
test_frame->stride(kUPlane),
|
||||||
|
test_frame->buffer(kVPlane),
|
||||||
|
test_frame->stride(kVPlane),
|
||||||
|
test_frame->width(), test_frame->height());
|
||||||
}
|
}
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -93,12 +93,12 @@ public:
|
|||||||
virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
|
virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
|
||||||
{return -1;}
|
{return -1;}
|
||||||
|
|
||||||
virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/)
|
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/)
|
||||||
{ return -1;}
|
{ return -1;}
|
||||||
|
|
||||||
// Same as GetVideoFromFile(). videoFrame will have the resolution specified
|
// Same as GetVideoFromFile(). videoFrame will have the resolution specified
|
||||||
// by the width outWidth and height outHeight in pixels.
|
// by the width outWidth and height outHeight in pixels.
|
||||||
virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/,
|
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/,
|
||||||
const uint32_t /*outWidth*/,
|
const uint32_t /*outWidth*/,
|
||||||
const uint32_t /*outHeight*/)
|
const uint32_t /*outHeight*/)
|
||||||
{return -1;}
|
{return -1;}
|
||||||
|
@ -98,7 +98,7 @@ class VideoCaptureDataCallback
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual void OnIncomingCapturedFrame(const int32_t id,
|
virtual void OnIncomingCapturedFrame(const int32_t id,
|
||||||
I420VideoFrame* videoFrame) = 0;
|
I420VideoFrame& videoFrame) = 0;
|
||||||
virtual void OnCaptureDelayChanged(const int32_t id,
|
virtual void OnCaptureDelayChanged(const int32_t id,
|
||||||
const int32_t delay) = 0;
|
const int32_t delay) = 0;
|
||||||
protected:
|
protected:
|
||||||
|
@ -105,10 +105,10 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual void OnIncomingCapturedFrame(const int32_t id,
|
virtual void OnIncomingCapturedFrame(const int32_t id,
|
||||||
webrtc::I420VideoFrame* videoFrame) {
|
webrtc::I420VideoFrame& videoFrame) {
|
||||||
CriticalSectionScoped cs(capture_cs_.get());
|
CriticalSectionScoped cs(capture_cs_.get());
|
||||||
int height = videoFrame->height();
|
int height = videoFrame.height();
|
||||||
int width = videoFrame->width();
|
int width = videoFrame.width();
|
||||||
#if ANDROID
|
#if ANDROID
|
||||||
// Android camera frames may be rotated depending on test device
|
// Android camera frames may be rotated depending on test device
|
||||||
// orientation.
|
// orientation.
|
||||||
@ -126,21 +126,21 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
|
|||||||
#endif
|
#endif
|
||||||
// RenderTimstamp should be the time now.
|
// RenderTimstamp should be the time now.
|
||||||
EXPECT_TRUE(
|
EXPECT_TRUE(
|
||||||
videoFrame->render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
|
videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
|
||||||
videoFrame->render_time_ms() <= TickTime::MillisecondTimestamp());
|
videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
|
||||||
|
|
||||||
if ((videoFrame->render_time_ms() >
|
if ((videoFrame.render_time_ms() >
|
||||||
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
|
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
|
||||||
last_render_time_ms_ > 0) ||
|
last_render_time_ms_ > 0) ||
|
||||||
(videoFrame->render_time_ms() <
|
(videoFrame.render_time_ms() <
|
||||||
last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
|
last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
|
||||||
last_render_time_ms_ > 0)) {
|
last_render_time_ms_ > 0)) {
|
||||||
timing_warnings_++;
|
timing_warnings_++;
|
||||||
}
|
}
|
||||||
|
|
||||||
incoming_frames_++;
|
incoming_frames_++;
|
||||||
last_render_time_ms_ = videoFrame->render_time_ms();
|
last_render_time_ms_ = videoFrame.render_time_ms();
|
||||||
last_frame_.CopyFrame(*videoFrame);
|
last_frame_.CopyFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void OnCaptureDelayChanged(const int32_t id,
|
virtual void OnCaptureDelayChanged(const int32_t id,
|
||||||
|
@ -215,7 +215,7 @@ int32_t VideoCaptureImpl::CaptureDelay()
|
|||||||
return _setCaptureDelay;
|
return _setCaptureDelay;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame* captureFrame,
|
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame,
|
||||||
int64_t capture_time) {
|
int64_t capture_time) {
|
||||||
UpdateFrameCount(); // frame count used for local frame rate callback.
|
UpdateFrameCount(); // frame count used for local frame rate callback.
|
||||||
|
|
||||||
@ -227,16 +227,16 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame* captureFrame,
|
|||||||
|
|
||||||
// Set the capture time
|
// Set the capture time
|
||||||
if (capture_time != 0) {
|
if (capture_time != 0) {
|
||||||
captureFrame->set_render_time_ms(capture_time - delta_ntp_internal_ms_);
|
captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_);
|
||||||
} else {
|
} else {
|
||||||
captureFrame->set_render_time_ms(TickTime::MillisecondTimestamp());
|
captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (captureFrame->render_time_ms() == last_capture_time_) {
|
if (captureFrame.render_time_ms() == last_capture_time_) {
|
||||||
// We don't allow the same capture time for two frames, drop this one.
|
// We don't allow the same capture time for two frames, drop this one.
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
last_capture_time_ = captureFrame->render_time_ms();
|
last_capture_time_ = captureFrame.render_time_ms();
|
||||||
|
|
||||||
if (_dataCallBack) {
|
if (_dataCallBack) {
|
||||||
if (callOnCaptureDelayChanged) {
|
if (callOnCaptureDelayChanged) {
|
||||||
@ -322,7 +322,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
|||||||
_captureFrame.set_rotation(kVideoRotation_0);
|
_captureFrame.set_rotation(kVideoRotation_0);
|
||||||
}
|
}
|
||||||
|
|
||||||
DeliverCapturedFrame(&_captureFrame, captureTime);
|
DeliverCapturedFrame(_captureFrame, captureTime);
|
||||||
}
|
}
|
||||||
else // Encoded format
|
else // Encoded format
|
||||||
{
|
{
|
||||||
@ -338,7 +338,7 @@ int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
|
|||||||
|
|
||||||
CriticalSectionScoped cs(&_apiCs);
|
CriticalSectionScoped cs(&_apiCs);
|
||||||
CriticalSectionScoped cs2(&_callBackCs);
|
CriticalSectionScoped cs2(&_callBackCs);
|
||||||
DeliverCapturedFrame(video_frame, captureTime);
|
DeliverCapturedFrame(*video_frame, captureTime);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -107,7 +107,7 @@ public:
|
|||||||
protected:
|
protected:
|
||||||
VideoCaptureImpl(const int32_t id);
|
VideoCaptureImpl(const int32_t id);
|
||||||
virtual ~VideoCaptureImpl();
|
virtual ~VideoCaptureImpl();
|
||||||
int32_t DeliverCapturedFrame(I420VideoFrame* captureFrame,
|
int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame,
|
||||||
int64_t capture_time);
|
int64_t capture_time);
|
||||||
|
|
||||||
int32_t _id; // Module ID
|
int32_t _id; // Module ID
|
||||||
|
@ -215,7 +215,7 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
|
|||||||
}
|
}
|
||||||
_decodedImage.set_timestamp(inputImage._timeStamp);
|
_decodedImage.set_timestamp(inputImage._timeStamp);
|
||||||
|
|
||||||
_decodeCompleteCallback->Decoded(&_decodedImage);
|
_decodeCompleteCallback->Decoded(_decodedImage);
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ class MockVideoEncoder : public VideoEncoder {
|
|||||||
class MockDecodedImageCallback : public DecodedImageCallback {
|
class MockDecodedImageCallback : public DecodedImageCallback {
|
||||||
public:
|
public:
|
||||||
MOCK_METHOD1(Decoded,
|
MOCK_METHOD1(Decoded,
|
||||||
int32_t(I420VideoFrame* decodedImage));
|
int32_t(I420VideoFrame& decodedImage));
|
||||||
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
|
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
|
||||||
int32_t(const uint64_t pictureId));
|
int32_t(const uint64_t pictureId));
|
||||||
MOCK_METHOD1(ReceivedDecodedFrame,
|
MOCK_METHOD1(ReceivedDecodedFrame,
|
||||||
|
@ -413,8 +413,8 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
|
|||||||
}
|
}
|
||||||
int32_t
|
int32_t
|
||||||
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
|
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
|
||||||
I420VideoFrame* image) {
|
I420VideoFrame& image) {
|
||||||
video_processor_->FrameDecoded(*image); // forward to parent class
|
video_processor_->FrameDecoded(image); // forward to parent class
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,7 +241,7 @@ class VideoProcessorImpl : public VideoProcessor {
|
|||||||
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
|
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
|
||||||
: video_processor_(vp) {
|
: video_processor_(vp) {
|
||||||
}
|
}
|
||||||
int32_t Decoded(webrtc::I420VideoFrame* image) override;
|
int32_t Decoded(webrtc::I420VideoFrame& image) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
VideoProcessorImpl* video_processor_;
|
VideoProcessorImpl* video_processor_;
|
||||||
|
@ -124,16 +124,16 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
|
|||||||
Vp8TestDecodedImageCallback()
|
Vp8TestDecodedImageCallback()
|
||||||
: decoded_frames_(0) {
|
: decoded_frames_(0) {
|
||||||
}
|
}
|
||||||
virtual int32_t Decoded(I420VideoFrame* decoded_image) {
|
virtual int32_t Decoded(I420VideoFrame& decoded_image) {
|
||||||
last_decoded_frame_.CopyFrame(*decoded_image);
|
last_decoded_frame_.CopyFrame(decoded_image);
|
||||||
for (int i = 0; i < decoded_image->width(); ++i) {
|
for (int i = 0; i < decoded_image.width(); ++i) {
|
||||||
EXPECT_NEAR(kColorY, decoded_image->buffer(kYPlane)[i], 1);
|
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(mikhal): Verify the difference between U,V and the original.
|
// TODO(mikhal): Verify the difference between U,V and the original.
|
||||||
for (int i = 0; i < ((decoded_image->width() + 1) / 2); ++i) {
|
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
|
||||||
EXPECT_NEAR(kColorU, decoded_image->buffer(kUPlane)[i], 4);
|
EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4);
|
||||||
EXPECT_NEAR(kColorV, decoded_image->buffer(kVPlane)[i], 4);
|
EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4);
|
||||||
}
|
}
|
||||||
decoded_frames_++;
|
decoded_frames_++;
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -78,7 +78,7 @@ class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
|||||||
public:
|
public:
|
||||||
explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame)
|
explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame)
|
||||||
: decoded_frame_(frame), decode_complete(false) {}
|
: decoded_frame_(frame), decode_complete(false) {}
|
||||||
int Decoded(webrtc::I420VideoFrame* frame);
|
int Decoded(webrtc::I420VideoFrame& frame);
|
||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -94,8 +94,8 @@ bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame* image) {
|
int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) {
|
||||||
decoded_frame_->CopyFrame(*image);
|
decoded_frame_->CopyFrame(image);
|
||||||
decode_complete = true;
|
decode_complete = true;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -227,7 +227,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
|
|||||||
decoder_->Decode(encoded_frame_, false, NULL));
|
decoder_->Decode(encoded_frame_, false, NULL));
|
||||||
EXPECT_GT(WaitForDecodedFrame(), 0u);
|
EXPECT_GT(WaitForDecodedFrame(), 0u);
|
||||||
// Compute PSNR on all planes (faster than SSIM).
|
// Compute PSNR on all planes (faster than SSIM).
|
||||||
EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36);
|
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
|
||||||
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
|
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
|
||||||
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
|
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
|
||||||
}
|
}
|
||||||
@ -249,7 +249,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) {
|
|||||||
encoded_frame_._frameType = kKeyFrame;
|
encoded_frame_._frameType = kKeyFrame;
|
||||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||||
decoder_->Decode(encoded_frame_, false, NULL));
|
decoder_->Decode(encoded_frame_, false, NULL));
|
||||||
EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36);
|
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(TestVp8Impl, TestReset) {
|
TEST_F(TestVp8Impl, TestReset) {
|
||||||
|
@ -1343,7 +1343,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
|||||||
img->stride[VPX_PLANE_V]);
|
img->stride[VPX_PLANE_V]);
|
||||||
decoded_image_.set_timestamp(timestamp);
|
decoded_image_.set_timestamp(timestamp);
|
||||||
decoded_image_.set_ntp_time_ms(ntp_time_ms);
|
decoded_image_.set_ntp_time_ms(ntp_time_ms);
|
||||||
int ret = decode_complete_callback_->Decoded(&decoded_image_);
|
int ret = decode_complete_callback_->Decoded(decoded_image_);
|
||||||
if (ret != 0)
|
if (ret != 0)
|
||||||
return ret;
|
return ret;
|
||||||
|
|
||||||
|
@ -68,15 +68,15 @@ class Vp8SequenceCoderDecodeCallback : public webrtc::DecodedImageCallback {
|
|||||||
public:
|
public:
|
||||||
explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file)
|
explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file)
|
||||||
: decoded_file_(decoded_file) {}
|
: decoded_file_(decoded_file) {}
|
||||||
int Decoded(webrtc::I420VideoFrame* frame);
|
int Decoded(webrtc::I420VideoFrame& frame);
|
||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
FILE* decoded_file_;
|
FILE* decoded_file_;
|
||||||
};
|
};
|
||||||
|
|
||||||
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame* image) {
|
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame& image) {
|
||||||
EXPECT_EQ(0, webrtc::PrintI420VideoFrame(*image, decoded_file_));
|
EXPECT_EQ(0, webrtc::PrintI420VideoFrame(image, decoded_file_));
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -480,7 +480,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
|
|||||||
img->stride[VPX_PLANE_U],
|
img->stride[VPX_PLANE_U],
|
||||||
img->stride[VPX_PLANE_V]);
|
img->stride[VPX_PLANE_V]);
|
||||||
decoded_image_.set_timestamp(timestamp);
|
decoded_image_.set_timestamp(timestamp);
|
||||||
int ret = decode_complete_callback_->Decoded(&decoded_image_);
|
int ret = decode_complete_callback_->Decoded(decoded_image_);
|
||||||
if (ret != 0)
|
if (ret != 0)
|
||||||
return ret;
|
return ret;
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
@ -81,7 +81,7 @@ class VCMPacketizationCallback {
|
|||||||
// Callback class used for passing decoded frames which are ready to be rendered.
|
// Callback class used for passing decoded frames which are ready to be rendered.
|
||||||
class VCMReceiveCallback {
|
class VCMReceiveCallback {
|
||||||
public:
|
public:
|
||||||
virtual int32_t FrameToRender(I420VideoFrame* videoFrame) = 0;
|
virtual int32_t FrameToRender(I420VideoFrame& videoFrame) = 0;
|
||||||
virtual int32_t ReceivedDecodedReferenceFrame(
|
virtual int32_t ReceivedDecodedReferenceFrame(
|
||||||
const uint64_t pictureId) {
|
const uint64_t pictureId) {
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -46,7 +46,7 @@ VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback()
|
|||||||
return _receiveCallback;
|
return _receiveCallback;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
|
int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
|
||||||
{
|
{
|
||||||
// TODO(holmer): We should improve this so that we can handle multiple
|
// TODO(holmer): We should improve this so that we can handle multiple
|
||||||
// callbacks from one call to Decode().
|
// callbacks from one call to Decode().
|
||||||
@ -55,7 +55,7 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
|
|||||||
{
|
{
|
||||||
CriticalSectionScoped cs(_critSect);
|
CriticalSectionScoped cs(_critSect);
|
||||||
frameInfo = static_cast<VCMFrameInformation*>(
|
frameInfo = static_cast<VCMFrameInformation*>(
|
||||||
_timestampMap.Pop(decodedImage->timestamp()));
|
_timestampMap.Pop(decodedImage.timestamp()));
|
||||||
callback = _receiveCallback;
|
callback = _receiveCallback;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,14 +66,14 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage)
|
|||||||
}
|
}
|
||||||
|
|
||||||
_timing.StopDecodeTimer(
|
_timing.StopDecodeTimer(
|
||||||
decodedImage->timestamp(),
|
decodedImage.timestamp(),
|
||||||
frameInfo->decodeStartTimeMs,
|
frameInfo->decodeStartTimeMs,
|
||||||
_clock->TimeInMilliseconds(),
|
_clock->TimeInMilliseconds(),
|
||||||
frameInfo->renderTimeMs);
|
frameInfo->renderTimeMs);
|
||||||
|
|
||||||
if (callback != NULL)
|
if (callback != NULL)
|
||||||
{
|
{
|
||||||
decodedImage->set_render_time_ms(frameInfo->renderTimeMs);
|
decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
|
||||||
callback->FrameToRender(decodedImage);
|
callback->FrameToRender(decodedImage);
|
||||||
}
|
}
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
@ -39,7 +39,7 @@ public:
|
|||||||
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
|
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
|
||||||
VCMReceiveCallback* UserReceiveCallback();
|
VCMReceiveCallback* UserReceiveCallback();
|
||||||
|
|
||||||
virtual int32_t Decoded(I420VideoFrame* decodedImage);
|
virtual int32_t Decoded(I420VideoFrame& decodedImage);
|
||||||
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
|
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
|
||||||
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);
|
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);
|
||||||
|
|
||||||
|
@ -147,13 +147,13 @@ VCMNTDecodeCompleteCallback::~VCMNTDecodeCompleteCallback()
|
|||||||
fclose(_decodedFile);
|
fclose(_decodedFile);
|
||||||
}
|
}
|
||||||
int32_t
|
int32_t
|
||||||
VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame* videoFrame)
|
VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
if (videoFrame->width() != _currentWidth ||
|
if (videoFrame.width() != _currentWidth ||
|
||||||
videoFrame->height() != _currentHeight)
|
videoFrame.height() != _currentHeight)
|
||||||
{
|
{
|
||||||
_currentWidth = videoFrame->width();
|
_currentWidth = videoFrame.width();
|
||||||
_currentHeight = videoFrame->height();
|
_currentHeight = videoFrame.height();
|
||||||
if (_decodedFile != NULL)
|
if (_decodedFile != NULL)
|
||||||
{
|
{
|
||||||
fclose(_decodedFile);
|
fclose(_decodedFile);
|
||||||
@ -161,11 +161,11 @@ VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame* videoFrame)
|
|||||||
}
|
}
|
||||||
_decodedFile = fopen(_outname.c_str(), "wb");
|
_decodedFile = fopen(_outname.c_str(), "wb");
|
||||||
}
|
}
|
||||||
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
|
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
_decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame->width(),
|
_decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame.width(),
|
||||||
videoFrame->height());
|
videoFrame.height());
|
||||||
return VCM_OK;
|
return VCM_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ public:
|
|||||||
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
|
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
|
||||||
|
|
||||||
// will write decoded frame into file
|
// will write decoded frame into file
|
||||||
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override;
|
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override;
|
||||||
|
|
||||||
size_t DecodedBytes();
|
size_t DecodedBytes();
|
||||||
private:
|
private:
|
||||||
|
@ -237,7 +237,7 @@ QualityModesTest::Perform(const CmdArgs& args)
|
|||||||
_vcm->EnableFrameDropper(false);
|
_vcm->EnableFrameDropper(false);
|
||||||
|
|
||||||
I420VideoFrame sourceFrame;
|
I420VideoFrame sourceFrame;
|
||||||
I420VideoFrame* decimatedFrame = NULL;
|
I420VideoFrame *decimatedFrame = NULL;
|
||||||
uint8_t* tmpBuffer = new uint8_t[_lengthSourceFrame];
|
uint8_t* tmpBuffer = new uint8_t[_lengthSourceFrame];
|
||||||
double startTime = clock()/(double)CLOCKS_PER_SEC;
|
double startTime = clock()/(double)CLOCKS_PER_SEC;
|
||||||
_vcm->SetChannelParameters(static_cast<uint32_t>(1000 * _bitRate), 0, 0);
|
_vcm->SetChannelParameters(static_cast<uint32_t>(1000 * _bitRate), 0, 0);
|
||||||
@ -483,18 +483,18 @@ VCMQMDecodeCompleteCallback::~VCMQMDecodeCompleteCallback()
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t
|
int32_t
|
||||||
VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
|
VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
++frames_cnt_since_drop_;
|
++frames_cnt_since_drop_;
|
||||||
|
|
||||||
// When receiving the first coded frame the last_frame variable is not set
|
// When receiving the first coded frame the last_frame variable is not set
|
||||||
if (last_frame_.IsZeroSize()) {
|
if (last_frame_.IsZeroSize()) {
|
||||||
last_frame_.CopyFrame(*videoFrame);
|
last_frame_.CopyFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if there were frames skipped.
|
// Check if there were frames skipped.
|
||||||
int num_frames_skipped = static_cast<int>( 0.5f +
|
int num_frames_skipped = static_cast<int>( 0.5f +
|
||||||
(videoFrame->timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) /
|
(videoFrame.timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) /
|
||||||
(9e4 / frame_rate_));
|
(9e4 / frame_rate_));
|
||||||
|
|
||||||
// If so...put the last frames into the encoded stream to make up for the
|
// If so...put the last frames into the encoded stream to make up for the
|
||||||
@ -510,9 +510,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
|
|||||||
DataLog::InsertCell(
|
DataLog::InsertCell(
|
||||||
feature_table_name_,"num frames since drop",frames_cnt_since_drop_);
|
feature_table_name_,"num frames since drop",frames_cnt_since_drop_);
|
||||||
|
|
||||||
if (_origWidth == videoFrame->width() && _origHeight == videoFrame->height())
|
if (_origWidth == videoFrame.width() && _origHeight == videoFrame.height())
|
||||||
{
|
{
|
||||||
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
|
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
_frameCnt++;
|
_frameCnt++;
|
||||||
@ -531,9 +531,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
_decodedBytes += CalcBufferSize(kI420, videoFrame->width(),
|
_decodedBytes += CalcBufferSize(kI420, videoFrame.width(),
|
||||||
videoFrame->height());
|
videoFrame.height());
|
||||||
videoFrame->SwapFrame(&last_frame_);
|
videoFrame.SwapFrame(&last_frame_);
|
||||||
return VCM_OK;
|
return VCM_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ public:
|
|||||||
virtual ~VCMQMDecodeCompleteCallback();
|
virtual ~VCMQMDecodeCompleteCallback();
|
||||||
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
|
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
|
||||||
// will write decoded frame into file
|
// will write decoded frame into file
|
||||||
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame);
|
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame);
|
||||||
size_t DecodedBytes();
|
size_t DecodedBytes();
|
||||||
void SetOriginalFrameDimensions(int32_t width, int32_t height);
|
void SetOriginalFrameDimensions(int32_t width, int32_t height);
|
||||||
int32_t buildInterpolator();
|
int32_t buildInterpolator();
|
||||||
|
@ -185,13 +185,13 @@ VCMRTPEncodeCompleteCallback::EncodeComplete()
|
|||||||
// Decoded Frame Callback Implementation
|
// Decoded Frame Callback Implementation
|
||||||
|
|
||||||
int32_t
|
int32_t
|
||||||
VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame)
|
VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) {
|
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
_decodedBytes += CalcBufferSize(kI420, videoFrame->width(),
|
_decodedBytes += CalcBufferSize(kI420, videoFrame.width(),
|
||||||
videoFrame->height());
|
videoFrame.height());
|
||||||
return VCM_OK;
|
return VCM_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ public:
|
|||||||
_decodedFile(decodedFile), _decodedBytes(0) {}
|
_decodedFile(decodedFile), _decodedBytes(0) {}
|
||||||
virtual ~VCMDecodeCompleteCallback() {}
|
virtual ~VCMDecodeCompleteCallback() {}
|
||||||
// Write decoded frame into file
|
// Write decoded frame into file
|
||||||
int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override;
|
int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override;
|
||||||
size_t DecodedBytes();
|
size_t DecodedBytes();
|
||||||
private:
|
private:
|
||||||
FILE* _decodedFile;
|
FILE* _decodedFile;
|
||||||
|
@ -113,7 +113,7 @@ FileOutputFrameReceiver::~FileOutputFrameReceiver() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t FileOutputFrameReceiver::FrameToRender(
|
int32_t FileOutputFrameReceiver::FrameToRender(
|
||||||
webrtc::I420VideoFrame* video_frame) {
|
webrtc::I420VideoFrame& video_frame) {
|
||||||
if (timing_file_ == NULL) {
|
if (timing_file_ == NULL) {
|
||||||
std::string basename;
|
std::string basename;
|
||||||
std::string extension;
|
std::string extension;
|
||||||
@ -123,14 +123,14 @@ int32_t FileOutputFrameReceiver::FrameToRender(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (out_file_ == NULL || video_frame->width() != width_ ||
|
if (out_file_ == NULL || video_frame.width() != width_ ||
|
||||||
video_frame->height() != height_) {
|
video_frame.height() != height_) {
|
||||||
if (out_file_) {
|
if (out_file_) {
|
||||||
fclose(out_file_);
|
fclose(out_file_);
|
||||||
}
|
}
|
||||||
printf("New size: %dx%d\n", video_frame->width(), video_frame->height());
|
printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
|
||||||
width_ = video_frame->width();
|
width_ = video_frame.width();
|
||||||
height_ = video_frame->height();
|
height_ = video_frame.height();
|
||||||
std::string filename_with_width_height = AppendWidthHeightCount(
|
std::string filename_with_width_height = AppendWidthHeightCount(
|
||||||
out_filename_, width_, height_, count_);
|
out_filename_, width_, height_, count_);
|
||||||
++count_;
|
++count_;
|
||||||
@ -139,9 +139,9 @@ int32_t FileOutputFrameReceiver::FrameToRender(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fprintf(timing_file_, "%u, %u\n", video_frame->timestamp(),
|
fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(),
|
||||||
webrtc::MaskWord64ToUWord32(video_frame->render_time_ms()));
|
webrtc::MaskWord64ToUWord32(video_frame.render_time_ms()));
|
||||||
if (PrintI420VideoFrame(*video_frame, out_file_) < 0) {
|
if (PrintI420VideoFrame(video_frame, out_file_) < 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -87,7 +87,7 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
|
|||||||
virtual ~FileOutputFrameReceiver();
|
virtual ~FileOutputFrameReceiver();
|
||||||
|
|
||||||
// VCMReceiveCallback
|
// VCMReceiveCallback
|
||||||
virtual int32_t FrameToRender(webrtc::I420VideoFrame* video_frame);
|
virtual int32_t FrameToRender(webrtc::I420VideoFrame& video_frame);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string out_filename_;
|
std::string out_filename_;
|
||||||
|
@ -169,7 +169,7 @@ uint32_t VideoProcessingModuleImpl::DecimatedHeight() const {
|
|||||||
|
|
||||||
int32_t VideoProcessingModuleImpl::PreprocessFrame(
|
int32_t VideoProcessingModuleImpl::PreprocessFrame(
|
||||||
const I420VideoFrame& frame,
|
const I420VideoFrame& frame,
|
||||||
I420VideoFrame** processed_frame) {
|
I420VideoFrame **processed_frame) {
|
||||||
CriticalSectionScoped mutex(&mutex_);
|
CriticalSectionScoped mutex(&mutex_);
|
||||||
return frame_pre_processor_.PreprocessFrame(frame, processed_frame);
|
return frame_pre_processor_.PreprocessFrame(frame, processed_frame);
|
||||||
}
|
}
|
||||||
|
@ -130,7 +130,7 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize) {
|
|||||||
|
|
||||||
EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0));
|
EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0));
|
||||||
|
|
||||||
I420VideoFrame* out_frame = NULL;
|
I420VideoFrame *out_frame = NULL;
|
||||||
EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(bad_frame, &out_frame));
|
EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(bad_frame, &out_frame));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,7 +358,7 @@ void TestSize(const I420VideoFrame& source_frame,
|
|||||||
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
|
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
|
||||||
|
|
||||||
// Compute PSNR against the cropped source frame and check expectation.
|
// Compute PSNR against the cropped source frame and check expectation.
|
||||||
double psnr = I420PSNR(cropped_source_frame, *out_frame);
|
double psnr = I420PSNR(&cropped_source_frame, out_frame);
|
||||||
EXPECT_GT(psnr, expected_psnr);
|
EXPECT_GT(psnr, expected_psnr);
|
||||||
printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
|
printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
|
||||||
"source which is scaled down/up to: %d %d, and back to source size \n",
|
"source which is scaled down/up to: %d %d, and back to source size \n",
|
||||||
|
@ -383,10 +383,10 @@ int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
|
|||||||
|
|
||||||
int32_t AndroidNativeOpenGl2Channel::RenderFrame(
|
int32_t AndroidNativeOpenGl2Channel::RenderFrame(
|
||||||
const uint32_t /*streamId*/,
|
const uint32_t /*streamId*/,
|
||||||
I420VideoFrame* videoFrame) {
|
I420VideoFrame& videoFrame) {
|
||||||
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
|
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
|
||||||
_renderCritSect.Enter();
|
_renderCritSect.Enter();
|
||||||
_bufferToRender.SwapFrame(videoFrame);
|
_bufferToRender.SwapFrame(&videoFrame);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
_renderer.ReDraw();
|
_renderer.ReDraw();
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -35,7 +35,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
|
|||||||
//Implement VideoRenderCallback
|
//Implement VideoRenderCallback
|
||||||
virtual int32_t RenderFrame(
|
virtual int32_t RenderFrame(
|
||||||
const uint32_t streamId,
|
const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
//Implements AndroidStream
|
//Implements AndroidStream
|
||||||
virtual void DeliverFrame(JNIEnv* jniEnv);
|
virtual void DeliverFrame(JNIEnv* jniEnv);
|
||||||
|
@ -412,10 +412,10 @@ int32_t AndroidSurfaceViewChannel::Init(
|
|||||||
|
|
||||||
int32_t AndroidSurfaceViewChannel::RenderFrame(
|
int32_t AndroidSurfaceViewChannel::RenderFrame(
|
||||||
const uint32_t /*streamId*/,
|
const uint32_t /*streamId*/,
|
||||||
I420VideoFrame* videoFrame) {
|
I420VideoFrame& videoFrame) {
|
||||||
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
|
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
|
||||||
_renderCritSect.Enter();
|
_renderCritSect.Enter();
|
||||||
_bufferToRender.SwapFrame(videoFrame);
|
_bufferToRender.SwapFrame(&videoFrame);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
_renderer.ReDraw();
|
_renderer.ReDraw();
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -33,7 +33,7 @@ class AndroidSurfaceViewChannel : public AndroidStream {
|
|||||||
|
|
||||||
//Implement VideoRenderCallback
|
//Implement VideoRenderCallback
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
//Implements AndroidStream
|
//Implements AndroidStream
|
||||||
virtual void DeliverFrame(JNIEnv* jniEnv);
|
virtual void DeliverFrame(JNIEnv* jniEnv);
|
||||||
|
@ -188,8 +188,9 @@ int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// VideoRenderCallback
|
// VideoRenderCallback
|
||||||
int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
|
int32_t VideoRenderExternalImpl::RenderFrame(
|
||||||
I420VideoFrame* videoFrame)
|
const uint32_t streamId,
|
||||||
|
I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ public:
|
|||||||
|
|
||||||
// VideoRenderCallback
|
// VideoRenderCallback
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
CriticalSectionWrapper& _critSect;
|
CriticalSectionWrapper& _critSect;
|
||||||
|
@ -49,7 +49,7 @@ class VideoRenderCallback
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame) = 0;
|
I420VideoFrame& videoFrame) = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual ~VideoRenderCallback()
|
virtual ~VideoRenderCallback()
|
||||||
|
@ -85,11 +85,11 @@ VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
|
int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
CriticalSectionScoped csS(&stream_critsect_);
|
CriticalSectionScoped csS(&stream_critsect_);
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||||
"%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
|
"%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
|
||||||
video_frame->render_time_ms());
|
video_frame.render_time_ms());
|
||||||
|
|
||||||
if (!running_) {
|
if (!running_) {
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||||
@ -110,7 +110,7 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
|
|||||||
|
|
||||||
// Insert frame.
|
// Insert frame.
|
||||||
CriticalSectionScoped csB(&buffer_critsect_);
|
CriticalSectionScoped csB(&buffer_critsect_);
|
||||||
if (render_buffers_.AddFrame(*video_frame) == 1)
|
if (render_buffers_.AddFrame(video_frame) == 1)
|
||||||
deliver_buffer_event_.Set();
|
deliver_buffer_event_.Set();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
@ -285,13 +285,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
|
|||||||
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
|
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
|
||||||
// We have not rendered anything and have a start image.
|
// We have not rendered anything and have a start image.
|
||||||
temp_frame_.CopyFrame(start_image_);
|
temp_frame_.CopyFrame(start_image_);
|
||||||
render_callback_->RenderFrame(stream_id_, &temp_frame_);
|
render_callback_->RenderFrame(stream_id_, temp_frame_);
|
||||||
} else if (!timeout_image_.IsZeroSize() &&
|
} else if (!timeout_image_.IsZeroSize() &&
|
||||||
last_render_time_ms_ + timeout_time_ <
|
last_render_time_ms_ + timeout_time_ <
|
||||||
TickTime::MillisecondTimestamp()) {
|
TickTime::MillisecondTimestamp()) {
|
||||||
// Render a timeout image.
|
// Render a timeout image.
|
||||||
temp_frame_.CopyFrame(timeout_image_);
|
temp_frame_.CopyFrame(timeout_image_);
|
||||||
render_callback_->RenderFrame(stream_id_, &temp_frame_);
|
render_callback_->RenderFrame(stream_id_, temp_frame_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -305,13 +305,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
|
|||||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||||
"%s: executing external renderer callback to deliver frame",
|
"%s: executing external renderer callback to deliver frame",
|
||||||
__FUNCTION__, frame_to_render.render_time_ms());
|
__FUNCTION__, frame_to_render.render_time_ms());
|
||||||
external_callback_->RenderFrame(stream_id_, &frame_to_render);
|
external_callback_->RenderFrame(stream_id_, frame_to_render);
|
||||||
} else {
|
} else {
|
||||||
if (render_callback_) {
|
if (render_callback_) {
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||||
"%s: Render frame, time: ", __FUNCTION__,
|
"%s: Render frame, time: ", __FUNCTION__,
|
||||||
frame_to_render.render_time_ms());
|
frame_to_render.render_time_ms());
|
||||||
render_callback_->RenderFrame(stream_id_, &frame_to_render);
|
render_callback_->RenderFrame(stream_id_, frame_to_render);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ class IncomingVideoStream : public VideoRenderCallback {
|
|||||||
// Get callback to deliver frames to the module.
|
// Get callback to deliver frames to the module.
|
||||||
VideoRenderCallback* ModuleCallback();
|
VideoRenderCallback* ModuleCallback();
|
||||||
virtual int32_t RenderFrame(const uint32_t stream_id,
|
virtual int32_t RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame);
|
I420VideoFrame& video_frame);
|
||||||
|
|
||||||
// Set callback to the platform dependent code.
|
// Set callback to the platform dependent code.
|
||||||
int32_t SetRenderCallback(VideoRenderCallback* render_callback);
|
int32_t SetRenderCallback(VideoRenderCallback* render_callback);
|
||||||
|
@ -25,7 +25,7 @@ class VideoRenderIosChannel : public VideoRenderCallback {
|
|||||||
|
|
||||||
// Implementation of VideoRenderCallback.
|
// Implementation of VideoRenderCallback.
|
||||||
int32_t RenderFrame(const uint32_t stream_id,
|
int32_t RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) override;
|
I420VideoFrame& video_frame) override;
|
||||||
|
|
||||||
int SetStreamSettings(const float z_order,
|
int SetStreamSettings(const float z_order,
|
||||||
const float left,
|
const float left,
|
||||||
|
@ -24,10 +24,10 @@ VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
|
|||||||
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
|
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
|
||||||
|
|
||||||
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
|
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
video_frame->set_render_time_ms(0);
|
video_frame.set_render_time_ms(0);
|
||||||
|
|
||||||
current_frame_->CopyFrame(*video_frame);
|
current_frame_->CopyFrame(video_frame);
|
||||||
buffer_is_updated_ = true;
|
buffer_is_updated_ = true;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -44,14 +44,15 @@ VideoX11Channel::~VideoX11Channel()
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
|
int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame) {
|
I420VideoFrame& videoFrame) {
|
||||||
CriticalSectionScoped cs(&_crit);
|
CriticalSectionScoped cs(&_crit);
|
||||||
if (_width != videoFrame->width() || _height != videoFrame->height()) {
|
if (_width != videoFrame.width() || _height
|
||||||
if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) {
|
!= videoFrame.height()) {
|
||||||
|
if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return DeliverFrame(*videoFrame);
|
return DeliverFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoX11Channel::FrameSizeChange(int32_t width,
|
int32_t VideoX11Channel::FrameSizeChange(int32_t width,
|
||||||
|
@ -34,7 +34,7 @@ public:
|
|||||||
virtual ~VideoX11Channel();
|
virtual ~VideoX11Channel();
|
||||||
|
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
int32_t FrameSizeChange(int32_t width, int32_t height,
|
int32_t FrameSizeChange(int32_t width, int32_t height,
|
||||||
int32_t numberOfStreams);
|
int32_t numberOfStreams);
|
||||||
|
@ -81,7 +81,7 @@ VideoChannelAGL::~VideoChannelAGL()
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
|
int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame) {
|
I420VideoFrame& videoFrame) {
|
||||||
_owner->LockAGLCntx();
|
_owner->LockAGLCntx();
|
||||||
if (_width != videoFrame.width() ||
|
if (_width != videoFrame.width() ||
|
||||||
_height != videoFrame.height()) {
|
_height != videoFrame.height()) {
|
||||||
@ -94,7 +94,7 @@ int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
|
|||||||
}
|
}
|
||||||
|
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return DeliverFrame(*videoFrame);
|
return DeliverFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
|
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
|
||||||
|
@ -52,7 +52,7 @@ class VideoChannelAGL : public VideoRenderCallback {
|
|||||||
int IsUpdated(bool& isUpdated);
|
int IsUpdated(bool& isUpdated);
|
||||||
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
|
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ public:
|
|||||||
|
|
||||||
// ********** new module functions ************ //
|
// ********** new module functions ************ //
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
// ********** new module helper functions ***** //
|
// ********** new module helper functions ***** //
|
||||||
int ChangeContext(NSOpenGLContext *nsglContext);
|
int ChangeContext(NSOpenGLContext *nsglContext);
|
||||||
|
@ -90,17 +90,18 @@ int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoChannelNSOpenGL::RenderFrame(
|
int32_t VideoChannelNSOpenGL::RenderFrame(
|
||||||
const uint32_t /*streamId*/, I420VideoFrame* videoFrame) {
|
const uint32_t /*streamId*/, I420VideoFrame& videoFrame) {
|
||||||
|
|
||||||
_owner->LockAGLCntx();
|
_owner->LockAGLCntx();
|
||||||
|
|
||||||
if(_width != videoFrame->width() || _height != videoFrame->height()) {
|
if(_width != videoFrame.width() ||
|
||||||
if(FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) {
|
_height != videoFrame.height()) {
|
||||||
|
if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int ret = DeliverFrame(*videoFrame);
|
int ret = DeliverFrame(videoFrame);
|
||||||
|
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -262,7 +262,7 @@ public:
|
|||||||
}
|
}
|
||||||
;
|
;
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame)
|
I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
_cnt++;
|
_cnt++;
|
||||||
if (_cnt % 100 == 0)
|
if (_cnt % 100 == 0)
|
||||||
@ -318,7 +318,7 @@ int TestSingleStream(VideoRender* renderModule) {
|
|||||||
// Render this frame with the specified delay
|
// Render this frame with the specified delay
|
||||||
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
|
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
|
||||||
+ renderDelayMs);
|
+ renderDelayMs);
|
||||||
renderCallback0->RenderFrame(streamId0, &videoFrame0);
|
renderCallback0->RenderFrame(streamId0, videoFrame0);
|
||||||
SleepMs(1000/TEST_FRAME_RATE);
|
SleepMs(1000/TEST_FRAME_RATE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -392,7 +392,7 @@ int TestBitmapText(VideoRender* renderModule) {
|
|||||||
// Render this frame with the specified delay
|
// Render this frame with the specified delay
|
||||||
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback0->RenderFrame(streamId0, &videoFrame0);
|
renderCallback0->RenderFrame(streamId0, videoFrame0);
|
||||||
SleepMs(1000/TEST_FRAME_RATE);
|
SleepMs(1000/TEST_FRAME_RATE);
|
||||||
}
|
}
|
||||||
// Sleep and let all frames be rendered before closing
|
// Sleep and let all frames be rendered before closing
|
||||||
@ -477,22 +477,22 @@ int TestMultipleStreams(VideoRender* renderModule) {
|
|||||||
|
|
||||||
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback0->RenderFrame(streamId0, &videoFrame0);
|
renderCallback0->RenderFrame(streamId0, videoFrame0);
|
||||||
|
|
||||||
GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
|
GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
|
||||||
videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback1->RenderFrame(streamId1, &videoFrame1);
|
renderCallback1->RenderFrame(streamId1, videoFrame1);
|
||||||
|
|
||||||
GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
|
GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
|
||||||
videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback2->RenderFrame(streamId2, &videoFrame2);
|
renderCallback2->RenderFrame(streamId2, videoFrame2);
|
||||||
|
|
||||||
GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
|
GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
|
||||||
videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback3->RenderFrame(streamId3, &videoFrame3);
|
renderCallback3->RenderFrame(streamId3, videoFrame3);
|
||||||
|
|
||||||
SleepMs(1000/TEST_FRAME_RATE);
|
SleepMs(1000/TEST_FRAME_RATE);
|
||||||
}
|
}
|
||||||
@ -550,7 +550,7 @@ int TestExternalRender(VideoRender* renderModule) {
|
|||||||
for (int i=0; i<frameCount; i++) {
|
for (int i=0; i<frameCount; i++) {
|
||||||
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
|
||||||
renderDelayMs);
|
renderDelayMs);
|
||||||
renderCallback0->RenderFrame(streamId0, &videoFrame0);
|
renderCallback0->RenderFrame(streamId0, videoFrame0);
|
||||||
SleepMs(33);
|
SleepMs(33);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,17 +143,17 @@ int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
|
int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame)
|
I420VideoFrame& videoFrame)
|
||||||
{
|
{
|
||||||
CriticalSectionScoped cs(_critSect);
|
CriticalSectionScoped cs(_critSect);
|
||||||
if (_width != videoFrame->width() || _height != videoFrame->height())
|
if (_width != videoFrame.width() || _height != videoFrame.height())
|
||||||
{
|
{
|
||||||
if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1)
|
if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1)
|
||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return DeliverFrame(*videoFrame);
|
return DeliverFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called from video engine when a new frame should be rendered.
|
// Called from video engine when a new frame should be rendered.
|
||||||
|
@ -46,7 +46,7 @@ public:
|
|||||||
// A new frame is delivered.
|
// A new frame is delivered.
|
||||||
virtual int DeliverFrame(const I420VideoFrame& videoFrame);
|
virtual int DeliverFrame(const I420VideoFrame& videoFrame);
|
||||||
virtual int32_t RenderFrame(const uint32_t streamId,
|
virtual int32_t RenderFrame(const uint32_t streamId,
|
||||||
I420VideoFrame* videoFrame);
|
I420VideoFrame& videoFrame);
|
||||||
|
|
||||||
// Called to check if the video frame is updated.
|
// Called to check if the video frame is updated.
|
||||||
int IsUpdated(bool& isUpdated);
|
int IsUpdated(bool& isUpdated);
|
||||||
|
@ -39,7 +39,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input,
|
|||||||
frame_.set_ntp_time_ms(input.ntp_time_ms_);
|
frame_.set_ntp_time_ms(input.ntp_time_ms_);
|
||||||
frame_.set_render_time_ms(render_time_ms);
|
frame_.set_render_time_ms(render_time_ms);
|
||||||
|
|
||||||
callback_->Decoded(&frame_);
|
callback_->Decoded(frame_);
|
||||||
|
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
@ -34,8 +34,8 @@ enum VideoMetricsType { kPSNR, kSSIM, kBoth };
|
|||||||
|
|
||||||
// Calculates metrics for a frame and adds statistics to the result for it.
|
// Calculates metrics for a frame and adds statistics to the result for it.
|
||||||
void CalculateFrame(VideoMetricsType video_metrics_type,
|
void CalculateFrame(VideoMetricsType video_metrics_type,
|
||||||
const I420VideoFrame& ref,
|
const I420VideoFrame* ref,
|
||||||
const I420VideoFrame& test,
|
const I420VideoFrame* test,
|
||||||
int frame_number,
|
int frame_number,
|
||||||
QualityMetricsResult* result) {
|
QualityMetricsResult* result) {
|
||||||
FrameResult frame_result = {0, 0};
|
FrameResult frame_result = {0, 0};
|
||||||
@ -129,17 +129,17 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
|
|||||||
kVideoRotation_0, &test_frame);
|
kVideoRotation_0, &test_frame);
|
||||||
switch (video_metrics_type) {
|
switch (video_metrics_type) {
|
||||||
case kPSNR:
|
case kPSNR:
|
||||||
CalculateFrame(kPSNR, ref_frame, test_frame, frame_number,
|
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,
|
||||||
psnr_result);
|
psnr_result);
|
||||||
break;
|
break;
|
||||||
case kSSIM:
|
case kSSIM:
|
||||||
CalculateFrame(kSSIM, ref_frame, test_frame, frame_number,
|
CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number,
|
||||||
ssim_result);
|
ssim_result);
|
||||||
break;
|
break;
|
||||||
case kBoth:
|
case kBoth:
|
||||||
CalculateFrame(kPSNR, ref_frame, test_frame, frame_number,
|
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,
|
||||||
psnr_result);
|
psnr_result);
|
||||||
CalculateFrame(kSSIM, ref_frame, test_frame, frame_number,
|
CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number,
|
||||||
ssim_result);
|
ssim_result);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -87,9 +87,9 @@ void VcmCapturer::Destroy() {
|
|||||||
VcmCapturer::~VcmCapturer() { Destroy(); }
|
VcmCapturer::~VcmCapturer() { Destroy(); }
|
||||||
|
|
||||||
void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
|
void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
|
||||||
I420VideoFrame* frame) {
|
I420VideoFrame& frame) {
|
||||||
if (started_)
|
if (started_)
|
||||||
input_->SwapFrame(frame);
|
input_->SwapFrame(&frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {
|
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {
|
||||||
|
@ -28,7 +28,7 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
|
|||||||
void Stop() override;
|
void Stop() override;
|
||||||
|
|
||||||
void OnIncomingCapturedFrame(const int32_t id,
|
void OnIncomingCapturedFrame(const int32_t id,
|
||||||
I420VideoFrame* frame) override; // NOLINT
|
I420VideoFrame& frame) override; // NOLINT
|
||||||
void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
|
void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -201,7 +201,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
CriticalSectionScoped lock(crit_.get());
|
CriticalSectionScoped lock(crit_.get());
|
||||||
while (frames_.front()->timestamp() < send_timestamp) {
|
while (frames_.front()->timestamp() < send_timestamp) {
|
||||||
AddFrameComparison(
|
AddFrameComparison(
|
||||||
*frames_.front(), last_rendered_frame_, true, render_time_ms);
|
frames_.front(), &last_rendered_frame_, true, render_time_ms);
|
||||||
frame_pool_.push_back(frames_.front());
|
frame_pool_.push_back(frames_.front());
|
||||||
frames_.pop_front();
|
frames_.pop_front();
|
||||||
}
|
}
|
||||||
@ -212,7 +212,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
|
EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
|
||||||
assert(reference_frame->timestamp() == send_timestamp);
|
assert(reference_frame->timestamp() == send_timestamp);
|
||||||
|
|
||||||
AddFrameComparison(*reference_frame, video_frame, false, render_time_ms);
|
AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
|
||||||
frame_pool_.push_back(reference_frame);
|
frame_pool_.push_back(reference_frame);
|
||||||
|
|
||||||
last_rendered_frame_.CopyFrame(video_frame);
|
last_rendered_frame_.CopyFrame(video_frame);
|
||||||
@ -253,8 +253,8 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
FrameComparison()
|
FrameComparison()
|
||||||
: dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {}
|
: dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {}
|
||||||
|
|
||||||
FrameComparison(const I420VideoFrame& reference,
|
FrameComparison(const I420VideoFrame* reference,
|
||||||
const I420VideoFrame& render,
|
const I420VideoFrame* render,
|
||||||
bool dropped,
|
bool dropped,
|
||||||
int64_t send_time_ms,
|
int64_t send_time_ms,
|
||||||
int64_t recv_time_ms,
|
int64_t recv_time_ms,
|
||||||
@ -263,8 +263,8 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
send_time_ms(send_time_ms),
|
send_time_ms(send_time_ms),
|
||||||
recv_time_ms(recv_time_ms),
|
recv_time_ms(recv_time_ms),
|
||||||
render_time_ms(render_time_ms) {
|
render_time_ms(render_time_ms) {
|
||||||
this->reference.CopyFrame(reference);
|
this->reference.CopyFrame(*reference);
|
||||||
this->render.CopyFrame(render);
|
this->render.CopyFrame(*render);
|
||||||
}
|
}
|
||||||
|
|
||||||
FrameComparison(const FrameComparison& compare)
|
FrameComparison(const FrameComparison& compare)
|
||||||
@ -295,15 +295,15 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
int64_t render_time_ms;
|
int64_t render_time_ms;
|
||||||
};
|
};
|
||||||
|
|
||||||
void AddFrameComparison(const I420VideoFrame& reference,
|
void AddFrameComparison(const I420VideoFrame* reference,
|
||||||
const I420VideoFrame& render,
|
const I420VideoFrame* render,
|
||||||
bool dropped,
|
bool dropped,
|
||||||
int64_t render_time_ms)
|
int64_t render_time_ms)
|
||||||
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
|
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
|
||||||
int64_t send_time_ms = send_times_[reference.timestamp()];
|
int64_t send_time_ms = send_times_[reference->timestamp()];
|
||||||
send_times_.erase(reference.timestamp());
|
send_times_.erase(reference->timestamp());
|
||||||
int64_t recv_time_ms = recv_times_[reference.timestamp()];
|
int64_t recv_time_ms = recv_times_[reference->timestamp()];
|
||||||
recv_times_.erase(reference.timestamp());
|
recv_times_.erase(reference->timestamp());
|
||||||
|
|
||||||
CriticalSectionScoped crit(comparison_lock_.get());
|
CriticalSectionScoped crit(comparison_lock_.get());
|
||||||
comparisons_.push_back(FrameComparison(reference,
|
comparisons_.push_back(FrameComparison(reference,
|
||||||
@ -405,8 +405,8 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
|
|
||||||
void PerformFrameComparison(const FrameComparison& comparison) {
|
void PerformFrameComparison(const FrameComparison& comparison) {
|
||||||
// Perform expensive psnr and ssim calculations while not holding lock.
|
// Perform expensive psnr and ssim calculations while not holding lock.
|
||||||
double psnr = I420PSNR(comparison.reference, comparison.render);
|
double psnr = I420PSNR(&comparison.reference, &comparison.render);
|
||||||
double ssim = I420SSIM(comparison.reference, comparison.render);
|
double ssim = I420SSIM(&comparison.reference, &comparison.render);
|
||||||
|
|
||||||
CriticalSectionScoped crit(comparison_lock_.get());
|
CriticalSectionScoped crit(comparison_lock_.get());
|
||||||
psnr_.AddSample(psnr);
|
psnr_.AddSample(psnr);
|
||||||
|
@ -28,7 +28,7 @@ class DecodedImageCallback {
|
|||||||
public:
|
public:
|
||||||
virtual ~DecodedImageCallback() {}
|
virtual ~DecodedImageCallback() {}
|
||||||
|
|
||||||
virtual int32_t Decoded(I420VideoFrame* decodedImage) = 0;
|
virtual int32_t Decoded(I420VideoFrame& decodedImage) = 0;
|
||||||
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
|
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -246,7 +246,7 @@ int32_t TbI420Decoder::Decode(
|
|||||||
|
|
||||||
_decodedImage.set_timestamp(inputImage._timeStamp);
|
_decodedImage.set_timestamp(inputImage._timeStamp);
|
||||||
|
|
||||||
_decodeCompleteCallback->Decoded(&_decodedImage);
|
_decodeCompleteCallback->Decoded(_decodedImage);
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,21 +339,21 @@ void ViECapturer::SwapFrame(I420VideoFrame* frame) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
|
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
CriticalSectionScoped cs(capture_cs_.get());
|
CriticalSectionScoped cs(capture_cs_.get());
|
||||||
// Make sure we render this frame earlier since we know the render time set
|
// Make sure we render this frame earlier since we know the render time set
|
||||||
// is slightly off since it's being set when the frame has been received from
|
// is slightly off since it's being set when the frame has been received from
|
||||||
// the camera, and not when the camera actually captured the frame.
|
// the camera, and not when the camera actually captured the frame.
|
||||||
video_frame->set_render_time_ms(video_frame->render_time_ms() - FrameDelay());
|
video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
|
||||||
|
|
||||||
overuse_detector_->FrameCaptured(video_frame->width(),
|
overuse_detector_->FrameCaptured(video_frame.width(),
|
||||||
video_frame->height(),
|
video_frame.height(),
|
||||||
video_frame->render_time_ms());
|
video_frame.render_time_ms());
|
||||||
|
|
||||||
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame->render_time_ms(),
|
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
|
||||||
"render_time", video_frame->render_time_ms());
|
"render_time", video_frame.render_time_ms());
|
||||||
|
|
||||||
captured_frame_ = *video_frame;
|
captured_frame_ = video_frame;
|
||||||
capture_event_.Set();
|
capture_event_.Set();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ class ViECapturer
|
|||||||
|
|
||||||
// Implements VideoCaptureDataCallback.
|
// Implements VideoCaptureDataCallback.
|
||||||
virtual void OnIncomingCapturedFrame(const int32_t id,
|
virtual void OnIncomingCapturedFrame(const int32_t id,
|
||||||
I420VideoFrame* video_frame);
|
I420VideoFrame& video_frame);
|
||||||
virtual void OnCaptureDelayChanged(const int32_t id,
|
virtual void OnCaptureDelayChanged(const int32_t id,
|
||||||
const int32_t delay);
|
const int32_t delay);
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ class ViECapturerTest : public ::testing::Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void AddInputFrame(I420VideoFrame* frame) {
|
void AddInputFrame(I420VideoFrame* frame) {
|
||||||
data_callback_->OnIncomingCapturedFrame(0, frame);
|
data_callback_->OnIncomingCapturedFrame(0, *frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AddOutputFrame(const I420VideoFrame* frame) {
|
void AddOutputFrame(const I420VideoFrame* frame) {
|
||||||
|
@ -1620,7 +1620,7 @@ CallStatsObserver* ViEChannel::GetStatsObserver() {
|
|||||||
// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
|
// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
|
||||||
// the same lock in the path of decode callback can deadlock.
|
// the same lock in the path of decode callback can deadlock.
|
||||||
int32_t ViEChannel::FrameToRender(
|
int32_t ViEChannel::FrameToRender(
|
||||||
I420VideoFrame* video_frame) { // NOLINT
|
I420VideoFrame& video_frame) { // NOLINT
|
||||||
CriticalSectionScoped cs(callback_cs_.get());
|
CriticalSectionScoped cs(callback_cs_.get());
|
||||||
|
|
||||||
if (decoder_reset_) {
|
if (decoder_reset_) {
|
||||||
@ -1628,30 +1628,30 @@ int32_t ViEChannel::FrameToRender(
|
|||||||
if (codec_observer_) {
|
if (codec_observer_) {
|
||||||
// The codec set by RegisterReceiveCodec might not be the size we're
|
// The codec set by RegisterReceiveCodec might not be the size we're
|
||||||
// actually decoding.
|
// actually decoding.
|
||||||
receive_codec_.width = static_cast<uint16_t>(video_frame->width());
|
receive_codec_.width = static_cast<uint16_t>(video_frame.width());
|
||||||
receive_codec_.height = static_cast<uint16_t>(video_frame->height());
|
receive_codec_.height = static_cast<uint16_t>(video_frame.height());
|
||||||
codec_observer_->IncomingCodecChanged(channel_id_, receive_codec_);
|
codec_observer_->IncomingCodecChanged(channel_id_, receive_codec_);
|
||||||
}
|
}
|
||||||
decoder_reset_ = false;
|
decoder_reset_ = false;
|
||||||
}
|
}
|
||||||
// Post processing is not supported if the frame is backed by a texture.
|
// Post processing is not supported if the frame is backed by a texture.
|
||||||
if (video_frame->native_handle() == NULL) {
|
if (video_frame.native_handle() == NULL) {
|
||||||
if (pre_render_callback_ != NULL)
|
if (pre_render_callback_ != NULL)
|
||||||
pre_render_callback_->FrameCallback(video_frame);
|
pre_render_callback_->FrameCallback(&video_frame);
|
||||||
if (effect_filter_) {
|
if (effect_filter_) {
|
||||||
size_t length =
|
size_t length =
|
||||||
CalcBufferSize(kI420, video_frame->width(), video_frame->height());
|
CalcBufferSize(kI420, video_frame.width(), video_frame.height());
|
||||||
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
|
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
|
||||||
ExtractBuffer(*video_frame, length, video_buffer.get());
|
ExtractBuffer(video_frame, length, video_buffer.get());
|
||||||
effect_filter_->Transform(length,
|
effect_filter_->Transform(length,
|
||||||
video_buffer.get(),
|
video_buffer.get(),
|
||||||
video_frame->ntp_time_ms(),
|
video_frame.ntp_time_ms(),
|
||||||
video_frame->timestamp(),
|
video_frame.timestamp(),
|
||||||
video_frame->width(),
|
video_frame.width(),
|
||||||
video_frame->height());
|
video_frame.height());
|
||||||
}
|
}
|
||||||
if (color_enhancement_) {
|
if (color_enhancement_) {
|
||||||
VideoProcessingModule::ColorEnhancement(video_frame);
|
VideoProcessingModule::ColorEnhancement(&video_frame);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1662,7 +1662,7 @@ int32_t ViEChannel::FrameToRender(
|
|||||||
no_of_csrcs = 1;
|
no_of_csrcs = 1;
|
||||||
}
|
}
|
||||||
std::vector<uint32_t> csrcs(arr_ofCSRC, arr_ofCSRC + no_of_csrcs);
|
std::vector<uint32_t> csrcs(arr_ofCSRC, arr_ofCSRC + no_of_csrcs);
|
||||||
DeliverFrame(video_frame, csrcs);
|
DeliverFrame(&video_frame, csrcs);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -313,7 +313,7 @@ class ViEChannel
|
|||||||
CallStatsObserver* GetStatsObserver();
|
CallStatsObserver* GetStatsObserver();
|
||||||
|
|
||||||
// Implements VCMReceiveCallback.
|
// Implements VCMReceiveCallback.
|
||||||
virtual int32_t FrameToRender(I420VideoFrame* video_frame); // NOLINT
|
virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT
|
||||||
|
|
||||||
// Implements VCMReceiveCallback.
|
// Implements VCMReceiveCallback.
|
||||||
virtual int32_t ReceivedDecodedReferenceFrame(
|
virtual int32_t ReceivedDecodedReferenceFrame(
|
||||||
|
@ -124,7 +124,7 @@ int32_t ViERenderer::Init(const uint32_t z_order,
|
|||||||
void ViERenderer::DeliverFrame(int id,
|
void ViERenderer::DeliverFrame(int id,
|
||||||
I420VideoFrame* video_frame,
|
I420VideoFrame* video_frame,
|
||||||
const std::vector<uint32_t>& csrcs) {
|
const std::vector<uint32_t>& csrcs) {
|
||||||
render_callback_->RenderFrame(render_id_, video_frame);
|
render_callback_->RenderFrame(render_id_, *video_frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ViERenderer::DelayChanged(int id, int frame_delay) {}
|
void ViERenderer::DelayChanged(int id, int frame_delay) {}
|
||||||
@ -156,15 +156,15 @@ int ViEExternalRendererImpl::SetViEExternalRenderer(
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
|
int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
if (external_renderer_format_ != kVideoI420)
|
if (external_renderer_format_ != kVideoI420)
|
||||||
return ConvertAndRenderFrame(stream_id, video_frame);
|
return ConvertAndRenderFrame(stream_id, video_frame);
|
||||||
|
|
||||||
// Fast path for I420 without frame copy.
|
// Fast path for I420 without frame copy.
|
||||||
NotifyFrameSizeChange(stream_id, video_frame);
|
NotifyFrameSizeChange(stream_id, video_frame);
|
||||||
if (video_frame->native_handle() == NULL ||
|
if (video_frame.native_handle() == NULL ||
|
||||||
external_renderer_->IsTextureSupported()) {
|
external_renderer_->IsTextureSupported()) {
|
||||||
external_renderer_->DeliverI420Frame(*video_frame);
|
external_renderer_->DeliverI420Frame(video_frame);
|
||||||
} else {
|
} else {
|
||||||
// TODO(wuchengli): readback the pixels and deliver the frame.
|
// TODO(wuchengli): readback the pixels and deliver the frame.
|
||||||
}
|
}
|
||||||
@ -173,17 +173,17 @@ int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
|
|||||||
|
|
||||||
int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
||||||
uint32_t stream_id,
|
uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
if (video_frame->native_handle() != NULL) {
|
if (video_frame.native_handle() != NULL) {
|
||||||
NotifyFrameSizeChange(stream_id, video_frame);
|
NotifyFrameSizeChange(stream_id, video_frame);
|
||||||
|
|
||||||
if (external_renderer_->IsTextureSupported()) {
|
if (external_renderer_->IsTextureSupported()) {
|
||||||
external_renderer_->DeliverFrame(NULL,
|
external_renderer_->DeliverFrame(NULL,
|
||||||
0,
|
0,
|
||||||
video_frame->timestamp(),
|
video_frame.timestamp(),
|
||||||
video_frame->ntp_time_ms(),
|
video_frame.ntp_time_ms(),
|
||||||
video_frame->render_time_ms(),
|
video_frame.render_time_ms(),
|
||||||
video_frame->native_handle());
|
video_frame.native_handle());
|
||||||
} else {
|
} else {
|
||||||
// TODO(wuchengli): readback the pixels and deliver the frame.
|
// TODO(wuchengli): readback the pixels and deliver the frame.
|
||||||
}
|
}
|
||||||
@ -193,8 +193,8 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
|||||||
// Convert to requested format.
|
// Convert to requested format.
|
||||||
VideoType type =
|
VideoType type =
|
||||||
RawVideoTypeToCommonVideoVideoType(external_renderer_format_);
|
RawVideoTypeToCommonVideoVideoType(external_renderer_format_);
|
||||||
size_t buffer_size = CalcBufferSize(type, video_frame->width(),
|
size_t buffer_size = CalcBufferSize(type, video_frame.width(),
|
||||||
video_frame->height());
|
video_frame.height());
|
||||||
if (buffer_size == 0) {
|
if (buffer_size == 0) {
|
||||||
// Unsupported video format.
|
// Unsupported video format.
|
||||||
assert(false);
|
assert(false);
|
||||||
@ -212,7 +212,7 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
|||||||
case kVideoRGB565:
|
case kVideoRGB565:
|
||||||
case kVideoARGB4444:
|
case kVideoARGB4444:
|
||||||
case kVideoARGB1555:
|
case kVideoARGB1555:
|
||||||
if (ConvertFromI420(*video_frame, type, 0, out_frame) < 0)
|
if (ConvertFromI420(video_frame, type, 0, out_frame) < 0)
|
||||||
return -1;
|
return -1;
|
||||||
break;
|
break;
|
||||||
case kVideoIYUV:
|
case kVideoIYUV:
|
||||||
@ -229,9 +229,9 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
|||||||
if (out_frame) {
|
if (out_frame) {
|
||||||
external_renderer_->DeliverFrame(out_frame,
|
external_renderer_->DeliverFrame(out_frame,
|
||||||
converted_frame_.size(),
|
converted_frame_.size(),
|
||||||
video_frame->timestamp(),
|
video_frame.timestamp(),
|
||||||
video_frame->ntp_time_ms(),
|
video_frame.ntp_time_ms(),
|
||||||
video_frame->render_time_ms(),
|
video_frame.render_time_ms(),
|
||||||
NULL);
|
NULL);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -239,11 +239,11 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
|||||||
|
|
||||||
void ViEExternalRendererImpl::NotifyFrameSizeChange(
|
void ViEExternalRendererImpl::NotifyFrameSizeChange(
|
||||||
const uint32_t stream_id,
|
const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame) {
|
I420VideoFrame& video_frame) {
|
||||||
if (external_renderer_width_ != video_frame->width() ||
|
if (external_renderer_width_ != video_frame.width() ||
|
||||||
external_renderer_height_ != video_frame->height()) {
|
external_renderer_height_ != video_frame.height()) {
|
||||||
external_renderer_width_ = video_frame->width();
|
external_renderer_width_ = video_frame.width();
|
||||||
external_renderer_height_ = video_frame->height();
|
external_renderer_height_ = video_frame.height();
|
||||||
external_renderer_->FrameSizeChange(
|
external_renderer_->FrameSizeChange(
|
||||||
external_renderer_width_, external_renderer_height_, stream_id);
|
external_renderer_width_, external_renderer_height_, stream_id);
|
||||||
}
|
}
|
||||||
|
@ -33,13 +33,13 @@ class ViEExternalRendererImpl : public VideoRenderCallback {
|
|||||||
|
|
||||||
// Implements VideoRenderCallback.
|
// Implements VideoRenderCallback.
|
||||||
virtual int32_t RenderFrame(const uint32_t stream_id,
|
virtual int32_t RenderFrame(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame);
|
I420VideoFrame& video_frame);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void NotifyFrameSizeChange(const uint32_t stream_id,
|
void NotifyFrameSizeChange(const uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame);
|
I420VideoFrame& video_frame);
|
||||||
int32_t ConvertAndRenderFrame(uint32_t stream_id,
|
int32_t ConvertAndRenderFrame(uint32_t stream_id,
|
||||||
I420VideoFrame* video_frame);
|
I420VideoFrame& video_frame);
|
||||||
ExternalRenderer* external_renderer_;
|
ExternalRenderer* external_renderer_;
|
||||||
RawVideoType external_renderer_format_;
|
RawVideoType external_renderer_format_;
|
||||||
int external_renderer_width_;
|
int external_renderer_width_;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user