Updating ConvertFromI420 to use VideoFrame - Related calls to DeliverFrame were also updated, and some refactoring (style-wise) was done on the way.
Review URL: https://webrtc-codereview.appspot.com/838006 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2858 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
cc82cff82e
commit
1e033e1594
@ -100,13 +100,11 @@ int ConvertToI420(VideoType src_video_type,
|
|||||||
// - src_stride : Number of bytes in a row of the src Y plane.
|
// - src_stride : Number of bytes in a row of the src Y plane.
|
||||||
// - dst_video_type : Type of output video.
|
// - dst_video_type : Type of output video.
|
||||||
// - dst_sample_size : Required only for the parsing of MJPG.
|
// - dst_sample_size : Required only for the parsing of MJPG.
|
||||||
// - width : Width in pixels.
|
|
||||||
// - height : Height in pixels.
|
|
||||||
// - dst_frame : Pointer to a destination frame.
|
// - dst_frame : Pointer to a destination frame.
|
||||||
// Return value: 0 if OK, < 0 otherwise.
|
// Return value: 0 if OK, < 0 otherwise.
|
||||||
int ConvertFromI420(const uint8_t* src_frame, int src_stride,
|
// It is assumed that source and destination have equal height.
|
||||||
|
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
|
||||||
VideoType dst_video_type, int dst_sample_size,
|
VideoType dst_video_type, int dst_sample_size,
|
||||||
int width, int height,
|
|
||||||
uint8_t* dst_frame);
|
uint8_t* dst_frame);
|
||||||
// ConvertFrom YV12.
|
// ConvertFrom YV12.
|
||||||
// Interface - same as above.
|
// Interface - same as above.
|
||||||
|
@ -135,8 +135,12 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
|
|
||||||
double psnr = 0;
|
double psnr = 0;
|
||||||
|
|
||||||
uint8_t* orig_buffer = new uint8_t[frame_length_];
|
VideoFrame orig_frame;
|
||||||
EXPECT_GT(fread(orig_buffer, 1, frame_length_, source_file_), 0U);
|
orig_frame.VerifyAndAllocate(frame_length_);
|
||||||
|
orig_frame.SetWidth(width_);
|
||||||
|
orig_frame.SetHeight(height_);
|
||||||
|
EXPECT_GT(fread(orig_frame.Buffer(), 1, frame_length_, source_file_), 0U);
|
||||||
|
orig_frame.SetLength(frame_length_);
|
||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> RGB24\n", j);
|
// printf("\nConvert #%d I420 <-> RGB24\n", j);
|
||||||
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
|
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
|
||||||
@ -144,8 +148,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
res_i420_frame.VerifyAndAllocate(frame_length_);
|
res_i420_frame.VerifyAndAllocate(frame_length_);
|
||||||
res_i420_frame.SetHeight(height_);
|
res_i420_frame.SetHeight(height_);
|
||||||
res_i420_frame.SetWidth(width_);
|
res_i420_frame.SetWidth(width_);
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kRGB24, 0,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kRGB24, 0,
|
||||||
width_, height_, res_rgb_buffer2));
|
res_rgb_buffer2));
|
||||||
|
|
||||||
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
@ -154,7 +158,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
||||||
EXPECT_GT(ceil(psnr), 44);
|
EXPECT_GT(ceil(psnr), 44);
|
||||||
j++;
|
j++;
|
||||||
@ -162,11 +167,12 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> UYVY\n", j);
|
// printf("\nConvert #%d I420 <-> UYVY\n", j);
|
||||||
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
|
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||||
kUYVY, 0, width_, height_, out_uyvy_buffer));
|
kUYVY, 0, out_uyvy_buffer));
|
||||||
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
@ -178,15 +184,15 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> I420 \n", j);
|
// printf("\nConvert #%d I420 <-> I420 \n", j);
|
||||||
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
|
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
|
||||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kI420, orig_frame.Buffer(), 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
EXPECT_EQ(0, ConvertFromI420(res_i420_frame.Buffer(), width_, kI420, 0,
|
EXPECT_EQ(0, ConvertFromI420(res_i420_frame, width_, kI420, 0,
|
||||||
width_, height_, out_i420_buffer));
|
out_i420_buffer));
|
||||||
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_buffer, out_i420_buffer, width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), out_i420_buffer, width_, height_);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
j++;
|
j++;
|
||||||
delete [] out_i420_buffer;
|
delete [] out_i420_buffer;
|
||||||
@ -194,8 +200,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
// printf("\nConvert #%d I420 <-> YV12\n", j);
|
// printf("\nConvert #%d I420 <-> YV12\n", j);
|
||||||
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
|
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
|
||||||
|
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kYV12, 0,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kYV12, 0,
|
||||||
width_, height_, outYV120Buffer));
|
outYV120Buffer));
|
||||||
EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
|
EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
|
||||||
kI420, 0,
|
kI420, 0,
|
||||||
width_, height_,
|
width_, height_,
|
||||||
@ -205,15 +211,16 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
j++;
|
j++;
|
||||||
delete [] outYV120Buffer;
|
delete [] outYV120Buffer;
|
||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> YUY2\n", j);
|
// printf("\nConvert #%d I420 <-> YUY2\n", j);
|
||||||
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
|
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||||
kYUY2, 0, width_, height_, out_yuy2_buffer));
|
kYUY2, 0, out_yuy2_buffer));
|
||||||
|
|
||||||
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
@ -222,13 +229,14 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
EXPECT_EQ(48.0, psnr);
|
EXPECT_EQ(48.0, psnr);
|
||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> RGB565\n", j);
|
// printf("\nConvert #%d I420 <-> RGB565\n", j);
|
||||||
uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
|
uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||||
kRGB565, 0, width_, height_, out_rgb565_buffer));
|
kRGB565, 0, out_rgb565_buffer));
|
||||||
|
|
||||||
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
@ -237,15 +245,16 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
||||||
// Another example is I420ToRGB24, the psnr is 44
|
// Another example is I420ToRGB24, the psnr is 44
|
||||||
EXPECT_GT(ceil(psnr), 40);
|
EXPECT_GT(ceil(psnr), 40);
|
||||||
|
|
||||||
// printf("\nConvert #%d I420 <-> ARGB8888\n", j);
|
// printf("\nConvert #%d I420 <-> ARGB8888\n", j);
|
||||||
uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
|
uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
|
||||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||||
kARGB, 0, width_, height_, out_argb8888_buffer));
|
kARGB, 0, out_argb8888_buffer));
|
||||||
|
|
||||||
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
|
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
|
||||||
0, kRotateNone, &res_i420_frame));
|
0, kRotateNone, &res_i420_frame));
|
||||||
@ -254,17 +263,18 @@ TEST_F(TestLibYuv, ConvertTest) {
|
|||||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||||
|
width_, height_);
|
||||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
||||||
EXPECT_GT(ceil(psnr), 42);
|
EXPECT_GT(ceil(psnr), 42);
|
||||||
|
|
||||||
ASSERT_EQ(0, fclose(output_file));
|
ASSERT_EQ(0, fclose(output_file));
|
||||||
|
|
||||||
res_i420_frame.Free();
|
res_i420_frame.Free();
|
||||||
|
orig_frame.Free();
|
||||||
delete [] out_argb8888_buffer;
|
delete [] out_argb8888_buffer;
|
||||||
delete [] out_rgb565_buffer;
|
delete [] out_rgb565_buffer;
|
||||||
delete [] out_yuy2_buffer;
|
delete [] out_yuy2_buffer;
|
||||||
delete [] orig_buffer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
|
// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
|
||||||
|
@ -194,14 +194,15 @@ int ConvertToI420(VideoType src_video_type,
|
|||||||
ConvertVideoType(src_video_type));
|
ConvertVideoType(src_video_type));
|
||||||
}
|
}
|
||||||
|
|
||||||
int ConvertFromI420(const uint8_t* src_frame, int src_stride,
|
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
|
||||||
VideoType dst_video_type, int dst_sample_size,
|
VideoType dst_video_type, int dst_sample_size,
|
||||||
int width, int height,
|
|
||||||
uint8_t* dst_frame) {
|
uint8_t* dst_frame) {
|
||||||
|
int height = src_frame.Height();
|
||||||
|
int width = src_frame.Width();
|
||||||
int abs_height = (height < 0) ? -height : height;
|
int abs_height = (height < 0) ? -height : height;
|
||||||
int half_width = (width + 1) >> 1;
|
int half_width = (width + 1) >> 1;
|
||||||
int half_height = (abs_height + 1) >> 1;
|
int half_height = (abs_height + 1) >> 1;
|
||||||
const uint8_t* src_yplane = src_frame;
|
const uint8_t* src_yplane = src_frame.Buffer();
|
||||||
const uint8_t* src_uplane = src_yplane + width * abs_height;
|
const uint8_t* src_uplane = src_yplane + width * abs_height;
|
||||||
const uint8_t* src_vplane = src_uplane + half_width * half_height;
|
const uint8_t* src_vplane = src_uplane + half_width * half_height;
|
||||||
return libyuv::ConvertFromI420(src_yplane, src_stride,
|
return libyuv::ConvertFromI420(src_yplane, src_stride,
|
||||||
|
@ -459,8 +459,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
|
|||||||
|
|
||||||
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
|
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
|
||||||
const int conversionResult =
|
const int conversionResult =
|
||||||
ConvertFromI420((unsigned char* )_bufferToRender.Buffer(), _bitmapWidth,
|
ConvertFromI420(_bufferToRender, _bitmapWidth,
|
||||||
kRGB565, 0, _bitmapWidth, _bitmapHeight, _directBuffer);
|
kRGB565, 0, _directBuffer);
|
||||||
|
|
||||||
if (conversionResult < 0) {
|
if (conversionResult < 0) {
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
|
||||||
|
@ -44,19 +44,15 @@ VideoX11Channel::~VideoX11Channel()
|
|||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
|
WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
|
||||||
VideoFrame& videoFrame)
|
VideoFrame& videoFrame) {
|
||||||
{
|
CriticalSectionScoped cs(&_crit);
|
||||||
CriticalSectionScoped cs(&_crit);
|
if (_width != (WebRtc_Word32) videoFrame.Width() || _height
|
||||||
if (_width != (WebRtc_Word32) videoFrame.Width() || _height
|
!= (WebRtc_Word32) videoFrame.Height()) {
|
||||||
!= (WebRtc_Word32) videoFrame.Height())
|
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||||
{
|
return -1;
|
||||||
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
|
||||||
{
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
|
}
|
||||||
videoFrame.TimeStamp());
|
return DeliverFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
|
WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
|
||||||
@ -76,33 +72,26 @@ WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
|
WebRtc_Word32 VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
|
||||||
WebRtc_Word32 bufferSize,
|
CriticalSectionScoped cs(&_crit);
|
||||||
unsigned WebRtc_Word32 /*timeStamp90kHz*/)
|
if (!_prepared) {
|
||||||
{
|
|
||||||
CriticalSectionScoped cs(&_crit);
|
|
||||||
if (!_prepared)
|
|
||||||
{
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!dispArray[_dispCount])
|
|
||||||
{
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned char *pBuf = buffer;
|
|
||||||
// convert to RGB32, setting stride = width.
|
|
||||||
ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
|
|
||||||
|
|
||||||
// put image in window
|
|
||||||
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
|
||||||
_height, True);
|
|
||||||
|
|
||||||
// very important for the image to update properly!
|
|
||||||
XSync(_display, False);
|
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!dispArray[_dispCount]) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert to RGB32, setting stride = width.
|
||||||
|
ConvertFromI420(videoFrame, _width, kARGB, 0, _buffer);
|
||||||
|
|
||||||
|
// Put image in window.
|
||||||
|
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
||||||
|
_height, True);
|
||||||
|
|
||||||
|
// Very important for the image to update properly!
|
||||||
|
XSync(_display, False);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
|
WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
|
||||||
|
@ -38,8 +38,7 @@ public:
|
|||||||
|
|
||||||
WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
|
WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
|
||||||
WebRtc_Word32 numberOfStreams);
|
WebRtc_Word32 numberOfStreams);
|
||||||
WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
|
WebRtc_Word32 DeliverFrame(const VideoFrame& videoFrame);
|
||||||
unsigned WebRtc_Word32 /*timeStamp90kHz*/);
|
|
||||||
WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
|
WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
|
||||||
WebRtc_Word32 Init(Window window, float left, float top, float right,
|
WebRtc_Word32 Init(Window window, float left, float top, float right,
|
||||||
float bottom);
|
float bottom);
|
||||||
|
@ -80,21 +80,21 @@ VideoChannelAGL::~VideoChannelAGL()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame)
|
WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId,
|
||||||
{
|
VideoFrame& videoFrame) {
|
||||||
_owner->LockAGLCntx();
|
_owner->LockAGLCntx();
|
||||||
if(_width != videoFrame.Width() ||
|
if (_width != videoFrame.Width() ||
|
||||||
_height != videoFrame.Height())
|
_height != videoFrame.Height()) {
|
||||||
{
|
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||||
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
|
||||||
{ //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSizeChange returned an error", __FUNCTION__, __LINE__);
|
Change returned an error", __FUNCTION__, __LINE__);
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
|
return DeliverFrame(videoFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
|
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
|
||||||
@ -220,63 +220,58 @@ int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Called from video engine when a new frame should be rendered.
|
// Called from video engine when a new frame should be rendered.
|
||||||
int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
|
int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
|
||||||
{
|
_owner->LockAGLCntx();
|
||||||
_owner->LockAGLCntx();
|
|
||||||
|
|
||||||
if (_texture == 0)
|
if (_texture == 0) {
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bufferSize != _incommingBufferSize)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setting stride = width.
|
|
||||||
int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
|
|
||||||
_buffer);
|
|
||||||
if (rgbret < 0)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
aglSetCurrentContext(_aglContext);
|
|
||||||
|
|
||||||
// Put the new frame into the graphic card texture.
|
|
||||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
|
|
||||||
GLenum glErr = glGetError();
|
|
||||||
if (glErr != GL_NO_ERROR)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy buffer to texture
|
|
||||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
|
||||||
0, // Level, not use
|
|
||||||
0, // start point x, (low left of pic)
|
|
||||||
0, // start point y,
|
|
||||||
_width, // width
|
|
||||||
_height, // height
|
|
||||||
_pixelFormat, // pictue format for _buffer
|
|
||||||
_pixelDataType, // data type of _buffer
|
|
||||||
(const GLvoid*) _buffer); // the pixel data
|
|
||||||
|
|
||||||
if (glGetError() != GL_NO_ERROR)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
_bufferIsUpdated = true;
|
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bufferSize != _incommingBufferSize) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setting stride = width.
|
||||||
|
int rgbret = ConvertFromYV12(videoFrame.Buffer(), _width, kBGRA, 0, _width,
|
||||||
|
_height, _buffer);
|
||||||
|
if (rgbret < 0) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
aglSetCurrentContext(_aglContext);
|
||||||
|
|
||||||
|
// Put the new frame into the graphic card texture.
|
||||||
|
// Make sure this texture is the active one
|
||||||
|
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
|
||||||
|
GLenum glErr = glGetError();
|
||||||
|
if (glErr != GL_NO_ERROR) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy buffer to texture
|
||||||
|
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||||
|
0, // Level, not use
|
||||||
|
0, // start point x, (low left of pic)
|
||||||
|
0, // start point y,
|
||||||
|
_width, // width
|
||||||
|
_height, // height
|
||||||
|
_pixelFormat, // pictue format for _buffer
|
||||||
|
_pixelDataType, // data type of _buffer
|
||||||
|
(const GLvoid*) _buffer); // the pixel data
|
||||||
|
|
||||||
|
if (glGetError() != GL_NO_ERROR) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
_bufferIsUpdated = true;
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelAGL::RenderOffScreenBuffer()
|
int VideoChannelAGL::RenderOffScreenBuffer()
|
||||||
|
@ -45,7 +45,7 @@ public:
|
|||||||
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
|
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
|
||||||
virtual ~VideoChannelAGL();
|
virtual ~VideoChannelAGL();
|
||||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
|
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||||
virtual int UpdateSize(int width, int height);
|
virtual int UpdateSize(int width, int height);
|
||||||
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||||
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||||
|
@ -44,7 +44,7 @@ public:
|
|||||||
virtual ~VideoChannelNSOpenGL();
|
virtual ~VideoChannelNSOpenGL();
|
||||||
|
|
||||||
// A new frame is delivered
|
// A new frame is delivered
|
||||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
|
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||||
|
|
||||||
// Called when the incomming frame size and/or number of streams in mix changes
|
// Called when the incomming frame size and/or number of streams in mix changes
|
||||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||||
|
@ -91,25 +91,22 @@ WebRtc_Word32 VideoChannelNSOpenGL::GetChannelProperties(float& left,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
|
WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(
|
||||||
{
|
const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame) {
|
||||||
|
|
||||||
_owner->LockAGLCntx();
|
_owner->LockAGLCntx();
|
||||||
|
|
||||||
if(_width != (int)videoFrame.Width() ||
|
if(_width != (int)videoFrame.Width() ||
|
||||||
_height != (int)videoFrame.Height())
|
_height != (int)videoFrame.Height()) {
|
||||||
{
|
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||||
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
_owner->UnlockAGLCntx();
|
||||||
{
|
return -1;
|
||||||
_owner->UnlockAGLCntx();
|
}
|
||||||
return -1;
|
}
|
||||||
}
|
int ret = DeliverFrame(videoFrame);
|
||||||
}
|
|
||||||
|
|
||||||
int ret = DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
|
_owner->UnlockAGLCntx();
|
||||||
|
return ret;
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelNSOpenGL::UpdateSize(int width, int height)
|
int VideoChannelNSOpenGL::UpdateSize(int width, int height)
|
||||||
@ -156,7 +153,7 @@ int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr
|
|||||||
}
|
}
|
||||||
|
|
||||||
_incommingBufferSize = CalcBufferSize(kI420, _width, _height);
|
_incommingBufferSize = CalcBufferSize(kI420, _width, _height);
|
||||||
_bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
|
_bufferSize = CalcBufferSize(kARGB, _width, _height);
|
||||||
_buffer = new unsigned char [_bufferSize];
|
_buffer = new unsigned char [_bufferSize];
|
||||||
memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
|
memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
|
||||||
|
|
||||||
@ -211,66 +208,61 @@ int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
|
int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
|
||||||
{
|
|
||||||
|
|
||||||
_owner->LockAGLCntx();
|
_owner->LockAGLCntx();
|
||||||
|
|
||||||
if (_texture == 0)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bufferSize != _incommingBufferSize)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int rgbRet = ConvertFromYV12(buffer, _width,
|
|
||||||
kBGRA, 0, _width, _height,
|
|
||||||
_buffer);
|
|
||||||
if (rgbRet < 0)
|
|
||||||
{
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
[_nsglContext makeCurrentContext];
|
|
||||||
|
|
||||||
|
|
||||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
|
|
||||||
GLenum glErr = glGetError();
|
|
||||||
if (glErr != GL_NO_ERROR)
|
|
||||||
{
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glBindTexture", glErr);
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
|
||||||
0, // Level, not use
|
|
||||||
0, // start point x, (low left of pic)
|
|
||||||
0, // start point y,
|
|
||||||
_width, // width
|
|
||||||
_height, // height
|
|
||||||
_pixelFormat, // pictue format for _buffer
|
|
||||||
_pixelDataType, // data type of _buffer
|
|
||||||
(const GLvoid*) _buffer); // the pixel data
|
|
||||||
|
|
||||||
glErr = glGetError();
|
|
||||||
if (glErr != GL_NO_ERROR)
|
|
||||||
{
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glTexSubImage2d", glErr);
|
|
||||||
_owner->UnlockAGLCntx();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
_bufferIsUpdated = true;
|
|
||||||
|
|
||||||
|
if (_texture == 0) {
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (static_cast<int>(videoFrame.Length()) != _incommingBufferSize) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int rgbRet = ConvertFromYV12(videoFrame.Buffer(), _width,
|
||||||
|
kBGRA, 0, _width, _height, _buffer);
|
||||||
|
if (rgbRet < 0) {
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
[_nsglContext makeCurrentContext];
|
||||||
|
|
||||||
|
// Make sure this texture is the active one
|
||||||
|
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
|
||||||
|
GLenum glErr = glGetError();
|
||||||
|
if (glErr != GL_NO_ERROR) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
|
||||||
|
"ERROR %d while calling glBindTexture", glErr);
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||||
|
0, // Level, not use
|
||||||
|
0, // start point x, (low left of pic)
|
||||||
|
0, // start point y,
|
||||||
|
_width, // width
|
||||||
|
_height, // height
|
||||||
|
_pixelFormat, // pictue format for _buffer
|
||||||
|
_pixelDataType, // data type of _buffer
|
||||||
|
(const GLvoid*) _buffer); // the pixel data
|
||||||
|
|
||||||
|
glErr = glGetError();
|
||||||
|
if (glErr != GL_NO_ERROR) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
|
||||||
|
"ERROR %d while calling glTexSubImage2d", glErr);
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
_bufferIsUpdated = true;
|
||||||
|
|
||||||
|
_owner->UnlockAGLCntx();
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoChannelNSOpenGL::RenderOffScreenBuffer()
|
int VideoChannelNSOpenGL::RenderOffScreenBuffer()
|
||||||
|
@ -153,65 +153,55 @@ WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId,
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
|
return DeliverFrame(videoFrame);
|
||||||
videoFrame.TimeStamp());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called from video engine when a new frame should be rendered.
|
// Called from video engine when a new frame should be rendered.
|
||||||
int D3D9Channel::DeliverFrame(unsigned char* buffer,
|
int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
|
||||||
int bufferSize,
|
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
||||||
unsigned int timeStamp90kHz)
|
"DeliverFrame to D3D9Channel");
|
||||||
{
|
|
||||||
|
CriticalSectionScoped cs(_critSect);
|
||||||
|
|
||||||
|
// FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
|
||||||
|
// update the texture? probably not
|
||||||
|
if (_bufferIsUpdated) {
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
||||||
"DeliverFrame to D3D9Channel");
|
"Last frame hasn't been rendered yet. Drop this frame.");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
CriticalSectionScoped cs(_critSect);
|
if (!_pd3dDevice) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||||
|
"D3D for rendering not initialized.");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
//FIXME if _bufferIsUpdated is still true (not be renderred), do we what to update the texture?)
|
if (!_pTexture) {
|
||||||
//probably not
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||||
if (_bufferIsUpdated)
|
"Texture for rendering not initialized.");
|
||||||
{
|
return -1;
|
||||||
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
}
|
||||||
"Last frame hasn't been rendered yet. Drop this frame.");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!_pd3dDevice)
|
D3DLOCKED_RECT lr;
|
||||||
{
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
||||||
"D3D for rendering not initialized.");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!_pTexture)
|
if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
|
||||||
{
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
"Failed to lock a texture in D3D9 Channel.");
|
||||||
"Texture for rendering not initialized.");
|
return -1;
|
||||||
return -1;
|
}
|
||||||
}
|
UCHAR* pRect = (UCHAR*) lr.pBits;
|
||||||
|
|
||||||
D3DLOCKED_RECT lr;
|
ConvertFromI420(videoFrame, _width, kARGB, 0, pRect);
|
||||||
|
|
||||||
if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0)))
|
if (FAILED(_pTexture->UnlockRect(0))) {
|
||||||
{
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
"Failed to unlock a texture in D3D9 Channel.");
|
||||||
"Failed to lock a texture in D3D9 Channel.");
|
return -1;
|
||||||
return -1;
|
}
|
||||||
}
|
|
||||||
UCHAR* pRect = (UCHAR*) lr.pBits;
|
|
||||||
|
|
||||||
ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
|
_bufferIsUpdated = true;
|
||||||
|
return 0;
|
||||||
if (FAILED(_pTexture->UnlockRect(0)))
|
|
||||||
{
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
||||||
"Failed to unlock a texture in D3D9 Channel.");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
_bufferIsUpdated = true;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by d3d channel owner to indicate the frame/texture has been rendered off
|
// Called by d3d channel owner to indicate the frame/texture has been rendered off
|
||||||
|
@ -43,10 +43,8 @@ public:
|
|||||||
// Called when the incomming frame size and/or number of streams in mix changes
|
// Called when the incomming frame size and/or number of streams in mix changes
|
||||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||||
|
|
||||||
// A new frame is delivered
|
// A new frame is delivered.
|
||||||
virtual int DeliverFrame(unsigned char* buffer,
|
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||||
int bufferSize,
|
|
||||||
unsigned int timeStame90kHz);
|
|
||||||
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
|
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
|
||||||
VideoFrame& videoFrame);
|
VideoFrame& videoFrame);
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
|
|
||||||
#include "video_render.h"
|
#include "video_render.h"
|
||||||
|
|
||||||
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "tb_interfaces.h"
|
#include "tb_interfaces.h"
|
||||||
#include "tb_video_channel.h"
|
#include "tb_video_channel.h"
|
||||||
#include "tb_capture_device.h"
|
#include "tb_capture_device.h"
|
||||||
@ -57,15 +58,13 @@ public:
|
|||||||
|
|
||||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
|
virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
|
||||||
uint32_t time_stamp,
|
uint32_t time_stamp,
|
||||||
int64_t render_time)
|
int64_t render_time) {
|
||||||
{
|
if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {
|
||||||
if (bufferSize != _width * _height * 3 / 2)
|
ViETest::Log("Incorrect render buffer received, of length = %d\n",
|
||||||
{
|
bufferSize);
|
||||||
ViETest::Log("incorrect render buffer received, of length = %d\n",
|
|
||||||
bufferSize);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return 0;
|
return 0;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -42,7 +42,8 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
|
|||||||
int FrameSizeChange(unsigned int width, unsigned int height,
|
int FrameSizeChange(unsigned int width, unsigned int height,
|
||||||
unsigned int number_of_streams);
|
unsigned int number_of_streams);
|
||||||
|
|
||||||
int DeliverFrame(unsigned char* buffer, int buffer_size,
|
int DeliverFrame(unsigned char* buffer,
|
||||||
|
int buffer_size,
|
||||||
uint32_t time_stamp,
|
uint32_t time_stamp,
|
||||||
int64_t render_time);
|
int64_t render_time);
|
||||||
|
|
||||||
|
@ -195,8 +195,7 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(
|
|||||||
case kVideoARGB4444:
|
case kVideoARGB4444:
|
||||||
case kVideoARGB1555 :
|
case kVideoARGB1555 :
|
||||||
{
|
{
|
||||||
ConvertFromI420(video_frame.Buffer(), video_frame.Width(), type, 0,
|
ConvertFromI420(video_frame, video_frame.Width(), type, 0,
|
||||||
video_frame.Width(), video_frame.Height(),
|
|
||||||
converted_frame_->Buffer());
|
converted_frame_->Buffer());
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user