Updating ConvertFromI420 to use VideoFrame - Related calls to DeliverFrame were also updated, and some refactoring (style-wise) was done on the way.
Review URL: https://webrtc-codereview.appspot.com/838006 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2858 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
cc82cff82e
commit
1e033e1594
@ -100,13 +100,11 @@ int ConvertToI420(VideoType src_video_type,
|
||||
// - src_stride : Number of bytes in a row of the src Y plane.
|
||||
// - dst_video_type : Type of output video.
|
||||
// - dst_sample_size : Required only for the parsing of MJPG.
|
||||
// - width : Width in pixels.
|
||||
// - height : Height in pixels.
|
||||
// - dst_frame : Pointer to a destination frame.
|
||||
// Return value: 0 if OK, < 0 otherwise.
|
||||
int ConvertFromI420(const uint8_t* src_frame, int src_stride,
|
||||
// It is assumed that source and destination have equal height.
|
||||
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
|
||||
VideoType dst_video_type, int dst_sample_size,
|
||||
int width, int height,
|
||||
uint8_t* dst_frame);
|
||||
// ConvertFrom YV12.
|
||||
// Interface - same as above.
|
||||
|
@ -135,8 +135,12 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
|
||||
double psnr = 0;
|
||||
|
||||
uint8_t* orig_buffer = new uint8_t[frame_length_];
|
||||
EXPECT_GT(fread(orig_buffer, 1, frame_length_, source_file_), 0U);
|
||||
VideoFrame orig_frame;
|
||||
orig_frame.VerifyAndAllocate(frame_length_);
|
||||
orig_frame.SetWidth(width_);
|
||||
orig_frame.SetHeight(height_);
|
||||
EXPECT_GT(fread(orig_frame.Buffer(), 1, frame_length_, source_file_), 0U);
|
||||
orig_frame.SetLength(frame_length_);
|
||||
|
||||
// printf("\nConvert #%d I420 <-> RGB24\n", j);
|
||||
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
|
||||
@ -144,8 +148,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
res_i420_frame.VerifyAndAllocate(frame_length_);
|
||||
res_i420_frame.SetHeight(height_);
|
||||
res_i420_frame.SetWidth(width_);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kRGB24, 0,
|
||||
width_, height_, res_rgb_buffer2));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kRGB24, 0,
|
||||
res_rgb_buffer2));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
@ -154,7 +158,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
||||
EXPECT_GT(ceil(psnr), 44);
|
||||
j++;
|
||||
@ -162,11 +167,12 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
|
||||
// printf("\nConvert #%d I420 <-> UYVY\n", j);
|
||||
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
||||
kUYVY, 0, width_, height_, out_uyvy_buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||
kUYVY, 0, out_uyvy_buffer));
|
||||
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
@ -178,15 +184,15 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
|
||||
// printf("\nConvert #%d I420 <-> I420 \n", j);
|
||||
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_frame.Buffer(), 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
EXPECT_EQ(0, ConvertFromI420(res_i420_frame.Buffer(), width_, kI420, 0,
|
||||
width_, height_, out_i420_buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(res_i420_frame, width_, kI420, 0,
|
||||
out_i420_buffer));
|
||||
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(orig_buffer, out_i420_buffer, width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), out_i420_buffer, width_, height_);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
j++;
|
||||
delete [] out_i420_buffer;
|
||||
@ -194,8 +200,8 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
// printf("\nConvert #%d I420 <-> YV12\n", j);
|
||||
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
|
||||
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kYV12, 0,
|
||||
width_, height_, outYV120Buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kYV12, 0,
|
||||
outYV120Buffer));
|
||||
EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
|
||||
kI420, 0,
|
||||
width_, height_,
|
||||
@ -205,15 +211,16 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
return;
|
||||
}
|
||||
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
j++;
|
||||
delete [] outYV120Buffer;
|
||||
|
||||
// printf("\nConvert #%d I420 <-> YUY2\n", j);
|
||||
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
||||
kYUY2, 0, width_, height_, out_yuy2_buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||
kYUY2, 0, out_yuy2_buffer));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
@ -222,13 +229,14 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
|
||||
// printf("\nConvert #%d I420 <-> RGB565\n", j);
|
||||
uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
||||
kRGB565, 0, width_, height_, out_rgb565_buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||
kRGB565, 0, out_rgb565_buffer));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
@ -237,15 +245,16 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
||||
// Another example is I420ToRGB24, the psnr is 44
|
||||
EXPECT_GT(ceil(psnr), 40);
|
||||
|
||||
// printf("\nConvert #%d I420 <-> ARGB8888\n", j);
|
||||
uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
|
||||
kARGB, 0, width_, height_, out_argb8888_buffer));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
|
||||
kARGB, 0, out_argb8888_buffer));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
|
||||
0, kRotateNone, &res_i420_frame));
|
||||
@ -254,17 +263,18 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
output_file) != static_cast<unsigned int>(frame_length_)) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(orig_buffer, res_i420_frame.Buffer(), width_, height_);
|
||||
psnr = I420PSNR(orig_frame.Buffer(), res_i420_frame.Buffer(),
|
||||
width_, height_);
|
||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
||||
EXPECT_GT(ceil(psnr), 42);
|
||||
|
||||
ASSERT_EQ(0, fclose(output_file));
|
||||
|
||||
res_i420_frame.Free();
|
||||
orig_frame.Free();
|
||||
delete [] out_argb8888_buffer;
|
||||
delete [] out_rgb565_buffer;
|
||||
delete [] out_yuy2_buffer;
|
||||
delete [] orig_buffer;
|
||||
}
|
||||
|
||||
// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
|
||||
|
@ -194,14 +194,15 @@ int ConvertToI420(VideoType src_video_type,
|
||||
ConvertVideoType(src_video_type));
|
||||
}
|
||||
|
||||
int ConvertFromI420(const uint8_t* src_frame, int src_stride,
|
||||
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
|
||||
VideoType dst_video_type, int dst_sample_size,
|
||||
int width, int height,
|
||||
uint8_t* dst_frame) {
|
||||
int height = src_frame.Height();
|
||||
int width = src_frame.Width();
|
||||
int abs_height = (height < 0) ? -height : height;
|
||||
int half_width = (width + 1) >> 1;
|
||||
int half_height = (abs_height + 1) >> 1;
|
||||
const uint8_t* src_yplane = src_frame;
|
||||
const uint8_t* src_yplane = src_frame.Buffer();
|
||||
const uint8_t* src_uplane = src_yplane + width * abs_height;
|
||||
const uint8_t* src_vplane = src_uplane + half_width * half_height;
|
||||
return libyuv::ConvertFromI420(src_yplane, src_stride,
|
||||
|
@ -459,8 +459,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
|
||||
|
||||
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
|
||||
const int conversionResult =
|
||||
ConvertFromI420((unsigned char* )_bufferToRender.Buffer(), _bitmapWidth,
|
||||
kRGB565, 0, _bitmapWidth, _bitmapHeight, _directBuffer);
|
||||
ConvertFromI420(_bufferToRender, _bitmapWidth,
|
||||
kRGB565, 0, _directBuffer);
|
||||
|
||||
if (conversionResult < 0) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
|
||||
|
@ -44,19 +44,15 @@ VideoX11Channel::~VideoX11Channel()
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
|
||||
VideoFrame& videoFrame)
|
||||
{
|
||||
CriticalSectionScoped cs(&_crit);
|
||||
if (_width != (WebRtc_Word32) videoFrame.Width() || _height
|
||||
!= (WebRtc_Word32) videoFrame.Height())
|
||||
{
|
||||
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
VideoFrame& videoFrame) {
|
||||
CriticalSectionScoped cs(&_crit);
|
||||
if (_width != (WebRtc_Word32) videoFrame.Width() || _height
|
||||
!= (WebRtc_Word32) videoFrame.Height()) {
|
||||
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||
return -1;
|
||||
}
|
||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
|
||||
videoFrame.TimeStamp());
|
||||
}
|
||||
return DeliverFrame(videoFrame);
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
|
||||
@ -76,33 +72,26 @@ WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
|
||||
WebRtc_Word32 bufferSize,
|
||||
unsigned WebRtc_Word32 /*timeStamp90kHz*/)
|
||||
{
|
||||
CriticalSectionScoped cs(&_crit);
|
||||
if (!_prepared)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!dispArray[_dispCount])
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
unsigned char *pBuf = buffer;
|
||||
// convert to RGB32, setting stride = width.
|
||||
ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
|
||||
|
||||
// put image in window
|
||||
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
||||
_height, True);
|
||||
|
||||
// very important for the image to update properly!
|
||||
XSync(_display, False);
|
||||
WebRtc_Word32 VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
|
||||
CriticalSectionScoped cs(&_crit);
|
||||
if (!_prepared) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!dispArray[_dispCount]) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// convert to RGB32, setting stride = width.
|
||||
ConvertFromI420(videoFrame, _width, kARGB, 0, _buffer);
|
||||
|
||||
// Put image in window.
|
||||
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
||||
_height, True);
|
||||
|
||||
// Very important for the image to update properly!
|
||||
XSync(_display, False);
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
|
||||
|
@ -38,8 +38,7 @@ public:
|
||||
|
||||
WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
|
||||
WebRtc_Word32 numberOfStreams);
|
||||
WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
|
||||
unsigned WebRtc_Word32 /*timeStamp90kHz*/);
|
||||
WebRtc_Word32 DeliverFrame(const VideoFrame& videoFrame);
|
||||
WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
|
||||
WebRtc_Word32 Init(Window window, float left, float top, float right,
|
||||
float bottom);
|
||||
|
@ -80,21 +80,21 @@ VideoChannelAGL::~VideoChannelAGL()
|
||||
}
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame)
|
||||
{
|
||||
_owner->LockAGLCntx();
|
||||
if(_width != videoFrame.Width() ||
|
||||
_height != videoFrame.Height())
|
||||
{
|
||||
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
||||
{ //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSizeChange returned an error", __FUNCTION__, __LINE__);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId,
|
||||
VideoFrame& videoFrame) {
|
||||
_owner->LockAGLCntx();
|
||||
if (_width != videoFrame.Width() ||
|
||||
_height != videoFrame.Height()) {
|
||||
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
|
||||
Change returned an error", __FUNCTION__, __LINE__);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
_owner->UnlockAGLCntx();
|
||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
|
||||
_owner->UnlockAGLCntx();
|
||||
return DeliverFrame(videoFrame);
|
||||
}
|
||||
|
||||
int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
|
||||
@ -220,63 +220,58 @@ int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
|
||||
}
|
||||
|
||||
// Called from video engine when a new frame should be rendered.
|
||||
int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
|
||||
{
|
||||
_owner->LockAGLCntx();
|
||||
int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
|
||||
_owner->LockAGLCntx();
|
||||
|
||||
if (_texture == 0)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (bufferSize != _incommingBufferSize)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Setting stride = width.
|
||||
int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
|
||||
_buffer);
|
||||
if (rgbret < 0)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
aglSetCurrentContext(_aglContext);
|
||||
|
||||
// Put the new frame into the graphic card texture.
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
|
||||
GLenum glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Copy buffer to texture
|
||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||
0, // Level, not use
|
||||
0, // start point x, (low left of pic)
|
||||
0, // start point y,
|
||||
_width, // width
|
||||
_height, // height
|
||||
_pixelFormat, // pictue format for _buffer
|
||||
_pixelDataType, // data type of _buffer
|
||||
(const GLvoid*) _buffer); // the pixel data
|
||||
|
||||
if (glGetError() != GL_NO_ERROR)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
_bufferIsUpdated = true;
|
||||
if (_texture == 0) {
|
||||
_owner->UnlockAGLCntx();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (bufferSize != _incommingBufferSize) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Setting stride = width.
|
||||
int rgbret = ConvertFromYV12(videoFrame.Buffer(), _width, kBGRA, 0, _width,
|
||||
_height, _buffer);
|
||||
if (rgbret < 0) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
aglSetCurrentContext(_aglContext);
|
||||
|
||||
// Put the new frame into the graphic card texture.
|
||||
// Make sure this texture is the active one
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
|
||||
GLenum glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Copy buffer to texture
|
||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||
0, // Level, not use
|
||||
0, // start point x, (low left of pic)
|
||||
0, // start point y,
|
||||
_width, // width
|
||||
_height, // height
|
||||
_pixelFormat, // pictue format for _buffer
|
||||
_pixelDataType, // data type of _buffer
|
||||
(const GLvoid*) _buffer); // the pixel data
|
||||
|
||||
if (glGetError() != GL_NO_ERROR) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
_bufferIsUpdated = true;
|
||||
_owner->UnlockAGLCntx();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoChannelAGL::RenderOffScreenBuffer()
|
||||
|
@ -45,7 +45,7 @@ public:
|
||||
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
|
||||
virtual ~VideoChannelAGL();
|
||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
|
||||
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||
virtual int UpdateSize(int width, int height);
|
||||
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||
|
@ -44,7 +44,7 @@ public:
|
||||
virtual ~VideoChannelNSOpenGL();
|
||||
|
||||
// A new frame is delivered
|
||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
|
||||
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||
|
||||
// Called when the incomming frame size and/or number of streams in mix changes
|
||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||
|
@ -91,25 +91,22 @@ WebRtc_Word32 VideoChannelNSOpenGL::GetChannelProperties(float& left,
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
|
||||
{
|
||||
WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(
|
||||
const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame) {
|
||||
|
||||
_owner->LockAGLCntx();
|
||||
_owner->LockAGLCntx();
|
||||
|
||||
if(_width != (int)videoFrame.Width() ||
|
||||
_height != (int)videoFrame.Height())
|
||||
{
|
||||
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if(_width != (int)videoFrame.Width() ||
|
||||
_height != (int)videoFrame.Height()) {
|
||||
if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
int ret = DeliverFrame(videoFrame);
|
||||
|
||||
int ret = DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
|
||||
|
||||
_owner->UnlockAGLCntx();
|
||||
return ret;
|
||||
_owner->UnlockAGLCntx();
|
||||
return ret;
|
||||
}
|
||||
|
||||
int VideoChannelNSOpenGL::UpdateSize(int width, int height)
|
||||
@ -156,7 +153,7 @@ int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr
|
||||
}
|
||||
|
||||
_incommingBufferSize = CalcBufferSize(kI420, _width, _height);
|
||||
_bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
|
||||
_bufferSize = CalcBufferSize(kARGB, _width, _height);
|
||||
_buffer = new unsigned char [_bufferSize];
|
||||
memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
|
||||
|
||||
@ -211,66 +208,61 @@ int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
|
||||
{
|
||||
int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
|
||||
|
||||
_owner->LockAGLCntx();
|
||||
|
||||
if (_texture == 0)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (bufferSize != _incommingBufferSize)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int rgbRet = ConvertFromYV12(buffer, _width,
|
||||
kBGRA, 0, _width, _height,
|
||||
_buffer);
|
||||
if (rgbRet < 0)
|
||||
{
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
[_nsglContext makeCurrentContext];
|
||||
|
||||
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
|
||||
GLenum glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glBindTexture", glErr);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||
0, // Level, not use
|
||||
0, // start point x, (low left of pic)
|
||||
0, // start point y,
|
||||
_width, // width
|
||||
_height, // height
|
||||
_pixelFormat, // pictue format for _buffer
|
||||
_pixelDataType, // data type of _buffer
|
||||
(const GLvoid*) _buffer); // the pixel data
|
||||
|
||||
glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glTexSubImage2d", glErr);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
_bufferIsUpdated = true;
|
||||
_owner->LockAGLCntx();
|
||||
|
||||
if (_texture == 0) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (static_cast<int>(videoFrame.Length()) != _incommingBufferSize) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int rgbRet = ConvertFromYV12(videoFrame.Buffer(), _width,
|
||||
kBGRA, 0, _width, _height, _buffer);
|
||||
if (rgbRet < 0) {
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
[_nsglContext makeCurrentContext];
|
||||
|
||||
// Make sure this texture is the active one
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
|
||||
GLenum glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
|
||||
"ERROR %d while calling glBindTexture", glErr);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
|
||||
0, // Level, not use
|
||||
0, // start point x, (low left of pic)
|
||||
0, // start point y,
|
||||
_width, // width
|
||||
_height, // height
|
||||
_pixelFormat, // pictue format for _buffer
|
||||
_pixelDataType, // data type of _buffer
|
||||
(const GLvoid*) _buffer); // the pixel data
|
||||
|
||||
glErr = glGetError();
|
||||
if (glErr != GL_NO_ERROR) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
|
||||
"ERROR %d while calling glTexSubImage2d", glErr);
|
||||
_owner->UnlockAGLCntx();
|
||||
return -1;
|
||||
}
|
||||
|
||||
_bufferIsUpdated = true;
|
||||
|
||||
_owner->UnlockAGLCntx();
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoChannelNSOpenGL::RenderOffScreenBuffer()
|
||||
|
@ -153,65 +153,55 @@ WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId,
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
|
||||
videoFrame.TimeStamp());
|
||||
return DeliverFrame(videoFrame);
|
||||
}
|
||||
|
||||
// Called from video engine when a new frame should be rendered.
|
||||
int D3D9Channel::DeliverFrame(unsigned char* buffer,
|
||||
int bufferSize,
|
||||
unsigned int timeStamp90kHz)
|
||||
{
|
||||
int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
|
||||
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
||||
"DeliverFrame to D3D9Channel");
|
||||
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
|
||||
// FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
|
||||
// update the texture? probably not
|
||||
if (_bufferIsUpdated) {
|
||||
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
||||
"DeliverFrame to D3D9Channel");
|
||||
"Last frame hasn't been rendered yet. Drop this frame.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
if (!_pd3dDevice) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"D3D for rendering not initialized.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
//FIXME if _bufferIsUpdated is still true (not be renderred), do we what to update the texture?)
|
||||
//probably not
|
||||
if (_bufferIsUpdated)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
|
||||
"Last frame hasn't been rendered yet. Drop this frame.");
|
||||
return -1;
|
||||
}
|
||||
if (!_pTexture) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Texture for rendering not initialized.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!_pd3dDevice)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"D3D for rendering not initialized.");
|
||||
return -1;
|
||||
}
|
||||
D3DLOCKED_RECT lr;
|
||||
|
||||
if (!_pTexture)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Texture for rendering not initialized.");
|
||||
return -1;
|
||||
}
|
||||
if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Failed to lock a texture in D3D9 Channel.");
|
||||
return -1;
|
||||
}
|
||||
UCHAR* pRect = (UCHAR*) lr.pBits;
|
||||
|
||||
D3DLOCKED_RECT lr;
|
||||
ConvertFromI420(videoFrame, _width, kARGB, 0, pRect);
|
||||
|
||||
if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0)))
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Failed to lock a texture in D3D9 Channel.");
|
||||
return -1;
|
||||
}
|
||||
UCHAR* pRect = (UCHAR*) lr.pBits;
|
||||
if (FAILED(_pTexture->UnlockRect(0))) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Failed to unlock a texture in D3D9 Channel.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
|
||||
|
||||
if (FAILED(_pTexture->UnlockRect(0)))
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
||||
"Failed to unlock a texture in D3D9 Channel.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
_bufferIsUpdated = true;
|
||||
|
||||
return 0;
|
||||
_bufferIsUpdated = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Called by d3d channel owner to indicate the frame/texture has been rendered off
|
||||
|
@ -43,10 +43,8 @@ public:
|
||||
// Called when the incomming frame size and/or number of streams in mix changes
|
||||
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
|
||||
|
||||
// A new frame is delivered
|
||||
virtual int DeliverFrame(unsigned char* buffer,
|
||||
int bufferSize,
|
||||
unsigned int timeStame90kHz);
|
||||
// A new frame is delivered.
|
||||
virtual int DeliverFrame(const VideoFrame& videoFrame);
|
||||
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
|
||||
VideoFrame& videoFrame);
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
#include "video_render.h"
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "tb_interfaces.h"
|
||||
#include "tb_video_channel.h"
|
||||
#include "tb_capture_device.h"
|
||||
@ -57,15 +58,13 @@ public:
|
||||
|
||||
virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time)
|
||||
{
|
||||
if (bufferSize != _width * _height * 3 / 2)
|
||||
{
|
||||
ViETest::Log("incorrect render buffer received, of length = %d\n",
|
||||
bufferSize);
|
||||
return 0;
|
||||
}
|
||||
int64_t render_time) {
|
||||
if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {
|
||||
ViETest::Log("Incorrect render buffer received, of length = %d\n",
|
||||
bufferSize);
|
||||
return 0;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public:
|
||||
|
@ -42,7 +42,8 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
|
||||
int FrameSizeChange(unsigned int width, unsigned int height,
|
||||
unsigned int number_of_streams);
|
||||
|
||||
int DeliverFrame(unsigned char* buffer, int buffer_size,
|
||||
int DeliverFrame(unsigned char* buffer,
|
||||
int buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time);
|
||||
|
||||
|
@ -195,8 +195,7 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(
|
||||
case kVideoARGB4444:
|
||||
case kVideoARGB1555 :
|
||||
{
|
||||
ConvertFromI420(video_frame.Buffer(), video_frame.Width(), type, 0,
|
||||
video_frame.Width(), video_frame.Height(),
|
||||
ConvertFromI420(video_frame, video_frame.Width(), type, 0,
|
||||
converted_frame_->Buffer());
|
||||
}
|
||||
break;
|
||||
|
Loading…
x
Reference in New Issue
Block a user