Updating render module following latest libyuv api changes.
Review URL: http://webrtc-codereview.appspot.com/331019 git-svn-id: http://webrtc.googlecode.com/svn/trunk@1337 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -365,92 +365,97 @@ WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*stre
|
|||||||
/*Implements AndroidStream
|
/*Implements AndroidStream
|
||||||
* Calls the Java object and render the buffer in _bufferToRender
|
* Calls the Java object and render the buffer in _bufferToRender
|
||||||
*/
|
*/
|
||||||
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
|
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
|
||||||
{
|
|
||||||
_renderCritSect.Enter();
|
_renderCritSect.Enter();
|
||||||
// TickTime timeNow=TickTime::Now();
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef ANDROID_NDK_8_OR_ABOVE
|
#ifdef ANDROID_NDK_8_OR_ABOVE
|
||||||
if(_bitmapWidth!=_bufferToRender.Width() || _bitmapHeight!=_bufferToRender.Height())
|
if (_bitmapWidth != _bufferToRender.Width() ||
|
||||||
{
|
_bitmapHeight != _bufferToRender.Height()) {
|
||||||
|
|
||||||
// Create the bitmap to write to
|
// Create the bitmap to write to
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u %u", __FUNCTION__,_bufferToRender.Width(),_bufferToRender.Height());
|
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
|
||||||
if(_javaBitmapObj)
|
"%u", __FUNCTION__, _bufferToRender.Width(),
|
||||||
{
|
_bufferToRender.Height());
|
||||||
|
if (_javaBitmapObj) {
|
||||||
jniEnv->DeleteGlobalRef(_javaBitmapObj);
|
jniEnv->DeleteGlobalRef(_javaBitmapObj);
|
||||||
_javaBitmapObj = NULL;
|
_javaBitmapObj = NULL;
|
||||||
}
|
}
|
||||||
jobject javaBitmap=jniEnv->CallObjectMethod(_javaRenderObj,_createBitmapCid,videoFrame.Width(),videoFrame.Height());
|
jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
|
||||||
|
_createBitmapCid,
|
||||||
|
videoFrame.Width(),
|
||||||
|
videoFrame.Height());
|
||||||
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
|
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
|
||||||
if (!_javaBitmapObj)
|
if (!_javaBitmapObj) {
|
||||||
{
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Bitmap object reference", __FUNCTION__);
|
"create Java Bitmap object reference", __FUNCTION__);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
return;
|
return;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
_bitmapWidth=_bufferToRender.Width();
|
_bitmapWidth=_bufferToRender.Width();
|
||||||
_bitmapHeight=_bufferToRender.Height();
|
_bitmapHeight=_bufferToRender.Height();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void* pixels;
|
void* pixels;
|
||||||
if (_javaBitmapObj && AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0)
|
if (_javaBitmapObj &&
|
||||||
{
|
AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", __FUNCTION__);
|
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
|
||||||
|
__FUNCTION__);
|
||||||
// Convert I420 straight into the Java bitmap.
|
// Convert I420 straight into the Java bitmap.
|
||||||
const int conversionResult=ConvertI420ToRGB565( (unsigned char* )_bufferToRender.Buffer(), (unsigned char* ) pixels, _bitmapWidth, _bitmapHeight);
|
int ret = ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
|
||||||
if(conversionResult<0)
|
(unsigned char* ) pixels,
|
||||||
{
|
_bitmapWidth, _bitmapHeight);
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
if (ret < 0) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion "
|
||||||
|
"failed.", __FUNCTION__);
|
||||||
}
|
}
|
||||||
|
|
||||||
AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
|
AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
|
||||||
|
// Draw the Surface.
|
||||||
//Draw the Surface
|
|
||||||
jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
|
jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
|
||||||
|
|
||||||
}
|
} else {
|
||||||
else
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
|
||||||
{
|
"bitmap", __FUNCTION__);
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock bitmap", __FUNCTION__);
|
|
||||||
}
|
}
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
|
|
||||||
#else
|
#else
|
||||||
if(_bitmapWidth!=_bufferToRender.Width() || _bitmapHeight!=_bufferToRender.Height())
|
if (_bitmapWidth != _bufferToRender.Width() ||
|
||||||
{
|
_bitmapHeight != _bufferToRender.Height()) {
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: New render size %d %d",__FUNCTION__, _bufferToRender.Width(), _bufferToRender.Height());
|
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
|
||||||
if(_javaByteBufferObj)
|
"%d",__FUNCTION__,
|
||||||
{
|
_bufferToRender.Width(), _bufferToRender.Height());
|
||||||
|
if (_javaByteBufferObj) {
|
||||||
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
|
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
|
||||||
_javaByteBufferObj = NULL;
|
_javaByteBufferObj = NULL;
|
||||||
_directBuffer = NULL;
|
_directBuffer = NULL;
|
||||||
}
|
}
|
||||||
jobject javaByteBufferObj=jniEnv->CallObjectMethod(_javaRenderObj,_createByteBufferCid,_bufferToRender.Width(),_bufferToRender.Height());
|
jobject javaByteBufferObj =
|
||||||
|
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
|
||||||
|
_bufferToRender.Width(),
|
||||||
|
_bufferToRender.Height());
|
||||||
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
|
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
|
||||||
if (!_javaByteBufferObj)
|
if (!_javaByteBufferObj) {
|
||||||
{
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ByteBuffer object reference", __FUNCTION__);
|
"create Java ByteBuffer object reference", __FUNCTION__);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
return;
|
return;
|
||||||
}
|
} else {
|
||||||
else
|
_directBuffer = static_cast<unsigned char*>
|
||||||
{
|
(jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
|
||||||
_directBuffer=(unsigned char*) jniEnv->GetDirectBufferAddress(_javaByteBufferObj);
|
|
||||||
_bitmapWidth = _bufferToRender.Width();
|
_bitmapWidth = _bufferToRender.Width();
|
||||||
_bitmapHeight = _bufferToRender.Height();
|
_bitmapHeight = _bufferToRender.Height();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight)
|
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
|
||||||
{
|
// Android requires a vertically flipped image compared to std convert.
|
||||||
const int conversionResult=ConvertI420ToRGB565Android((unsigned char* )_bufferToRender.Buffer(), _directBuffer, _bitmapWidth, _bitmapHeight);
|
// This is done by giving a negative height input.
|
||||||
if(conversionResult<0)
|
const int conversionResult =
|
||||||
{
|
ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
_directBuffer, _bitmapWidth, -_bitmapHeight);
|
||||||
|
if (conversionResult < 0) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
|
||||||
|
" failed.", __FUNCTION__);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -459,7 +464,6 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
|
|||||||
// Draw the Surface
|
// Draw the Surface
|
||||||
jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
|
jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
|
||||||
#endif
|
#endif
|
||||||
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@@ -93,8 +93,8 @@ WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
|
|||||||
}
|
}
|
||||||
|
|
||||||
unsigned char *pBuf = buffer;
|
unsigned char *pBuf = buffer;
|
||||||
// convert to RGB32
|
// convert to RGB32, setting stride = width.
|
||||||
ConvertI420ToARGB(pBuf, _buffer, _width, _height, 0);
|
ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
|
||||||
|
|
||||||
// put image in window
|
// put image in window
|
||||||
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
|
||||||
|
|||||||
@@ -236,7 +236,9 @@ int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigne
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rgbret = ConvertI420ToRGBAMac((WebRtc_UWord8*)buffer, (WebRtc_UWord8*)_buffer, (WebRtc_Word32)_width, (WebRtc_Word32)_height, 0);
|
// Setting stride = width.
|
||||||
|
int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
|
||||||
|
_buffer);
|
||||||
if (rgbret < 0)
|
if (rgbret < 0)
|
||||||
{
|
{
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
|
|||||||
@@ -232,8 +232,10 @@ int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, un
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rgbLength = ConvertI420ToRGBAMac(buffer, _buffer, _width, _height, 0);
|
int rgbRet = ConvertFromYV12(buffer, _width,
|
||||||
if (rgbLength == -1)
|
kBGRA, 0, _width, _height,
|
||||||
|
_buffer);
|
||||||
|
if (rgbRet < 0)
|
||||||
{
|
{
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return -1;
|
return -1;
|
||||||
|
|||||||
@@ -200,7 +200,7 @@ int D3D9Channel::DeliverFrame(unsigned char* buffer,
|
|||||||
}
|
}
|
||||||
UCHAR* pRect = (UCHAR*) lr.pBits;
|
UCHAR* pRect = (UCHAR*) lr.pBits;
|
||||||
|
|
||||||
ConvertI420ToARGB(buffer, pRect, _width, _height, 0);
|
ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
|
||||||
|
|
||||||
if (FAILED(_pTexture->UnlockRect(0)))
|
if (FAILED(_pTexture->UnlockRect(0)))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -996,26 +996,20 @@ int DirectDrawChannel::DeliverFrame(unsigned char* buffer, int bufferSize,
|
|||||||
switch (_blitVideoType)
|
switch (_blitVideoType)
|
||||||
{
|
{
|
||||||
case kYUY2:
|
case kYUY2:
|
||||||
ConvertI420ToYUY2(buffer, ptr, _width, _height,
|
|
||||||
ddsd.lPitch);
|
|
||||||
break;
|
|
||||||
case kUYVY:
|
case kUYVY:
|
||||||
ConvertI420ToUYVY(buffer, ptr, _width, _height,
|
|
||||||
ddsd.lPitch);
|
|
||||||
break;
|
|
||||||
case kIYUV: // same as kYV12
|
case kIYUV: // same as kYV12
|
||||||
case kYV12:
|
case kYV12:
|
||||||
ConvertI420ToYV12(buffer, ptr, _width, _height,
|
ConvertFromI420(buffer, _width,
|
||||||
ddsd.lPitch);
|
_blitVideoType, 0,
|
||||||
|
_width, _height,
|
||||||
|
ptr);
|
||||||
break;
|
break;
|
||||||
case kRGB24:
|
case kRGB24:
|
||||||
{
|
{
|
||||||
_tempRenderBuffer.VerifyAndAllocate(_width * _height * 3);
|
_tempRenderBuffer.VerifyAndAllocate(_width * _height * 3);
|
||||||
//unsigned char *ptrTempBuffer=_tempRenderBuffer.GetBuffer();
|
|
||||||
unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
|
unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
|
||||||
//ConvertI420ToRGB24(buffer ,(int*) ptrTempBuffer, _width, _height);
|
ConvertFromI420(buffer, _width, kRGB24, 0, _width, _height,
|
||||||
ConvertI420ToRGB24(buffer, ptrTempBuffer, _width,
|
ptrTempBuffer);
|
||||||
_height);
|
|
||||||
for (int i = 0; i < _height; i++)
|
for (int i = 0; i < _height; i++)
|
||||||
{
|
{
|
||||||
memcpy(ptr, ptrTempBuffer, _width * 3);
|
memcpy(ptr, ptrTempBuffer, _width * 3);
|
||||||
@@ -1025,8 +1019,8 @@ int DirectDrawChannel::DeliverFrame(unsigned char* buffer, int bufferSize,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case kARGB:
|
case kARGB:
|
||||||
ConvertI420ToARGB(buffer, ptr, _width, _height,
|
ConvertFromI420(buffer, ddsd.lPitch, kARGB, 0,
|
||||||
(ddsd.lPitch >> 2) - _width);
|
_width, _height, ptrTempBuffer);
|
||||||
break;
|
break;
|
||||||
case kARGB4444:
|
case kARGB4444:
|
||||||
ConvertI420ToARGB4444(buffer, ptr, _width, _height,
|
ConvertI420ToARGB4444(buffer, ptr, _width, _height,
|
||||||
@@ -1039,11 +1033,8 @@ int DirectDrawChannel::DeliverFrame(unsigned char* buffer, int bufferSize,
|
|||||||
case kRGB565:
|
case kRGB565:
|
||||||
{
|
{
|
||||||
_tempRenderBuffer.VerifyAndAllocate(_width * _height * 2);
|
_tempRenderBuffer.VerifyAndAllocate(_width * _height * 2);
|
||||||
//unsigned char *ptrTempBuffer=_tempRenderBuffer.GetBuffer();
|
|
||||||
unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
|
unsigned char *ptrTempBuffer = _tempRenderBuffer.Buffer();
|
||||||
//ConvertI420ToRGB565(buffer ,(int*) ptrTempBuffer, _width, _height);
|
ConvertI420ToRGB565(buffer, ptrTempBuffer, _width, _height);
|
||||||
ConvertI420ToRGB565(buffer, ptrTempBuffer, _width,
|
|
||||||
_height);
|
|
||||||
ptr += ddsd.lPitch * (_height - 1);
|
ptr += ddsd.lPitch * (_height - 1);
|
||||||
for (int i = 0; i < _height; i++)
|
for (int i = 0; i < _height; i++)
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user