Hi Magnus, I added some of the changes that you suggested before. Let me know what you think.
Review URL: https://webrtc-codereview.appspot.com/507004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2101 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
7401a1f393
commit
5f49dba1a1
@ -279,9 +279,10 @@ int ConvertToI420(VideoType src_video_type,
|
|||||||
VideoRotationMode rotation,
|
VideoRotationMode rotation,
|
||||||
uint8_t* dst_frame) {
|
uint8_t* dst_frame) {
|
||||||
// All sanity tests are conducted within LibYuv.
|
// All sanity tests are conducted within LibYuv.
|
||||||
|
int abs_dst_height = (dst_height < 0) ? -dst_height : dst_height;
|
||||||
uint8_t* dst_yplane = dst_frame;
|
uint8_t* dst_yplane = dst_frame;
|
||||||
uint8_t* dst_uplane = dst_yplane + dst_width * dst_height;
|
uint8_t* dst_uplane = dst_yplane + dst_width * abs_dst_height;
|
||||||
uint8_t* dst_vplane = dst_uplane + (dst_width * dst_height / 4);
|
uint8_t* dst_vplane = dst_uplane + (dst_width * abs_dst_height / 4);
|
||||||
return libyuv::ConvertToI420(src_frame, sample_size,
|
return libyuv::ConvertToI420(src_frame, sample_size,
|
||||||
dst_yplane, dst_stride,
|
dst_yplane, dst_stride,
|
||||||
dst_uplane, (dst_stride + 1) / 2,
|
dst_uplane, (dst_stride + 1) / 2,
|
||||||
|
@ -187,7 +187,19 @@ CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
|
|||||||
|
|
||||||
// Store the incoming width and height
|
// Store the incoming width and height
|
||||||
_resultingCapability.width = pvi->bmiHeader.biWidth;
|
_resultingCapability.width = pvi->bmiHeader.biWidth;
|
||||||
|
|
||||||
|
// Store the incoming height,
|
||||||
|
// for RGB24 we assume the frame to be upside down
|
||||||
|
if(*SubType == MEDIASUBTYPE_RGB24
|
||||||
|
&& pvi->bmiHeader.biHeight > 0)
|
||||||
|
{
|
||||||
|
_resultingCapability.height = -(pvi->bmiHeader.biHeight);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
_resultingCapability.height = abs(pvi->bmiHeader.biHeight);
|
_resultingCapability.height = abs(pvi->bmiHeader.biHeight);
|
||||||
|
}
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
|
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
|
||||||
"CheckMediaType width:%d height:%d Compression:0x%x\n",
|
"CheckMediaType width:%d height:%d Compression:0x%x\n",
|
||||||
pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
|
pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
|
||||||
@ -247,7 +259,18 @@ CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
|
|||||||
pvi->bmiHeader.biCompression);
|
pvi->bmiHeader.biCompression);
|
||||||
|
|
||||||
_resultingCapability.width = pvi->bmiHeader.biWidth;
|
_resultingCapability.width = pvi->bmiHeader.biWidth;
|
||||||
|
|
||||||
|
// Store the incoming height,
|
||||||
|
// for RGB24 we assume the frame to be upside down
|
||||||
|
if(*SubType == MEDIASUBTYPE_RGB24
|
||||||
|
&& pvi->bmiHeader.biHeight > 0)
|
||||||
|
{
|
||||||
|
_resultingCapability.height = -(pvi->bmiHeader.biHeight);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
_resultingCapability.height = abs(pvi->bmiHeader.biHeight);
|
_resultingCapability.height = abs(pvi->bmiHeader.biHeight);
|
||||||
|
}
|
||||||
|
|
||||||
if(*SubType == MEDIASUBTYPE_MJPG
|
if(*SubType == MEDIASUBTYPE_MJPG
|
||||||
&& pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
|
&& pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
#include "trace.h"
|
#include "trace.h"
|
||||||
#include "video_capture_config.h"
|
#include "video_capture_config.h"
|
||||||
|
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
#ifdef WEBRTC_ANDROID
|
#ifdef WEBRTC_ANDROID
|
||||||
#include "video_capture_android.h" // Need inclusion here to set Java environment.
|
#include "video_capture_android.h" // Need inclusion here to set Java environment.
|
||||||
#endif
|
#endif
|
||||||
@ -271,7 +273,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
|
|||||||
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
|
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
|
||||||
|
|
||||||
if (frameInfo.rawType != kVideoMJPEG &&
|
if (frameInfo.rawType != kVideoMJPEG &&
|
||||||
CalcBufferSize(commonVideoType, width, height) != videoFrameLength)
|
CalcBufferSize(commonVideoType, width,
|
||||||
|
abs(height)) != videoFrameLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"Wrong incoming frame length.");
|
"Wrong incoming frame length.");
|
||||||
@ -279,7 +282,7 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Allocate I420 buffer.
|
// Allocate I420 buffer.
|
||||||
int requiredLength = CalcBufferSize(kI420, width, height);
|
int requiredLength = CalcBufferSize(kI420, width, abs(height));
|
||||||
_captureFrame.VerifyAndAllocate(requiredLength);
|
_captureFrame.VerifyAndAllocate(requiredLength);
|
||||||
if (!_captureFrame.Buffer())
|
if (!_captureFrame.Buffer())
|
||||||
{
|
{
|
||||||
@ -317,7 +320,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DeliverCapturedFrame(_captureFrame, width, height, captureTime, frameInfo.codecType);
|
DeliverCapturedFrame(_captureFrame, width, abs(height), captureTime,
|
||||||
|
frameInfo.codecType);
|
||||||
|
|
||||||
|
|
||||||
const WebRtc_UWord32 processTime =
|
const WebRtc_UWord32 processTime =
|
||||||
|
Loading…
Reference in New Issue
Block a user