Fixed camera output format handling

This commit is contained in:
Andrey Kamaev 2011-07-25 12:32:33 +00:00
parent 4ddc1a6477
commit ac7e16fdf6
13 changed files with 109 additions and 63 deletions

Binary file not shown.

Binary file not shown.

View File

@ -398,7 +398,7 @@ double CameraHandler::getProperty(int propIdx)
union {const char* str;double res;} u; union {const char* str;double res;} u;
memset(&u.res, 0, sizeof(u.res)); memset(&u.res, 0, sizeof(u.res));
u.str = cameraPropertySupportedPreviewSizesString.c_str(); u.str = cameraPropertyPreviewFormatString.c_str();
return u.res; return u.res;
} }

View File

@ -133,7 +133,7 @@ CameraActivity::ErrorCode CameraWrapperConnector::getProperty(void* camera, int
LOGE("CameraWrapperConnector::getProperty error: wrong pointer to camera object"); LOGE("CameraWrapperConnector::getProperty error: wrong pointer to camera object");
return CameraActivity::ERROR_WRONG_POINTER_CAMERA_WRAPPER; return CameraActivity::ERROR_WRONG_POINTER_CAMERA_WRAPPER;
} }
LOGE("calling (*pGetPropertyC)(%p, %d)", camera, propIdx);
*value = (*pGetPropertyC)(camera, propIdx); *value = (*pGetPropertyC)(camera, propIdx);
return CameraActivity::NO_ERROR; return CameraActivity::NO_ERROR;
} }
@ -260,10 +260,6 @@ std::string CameraWrapperConnector::getPathLibFolder()
LOGD("Library name: %s", dl_info.dli_fname); LOGD("Library name: %s", dl_info.dli_fname);
LOGD("Library base address: %p", dl_info.dli_fbase); LOGD("Library base address: %p", dl_info.dli_fbase);
char addrBuf[18];
sprintf(addrBuf, "%p-", dl_info.dli_fbase);
int addrLength = strlen(addrBuf);
const char* libName=dl_info.dli_fname; const char* libName=dl_info.dli_fname;
while( ((*libName)=='/') || ((*libName)=='.') ) while( ((*libName)=='/') || ((*libName)=='.') )
libName++; libName++;

View File

@ -349,7 +349,8 @@ enum
CV_CAP_PROP_WHITE_BALANCE_RED_V =26, CV_CAP_PROP_WHITE_BALANCE_RED_V =26,
CV_CAP_PROP_MAX_DC1394 =27, CV_CAP_PROP_MAX_DC1394 =27,
CV_CAP_PROP_AUTOGRAB =1024, // property for highgui class CvCapture_Android only CV_CAP_PROP_AUTOGRAB =1024, // property for highgui class CvCapture_Android only
CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING=1025, // tricky property, returns cpnst char* indeed CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING=1025, // readonly, tricky property, returns cpnst char* indeed
CV_CAP_PROP_PREVIEW_FORMAT=1026, // readonly, tricky property, returns cpnst char* indeed
// OpenNI map generators // OpenNI map generators
CV_CAP_OPENNI_DEPTH_GENERATOR = 0, CV_CAP_OPENNI_DEPTH_GENERATOR = 0,
CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 31, CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 31,

View File

@ -86,8 +86,18 @@ protected:
//raw from camera //raw from camera
int m_width; int m_width;
int m_height; int m_height;
unsigned char *m_frameYUV420i; unsigned char *m_frameYUV420;
unsigned char *m_frameYUV420inext; unsigned char *m_frameYUV420next;
enum YUVformat
{
noformat = 0,
yuv420sp,
yuv420i,
yuvUnknown
};
YUVformat m_frameFormat;
void setFrame(const void* buffer, int bufferSize); void setFrame(const void* buffer, int bufferSize);
@ -117,9 +127,9 @@ private:
volatile bool m_waitingNextFrame; volatile bool m_waitingNextFrame;
volatile bool m_shouldAutoGrab; volatile bool m_shouldAutoGrab;
void prepareCacheForYUV420i(int width, int height); void prepareCacheForYUV(int width, int height);
static bool convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat); bool convertYUV2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat);
static bool convertYUV420i2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha); bool convertYUV2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha);
friend class HighguiAndroidCameraActivity; friend class HighguiAndroidCameraActivity;
}; };
@ -179,8 +189,8 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_height = 0; m_height = 0;
m_activity = 0; m_activity = 0;
m_isOpened = false; m_isOpened = false;
m_frameYUV420i = 0; m_frameYUV420 = 0;
m_frameYUV420inext = 0; m_frameYUV420next = 0;
m_hasGray = false; m_hasGray = false;
m_hasColor = false; m_hasColor = false;
m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME; m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME;
@ -188,6 +198,7 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_shouldAutoGrab = false; m_shouldAutoGrab = false;
m_framesGrabbed = 0; m_framesGrabbed = 0;
m_CameraParamsChanged = false; m_CameraParamsChanged = false;
m_frameFormat = noformat;
//try connect to camera //try connect to camera
m_activity = new HighguiAndroidCameraActivity(this); m_activity = new HighguiAndroidCameraActivity(this);
@ -223,10 +234,10 @@ CvCapture_Android::~CvCapture_Android()
pthread_mutex_lock(&m_nextFrameMutex); pthread_mutex_lock(&m_nextFrameMutex);
unsigned char *tmp1=m_frameYUV420i; unsigned char *tmp1=m_frameYUV420;
unsigned char *tmp2=m_frameYUV420inext; unsigned char *tmp2=m_frameYUV420next;
m_frameYUV420i = 0; m_frameYUV420 = 0;
m_frameYUV420inext = 0; m_frameYUV420next = 0;
delete tmp1; delete tmp1;
delete tmp2; delete tmp2;
@ -255,6 +266,8 @@ double CvCapture_Android::getProperty( int propIdx )
case CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING: case CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING); return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING);
case CV_CAP_PROP_PREVIEW_FORMAT:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING);
default: default:
CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." ); CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." );
break; break;
@ -318,9 +331,9 @@ bool CvCapture_Android::grabFrame()
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) { if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) {
//LOGD("CvCapture_Android::grabFrame: get new frame"); //LOGD("CvCapture_Android::grabFrame: get new frame");
//swap current and new frames //swap current and new frames
unsigned char* tmp = m_frameYUV420i; unsigned char* tmp = m_frameYUV420;
m_frameYUV420i = m_frameYUV420inext; m_frameYUV420 = m_frameYUV420next;
m_frameYUV420inext = tmp; m_frameYUV420next = tmp;
//discard cached frames //discard cached frames
m_hasGray = false; m_hasGray = false;
@ -348,27 +361,39 @@ IplImage* CvCapture_Android::retrieveFrame( int outputType )
{ {
IplImage* image = NULL; IplImage* image = NULL;
unsigned char *current_frameYUV420i=m_frameYUV420i; unsigned char *current_frameYUV420=m_frameYUV420;
//Attention! all the operations in this function below should occupy less time than the period between two frames from camera //Attention! all the operations in this function below should occupy less time than the period between two frames from camera
if (NULL != current_frameYUV420i) if (NULL != current_frameYUV420)
{ {
if (m_frameFormat == noformat)
{
union {double prop; const char* name;} u;
u.prop = getProperty(CV_CAP_PROP_PREVIEW_FORMAT);
if (0 == strcmp(u.name, "yuv420sp"))
m_frameFormat = yuv420sp;
else if (0 == strcmp(u.name, "yuv420i"))
m_frameFormat = yuv420i;
else
m_frameFormat = yuvUnknown;
}
switch(outputType) switch(outputType)
{ {
case CV_CAP_ANDROID_GREY_FRAME: case CV_CAP_ANDROID_GREY_FRAME:
if (!m_hasGray) if (!m_hasGray)
if (!(m_hasGray = convertYUV420i2Grey(m_width, m_height, current_frameYUV420i, m_frameGray.mat))) if (!(m_hasGray = convertYUV2Grey(m_width, m_height, current_frameYUV420, m_frameGray.mat)))
return NULL; return NULL;
image = m_frameGray.getIplImagePtr(); image = m_frameGray.getIplImagePtr();
break; break;
case CV_CAP_ANDROID_COLOR_FRAME_BGR: case CV_CAP_ANDROID_COLOR_FRAME_RGB: case CV_CAP_ANDROID_COLOR_FRAME_BGR: case CV_CAP_ANDROID_COLOR_FRAME_RGB:
if (!m_hasColor) if (!m_hasColor)
if (!(m_hasColor = convertYUV420i2BGR(m_width, m_height, current_frameYUV420i, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGB, false))) if (!(m_hasColor = convertYUV2BGR(m_width, m_height, current_frameYUV420, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGB, false)))
return NULL; return NULL;
image = m_frameColor.getIplImagePtr(); image = m_frameColor.getIplImagePtr();
break; break;
case CV_CAP_ANDROID_COLOR_FRAME_BGRA: case CV_CAP_ANDROID_COLOR_FRAME_RGBA: case CV_CAP_ANDROID_COLOR_FRAME_BGRA: case CV_CAP_ANDROID_COLOR_FRAME_RGBA:
if (!m_hasColor) if (!m_hasColor)
if (!(m_hasColor = convertYUV420i2BGR(m_width, m_height, current_frameYUV420i, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGBA, true))) if (!(m_hasColor = convertYUV2BGR(m_width, m_height, current_frameYUV420, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGBA, true)))
return NULL; return NULL;
image = m_frameColor.getIplImagePtr(); image = m_frameColor.getIplImagePtr();
break; break;
@ -391,22 +416,22 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
if ( expectedSize != bufferSize) if ( expectedSize != bufferSize)
{ {
LOGE("ERROR reading YUV420i buffer: width=%d, height=%d, size=%d, receivedSize=%d", width, height, expectedSize, bufferSize); LOGE("ERROR reading YUV buffer: width=%d, height=%d, size=%d, receivedSize=%d", width, height, expectedSize, bufferSize);
return; return;
} }
//allocate memory if needed //allocate memory if needed
prepareCacheForYUV420i(width, height); prepareCacheForYUV(width, height);
//copy data //copy data
memcpy(m_frameYUV420inext, buffer, bufferSize); memcpy(m_frameYUV420next, buffer, bufferSize);
//LOGD("CvCapture_Android::setFrame -- memcpy is done"); //LOGD("CvCapture_Android::setFrame -- memcpy is done");
#if 0 //moved this part of code into grabFrame #if 0 //moved this part of code into grabFrame
//swap current and new frames //swap current and new frames
unsigned char* tmp = m_frameYUV420i; unsigned char* tmp = m_frameYUV420;
m_frameYUV420i = m_frameYUV420inext; m_frameYUV420 = m_frameYUV420next;
m_frameYUV420inext = tmp; m_frameYUV420next = tmp;
//discard cached frames //discard cached frames
m_hasGray = false; m_hasGray = false;
@ -418,30 +443,31 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
} }
//Attention: this method should be called inside pthread_mutex_lock(m_nextFrameMutex) only //Attention: this method should be called inside pthread_mutex_lock(m_nextFrameMutex) only
void CvCapture_Android::prepareCacheForYUV420i(int width, int height) void CvCapture_Android::prepareCacheForYUV(int width, int height)
{ {
if (width != m_width || height != m_height) if (width != m_width || height != m_height)
{ {
LOGD("CvCapture_Android::prepareCacheForYUV420i: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height); LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
m_width = width; m_width = width;
m_height = height; m_height = height;
unsigned char *tmp = m_frameYUV420inext; unsigned char *tmp = m_frameYUV420next;
m_frameYUV420inext = new unsigned char [width * height * 3 / 2]; m_frameYUV420next = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL) {
delete[] tmp; delete[] tmp;
} }
tmp = m_frameYUV420i; tmp = m_frameYUV420;
m_frameYUV420i = new unsigned char [width * height * 3 / 2]; m_frameYUV420 = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL) {
delete[] tmp; delete[] tmp;
} }
} }
} }
bool CvCapture_Android::convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat) bool CvCapture_Android::convertYUV2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat)
{ {
if (yuv == 0) return false; if (yuv == 0) return false;
if (m_frameFormat != yuv420sp && m_frameFormat != yuv420i) return false;
#define ALWAYS_COPY_GRAY 0 #define ALWAYS_COPY_GRAY 0
#if ALWAYS_COPY_GRAY #if ALWAYS_COPY_GRAY
resmat.create(height, width, CV_8UC1); resmat.create(height, width, CV_8UC1);
@ -453,14 +479,19 @@ bool CvCapture_Android::convertYUV420i2Grey(int width, int height, const unsigne
return !resmat.empty(); return !resmat.empty();
} }
bool CvCapture_Android::convertYUV420i2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha) bool CvCapture_Android::convertYUV2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha)
{ {
if (yuv == 0) return false; if (yuv == 0) return false;
if (m_frameFormat != yuv420sp && m_frameFormat != yuv420i) return false;
CV_Assert(width % 2 == 0 && height % 2 == 0); CV_Assert(width % 2 == 0 && height % 2 == 0);
cv::Mat src(height*3/2, width, CV_8UC1, (void*)yuv); cv::Mat src(height*3/2, width, CV_8UC1, (void*)yuv);
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420i2RGB : CV_YUV420i2BGR, withAlpha ? 4 : 3); if (m_frameFormat == yuv420sp)
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420sp2RGB : CV_YUV420sp2BGR, withAlpha ? 4 : 3);
else if (m_frameFormat == yuv420i)
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420i2RGB : CV_YUV420i2BGR, withAlpha ? 4 : 3);
return !resmat.empty(); return !resmat.empty();
} }

View File

@ -902,6 +902,8 @@ enum
COLOR_YUV420i2RGB = 90, COLOR_YUV420i2RGB = 90,
COLOR_YUV420i2BGR = 91, COLOR_YUV420i2BGR = 91,
COLOR_YUV420sp2RGB = 92,
COLOR_YUV420sp2BGR = 93,
COLOR_COLORCVT_MAX =100 COLOR_COLORCVT_MAX =100
}; };

View File

@ -228,6 +228,8 @@ enum
CV_YUV420i2RGB = 90, CV_YUV420i2RGB = 90,
CV_YUV420i2BGR = 91, CV_YUV420i2BGR = 91,
CV_YUV420sp2RGB = 92,
CV_YUV420sp2BGR = 93,
CV_COLORCVT_MAX =100 CV_COLORCVT_MAX =100
}; };

View File

@ -2648,16 +2648,16 @@ static void Bayer2RGB_VNG_8u( const Mat& srcmat, Mat& dstmat, int code )
} }
} }
///////////////////////////////////// YUV420i -> RGB ///////////////////////////////////// ///////////////////////////////////// YUV420 -> RGB /////////////////////////////////////
template<int R> template<int R, int SPorI>
struct YUV420i2BGR888Invoker struct YUV4202BGR888Invoker
{ {
Mat* dst; Mat* dst;
const uchar* my1, *muv; const uchar* my1, *muv;
int width; int width;
YUV420i2BGR888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv) YUV4202BGR888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
: dst(&_dst), my1(_y1), muv(_uv), width(_width) {} : dst(&_dst), my1(_y1), muv(_uv), width(_width) {}
void operator()(const BlockedRange& range) const void operator()(const BlockedRange& range) const
@ -2676,8 +2676,8 @@ struct YUV420i2BGR888Invoker
for(int i = 0; i < width; i+=2,row1+=6,row2+=6) for(int i = 0; i < width; i+=2,row1+=6,row2+=6)
{ {
int cr = uv[i] - 128; int cr = uv[i + SPorI + 0] - 128;
int cb = uv[i+1] - 128; int cb = uv[i - SPorI + 1] - 128;
int ruv = 409 * cr + 128; int ruv = 409 * cr + 128;
int guv = 128 - 100 * cb - 208 * cr; int guv = 128 - 100 * cb - 208 * cr;
@ -2707,14 +2707,14 @@ struct YUV420i2BGR888Invoker
} }
}; };
template<int R> template<int R, int SPorI>
struct YUV420i2BGRA8888Invoker struct YUV4202BGRA8888Invoker
{ {
Mat* dst; Mat* dst;
const uchar* my1, *muv; const uchar* my1, *muv;
int width; int width;
YUV420i2BGRA8888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv) YUV4202BGRA8888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
: dst(&_dst), my1(_y1), muv(_uv), width(_width) {} : dst(&_dst), my1(_y1), muv(_uv), width(_width) {}
void operator()(const BlockedRange& range) const void operator()(const BlockedRange& range) const
@ -2733,8 +2733,8 @@ struct YUV420i2BGRA8888Invoker
for(int i = 0; i < width; i+=2,row1+=8,row2+=8) for(int i = 0; i < width; i+=2,row1+=8,row2+=8)
{ {
int cr = uv[i] - 128; int cr = uv[i + SPorI + 0] - 128;
int cb = uv[i+1] - 128; int cb = uv[i - SPorI + 1] - 128;
int ruv = 409 * cr + 128; int ruv = 409 * cr + 128;
int guv = 128 - 100 * cb - 208 * cr; int guv = 128 - 100 * cb - 208 * cr;
@ -3112,7 +3112,7 @@ void cv::cvtColor( InputArray _src, OutputArray _dst, int code, int dcn )
Bayer2RGB_VNG_8u(src, dst, code); Bayer2RGB_VNG_8u(src, dst, code);
} }
break; break;
case CV_YUV420i2BGR: case CV_YUV420i2RGB: case CV_YUV420sp2BGR: case CV_YUV420sp2RGB: case CV_YUV420i2BGR: case CV_YUV420i2RGB:
{ {
if(dcn <= 0) dcn = 3; if(dcn <= 0) dcn = 3;
CV_Assert( dcn == 3 || dcn == 4 ); CV_Assert( dcn == 3 || dcn == 4 );
@ -3126,22 +3126,36 @@ void cv::cvtColor( InputArray _src, OutputArray _dst, int code, int dcn )
const uchar* uv = y + dstSz.area(); const uchar* uv = y + dstSz.area();
#ifdef HAVE_TEGRA_OPTIMIZATION #ifdef HAVE_TEGRA_OPTIMIZATION
if (!tegra::YUV420i2BGR(y, uv, dst, CV_YUV420i2RGB == code)) if (!tegra::YUV420i2BGR(y, uv, dst, CV_YUV420sp2RGB == code))
#endif #endif
{ {
if (CV_YUV420i2RGB == code) if (CV_YUV420sp2RGB == code)
{ {
if (dcn == 3) if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGR888Invoker<2>(dst, dstSz.width, y, uv)); parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<2,0>(dst, dstSz.width, y, uv));
else else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGRA8888Invoker<2>(dst, dstSz.width, y, uv)); parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<2,0>(dst, dstSz.width, y, uv));
} }
else else if (CV_YUV420sp2BGR == code)
{ {
if (dcn == 3) if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGR888Invoker<0>(dst, dstSz.width, y, uv)); parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<0,0>(dst, dstSz.width, y, uv));
else else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGRA8888Invoker<0>(dst, dstSz.width, y, uv)); parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<0,0>(dst, dstSz.width, y, uv));
}
else if (CV_YUV420i2RGB == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<2,1>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<2,1>(dst, dstSz.width, y, uv));
}
else if (CV_YUV420i2BGR == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<0,1>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<0,1>(dst, dstSz.width, y, uv));
} }
} }
} }

View File

@ -45,7 +45,7 @@ class Sample1View extends SampleViewBase {
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break; break;
case Sample1Java.VIEW_MODE_RGBA: case Sample1Java.VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4); Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3); Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
break; break;
case Sample1Java.VIEW_MODE_CANNY: case Sample1Java.VIEW_MODE_CANNY:

View File

@ -42,14 +42,14 @@ class Sample4View extends SampleViewBase {
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break; break;
case Sample4Mixed.VIEW_MODE_RGBA: case Sample4Mixed.VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4); Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
break; break;
case Sample4Mixed.VIEW_MODE_CANNY: case Sample4Mixed.VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100); Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break; break;
case Sample4Mixed.VIEW_MODE_FEATURES: case Sample4Mixed.VIEW_MODE_FEATURES:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4); Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr()); FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break; break;
} }