This commit is contained in:
zenev 2015-05-18 13:29:54 -07:00
commit 6ac4a4ceb6
84 changed files with 38771 additions and 166 deletions

View File

@ -45,6 +45,24 @@ endif(CMAKE_SYSTEM_NAME MATCHES WindowsPhone OR CMAKE_SYSTEM_NAME MATCHES Window
if(WINRT) if(WINRT)
add_definitions(-DWINRT -DNO_GETENV) add_definitions(-DWINRT -DNO_GETENV)
# Making definitions available to other configurations and
# to filter dependency restrictions at compile time.
if(CMAKE_SYSTEM_NAME MATCHES WindowsPhone)
set(WINRT_PHONE TRUE)
add_definitions(-DWINRT_PHONE)
elseif(CMAKE_SYSTEM_NAME MATCHES WindowsStore)
set(WINRT_STORE TRUE)
add_definitions(-DWINRT_STORE)
endif()
if(CMAKE_SYSTEM_VERSION MATCHES 8.1)
set(WINRT_8_1 TRUE)
add_definitions(-DWINRT_8_1)
elseif(CMAKE_SYSTEM_VERSION MATCHES 8.0)
set(WINRT_8_0 TRUE)
add_definitions(-DWINRT_8_0)
endif()
endif() endif()
if(POLICY CMP0022) if(POLICY CMP0022)
@ -488,7 +506,7 @@ include(cmake/OpenCVModule.cmake)
# Detect endianness of build platform # Detect endianness of build platform
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
if(CMAKE_SYSTEM_NAME STREQUAL iOS) if(IOS)
# test_big_endian needs try_compile, which doesn't work for iOS # test_big_endian needs try_compile, which doesn't work for iOS
# http://public.kitware.com/Bug/view.php?id=12288 # http://public.kitware.com/Bug/view.php?id=12288
set(WORDS_BIGENDIAN 0) set(WORDS_BIGENDIAN 0)

View File

@ -82,6 +82,8 @@ if(MSVC)
set(OpenCV_RUNTIME vc11) set(OpenCV_RUNTIME vc11)
elseif(MSVC_VERSION EQUAL 1800) elseif(MSVC_VERSION EQUAL 1800)
set(OpenCV_RUNTIME vc12) set(OpenCV_RUNTIME vc12)
elseif(MSVC_VERSION EQUAL 1900)
set(OpenCV_RUNTIME vc14)
endif() endif()
elseif(MINGW) elseif(MINGW)
set(OpenCV_RUNTIME mingw) set(OpenCV_RUNTIME mingw)

View File

@ -143,6 +143,8 @@ if(MSVC)
set(OpenCV_RUNTIME vc11) set(OpenCV_RUNTIME vc11)
elseif(MSVC_VERSION EQUAL 1800) elseif(MSVC_VERSION EQUAL 1800)
set(OpenCV_RUNTIME vc12) set(OpenCV_RUNTIME vc12)
elseif(MSVC_VERSION EQUAL 1900)
set(OpenCV_RUNTIME vc14)
endif() endif()
elseif(MINGW) elseif(MINGW)
set(OpenCV_RUNTIME mingw) set(OpenCV_RUNTIME mingw)

View File

@ -37,7 +37,7 @@ function(find_python preferred_version min_version library_env include_dir_env
# standard FindPythonInterp always prefers executable from system path # standard FindPythonInterp always prefers executable from system path
# this is really important because we are using the interpreter for numpy search and for choosing the install location # this is really important because we are using the interpreter for numpy search and for choosing the install location
foreach(_CURRENT_VERSION ${Python_ADDITIONAL_VERSIONS} "${preferred_version}" "${min_version}") foreach(_CURRENT_VERSION ${Python_ADDITIONAL_VERSIONS} "${preferred_version}" "${min_version}")
find_host_program(executable find_host_program(PYTHON_EXECUTABLE
NAMES python${_CURRENT_VERSION} python NAMES python${_CURRENT_VERSION} python
PATHS PATHS
[HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Python\\\\PythonCore\\\\${_CURRENT_VERSION}\\\\InstallPath] [HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Python\\\\PythonCore\\\\${_CURRENT_VERSION}\\\\InstallPath]

BIN
data/detect_blob.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

View File

@ -4714,8 +4714,8 @@ SparseMat::Hdr::Hdr( int _dims, const int* _sizes, int _type )
refcount = 1; refcount = 1;
dims = _dims; dims = _dims;
valueOffset = (int)alignSize(sizeof(SparseMat::Node) + valueOffset = (int)alignSize(sizeof(SparseMat::Node) - MAX_DIM*sizeof(int) +
sizeof(int)*std::max(dims - CV_MAX_DIM, 0), CV_ELEM_SIZE1(_type)); dims*sizeof(int), CV_ELEM_SIZE1(_type));
nodeSize = alignSize(valueOffset + nodeSize = alignSize(valueOffset +
CV_ELEM_SIZE(_type), (int)sizeof(size_t)); CV_ELEM_SIZE(_type), (int)sizeof(size_t));
@ -4816,7 +4816,8 @@ void SparseMat::copyTo( SparseMat& m ) const
void SparseMat::copyTo( Mat& m ) const void SparseMat::copyTo( Mat& m ) const
{ {
CV_Assert( hdr ); CV_Assert( hdr );
m.create( dims(), hdr->size, type() ); int ndims = dims();
m.create( ndims, hdr->size, type() );
m = Scalar(0); m = Scalar(0);
SparseMatConstIterator from = begin(); SparseMatConstIterator from = begin();
@ -4825,7 +4826,7 @@ void SparseMat::copyTo( Mat& m ) const
for( i = 0; i < N; i++, ++from ) for( i = 0; i < N; i++, ++from )
{ {
const Node* n = from.node(); const Node* n = from.node();
copyElem( from.ptr, m.ptr(n->idx), esz); copyElem( from.ptr, (ndims > 1 ? m.ptr(n->idx) : m.ptr(n->idx[0])), esz);
} }
} }
@ -5114,7 +5115,8 @@ uchar* SparseMat::newNode(const int* idx, size_t hashval)
if( !hdr->freeList ) if( !hdr->freeList )
{ {
size_t i, nsz = hdr->nodeSize, psize = hdr->pool.size(), size_t i, nsz = hdr->nodeSize, psize = hdr->pool.size(),
newpsize = std::max(psize*2, 8*nsz); newpsize = std::max(psize*3/2, 8*nsz);
newpsize = (newpsize/nsz)*nsz;
hdr->pool.resize(newpsize); hdr->pool.resize(newpsize);
uchar* pool = &hdr->pool[0]; uchar* pool = &hdr->pool[0];
hdr->freeList = std::max(psize, nsz); hdr->freeList = std::max(psize, nsz);

View File

@ -504,54 +504,21 @@ static int countNonZero_(const T* src, int len )
return nz; return nz;
} }
#if CV_SSE2
static const uchar * initPopcountTable()
{
static uchar tab[256];
static volatile bool initialized = false;
if( !initialized )
{
// we compute inverse popcount table,
// since we pass (img[x] == 0) mask as index in the table.
unsigned int j = 0u;
#if CV_POPCNT
if (checkHardwareSupport(CV_CPU_POPCNT))
{
for( ; j < 256u; j++ )
tab[j] = (uchar)(8 - _mm_popcnt_u32(j));
}
#endif
for( ; j < 256u; j++ )
{
int val = 0;
for( int mask = 1; mask < 256; mask += mask )
val += (j & mask) == 0;
tab[j] = (uchar)val;
}
initialized = true;
}
return tab;
}
#endif
static int countNonZero8u( const uchar* src, int len ) static int countNonZero8u( const uchar* src, int len )
{ {
int i=0, nz = 0; int i=0, nz = 0;
#if CV_SSE2 #if CV_SSE2
if(USE_SSE2)//5x-6x if(USE_SSE2)//5x-6x
{ {
__m128i pattern = _mm_setzero_si128 (); __m128i v_zero = _mm_setzero_si128();
static const uchar * tab = initPopcountTable(); __m128i sum = _mm_setzero_si128();
for (; i<=len-16; i+=16) for (; i<=len-16; i+=16)
{ {
__m128i r0 = _mm_loadu_si128((const __m128i*)(src+i)); __m128i r0 = _mm_loadu_si128((const __m128i*)(src+i));
int val = _mm_movemask_epi8(_mm_cmpeq_epi8(r0, pattern)); sum = _mm_add_epi32(sum, _mm_sad_epu8(_mm_sub_epi8(v_zero, _mm_cmpeq_epi8(r0, v_zero)), v_zero));
nz += tab[val & 255] + tab[val >> 8];
} }
nz = i - _mm_cvtsi128_si32(_mm_add_epi32(sum, _mm_unpackhi_epi64(sum, sum)));
} }
#elif CV_NEON #elif CV_NEON
int len0 = len & -16, blockSize1 = (1 << 8) - 16, blockSize0 = blockSize1 << 6; int len0 = len & -16, blockSize1 = (1 << 8) - 16, blockSize0 = blockSize1 << 6;
@ -598,15 +565,15 @@ static int countNonZero16u( const ushort* src, int len )
if (USE_SSE2) if (USE_SSE2)
{ {
__m128i v_zero = _mm_setzero_si128 (); __m128i v_zero = _mm_setzero_si128 ();
static const uchar * tab = initPopcountTable(); __m128i sum = _mm_setzero_si128();
for ( ; i <= len - 8; i += 8) for ( ; i <= len - 8; i += 8)
{ {
__m128i v_src = _mm_loadu_si128((const __m128i*)(src + i)); __m128i r0 = _mm_loadu_si128((const __m128i*)(src + i));
int val = _mm_movemask_epi8(_mm_packs_epi16(_mm_cmpeq_epi16(v_src, v_zero), v_zero)); sum = _mm_add_epi32(sum, _mm_sad_epu8(_mm_sub_epi8(v_zero, _mm_cmpeq_epi16(r0, v_zero)), v_zero));
nz += tab[val];
} }
nz = i - (_mm_cvtsi128_si32(_mm_add_epi32(sum, _mm_unpackhi_epi64(sum, sum))) >> 1);
src += i; src += i;
} }
#elif CV_NEON #elif CV_NEON
@ -649,20 +616,15 @@ static int countNonZero32s( const int* src, int len )
if (USE_SSE2) if (USE_SSE2)
{ {
__m128i v_zero = _mm_setzero_si128 (); __m128i v_zero = _mm_setzero_si128 ();
static const uchar * tab = initPopcountTable(); __m128i sum = _mm_setzero_si128();
for ( ; i <= len - 8; i += 8) for ( ; i <= len - 4; i += 4)
{ {
__m128i v_src = _mm_loadu_si128((const __m128i*)(src + i)); __m128i r0 = _mm_loadu_si128((const __m128i*)(src + i));
__m128i v_dst0 = _mm_cmpeq_epi32(v_src, v_zero); sum = _mm_add_epi32(sum, _mm_sad_epu8(_mm_sub_epi8(v_zero, _mm_cmpeq_epi32(r0, v_zero)), v_zero));
v_src = _mm_loadu_si128((const __m128i*)(src + i + 4));
__m128i v_dst1 = _mm_cmpeq_epi32(v_src, v_zero);
int val = _mm_movemask_epi8(_mm_packs_epi16(_mm_packs_epi32(v_dst0, v_dst1), v_zero));
nz += tab[val];
} }
nz = i - (_mm_cvtsi128_si32(_mm_add_epi32(sum, _mm_unpackhi_epi64(sum, sum))) >> 2);
src += i; src += i;
} }
#elif CV_NEON #elif CV_NEON
@ -706,19 +668,17 @@ static int countNonZero32f( const float* src, int len )
#if CV_SSE2 #if CV_SSE2
if (USE_SSE2) if (USE_SSE2)
{ {
__m128i v_zero_i = _mm_setzero_si128();
__m128 v_zero_f = _mm_setzero_ps(); __m128 v_zero_f = _mm_setzero_ps();
static const uchar * tab = initPopcountTable(); __m128i v_zero = _mm_setzero_si128 ();
__m128i sum = _mm_setzero_si128();
for ( ; i <= len - 8; i += 8) for ( ; i <= len - 4; i += 4)
{ {
__m128i v_dst0 = _mm_castps_si128(_mm_cmpeq_ps(_mm_loadu_ps(src + i), v_zero_f)); __m128 r0 = _mm_loadu_ps(src + i);
__m128i v_dst1 = _mm_castps_si128(_mm_cmpeq_ps(_mm_loadu_ps(src + i + 4), v_zero_f)); sum = _mm_add_epi32(sum, _mm_sad_epu8(_mm_sub_epi8(v_zero, _mm_castps_si128(_mm_cmpeq_ps(r0, v_zero_f))), v_zero));
int val = _mm_movemask_epi8(_mm_packs_epi16(_mm_packs_epi32(v_dst0, v_dst1), v_zero_i));
nz += tab[val];
} }
nz = i - (_mm_cvtsi128_si32(_mm_add_epi32(sum, _mm_unpackhi_epi64(sum, sum))) >> 2);
src += i; src += i;
} }
#elif CV_NEON #elif CV_NEON
@ -758,32 +718,7 @@ static int countNonZero32f( const float* src, int len )
static int countNonZero64f( const double* src, int len ) static int countNonZero64f( const double* src, int len )
{ {
int i = 0, nz = 0; return countNonZero_(src, len);
#if CV_SSE2
if (USE_SSE2)
{
__m128i v_zero_i = _mm_setzero_si128();
__m128d v_zero_d = _mm_setzero_pd();
static const uchar * tab = initPopcountTable();
for ( ; i <= len - 8; i += 8)
{
__m128i v_dst0 = _mm_castpd_si128(_mm_cmpeq_pd(_mm_loadu_pd(src + i), v_zero_d));
__m128i v_dst1 = _mm_castpd_si128(_mm_cmpeq_pd(_mm_loadu_pd(src + i + 2), v_zero_d));
__m128i v_dst2 = _mm_castpd_si128(_mm_cmpeq_pd(_mm_loadu_pd(src + i + 4), v_zero_d));
__m128i v_dst3 = _mm_castpd_si128(_mm_cmpeq_pd(_mm_loadu_pd(src + i + 6), v_zero_d));
v_dst0 = _mm_packs_epi32(v_dst0, v_dst1);
v_dst1 = _mm_packs_epi32(v_dst2, v_dst3);
int val = _mm_movemask_epi8(_mm_packs_epi16(_mm_packs_epi32(v_dst0, v_dst1), v_zero_i));
nz += tab[val];
}
src += i;
}
#endif
return nz + countNonZero_(src, len - i);
} }
typedef int (*CountNonZeroFunc)(const uchar*, int); typedef int (*CountNonZeroFunc)(const uchar*, int);

View File

@ -1248,3 +1248,27 @@ TEST(Core_SVD, orthogonality)
ASSERT_LT(norm(mat_U, Mat::eye(2, 2, type), NORM_INF), 1e-5); ASSERT_LT(norm(mat_U, Mat::eye(2, 2, type), NORM_INF), 1e-5);
} }
} }
TEST(Core_SparseMat, footprint)
{
int n = 1000000;
int sz[] = { n, n };
SparseMat m(2, sz, CV_64F);
int nodeSize0 = (int)m.hdr->nodeSize;
double dataSize0 = ((double)m.hdr->pool.size() + (double)m.hdr->hashtab.size()*sizeof(size_t))*1e-6;
printf("before: node size=%d bytes, data size=%.0f Mbytes\n", nodeSize0, dataSize0);
for (int i = 0; i < n; i++)
{
m.ref<double>(i, i) = 1;
}
double dataSize1 = ((double)m.hdr->pool.size() + (double)m.hdr->hashtab.size()*sizeof(size_t))*1e-6;
double threshold = (n*nodeSize0*1.6 + n*2.*sizeof(size_t))*1e-6;
printf("after: data size=%.0f Mbytes, threshold=%.0f MBytes\n", dataSize1, threshold);
ASSERT_LE((int)m.hdr->nodeSize, 32);
ASSERT_LE(dataSize1, threshold);
}

View File

@ -587,6 +587,44 @@ CUDA_TEST_P(MinMaxLoc, WithoutMask)
} }
} }
CUDA_TEST_P(MinMaxLoc, OneRowMat)
{
cv::Mat src = randomMat(cv::Size(size.width, 1), depth);
double minVal, maxVal;
cv::Point minLoc, maxLoc;
cv::cuda::minMaxLoc(loadMat(src, useRoi), &minVal, &maxVal, &minLoc, &maxLoc);
double minVal_gold, maxVal_gold;
cv::Point minLoc_gold, maxLoc_gold;
minMaxLocGold(src, &minVal_gold, &maxVal_gold, &minLoc_gold, &maxLoc_gold);
EXPECT_DOUBLE_EQ(minVal_gold, minVal);
EXPECT_DOUBLE_EQ(maxVal_gold, maxVal);
expectEqual(src, minLoc_gold, minLoc);
expectEqual(src, maxLoc_gold, maxLoc);
}
CUDA_TEST_P(MinMaxLoc, OneColumnMat)
{
cv::Mat src = randomMat(cv::Size(1, size.height), depth);
double minVal, maxVal;
cv::Point minLoc, maxLoc;
cv::cuda::minMaxLoc(loadMat(src, useRoi), &minVal, &maxVal, &minLoc, &maxLoc);
double minVal_gold, maxVal_gold;
cv::Point minLoc_gold, maxLoc_gold;
minMaxLocGold(src, &minVal_gold, &maxVal_gold, &minLoc_gold, &maxLoc_gold);
EXPECT_DOUBLE_EQ(minVal_gold, minVal);
EXPECT_DOUBLE_EQ(maxVal_gold, maxVal);
expectEqual(src, minLoc_gold, minLoc);
expectEqual(src, maxLoc_gold, maxLoc);
}
CUDA_TEST_P(MinMaxLoc, Async) CUDA_TEST_P(MinMaxLoc, Async)
{ {
cv::Mat src = randomMat(size, depth); cv::Mat src = randomMat(size, depth);

View File

@ -156,7 +156,7 @@ namespace grid_minmaxloc_detail
__host__ void minMaxLoc(const SrcPtr& src, ResType* minVal, ResType* maxVal, int* minLoc, int* maxLoc, const MaskPtr& mask, int rows, int cols, cudaStream_t stream) __host__ void minMaxLoc(const SrcPtr& src, ResType* minVal, ResType* maxVal, int* minLoc, int* maxLoc, const MaskPtr& mask, int rows, int cols, cudaStream_t stream)
{ {
dim3 block, grid; dim3 block, grid;
getLaunchCfg<Policy>(cols, rows, block, grid); getLaunchCfg<Policy>(rows, cols, block, grid);
const int patch_x = divUp(divUp(cols, grid.x), block.x); const int patch_x = divUp(divUp(cols, grid.x), block.x);
const int patch_y = divUp(divUp(rows, grid.y), block.y); const int patch_y = divUp(divUp(rows, grid.y), block.y);

View File

@ -49,7 +49,7 @@ namespace cv { namespace hal {
\****************************************************************************************/ \****************************************************************************************/
template<typename _Tp> static inline int template<typename _Tp> static inline int
LUImpl(_Tp* A, size_t astep, int m, _Tp* b, size_t bstep, int n) LUImpl(_Tp* A, size_t astep, int m, _Tp* b, size_t bstep, int n, _Tp eps)
{ {
int i, j, k, p = 1; int i, j, k, p = 1;
astep /= sizeof(A[0]); astep /= sizeof(A[0]);
@ -63,7 +63,7 @@ LUImpl(_Tp* A, size_t astep, int m, _Tp* b, size_t bstep, int n)
if( std::abs(A[j*astep + i]) > std::abs(A[k*astep + i]) ) if( std::abs(A[j*astep + i]) > std::abs(A[k*astep + i]) )
k = j; k = j;
if( std::abs(A[k*astep + i]) < std::numeric_limits<_Tp>::epsilon() ) if( std::abs(A[k*astep + i]) < eps )
return 0; return 0;
if( k != i ) if( k != i )
@ -111,13 +111,13 @@ LUImpl(_Tp* A, size_t astep, int m, _Tp* b, size_t bstep, int n)
int LU(float* A, size_t astep, int m, float* b, size_t bstep, int n) int LU(float* A, size_t astep, int m, float* b, size_t bstep, int n)
{ {
return LUImpl(A, astep, m, b, bstep, n); return LUImpl(A, astep, m, b, bstep, n, FLT_EPSILON*10);
} }
int LU(double* A, size_t astep, int m, double* b, size_t bstep, int n) int LU(double* A, size_t astep, int m, double* b, size_t bstep, int n)
{ {
return LUImpl(A, astep, m, b, bstep, n); return LUImpl(A, astep, m, b, bstep, n, DBL_EPSILON*100);
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -3741,7 +3741,8 @@ enum ColormapTypes
COLORMAP_COOL = 8, //!< ![cool](pics/colormaps/colorscale_cool.jpg) COLORMAP_COOL = 8, //!< ![cool](pics/colormaps/colorscale_cool.jpg)
COLORMAP_HSV = 9, //!< ![HSV](pics/colormaps/colorscale_hsv.jpg) COLORMAP_HSV = 9, //!< ![HSV](pics/colormaps/colorscale_hsv.jpg)
COLORMAP_PINK = 10, //!< ![pink](pics/colormaps/colorscale_pink.jpg) COLORMAP_PINK = 10, //!< ![pink](pics/colormaps/colorscale_pink.jpg)
COLORMAP_HOT = 11 //!< ![hot](pics/colormaps/colorscale_hot.jpg) COLORMAP_HOT = 11, //!< ![hot](pics/colormaps/colorscale_hot.jpg)
COLORMAP_PARULA = 12 //!< ![hot](pics/colormaps/colorscale_parula.jpg)
}; };
/** @brief Applies a GNU Octave/MATLAB equivalent colormap on a given image. /** @brief Applies a GNU Octave/MATLAB equivalent colormap on a given image.

View File

@ -469,6 +469,30 @@ namespace colormap
} }
}; };
// Colormap similar to MATLAB's "parula".
class Parula : public ColorMap {
public:
Parula() : ColorMap() {
init(256);
}
Parula(int n) : ColorMap() {
init(n);
}
void init(int n) {
float r[] = { 0.2078, 0.0118, 0.0784, 0.0235, 0.2196, 0.5725, 0.8510, 0.9882, 0.9765 };
float g[] = { 0.1647, 0.3882, 0.5216, 0.6549, 0.7255, 0.7490, 0.7294, 0.8078, 0.9843 };
float b[] = { 0.5294, 0.8824, 0.8314, 0.7765, 0.6196, 0.4510, 0.3373, 0.1804, 0.0549 };
Mat X = linspace(0, 1, 9);
this->_lut = ColorMap::linear_colormap(X,
Mat(9, 1, CV_32FC1, r).clone(), // red
Mat(9, 1, CV_32FC1, g).clone(), // green
Mat(9, 1, CV_32FC1, b).clone(), // blue
n); // number of sample points
}
};
void ColorMap::operator()(InputArray _src, OutputArray _dst) const void ColorMap::operator()(InputArray _src, OutputArray _dst) const
{ {
if(_lut.total() != 256) if(_lut.total() != 256)
@ -513,6 +537,7 @@ namespace colormap
colormap == COLORMAP_HSV ? (colormap::ColorMap*)(new colormap::HSV) : colormap == COLORMAP_HSV ? (colormap::ColorMap*)(new colormap::HSV) :
colormap == COLORMAP_JET ? (colormap::ColorMap*)(new colormap::Jet) : colormap == COLORMAP_JET ? (colormap::ColorMap*)(new colormap::Jet) :
colormap == COLORMAP_OCEAN ? (colormap::ColorMap*)(new colormap::Ocean) : colormap == COLORMAP_OCEAN ? (colormap::ColorMap*)(new colormap::Ocean) :
colormap == COLORMAP_PARULA ? (colormap::ColorMap*)(new colormap::Parula) :
colormap == COLORMAP_PINK ? (colormap::ColorMap*)(new colormap::Pink) : colormap == COLORMAP_PINK ? (colormap::ColorMap*)(new colormap::Pink) :
colormap == COLORMAP_RAINBOW ? (colormap::ColorMap*)(new colormap::Rainbow) : colormap == COLORMAP_RAINBOW ? (colormap::ColorMap*)(new colormap::Rainbow) :
colormap == COLORMAP_SPRING ? (colormap::ColorMap*)(new colormap::Spring) : colormap == COLORMAP_SPRING ? (colormap::ColorMap*)(new colormap::Spring) :

View File

@ -3455,7 +3455,7 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
bool symmetrical = (this->symmetryType & KERNEL_SYMMETRICAL) != 0; bool symmetrical = (this->symmetryType & KERNEL_SYMMETRICAL) != 0;
bool is_1_2_1 = ky[0] == 2 && ky[1] == 1; bool is_1_2_1 = ky[0] == 2 && ky[1] == 1;
bool is_1_m2_1 = ky[0] == -2 && ky[1] == 1; bool is_1_m2_1 = ky[0] == -2 && ky[1] == 1;
bool is_m1_0_1 = ky[1] == 1 || ky[1] == -1; bool is_m1_0_1 = ky[0] == 0 && (ky[1] == 1 || ky[1] == -1);
ST f0 = ky[0], f1 = ky[1]; ST f0 = ky[0], f1 = ky[1];
ST _delta = this->delta; ST _delta = this->delta;
CastOp castOp = this->castOp0; CastOp castOp = this->castOp0;
@ -3486,13 +3486,12 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
D[i+2] = castOp(s0); D[i+2] = castOp(s0);
D[i+3] = castOp(s1); D[i+3] = castOp(s1);
} }
#else #endif
for( ; i < width; i ++ ) for( ; i < width; i ++ )
{ {
ST s0 = S0[i] + S1[i]*2 + S2[i] + _delta; ST s0 = S0[i] + S1[i]*2 + S2[i] + _delta;
D[i] = castOp(s0); D[i] = castOp(s0);
} }
#endif
} }
else if( is_1_m2_1 ) else if( is_1_m2_1 )
{ {
@ -3509,17 +3508,16 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
D[i+2] = castOp(s0); D[i+2] = castOp(s0);
D[i+3] = castOp(s1); D[i+3] = castOp(s1);
} }
#else #endif
for( ; i < width; i ++ ) for( ; i < width; i ++ )
{ {
ST s0 = S0[i] - S1[i]*2 + S2[i] + _delta; ST s0 = S0[i] - S1[i]*2 + S2[i] + _delta;
D[i] = castOp(s0); D[i] = castOp(s0);
} }
#endif
} }
else else
{ {
#if CV_ENABLE_UNROLLED #if CV_ENABLE_UNROLLED
for( ; i <= width - 4; i += 4 ) for( ; i <= width - 4; i += 4 )
{ {
ST s0 = (S0[i] + S2[i])*f1 + S1[i]*f0 + _delta; ST s0 = (S0[i] + S2[i])*f1 + S1[i]*f0 + _delta;
@ -3532,16 +3530,13 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
D[i+2] = castOp(s0); D[i+2] = castOp(s0);
D[i+3] = castOp(s1); D[i+3] = castOp(s1);
} }
#else #endif
for( ; i < width; i ++ ) for( ; i < width; i ++ )
{ {
ST s0 = (S0[i] + S2[i])*f1 + S1[i]*f0 + _delta; ST s0 = (S0[i] + S2[i])*f1 + S1[i]*f0 + _delta;
D[i] = castOp(s0); D[i] = castOp(s0);
} }
#endif
} }
for( ; i < width; i++ )
D[i] = castOp((S0[i] + S2[i])*f1 + S1[i]*f0 + _delta);
} }
else else
{ {
@ -3549,7 +3544,7 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
{ {
if( f1 < 0 ) if( f1 < 0 )
std::swap(S0, S2); std::swap(S0, S2);
#if CV_ENABLE_UNROLLED #if CV_ENABLE_UNROLLED
for( ; i <= width - 4; i += 4 ) for( ; i <= width - 4; i += 4 )
{ {
ST s0 = S2[i] - S0[i] + _delta; ST s0 = S2[i] - S0[i] + _delta;
@ -3562,19 +3557,18 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
D[i+2] = castOp(s0); D[i+2] = castOp(s0);
D[i+3] = castOp(s1); D[i+3] = castOp(s1);
} }
#else #endif
for( ; i < width; i ++ ) for( ; i < width; i ++ )
{ {
ST s0 = S2[i] - S0[i] + _delta; ST s0 = S2[i] - S0[i] + _delta;
D[i] = castOp(s0); D[i] = castOp(s0);
} }
#endif
if( f1 < 0 ) if( f1 < 0 )
std::swap(S0, S2); std::swap(S0, S2);
} }
else else
{ {
#if CV_ENABLE_UNROLLED #if CV_ENABLE_UNROLLED
for( ; i <= width - 4; i += 4 ) for( ; i <= width - 4; i += 4 )
{ {
ST s0 = (S2[i] - S0[i])*f1 + _delta; ST s0 = (S2[i] - S0[i])*f1 + _delta;
@ -3588,10 +3582,9 @@ struct SymmColumnSmallFilter : public SymmColumnFilter<CastOp, VecOp>
D[i+3] = castOp(s1); D[i+3] = castOp(s1);
} }
#endif #endif
for( ; i < width; i++ )
D[i] = castOp((S2[i] - S0[i])*f1 + _delta);
} }
for( ; i < width; i++ )
D[i] = castOp((S2[i] - S0[i])*f1 + _delta);
} }
} }
} }

View File

@ -3805,20 +3805,20 @@ static void remapBilinear( const Mat& _src, Mat& _dst, const Mat& _xy,
typedef typename CastOp::rtype T; typedef typename CastOp::rtype T;
typedef typename CastOp::type1 WT; typedef typename CastOp::type1 WT;
Size ssize = _src.size(), dsize = _dst.size(); Size ssize = _src.size(), dsize = _dst.size();
int cn = _src.channels(); int k, cn = _src.channels();
const AT* wtab = (const AT*)_wtab; const AT* wtab = (const AT*)_wtab;
const T* S0 = _src.ptr<T>(); const T* S0 = _src.ptr<T>();
size_t sstep = _src.step/sizeof(S0[0]); size_t sstep = _src.step/sizeof(S0[0]);
Scalar_<T> cval(saturate_cast<T>(_borderValue[0]), T cval[CV_CN_MAX];
saturate_cast<T>(_borderValue[1]),
saturate_cast<T>(_borderValue[2]),
saturate_cast<T>(_borderValue[3]));
int dx, dy; int dx, dy;
CastOp castOp; CastOp castOp;
VecOp vecOp; VecOp vecOp;
for( k = 0; k < cn; k++ )
cval[k] = saturate_cast<T>(_borderValue[k & 3]);
unsigned width1 = std::max(ssize.width-1, 0), height1 = std::max(ssize.height-1, 0); unsigned width1 = std::max(ssize.width-1, 0), height1 = std::max(ssize.height-1, 0);
CV_Assert( cn <= 4 && ssize.area() > 0 ); CV_Assert( ssize.area() > 0 );
#if CV_SSE2 #if CV_SSE2
if( _src.type() == CV_8UC3 ) if( _src.type() == CV_8UC3 )
width1 = std::max(ssize.width-2, 0); width1 = std::max(ssize.width-2, 0);
@ -3882,7 +3882,7 @@ static void remapBilinear( const Mat& _src, Mat& _dst, const Mat& _xy,
WT t2 = S[2]*w[0] + S[5]*w[1] + S[sstep+2]*w[2] + S[sstep+5]*w[3]; WT t2 = S[2]*w[0] + S[5]*w[1] + S[sstep+2]*w[2] + S[sstep+5]*w[3];
D[0] = castOp(t0); D[1] = castOp(t1); D[2] = castOp(t2); D[0] = castOp(t0); D[1] = castOp(t1); D[2] = castOp(t2);
} }
else else if( cn == 4 )
for( ; dx < X1; dx++, D += 4 ) for( ; dx < X1; dx++, D += 4 )
{ {
int sx = XY[dx*2], sy = XY[dx*2+1]; int sx = XY[dx*2], sy = XY[dx*2+1];
@ -3895,6 +3895,18 @@ static void remapBilinear( const Mat& _src, Mat& _dst, const Mat& _xy,
t1 = S[3]*w[0] + S[7]*w[1] + S[sstep+3]*w[2] + S[sstep+7]*w[3]; t1 = S[3]*w[0] + S[7]*w[1] + S[sstep+3]*w[2] + S[sstep+7]*w[3];
D[2] = castOp(t0); D[3] = castOp(t1); D[2] = castOp(t0); D[3] = castOp(t1);
} }
else
for( ; dx < X1; dx++, D += cn )
{
int sx = XY[dx*2], sy = XY[dx*2+1];
const AT* w = wtab + FXY[dx]*4;
const T* S = S0 + sy*sstep + sx*cn;
for( k = 0; k < cn; k++ )
{
WT t0 = S[k]*w[0] + S[k+cn]*w[1] + S[sstep+k]*w[2] + S[sstep+k+cn]*w[3];
D[k] = castOp(t0);
}
}
} }
else else
{ {
@ -3948,7 +3960,7 @@ static void remapBilinear( const Mat& _src, Mat& _dst, const Mat& _xy,
else else
for( ; dx < X1; dx++, D += cn ) for( ; dx < X1; dx++, D += cn )
{ {
int sx = XY[dx*2], sy = XY[dx*2+1], k; int sx = XY[dx*2], sy = XY[dx*2+1];
if( borderType == BORDER_CONSTANT && if( borderType == BORDER_CONSTANT &&
(sx >= ssize.width || sx+1 < 0 || (sx >= ssize.width || sx+1 < 0 ||
sy >= ssize.height || sy+1 < 0) ) sy >= ssize.height || sy+1 < 0) )

View File

@ -40,6 +40,7 @@
//M*/ //M*/
#include "test_precomp.hpp" #include "test_precomp.hpp"
#include "opencv2/highgui.hpp"
using namespace cv; using namespace cv;
using namespace std; using namespace std;
@ -429,4 +430,64 @@ TEST(Core_Drawing, polylines)
int cnt = countNonZero(img); int cnt = countNonZero(img);
ASSERT_EQ(cnt, 21); ASSERT_EQ(cnt, 21);
} }
//rotate/flip a quadrant appropriately
static void rot(int n, int *x, int *y, int rx, int ry)
{
if (ry == 0) {
if (rx == 1) {
*x = n-1 - *x;
*y = n-1 - *y;
}
//Swap x and y
int t = *x;
*x = *y;
*y = t;
}
}
static void d2xy(int n, int d, int *x, int *y)
{
int rx, ry, s, t=d;
*x = *y = 0;
for (s=1; s<n; s*=2)
{
rx = 1 & (t/2);
ry = 1 & (t ^ rx);
rot(s, x, y, rx, ry);
*x += s * rx;
*y += s * ry;
t /= 4;
}
}
TEST(Imgproc_FindContours, hilbert)
{
int n = 64, n2 = n*n, scale = 10, w = (n + 2)*scale;
Point ofs(scale, scale);
Mat img(w, w, CV_8U);
img.setTo(Scalar::all(0));
Point p(0,0);
for( int i = 0; i < n2; i++ )
{
Point q(0,0);
d2xy(n2, i, &q.x, &q.y);
line(img, p*scale + ofs, q*scale + ofs, Scalar::all(255));
p = q;
}
dilate(img, img, Mat());
vector<vector<Point> > contours;
findContours(img, contours, noArray(), RETR_LIST, CHAIN_APPROX_SIMPLE);
printf("ncontours = %d, contour[0].npoints=%d\n", (int)contours.size(), (int)contours[0].size());
img.setTo(Scalar::all(0));
drawContours(img, contours, 0, Scalar::all(255), 1);
//imshow("hilbert", img);
//waitKey();
ASSERT_EQ(1, (int)contours.size());
ASSERT_EQ(9832, (int)contours[0].size());
}
/* End of file. */ /* End of file. */

View File

@ -1918,3 +1918,37 @@ TEST(Imgproc_Blur, borderTypes)
EXPECT_EQ(expected_dst.size(), dst.size()); EXPECT_EQ(expected_dst.size(), dst.size());
EXPECT_DOUBLE_EQ(0.0, cvtest::norm(expected_dst, dst, NORM_INF)); EXPECT_DOUBLE_EQ(0.0, cvtest::norm(expected_dst, dst, NORM_INF));
} }
TEST(Imgproc_Morphology, iterated)
{
RNG& rng = theRNG();
for( int iter = 0; iter < 30; iter++ )
{
int width = rng.uniform(5, 33);
int height = rng.uniform(5, 33);
int cn = rng.uniform(1, 5);
int iterations = rng.uniform(1, 11);
int op = rng.uniform(0, 2);
Mat src(height, width, CV_8UC(cn)), dst0, dst1, dst2;
randu(src, 0, 256);
if( op == 0 )
dilate(src, dst0, Mat(), Point(-1,-1), iterations);
else
erode(src, dst0, Mat(), Point(-1,-1), iterations);
for( int i = 0; i < iterations; i++ )
if( op == 0 )
dilate(i == 0 ? src : dst1, dst1, Mat(), Point(-1,-1), 1);
else
erode(i == 0 ? src : dst1, dst1, Mat(), Point(-1,-1), 1);
Mat kern = getStructuringElement(MORPH_RECT, Size(3,3));
if( op == 0 )
dilate(src, dst2, kern, Point(-1,-1), iterations);
else
erode(src, dst2, kern, Point(-1,-1), iterations);
ASSERT_EQ(0.0, norm(dst0, dst1, NORM_INF));
ASSERT_EQ(0.0, norm(dst0, dst2, NORM_INF));
}
}

View File

@ -1632,4 +1632,64 @@ TEST(Resize, Area_half)
} }
} }
TEST(Imgproc_Warp, multichannel)
{
RNG& rng = theRNG();
for( int iter = 0; iter < 30; iter++ )
{
int width = rng.uniform(3, 333);
int height = rng.uniform(3, 333);
int cn = rng.uniform(1, 10);
Mat src(height, width, CV_8UC(cn)), dst;
//randu(src, 0, 256);
src.setTo(0.);
Mat rot = getRotationMatrix2D(Point2f(0.f, 0.f), 1, 1);
warpAffine(src, dst, rot, src.size());
ASSERT_EQ(0.0, norm(dst, NORM_INF));
Mat rot2 = Mat::eye(3, 3, rot.type());
rot.copyTo(rot2.rowRange(0, 2));
warpPerspective(src, dst, rot2, src.size());
ASSERT_EQ(0.0, norm(dst, NORM_INF));
}
}
TEST(Imgproc_GetAffineTransform, singularity)
{
Point2f A_sample[3];
A_sample[0] = Point2f(8.f, 9.f);
A_sample[1] = Point2f(40.f, 41.f);
A_sample[2] = Point2f(47.f, 48.f);
Point2f B_sample[3];
B_sample[0] = Point2f(7.37465f, 11.8295f);
B_sample[1] = Point2f(15.0113f, 12.8994f);
B_sample[2] = Point2f(38.9943f, 9.56297f);
Mat trans = getAffineTransform(A_sample, B_sample);
ASSERT_EQ(0.0, norm(trans, NORM_INF));
}
TEST(Imgproc_Remap, DISABLED_memleak)
{
Mat src;
const int N = 400;
src.create(N, N, CV_8U);
randu(src, 0, 256);
Mat map_x, map_y, dst;
dst.create( src.size(), src.type() );
map_x.create( src.size(), CV_32FC1 );
map_y.create( src.size(), CV_32FC1 );
randu(map_x, 0., N+0.);
randu(map_y, 0., N+0.);
for( int iter = 0; iter < 10000; iter++ )
{
if(iter % 100 == 0)
{
putchar('.');
fflush(stdout);
}
remap(src, dst, map_x, map_y, CV_INTER_LINEAR);
}
}
/* End of file. */ /* End of file. */

View File

@ -679,10 +679,6 @@ void HaarEvaluator::computeOptFeatures()
copyVectorToUMat(*optfeatures_lbuf, ufbuf); copyVectorToUMat(*optfeatures_lbuf, ufbuf);
} }
bool HaarEvaluator::setImage(InputArray _image, const std::vector<float>& _scales){
return FeatureEvaluator::setImage(_image, _scales);
}
bool HaarEvaluator::setWindow( Point pt, int scaleIdx ) bool HaarEvaluator::setWindow( Point pt, int scaleIdx )
{ {
const ScaleData& s = getScaleData(scaleIdx); const ScaleData& s = getScaleData(scaleIdx);

View File

@ -347,7 +347,6 @@ public:
virtual Ptr<FeatureEvaluator> clone() const; virtual Ptr<FeatureEvaluator> clone() const;
virtual int getFeatureType() const { return FeatureEvaluator::HAAR; } virtual int getFeatureType() const { return FeatureEvaluator::HAAR; }
virtual bool setImage(InputArray _image, const std::vector<float>& _scales);
virtual bool setWindow(Point p, int scaleIdx); virtual bool setWindow(Point p, int scaleIdx);
Rect getNormRect() const; Rect getNormRect() const;
int getSquaresOffset() const; int getSquaresOffset() const;

View File

@ -1459,6 +1459,7 @@ void HOGDescriptor::detect(const Mat& img,
Size winStride, Size padding, const std::vector<Point>& locations) const Size winStride, Size padding, const std::vector<Point>& locations) const
{ {
hits.clear(); hits.clear();
weights.clear();
if( svmDetector.empty() ) if( svmDetector.empty() )
return; return;

View File

@ -90,7 +90,9 @@ public:
for(int i = 0, x = step_x / 2; i < x_points; i++, x += step_x) { for(int i = 0, x = step_x / 2; i < x_points; i++, x += step_x) {
for(int j = 0, y = step_y / 2; j < y_points; j++, y += step_y) { for(int j = 0, y = step_y / 2; j < y_points; j++, y += step_y) {
sample_points.push_back(Point(x, y)); if( 0 <= x && x < images[0].cols &&
0 <= y && y < images[0].rows )
sample_points.push_back(Point(x, y));
} }
} }
} }

View File

@ -50,42 +50,50 @@ static void fastNlMeansDenoising_( const Mat& src, Mat& dst, const std::vector<f
int templateWindowSize, int searchWindowSize) int templateWindowSize, int searchWindowSize)
{ {
int hn = (int)h.size(); int hn = (int)h.size();
double granularity = (double)std::max(1., (double)dst.total()/(1 << 17));
switch (CV_MAT_CN(src.type())) { switch (CV_MAT_CN(src.type())) {
case 1: case 1:
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<ST, IT, UIT, D, int>( FastNlMeansDenoisingInvoker<ST, IT, UIT, D, int>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case 2: case 2:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, int>( FastNlMeansDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, int>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, Vec2i>( FastNlMeansDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, Vec2i>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case 3: case 3:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, int>( FastNlMeansDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, int>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, Vec3i>( FastNlMeansDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, Vec3i>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case 4: case 4:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, int>( FastNlMeansDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, int>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, src.rows), parallel_for_(cv::Range(0, src.rows),
FastNlMeansDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, Vec4i>( FastNlMeansDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, Vec4i>(
src, dst, templateWindowSize, searchWindowSize, &h[0])); src, dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
default: default:
CV_Error(Error::StsBadArg, CV_Error(Error::StsBadArg,
@ -237,6 +245,7 @@ static void fastNlMeansDenoisingMulti_( const std::vector<Mat>& srcImgs, Mat& ds
int templateWindowSize, int searchWindowSize) int templateWindowSize, int searchWindowSize)
{ {
int hn = (int)h.size(); int hn = (int)h.size();
double granularity = (double)std::max(1., (double)dst.total()/(1 << 16));
switch (srcImgs[0].type()) switch (srcImgs[0].type())
{ {
@ -244,43 +253,50 @@ static void fastNlMeansDenoisingMulti_( const std::vector<Mat>& srcImgs, Mat& ds
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<uchar, IT, UIT, D, int>( FastNlMeansMultiDenoisingInvoker<uchar, IT, UIT, D, int>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case CV_8UC2: case CV_8UC2:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, int>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, int>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, Vec2i>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 2>, IT, UIT, D, Vec2i>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case CV_8UC3: case CV_8UC3:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, int>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, int>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, Vec3i>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 3>, IT, UIT, D, Vec3i>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
case CV_8UC4: case CV_8UC4:
if (hn == 1) if (hn == 1)
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, int>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, int>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
else else
parallel_for_(cv::Range(0, srcImgs[0].rows), parallel_for_(cv::Range(0, srcImgs[0].rows),
FastNlMeansMultiDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, Vec4i>( FastNlMeansMultiDenoisingInvoker<Vec<ST, 4>, IT, UIT, D, Vec4i>(
srcImgs, imgToDenoiseIndex, temporalWindowSize, srcImgs, imgToDenoiseIndex, temporalWindowSize,
dst, templateWindowSize, searchWindowSize, &h[0])); dst, templateWindowSize, searchWindowSize, &h[0]),
granularity);
break; break;
default: default:
CV_Error(Error::StsBadArg, CV_Error(Error::StsBadArg,

View File

@ -47,6 +47,12 @@
namespace cv namespace cv
{ {
inline void log_(const Mat& src, Mat& dst)
{
max(src, Scalar::all(1e-4), dst);
log(dst, dst);
}
class TonemapImpl : public Tonemap class TonemapImpl : public Tonemap
{ {
public: public:
@ -122,7 +128,7 @@ public:
Mat gray_img; Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY); cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img; Mat log_img;
log(gray_img, log_img); log_(gray_img, log_img);
float mean = expf(static_cast<float>(sum(log_img)[0]) / log_img.total()); float mean = expf(static_cast<float>(sum(log_img)[0]) / log_img.total());
gray_img /= mean; gray_img /= mean;
log_img.release(); log_img.release();
@ -205,7 +211,7 @@ public:
Mat gray_img; Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY); cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img; Mat log_img;
log(gray_img, log_img); log_(gray_img, log_img);
Mat map_img; Mat map_img;
bilateralFilter(log_img, map_img, -1, sigma_color, sigma_space); bilateralFilter(log_img, map_img, -1, sigma_color, sigma_space);
@ -289,7 +295,7 @@ public:
Mat gray_img; Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY); cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img; Mat log_img;
log(gray_img, log_img); log_(gray_img, log_img);
float log_mean = static_cast<float>(sum(log_img)[0] / log_img.total()); float log_mean = static_cast<float>(sum(log_img)[0] / log_img.total());
double log_min, log_max; double log_min, log_max;
@ -383,7 +389,7 @@ public:
Mat gray_img; Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY); cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img; Mat log_img;
log(gray_img, log_img); log_(gray_img, log_img);
std::vector<Mat> x_contrast, y_contrast; std::vector<Mat> x_contrast, y_contrast;
getContrast(log_img, x_contrast, y_contrast); getContrast(log_img, x_contrast, y_contrast);

View File

@ -156,3 +156,14 @@ TEST(Photo_White, issue_2646)
ASSERT_EQ(0, nonWhitePixelsCount); ASSERT_EQ(0, nonWhitePixelsCount);
} }
TEST(Photo_Denoising, speed)
{
string imgname = string(cvtest::TS::ptr()->get_data_path()) + "shared/5MP.png";
Mat src = imread(imgname, 0), dst;
double t = (double)getTickCount();
fastNlMeansDenoising(src, dst, 5, 7, 21);
t = (double)getTickCount() - t;
printf("execution time: %gms\n", t*1000./getTickFrequency());
}

View File

@ -1,7 +1,3 @@
if(WINRT)
ocv_module_disable(videoio)
endif()
set(the_description "Media I/O") set(the_description "Media I/O")
ocv_add_module(videoio opencv_imgproc opencv_imgcodecs OPTIONAL opencv_androidcamera WRAP java python) ocv_add_module(videoio opencv_imgproc opencv_imgcodecs OPTIONAL opencv_androidcamera WRAP java python)
@ -11,7 +7,7 @@ ocv_add_module(videoio opencv_imgproc opencv_imgcodecs OPTIONAL opencv_androidca
# Jose Luis Blanco, 2008 # Jose Luis Blanco, 2008
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
if(HAVE_WINRT_CX AND NOT WINRT) if(WINRT_8_1)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /ZW") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /ZW")
endif() endif()
@ -36,6 +32,36 @@ file(GLOB videoio_ext_hdrs
"${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.hpp" "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.hpp"
"${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.h") "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.h")
# Removing WinRT API headers by default
status(" ${name}: Removing WinRT API headers by default")
list(REMOVE_ITEM videoio_ext_hdrs "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/cap_winrt.hpp")
# Dependencies used by the implementation referenced
# below are not available on WinRT 8.0.
# Enabling it for WiRT 8.1+ only.
if(DEFINED WINRT AND NOT DEFINED WINRT_8_0)
# WinRT detected. Adding WinRT API header
status(" ${name}: WinRT detected. Adding WinRT API header")
list(APPEND videoio_ext_hdrs "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/cap_winrt.hpp")
# Adding WinRT internal sources and headers
list(APPEND videoio_srcs
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_capture.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_bridge.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_video.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/CaptureFrameGrabber.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/MediaStreamSink.cpp)
list(APPEND videoio_hdrs
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_capture.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_bridge.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt_video.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/MFIncludes.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/CaptureFrameGrabber.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/MediaSink.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/MediaStreamSink.hpp)
endif()
if(WIN32 AND NOT ARM) if(WIN32 AND NOT ARM)
list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_cmu.cpp) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_cmu.cpp)
endif() endif()

View File

@ -50,6 +50,7 @@
@{ @{
@defgroup videoio_c C API @defgroup videoio_c C API
@defgroup videoio_ios iOS glue @defgroup videoio_ios iOS glue
@defgroup videoio_winrt WinRT glue
@} @}
*/ */
@ -85,8 +86,9 @@ enum { CAP_ANY = 0, // autodetect
CAP_AVFOUNDATION = 1200, // AVFoundation framework for iOS (OS X Lion will have the same API) CAP_AVFOUNDATION = 1200, // AVFoundation framework for iOS (OS X Lion will have the same API)
CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK
CAP_MSMF = 1400, // Microsoft Media Foundation (via videoInput) CAP_MSMF = 1400, // Microsoft Media Foundation (via videoInput)
CAP_INTELPERC = 1500, // Intel Perceptual Computing SDK CAP_WINRT = 1410, // Microsoft Windows Runtime using Media Foundation
CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect) CAP_INTELPERC = 1500, // Intel Perceptual Computing SDK
CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect)
CAP_OPENNI2_ASUS = 1610 // OpenNI2 (for Asus Xtion and Occipital Structure sensors) CAP_OPENNI2_ASUS = 1610 // OpenNI2 (for Asus Xtion and Occipital Structure sensors)
}; };

View File

@ -0,0 +1,130 @@
// Video support for Windows Runtime
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include <ppl.h>
#include <functional>
#include <concrt.h>
#include <agile.h>
using namespace Windows::UI::Xaml::Controls;
namespace cv
{
//! @addtogroup videoio_winrt
//! @{
enum {
OPEN_CAMERA = 300,
CLOSE_CAMERA,
UPDATE_IMAGE_ELEMENT,
SHOW_TRACKBAR
};
/********************************** WinRT API ************************************************/
template <typename ...Args>
CV_EXPORTS void winrt_startMessageLoop(std::function<void(Args...)>&& callback, Args... args);
template <typename ...Args>
CV_EXPORTS void winrt_startMessageLoop(void callback(Args...), Args... args);
/** @brief
@note
Sets the reporter method for the HighguiAssist singleton. Starts the main OpenCV as
an async thread in WinRT. See VideoCapture for the example of callback implementation.
Here is how the class can be used:
@code
void cvMain()
{
Mat frame;
VideoCapture cam;
cam.open(0);
while (1)
{
cam >> frame;
// don't reprocess the same frame again
if (!cam.grab()) continue;
// your processing logic goes here
// obligatory step to get XAML image component updated
winrt_imshow();
}
}
MainPage::MainPage()
{
InitializeComponent();
cv::winrt_setFrameContainer(cvImage);
cv::winrt_startMessageLoop(cvMain);
}
@endcode
*/
template
CV_EXPORTS void winrt_startMessageLoop(void callback(void));
/** @brief
@note
Must be called from WinRT specific callback to handle image grabber state.
Here is how the class can be used:
@code
MainPage::MainPage()
{
// ...
Window::Current->VisibilityChanged += ref new Windows::UI::Xaml::WindowVisibilityChangedEventHandler(this, &Application::MainPage::OnVisibilityChanged);
// ...
}
void Application::MainPage::OnVisibilityChanged(Platform::Object ^sender,
Windows::UI::Core::VisibilityChangedEventArgs ^e)
{
cv::winrt_onVisibilityChanged(e->Visible);
}
@endcode
*/
CV_EXPORTS void winrt_onVisibilityChanged(bool visible);
/** @brief
@note
Must be called to assign WinRT control holding image you're working with.
Code sample is available for winrt_startMessageLoop().
*/
CV_EXPORTS void winrt_setFrameContainer(::Windows::UI::Xaml::Controls::Image^ image);
/** @brief
@note
Must be called to update attached image source.
Code sample is available for winrt_startMessageLoop().
*/
CV_EXPORTS void winrt_imshow();
//! @} videoio_winrt
} // cv

View File

@ -0,0 +1,568 @@
//
// Copyright (C) Microsoft Corporation
// All rights reserved.
// Modified for native C++ WRL support by Gregory Morse
//
// Code in Details namespace is for internal usage within the library code
//
#ifndef _PLATFORM_AGILE_H_
#define _PLATFORM_AGILE_H_
#ifdef _MSC_VER
#pragma once
#endif // _MSC_VER
#include <algorithm>
#include <wrl\client.h>
template <typename T, bool TIsNotAgile> class Agile;
template <typename T>
struct UnwrapAgile
{
static const bool _IsAgile = false;
};
template <typename T>
struct UnwrapAgile<Agile<T, false>>
{
static const bool _IsAgile = true;
};
template <typename T>
struct UnwrapAgile<Agile<T, true>>
{
static const bool _IsAgile = true;
};
#define IS_AGILE(T) UnwrapAgile<T>::_IsAgile
#define __is_winrt_agile(T) (std::is_same<T, HSTRING__>::value || std::is_base_of<Microsoft::WRL::FtmBase, T>::value || std::is_base_of<IAgileObject, T>::value) //derived from Microsoft::WRL::FtmBase or IAgileObject
#define __is_win_interface(T) (std::is_base_of<IUnknown, T>::value || std::is_base_of<IInspectable, T>::value) //derived from IUnknown or IInspectable
#define __is_win_class(T) (std::is_same<T, HSTRING__>::value || std::is_base_of<Microsoft::WRL::Details::RuntimeClassBase, T>::value) //derived from Microsoft::WRL::RuntimeClass or HSTRING
namespace Details
{
IUnknown* __stdcall GetObjectContext();
HRESULT __stdcall GetProxyImpl(IUnknown*, REFIID, IUnknown*, IUnknown**);
HRESULT __stdcall ReleaseInContextImpl(IUnknown*, IUnknown*);
template <typename T>
#if _MSC_VER >= 1800
__declspec(no_refcount) inline HRESULT GetProxy(T *ObjectIn, IUnknown *ContextCallBack, T **Proxy)
#else
inline HRESULT GetProxy(T *ObjectIn, IUnknown *ContextCallBack, T **Proxy)
#endif
{
#if _MSC_VER >= 1800
return GetProxyImpl(*reinterpret_cast<IUnknown**>(&ObjectIn), __uuidof(T*), ContextCallBack, reinterpret_cast<IUnknown**>(Proxy));
#else
return GetProxyImpl(*reinterpret_cast<IUnknown**>(&const_cast<T*>(ObjectIn)), __uuidof(T*), ContextCallBack, reinterpret_cast<IUnknown**>(Proxy));
#endif
}
template <typename T>
inline HRESULT ReleaseInContext(T *ObjectIn, IUnknown *ContextCallBack)
{
return ReleaseInContextImpl(ObjectIn, ContextCallBack);
}
template <typename T>
class AgileHelper
{
__abi_IUnknown* _p;
bool _release;
public:
AgileHelper(__abi_IUnknown* p, bool release = true) : _p(p), _release(release)
{
}
AgileHelper(AgileHelper&& other) : _p(other._p), _release(other._release)
{
_other._p = nullptr;
_other._release = true;
}
AgileHelper operator=(AgileHelper&& other)
{
_p = other._p;
_release = other._release;
_other._p = nullptr;
_other._release = true;
return *this;
}
~AgileHelper()
{
if (_release && _p)
{
_p->__abi_Release();
}
}
__declspec(no_refcount) __declspec(no_release_return)
T* operator->()
{
return reinterpret_cast<T*>(_p);
}
__declspec(no_refcount) __declspec(no_release_return)
operator T * ()
{
return reinterpret_cast<T*>(_p);
}
private:
AgileHelper(const AgileHelper&);
AgileHelper operator=(const AgileHelper&);
};
template <typename T>
struct __remove_hat
{
typedef T type;
};
template <typename T>
struct __remove_hat<T*>
{
typedef T type;
};
template <typename T>
struct AgileTypeHelper
{
typename typedef __remove_hat<T>::type type;
typename typedef __remove_hat<T>::type* agileMemberType;
};
} // namespace Details
#pragma warning(push)
#pragma warning(disable: 4451) // Usage of ref class inside this context can lead to invalid marshaling of object across contexts
template <
typename T,
bool TIsNotAgile = (__is_win_class(typename Details::AgileTypeHelper<T>::type) && !__is_winrt_agile(typename Details::AgileTypeHelper<T>::type)) ||
__is_win_interface(typename Details::AgileTypeHelper<T>::type)
>
class Agile
{
static_assert(__is_win_class(typename Details::AgileTypeHelper<T>::type) || __is_win_interface(typename Details::AgileTypeHelper<T>::type), "Agile can only be used with ref class or interface class types");
typename typedef Details::AgileTypeHelper<T>::agileMemberType TypeT;
TypeT _object;
::Microsoft::WRL::ComPtr<IUnknown> _contextCallback;
ULONG_PTR _contextToken;
#if _MSC_VER >= 1800
enum class AgileState
{
NonAgilePointer = 0,
AgilePointer = 1,
Unknown = 2
};
AgileState _agileState;
#endif
void CaptureContext()
{
_contextCallback = Details::GetObjectContext();
__abi_ThrowIfFailed(CoGetContextToken(&_contextToken));
}
void SetObject(TypeT object)
{
// Capture context before setting the pointer
// If context capture fails then nothing to cleanup
Release();
if (object != nullptr)
{
::Microsoft::WRL::ComPtr<IAgileObject> checkIfAgile;
HRESULT hr = reinterpret_cast<IUnknown*>(object)->QueryInterface(__uuidof(IAgileObject), &checkIfAgile);
// Don't Capture context if object is agile
if (hr != S_OK)
{
#if _MSC_VER >= 1800
_agileState = AgileState::NonAgilePointer;
#endif
CaptureContext();
}
#if _MSC_VER >= 1800
else
{
_agileState = AgileState::AgilePointer;
}
#endif
}
_object = object;
}
public:
Agile() throw() : _object(nullptr), _contextToken(0)
#if _MSC_VER >= 1800
, _agileState(AgileState::Unknown)
#endif
{
}
Agile(nullptr_t) throw() : _object(nullptr), _contextToken(0)
#if _MSC_VER >= 1800
, _agileState(AgileState::Unknown)
#endif
{
}
explicit Agile(TypeT object) throw() : _object(nullptr), _contextToken(0)
#if _MSC_VER >= 1800
, _agileState(AgileState::Unknown)
#endif
{
// Assumes that the source object is from the current context
SetObject(object);
}
Agile(const Agile& object) throw() : _object(nullptr), _contextToken(0)
#if _MSC_VER >= 1800
, _agileState(AgileState::Unknown)
#endif
{
// Get returns pointer valid for current context
SetObject(object.Get());
}
Agile(Agile&& object) throw() : _object(nullptr), _contextToken(0)
#if _MSC_VER >= 1800
, _agileState(AgileState::Unknown)
#endif
{
// Assumes that the source object is from the current context
Swap(object);
}
~Agile() throw()
{
Release();
}
TypeT Get() const
{
// Agile object, no proxy required
#if _MSC_VER >= 1800
if (_agileState == AgileState::AgilePointer || _object == nullptr)
#else
if (_contextToken == 0 || _contextCallback == nullptr || _object == nullptr)
#endif
{
return _object;
}
// Do the check for same context
ULONG_PTR currentContextToken;
__abi_ThrowIfFailed(CoGetContextToken(&currentContextToken));
if (currentContextToken == _contextToken)
{
return _object;
}
#if _MSC_VER >= 1800
// Different context and holding on to a non agile object
// Do the costly work of getting a proxy
TypeT localObject;
__abi_ThrowIfFailed(Details::GetProxy(_object, _contextCallback.Get(), &localObject));
if (_agileState == AgileState::Unknown)
#else
// Object is agile if it implements IAgileObject
// GetAddressOf captures the context with out knowing the type of object that it will hold
if (_object != nullptr)
#endif
{
#if _MSC_VER >= 1800
// Object is agile if it implements IAgileObject
// GetAddressOf captures the context with out knowing the type of object that it will hold
::Microsoft::WRL::ComPtr<IAgileObject> checkIfAgile;
HRESULT hr = reinterpret_cast<IUnknown*>(localObject)->QueryInterface(__uuidof(IAgileObject), &checkIfAgile);
#else
::Microsoft::WRL::ComPtr<IAgileObject> checkIfAgile;
HRESULT hr = reinterpret_cast<IUnknown*>(_object)->QueryInterface(__uuidof(IAgileObject), &checkIfAgile);
#endif
if (hr == S_OK)
{
auto pThis = const_cast<Agile*>(this);
#if _MSC_VER >= 1800
pThis->_agileState = AgileState::AgilePointer;
#endif
pThis->_contextToken = 0;
pThis->_contextCallback = nullptr;
return _object;
}
#if _MSC_VER >= 1800
else
{
auto pThis = const_cast<Agile*>(this);
pThis->_agileState = AgileState::NonAgilePointer;
}
#endif
}
#if _MSC_VER < 1800
// Different context and holding on to a non agile object
// Do the costly work of getting a proxy
TypeT localObject;
__abi_ThrowIfFailed(Details::GetProxy(_object, _contextCallback.Get(), &localObject));
#endif
return localObject;
}
TypeT* GetAddressOf() throw()
{
Release();
CaptureContext();
return &_object;
}
TypeT* GetAddressOfForInOut() throw()
{
CaptureContext();
return &_object;
}
TypeT operator->() const throw()
{
return Get();
}
Agile& operator=(nullptr_t) throw()
{
Release();
return *this;
}
Agile& operator=(TypeT object) throw()
{
Agile(object).Swap(*this);
return *this;
}
Agile& operator=(Agile object) throw()
{
// parameter is by copy which gets pointer valid for current context
object.Swap(*this);
return *this;
}
#if _MSC_VER < 1800
Agile& operator=(IUnknown* lp) throw()
{
// bump ref count
::Microsoft::WRL::ComPtr<IUnknown> spObject(lp);
// put it into Platform Object
Platform::Object object;
*(IUnknown**)(&object) = spObject.Detach();
SetObject(object);
return *this;
}
#endif
void Swap(Agile& object)
{
std::swap(_object, object._object);
std::swap(_contextCallback, object._contextCallback);
std::swap(_contextToken, object._contextToken);
#if _MSC_VER >= 1800
std::swap(_agileState, object._agileState);
#endif
}
// Release the interface and set to NULL
void Release() throw()
{
if (_object)
{
// Cast to IInspectable (no QI)
IUnknown* pObject = *(IUnknown**)(&_object);
// Set * to null without release
*(IUnknown**)(&_object) = nullptr;
ULONG_PTR currentContextToken;
__abi_ThrowIfFailed(CoGetContextToken(&currentContextToken));
if (_contextToken == 0 || _contextCallback == nullptr || _contextToken == currentContextToken)
{
pObject->Release();
}
else
{
Details::ReleaseInContext(pObject, _contextCallback.Get());
}
_contextCallback = nullptr;
_contextToken = 0;
#if _MSC_VER >= 1800
_agileState = AgileState::Unknown;
#endif
}
}
bool operator==(nullptr_t) const throw()
{
return _object == nullptr;
}
bool operator==(const Agile& other) const throw()
{
return _object == other._object && _contextToken == other._contextToken;
}
bool operator<(const Agile& other) const throw()
{
if (reinterpret_cast<void*>(_object) < reinterpret_cast<void*>(other._object))
{
return true;
}
return _object == other._object && _contextToken < other._contextToken;
}
};
template <typename T>
class Agile<T, false>
{
static_assert(__is_win_class(typename Details::AgileTypeHelper<T>::type) || __is_win_interface(typename Details::AgileTypeHelper<T>::type), "Agile can only be used with ref class or interface class types");
typename typedef Details::AgileTypeHelper<T>::agileMemberType TypeT;
TypeT _object;
public:
Agile() throw() : _object(nullptr)
{
}
Agile(nullptr_t) throw() : _object(nullptr)
{
}
explicit Agile(TypeT object) throw() : _object(object)
{
}
Agile(const Agile& object) throw() : _object(object._object)
{
}
Agile(Agile&& object) throw() : _object(nullptr)
{
Swap(object);
}
~Agile() throw()
{
Release();
}
TypeT Get() const
{
return _object;
}
TypeT* GetAddressOf() throw()
{
Release();
return &_object;
}
TypeT* GetAddressOfForInOut() throw()
{
return &_object;
}
TypeT operator->() const throw()
{
return Get();
}
Agile& operator=(nullptr_t) throw()
{
Release();
return *this;
}
Agile& operator=(TypeT object) throw()
{
if (_object != object)
{
_object = object;
}
return *this;
}
Agile& operator=(Agile object) throw()
{
object.Swap(*this);
return *this;
}
#if _MSC_VER < 1800
Agile& operator=(IUnknown* lp) throw()
{
Release();
// bump ref count
::Microsoft::WRL::ComPtr<IUnknown> spObject(lp);
// put it into Platform Object
Platform::Object object;
*(IUnknown**)(&object) = spObject.Detach();
_object = object;
return *this;
}
#endif
// Release the interface and set to NULL
void Release() throw()
{
_object = nullptr;
}
void Swap(Agile& object)
{
std::swap(_object, object._object);
}
bool operator==(nullptr_t) const throw()
{
return _object == nullptr;
}
bool operator==(const Agile& other) const throw()
{
return _object == other._object;
}
bool operator<(const Agile& other) const throw()
{
return reinterpret_cast<void*>(_object) < reinterpret_cast<void*>(other._object);
}
};
#pragma warning(pop)
template<class U>
bool operator==(nullptr_t, const Agile<U>& a) throw()
{
return a == nullptr;
}
template<class U>
bool operator!=(const Agile<U>& a, nullptr_t) throw()
{
return !(a == nullptr);
}
template<class U>
bool operator!=(nullptr_t, const Agile<U>& a) throw()
{
return !(a == nullptr);
}
template<class U>
bool operator!=(const Agile<U>& a, const Agile<U>& b) throw()
{
return !(a == b);
}
#endif // _PLATFORM_AGILE_H_

View File

@ -43,6 +43,13 @@
#include "cap_intelperc.hpp" #include "cap_intelperc.hpp"
#include "cap_dshow.hpp" #include "cap_dshow.hpp"
// All WinRT versions older than 8.0 should provide classes used for video support
#if defined(WINRT) && !defined(WINRT_8_0)
# include "cap_winrt_capture.hpp"
# include "cap_winrt_bridge.hpp"
# define WINRT_VIDEO
#endif
#if defined _M_X64 && defined _MSC_VER && !defined CV_ICC #if defined _M_X64 && defined _MSC_VER && !defined CV_ICC
#pragma optimize("",off) #pragma optimize("",off)
#pragma warning(disable: 4748) #pragma warning(disable: 4748)
@ -508,6 +515,9 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
#endif #endif
#ifdef HAVE_INTELPERC #ifdef HAVE_INTELPERC
CV_CAP_INTELPERC, CV_CAP_INTELPERC,
#endif
#ifdef WINRT_VIDEO
CAP_WINRT,
#endif #endif
-1, -1 -1, -1
}; };
@ -526,6 +536,7 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
{ {
#if defined(HAVE_DSHOW) || \ #if defined(HAVE_DSHOW) || \
defined(HAVE_INTELPERC) || \ defined(HAVE_INTELPERC) || \
defined(WINRT_VIDEO) || \
(0) (0)
Ptr<IVideoCapture> capture; Ptr<IVideoCapture> capture;
@ -540,6 +551,13 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
case CV_CAP_INTELPERC: case CV_CAP_INTELPERC:
capture = makePtr<VideoCapture_IntelPerC>(); capture = makePtr<VideoCapture_IntelPerC>();
break; // CV_CAP_INTEL_PERC break; // CV_CAP_INTEL_PERC
#endif
#ifdef WINRT_VIDEO
case CAP_WINRT:
capture = Ptr<IVideoCapture>(new cv::VideoCapture_WinRT(index));
if (capture)
return capture;
break; // CAP_WINRT
#endif #endif
} }
if (capture && capture->isOpened()) if (capture && capture->isOpened())
@ -664,7 +682,29 @@ bool VideoCapture::read(OutputArray image)
VideoCapture& VideoCapture::operator >> (Mat& image) VideoCapture& VideoCapture::operator >> (Mat& image)
{ {
#ifdef WINRT_VIDEO
if (grab())
{
if (retrieve(image))
{
std::lock_guard<std::mutex> lock(VideoioBridge::getInstance().inputBufferMutex);
VideoioBridge& bridge = VideoioBridge::getInstance();
// double buffering
bridge.swapInputBuffers();
auto p = bridge.frontInputPtr;
bridge.bIsFrameNew = false;
// needed here because setting Mat 'image' is not allowed by OutputArray in read()
Mat m(bridge.height, bridge.width, CV_8UC3, p);
image = m;
}
}
#else
read(image); read(image);
#endif
return *this; return *this;
} }
@ -760,4 +800,4 @@ int VideoWriter::fourcc(char c1, char c2, char c3, char c4)
return (c1 & 255) + ((c2 & 255) << 8) + ((c3 & 255) << 16) + ((c4 & 255) << 24); return (c1 & 255) + ((c2 & 255) << 8) + ((c3 & 255) << 16) + ((c4 & 255) << 24);
} }
} }

View File

@ -540,11 +540,11 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
return true; return true;
case CV_CAP_PROP_IOS_DEVICE_FOCUS: case CV_CAP_PROP_IOS_DEVICE_FOCUS:
if ([mCaptureDevice isFocusModeSupported:(int)value]){ if ([mCaptureDevice isFocusModeSupported:(AVCaptureFocusMode)value]){
NSError* error = nil; NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error]; [mCaptureDevice lockForConfiguration:&error];
if (error) return false; if (error) return false;
[mCaptureDevice setFocusMode:(int)value]; [mCaptureDevice setFocusMode:(AVCaptureFocusMode)value];
[mCaptureDevice unlockForConfiguration]; [mCaptureDevice unlockForConfiguration];
//NSLog(@"Focus set"); //NSLog(@"Focus set");
return true; return true;
@ -553,11 +553,11 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
} }
case CV_CAP_PROP_IOS_DEVICE_EXPOSURE: case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
if ([mCaptureDevice isExposureModeSupported:(int)value]){ if ([mCaptureDevice isExposureModeSupported:(AVCaptureExposureMode)value]){
NSError* error = nil; NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error]; [mCaptureDevice lockForConfiguration:&error];
if (error) return false; if (error) return false;
[mCaptureDevice setExposureMode:(int)value]; [mCaptureDevice setExposureMode:(AVCaptureExposureMode)value];
[mCaptureDevice unlockForConfiguration]; [mCaptureDevice unlockForConfiguration];
//NSLog(@"Exposure set"); //NSLog(@"Exposure set");
return true; return true;
@ -566,11 +566,11 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
} }
case CV_CAP_PROP_IOS_DEVICE_FLASH: case CV_CAP_PROP_IOS_DEVICE_FLASH:
if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(int)value]){ if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(AVCaptureFlashMode)value]){
NSError* error = nil; NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error]; [mCaptureDevice lockForConfiguration:&error];
if (error) return false; if (error) return false;
[mCaptureDevice setFlashMode:(int)value]; [mCaptureDevice setFlashMode:(AVCaptureFlashMode)value];
[mCaptureDevice unlockForConfiguration]; [mCaptureDevice unlockForConfiguration];
//NSLog(@"Flash mode set"); //NSLog(@"Flash mode set");
return true; return true;
@ -579,11 +579,11 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
} }
case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE: case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
if ([mCaptureDevice isWhiteBalanceModeSupported:(int)value]){ if ([mCaptureDevice isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)value]){
NSError* error = nil; NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error]; [mCaptureDevice lockForConfiguration:&error];
if (error) return false; if (error) return false;
[mCaptureDevice setWhiteBalanceMode:(int)value]; [mCaptureDevice setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)value];
[mCaptureDevice unlockForConfiguration]; [mCaptureDevice unlockForConfiguration];
//NSLog(@"White balance set"); //NSLog(@"White balance set");
return true; return true;
@ -592,11 +592,11 @@ bool CvCaptureCAM::setProperty(int property_id, double value) {
} }
case CV_CAP_PROP_IOS_DEVICE_TORCH: case CV_CAP_PROP_IOS_DEVICE_TORCH:
if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(int)value]){ if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(AVCaptureTorchMode)value]){
NSError* error = nil; NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error]; [mCaptureDevice lockForConfiguration:&error];
if (error) return false; if (error) return false;
[mCaptureDevice setTorchMode:(int)value]; [mCaptureDevice setTorchMode:(AVCaptureTorchMode)value];
[mCaptureDevice unlockForConfiguration]; [mCaptureDevice unlockForConfiguration];
//NSLog(@"Torch mode set"); //NSLog(@"Torch mode set");
return true; return true;

View File

@ -0,0 +1,173 @@
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#include "MediaStreamSink.hpp"
#include "MediaSink.hpp"
#include "CaptureFrameGrabber.hpp"
using namespace Media;
using namespace Platform;
using namespace Windows::Foundation;
using namespace Windows::Media;
using namespace Windows::Media::Capture;
using namespace Windows::Media::MediaProperties;
using namespace concurrency;
using namespace Microsoft::WRL::Details;
using namespace Microsoft::WRL;
task<Media::CaptureFrameGrabber^> Media::CaptureFrameGrabber::CreateAsync(_In_ MediaCapture^ capture, _In_ VideoEncodingProperties^ props, CaptureStreamType streamType)
{
auto reader = ref new Media::CaptureFrameGrabber(capture, props, streamType);
auto profile = ref new MediaEncodingProfile();
profile->Video = props;
task<void> task;
if (reader->_streamType == CaptureStreamType::Preview)
{
task = create_task(capture->StartPreviewToCustomSinkAsync(profile, reader->_mediaExtension));
}
else
{
task = create_task(capture->StartRecordToCustomSinkAsync(profile, reader->_mediaExtension));
}
return task.then([reader]()
{
reader->_state = State::Started;
return reader;
});
}
Media::CaptureFrameGrabber::CaptureFrameGrabber(_In_ MediaCapture^ capture, _In_ VideoEncodingProperties^ props, CaptureStreamType streamType)
: _state(State::Created)
, _streamType(streamType)
, _capture(capture)
{
auto videoSampleHandler = ref new MediaSampleHandler(this, &Media::CaptureFrameGrabber::ProcessSample);
_mediaSink = Make<MediaSink>(nullptr, props, nullptr, videoSampleHandler);
_mediaExtension = reinterpret_cast<IMediaExtension^>(static_cast<AWM::IMediaExtension*>(_mediaSink.Get()));
}
Media::CaptureFrameGrabber::~CaptureFrameGrabber()
{
if (_state == State::Started)
{
if (_streamType == CaptureStreamType::Preview)
{
(void)_capture->StopPreviewAsync();
}
else
{
(void)_capture->StopRecordAsync();
}
}
if (_mediaSink != nullptr)
{
(void)_mediaSink->Shutdown();
_mediaSink = nullptr;
}
_mediaExtension = nullptr;
_capture = nullptr;
}
void Media::CaptureFrameGrabber::ShowCameraSettings()
{
#if WINAPI_FAMILY!=WINAPI_FAMILY_PHONE_APP
if (_state == State::Started)
{
CameraOptionsUI::Show(_capture.Get());
}
#endif
}
task<void> Media::CaptureFrameGrabber::FinishAsync()
{
auto lock = _lock.LockExclusive();
if (_state != State::Started)
{
throw ref new COMException(E_UNEXPECTED, L"State");
}
_state = State::Closing;
if (_mediaSink != nullptr)
{
(void)_mediaSink->Shutdown();
_mediaSink = nullptr;
}
_mediaExtension = nullptr;
task<void> task;
if (_streamType == CaptureStreamType::Preview)
{
task = create_task(_capture->StopPreviewAsync());
}
else
{
task = create_task(_capture->StopRecordAsync());
}
return task.then([this]()
{
auto lock = _lock.LockExclusive();
_state = State::Closed;
_capture = nullptr;
});
}
task<ComPtr<IMF2DBuffer2>> Media::CaptureFrameGrabber::GetFrameAsync()
{
auto lock = _lock.LockExclusive();
if (_state != State::Started)
{
throw ref new COMException(E_UNEXPECTED, L"State");
}
_mediaSink->RequestVideoSample();
task_completion_event<ComPtr<IMF2DBuffer2>> taskEvent;
_videoSampleRequestQueue.push(taskEvent);
return create_task(taskEvent);
}
void Media::CaptureFrameGrabber::ProcessSample(_In_ MediaSample^ sample)
{
task_completion_event<ComPtr<IMF2DBuffer2>> t;
{
auto lock = _lock.LockExclusive();
t = _videoSampleRequestQueue.front();
_videoSampleRequestQueue.pop();
}
ComPtr<IMFMediaBuffer> buffer;
CHK(sample->Sample->ConvertToContiguousBuffer(&buffer));
// Dispatch without the lock taken to avoid deadlocks
t.set(As<IMF2DBuffer2>(buffer));
}

View File

@ -0,0 +1,85 @@
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include "MFIncludes.hpp"
namespace Media {
class MediaSink;
enum class CaptureStreamType
{
Preview = 0,
Record
};
ref class CaptureFrameGrabber sealed
{
public:
// IClosable
virtual ~CaptureFrameGrabber();
virtual void ShowCameraSettings();
internal:
static concurrency::task<CaptureFrameGrabber^> CreateAsync(_In_ WMC::MediaCapture^ capture, _In_ WMMp::VideoEncodingProperties^ props)
{
return CreateAsync(capture, props, CaptureStreamType::Preview);
}
static concurrency::task<CaptureFrameGrabber^> CreateAsync(_In_ WMC::MediaCapture^ capture, _In_ WMMp::VideoEncodingProperties^ props, CaptureStreamType streamType);
concurrency::task<MW::ComPtr<IMF2DBuffer2>> GetFrameAsync();
concurrency::task<void> FinishAsync();
private:
CaptureFrameGrabber(_In_ WMC::MediaCapture^ capture, _In_ WMMp::VideoEncodingProperties^ props, CaptureStreamType streamType);
void ProcessSample(_In_ MediaSample^ sample);
Platform::Agile<WMC::MediaCapture> _capture;
::Windows::Media::IMediaExtension^ _mediaExtension;
MW::ComPtr<MediaSink> _mediaSink;
CaptureStreamType _streamType;
enum class State
{
Created,
Started,
Closing,
Closed
} _state;
std::queue<concurrency::task_completion_event<MW::ComPtr<IMF2DBuffer2>>> _videoSampleRequestQueue;
AutoMF _mf;
MWW::SRWLock _lock;
};
}

View File

@ -0,0 +1,172 @@
// Header for standard system include files.
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include <collection.h>
#include <ppltasks.h>
#include <wrl\implements.h>
#include <wrl\wrappers\corewrappers.h>
#include <Roerrorapi.h>
#include <queue>
#include <sstream>
#include <robuffer.h>
#include <mfapi.h>
#include <mfidl.h>
#include <Mferror.h>
#include <windows.media.h>
#include <windows.media.mediaproperties.h>
namespace AWM = ::ABI::Windows::Media;
namespace AWMMp = ::ABI::Windows::Media::MediaProperties;
namespace AWFC = ::ABI::Windows::Foundation::Collections;
namespace MW = ::Microsoft::WRL;
namespace MWD = ::Microsoft::WRL::Details;
namespace MWW = ::Microsoft::WRL::Wrappers;
namespace WMC = ::Windows::Media::Capture;
namespace WF = ::Windows::Foundation;
namespace WMMp = ::Windows::Media::MediaProperties;
namespace WSS = ::Windows::Storage::Streams;
// Exception-based error handling
#define CHK(statement) {HRESULT _hr = (statement); if (FAILED(_hr)) { throw ref new Platform::COMException(_hr); };}
#define CHKNULL(p) {if ((p) == nullptr) { throw ref new Platform::NullReferenceException(L#p); };}
// Exception-free error handling
#define CHK_RETURN(statement) {hr = (statement); if (FAILED(hr)) { return hr; };}
// Cast a C++/CX msartpointer to an ABI smartpointer
template<typename T, typename U>
MW::ComPtr<T> As(U^ in)
{
MW::ComPtr<T> out;
CHK(reinterpret_cast<IInspectable*>(in)->QueryInterface(IID_PPV_ARGS(&out)));
return out;
}
// Cast an ABI smartpointer
template<typename T, typename U>
Microsoft::WRL::ComPtr<T> As(const Microsoft::WRL::ComPtr<U>& in)
{
Microsoft::WRL::ComPtr<T> out;
CHK(in.As(&out));
return out;
}
// Cast an ABI smartpointer
template<typename T, typename U>
Microsoft::WRL::ComPtr<T> As(U* in)
{
Microsoft::WRL::ComPtr<T> out;
CHK(in->QueryInterface(IID_PPV_ARGS(&out)));
return out;
}
// Get access to bytes in IBuffer
inline unsigned char* GetData(_In_ WSS::IBuffer^ buffer)
{
unsigned char* bytes = nullptr;
CHK(As<WSS::IBufferByteAccess>(buffer)->Buffer(&bytes));
return bytes;
}
// Class to start and shutdown Media Foundation
class AutoMF
{
public:
AutoMF()
: _bInitialized(false)
{
CHK(MFStartup(MF_VERSION));
}
~AutoMF()
{
if (_bInitialized)
{
(void)MFShutdown();
}
}
private:
bool _bInitialized;
};
// Class to track error origin
template <size_t N>
HRESULT OriginateError(__in HRESULT hr, __in wchar_t const (&str)[N])
{
if (FAILED(hr))
{
::RoOriginateErrorW(hr, N - 1, str);
}
return hr;
}
// Class to track error origin
inline HRESULT OriginateError(__in HRESULT hr)
{
if (FAILED(hr))
{
::RoOriginateErrorW(hr, 0, nullptr);
}
return hr;
}
// Converts exceptions into HRESULTs
template <typename Lambda>
HRESULT ExceptionBoundary(Lambda&& lambda)
{
try
{
lambda();
return S_OK;
}
catch (Platform::Exception^ e)
{
return e->HResult;
}
catch (const std::bad_alloc&)
{
return E_OUTOFMEMORY;
}
catch (const std::exception&)
{
return E_FAIL;
}
}
// Wraps an IMFSample in a C++/CX class to be able to define a callback delegate
ref class MediaSample sealed
{
internal:
MW::ComPtr<IMFSample> Sample;
};
delegate void MediaSampleHandler(MediaSample^ sample);

View File

@ -0,0 +1,396 @@
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include "MediaStreamSink.hpp"
#include "MFIncludes.hpp"
namespace Media {
const unsigned int c_audioStreamSinkId = 0;
const unsigned int c_videoStreamSinkId = 1;
class MediaSink WrlSealed
: public MW::RuntimeClass<
MW::RuntimeClassFlags<
MW::RuntimeClassType::WinRtClassicComMix>
, AWM::IMediaExtension
, IMFMediaSink
, IMFClockStateSink
, MW::FtmBase
>
{
InspectableClass(L"MediaSink", BaseTrust)
public:
MediaSink(
_In_opt_ WMMp::AudioEncodingProperties^ audioProps,
_In_opt_ WMMp::VideoEncodingProperties^ videoProps,
_In_opt_ MediaSampleHandler^ audioSampleHandler,
_In_opt_ MediaSampleHandler^ videoSampleHandler
)
: _shutdown(false)
{
MW::ComPtr<IMFMediaType> audioMT;
if (audioProps != nullptr)
{
CHK(MFCreateMediaTypeFromProperties(As<IUnknown>(audioProps).Get(), &audioMT));
_audioStreamSink = MW::Make<MediaStreamSink>(
this,
c_audioStreamSinkId,
audioMT,
audioSampleHandler
);
}
MW::ComPtr<IMFMediaType> videoMT;
if (videoProps != nullptr)
{
CHK(MFCreateMediaTypeFromProperties(As<IUnknown>(videoProps).Get(), &videoMT));
_videoStreamSink = MW::Make<MediaStreamSink>(
this,
c_videoStreamSinkId,
videoMT,
videoSampleHandler
);
}
}
void RequestAudioSample()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
_audioStreamSink->RequestSample();
}
void RequestVideoSample()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
_videoStreamSink->RequestSample();
}
void SetCurrentAudioMediaType(_In_ IMFMediaType* mt)
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
_audioStreamSink->InternalSetCurrentMediaType(mt);
}
void SetCurrentVideoMediaType(_In_ IMFMediaType* mt)
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
_videoStreamSink->InternalSetCurrentMediaType(mt);
}
//
// IMediaExtension
//
IFACEMETHODIMP SetProperties(_In_ AWFC::IPropertySet * /*configuration*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
//
// IMFMediaSink
//
IFACEMETHODIMP GetCharacteristics(_Out_ DWORD *characteristics)
{
return ExceptionBoundary([this, characteristics]()
{
_VerifyNotShutdown();
CHKNULL(characteristics);
*characteristics = MEDIASINK_RATELESS | MEDIASINK_FIXED_STREAMS;
});
}
IFACEMETHODIMP AddStreamSink(
DWORD /*streamSinkIdentifier*/,
_In_ IMFMediaType * /*mediaType*/,
_COM_Outptr_ IMFStreamSink **streamSink
)
{
return ExceptionBoundary([this, streamSink]()
{
_VerifyNotShutdown();
CHKNULL(streamSink);
*streamSink = nullptr;
CHK(MF_E_STREAMSINKS_FIXED);
});
}
IFACEMETHODIMP RemoveStreamSink(DWORD /*streamSinkIdentifier*/)
{
return ExceptionBoundary([this]()
{
_VerifyNotShutdown();
CHK(MF_E_STREAMSINKS_FIXED);
});
}
IFACEMETHODIMP GetStreamSinkCount(_Out_ DWORD *streamSinkCount)
{
return ExceptionBoundary([this, streamSinkCount]()
{
CHKNULL(streamSinkCount);
_VerifyNotShutdown();
*streamSinkCount = (_audioStreamSink != nullptr) + (_videoStreamSink != nullptr);
});
}
IFACEMETHODIMP GetStreamSinkByIndex(DWORD index, _COM_Outptr_ IMFStreamSink **streamSink)
{
return ExceptionBoundary([this, index, streamSink]()
{
auto lock = _lock.LockExclusive();
CHKNULL(streamSink);
*streamSink = nullptr;
_VerifyNotShutdown();
switch (index)
{
case 0:
if (_audioStreamSink != nullptr)
{
CHK(_audioStreamSink.CopyTo(streamSink));
}
else
{
CHK(_videoStreamSink.CopyTo(streamSink));
}
break;
case 1:
if ((_audioStreamSink != nullptr) && (_videoStreamSink != nullptr))
{
CHK(_videoStreamSink.CopyTo(streamSink));
}
else
{
CHK(E_INVALIDARG);
}
break;
default:
CHK(E_INVALIDARG);
}
});
}
IFACEMETHODIMP GetStreamSinkById(DWORD identifier, _COM_Outptr_ IMFStreamSink **streamSink)
{
return ExceptionBoundary([this, identifier, streamSink]()
{
auto lock = _lock.LockExclusive();
CHKNULL(streamSink);
*streamSink = nullptr;
_VerifyNotShutdown();
if ((identifier == 0) && (_audioStreamSink != nullptr))
{
CHK(_audioStreamSink.CopyTo(streamSink));
}
else if ((identifier == 1) && (_videoStreamSink != nullptr))
{
CHK(_videoStreamSink.CopyTo(streamSink));
}
else
{
CHK(E_INVALIDARG);
}
});
}
IFACEMETHODIMP SetPresentationClock(_In_ IMFPresentationClock *clock)
{
return ExceptionBoundary([this, clock]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
if (_clock != nullptr)
{
CHK(_clock->RemoveClockStateSink(this));
_clock = nullptr;
}
if (clock != nullptr)
{
CHK(clock->AddClockStateSink(this));
_clock = clock;
}
});
}
IFACEMETHODIMP GetPresentationClock(_COM_Outptr_ IMFPresentationClock **clock)
{
return ExceptionBoundary([this, clock]()
{
auto lock = _lock.LockExclusive();
CHKNULL(clock);
*clock = nullptr;
_VerifyNotShutdown();
if (_clock != nullptr)
{
CHK(_clock.CopyTo(clock))
}
});
}
IFACEMETHODIMP Shutdown()
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
if (_shutdown)
{
return;
}
_shutdown = true;
if (_audioStreamSink != nullptr)
{
_audioStreamSink->Shutdown();
_audioStreamSink = nullptr;
}
if (_videoStreamSink != nullptr)
{
_videoStreamSink->Shutdown();
_videoStreamSink = nullptr;
}
if (_clock != nullptr)
{
(void)_clock->RemoveClockStateSink(this);
_clock = nullptr;
}
});
}
//
// IMFClockStateSink methods
//
IFACEMETHODIMP OnClockStart(MFTIME /*hnsSystemTime*/, LONGLONG /*llClockStartOffset*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
IFACEMETHODIMP OnClockStop(MFTIME /*hnsSystemTime*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
IFACEMETHODIMP OnClockPause(MFTIME /*hnsSystemTime*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
IFACEMETHODIMP OnClockRestart(MFTIME /*hnsSystemTime*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
IFACEMETHODIMP OnClockSetRate(MFTIME /*hnsSystemTime*/, float /*flRate*/)
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
private:
bool _shutdown;
void _VerifyNotShutdown()
{
if (_shutdown)
{
CHK(MF_E_SHUTDOWN);
}
}
MW::ComPtr<MediaStreamSink> _audioStreamSink;
MW::ComPtr<MediaStreamSink> _videoStreamSink;
MW::ComPtr<IMFPresentationClock> _clock;
MWW::SRWLock _lock;
};
}

View File

@ -0,0 +1,386 @@
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#include "MediaStreamSink.hpp"
#include "MFIncludes.hpp"
using namespace Media;
using namespace Microsoft::WRL;
using namespace Platform;
using namespace Windows::Foundation;
MediaStreamSink::MediaStreamSink(
__in const MW::ComPtr<IMFMediaSink>& sink,
__in DWORD id,
__in const MW::ComPtr<IMFMediaType>& mt,
__in MediaSampleHandler^ sampleHandler
)
: _shutdown(false)
, _id(-1)
, _width(0)
, _height(0)
{
CHK(MFCreateEventQueue(&_eventQueue));
CHK(MFCreateMediaType(&_curMT));
_UpdateMediaType(mt);
_sink = sink;
_id = id;
_sampleHandler = sampleHandler;
}
HRESULT MediaStreamSink::GetMediaSink(__deref_out IMFMediaSink **sink)
{
return ExceptionBoundary([this, sink]()
{
auto lock = _lock.LockExclusive();
CHKNULL(sink);
*sink = nullptr;
_VerifyNotShutdown();
CHK(_sink.CopyTo(sink));
});
}
HRESULT MediaStreamSink::GetIdentifier(__out DWORD *identifier)
{
return ExceptionBoundary([this, identifier]()
{
auto lock = _lock.LockExclusive();
CHKNULL(identifier);
_VerifyNotShutdown();
*identifier = _id;
});
}
HRESULT MediaStreamSink::GetMediaTypeHandler(__deref_out IMFMediaTypeHandler **handler)
{
return ExceptionBoundary([this, handler]()
{
auto lock = _lock.LockExclusive();
CHKNULL(handler);
*handler = nullptr;
_VerifyNotShutdown();
*handler = this;
this->AddRef();
});
}
void MediaStreamSink::RequestSample()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
CHK(_eventQueue->QueueEventParamVar(MEStreamSinkRequestSample, GUID_NULL, S_OK, nullptr));
}
HRESULT MediaStreamSink::ProcessSample(__in_opt IMFSample *sample)
{
return ExceptionBoundary([this, sample]()
{
MediaSampleHandler^ sampleHandler;
auto mediaSample = ref new MediaSample();
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
if (sample == nullptr)
{
return;
}
mediaSample->Sample = sample;
sampleHandler = _sampleHandler;
}
// Call back without the lock taken to avoid deadlocks
sampleHandler(mediaSample);
});
}
HRESULT MediaStreamSink::PlaceMarker(__in MFSTREAMSINK_MARKER_TYPE /*markerType*/, __in const PROPVARIANT * /*markerValue*/, __in const PROPVARIANT * contextValue)
{
return ExceptionBoundary([this, contextValue]()
{
auto lock = _lock.LockExclusive();
CHKNULL(contextValue);
_VerifyNotShutdown();
CHK(_eventQueue->QueueEventParamVar(MEStreamSinkMarker, GUID_NULL, S_OK, contextValue));
});
}
HRESULT MediaStreamSink::Flush()
{
return ExceptionBoundary([this]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
});
}
HRESULT MediaStreamSink::GetEvent(__in DWORD flags, __deref_out IMFMediaEvent **event)
{
return ExceptionBoundary([this, flags, event]()
{
CHKNULL(event);
*event = nullptr;
ComPtr<IMFMediaEventQueue> eventQueue;
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
eventQueue = _eventQueue;
}
// May block for a while
CHK(eventQueue->GetEvent(flags, event));
});
}
HRESULT MediaStreamSink::BeginGetEvent(__in IMFAsyncCallback *callback, __in_opt IUnknown *state)
{
return ExceptionBoundary([this, callback, state]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
CHK(_eventQueue->BeginGetEvent(callback, state));
});
}
HRESULT MediaStreamSink::EndGetEvent(__in IMFAsyncResult *result, __deref_out IMFMediaEvent **event)
{
return ExceptionBoundary([this, result, event]()
{
auto lock = _lock.LockExclusive();
CHKNULL(event);
*event = nullptr;
_VerifyNotShutdown();
CHK(_eventQueue->EndGetEvent(result, event));
});
}
HRESULT MediaStreamSink::QueueEvent(
__in MediaEventType met,
__in REFGUID extendedType,
__in HRESULT status,
__in_opt const PROPVARIANT *value
)
{
return ExceptionBoundary([this, met, extendedType, status, value]()
{
auto lock = _lock.LockExclusive();
_VerifyNotShutdown();
CHK(_eventQueue->QueueEventParamVar(met, extendedType, status, value));
});
}
HRESULT MediaStreamSink::IsMediaTypeSupported(__in IMFMediaType *mediaType, __deref_out_opt IMFMediaType **closestMediaType)
{
bool supported = false;
HRESULT hr = ExceptionBoundary([this, mediaType, closestMediaType, &supported]()
{
auto lock = _lock.LockExclusive();
HRESULT hr = S_OK;
if (closestMediaType != nullptr)
{
*closestMediaType = nullptr;
}
CHKNULL(mediaType);
_VerifyNotShutdown();
supported = _IsMediaTypeSupported(mediaType);
});
// Avoid throwing an exception to return MF_E_INVALIDMEDIATYPE as this is not a exceptional case
return FAILED(hr) ? hr : supported ? S_OK : MF_E_INVALIDMEDIATYPE;
}
HRESULT MediaStreamSink::GetMediaTypeCount(__out DWORD *typeCount)
{
return ExceptionBoundary([this, typeCount]()
{
auto lock = _lock.LockExclusive();
CHKNULL(typeCount);
_VerifyNotShutdown();
// No media type provided by default (app needs to specify it)
*typeCount = 0;
});
}
HRESULT MediaStreamSink::GetMediaTypeByIndex(__in DWORD /*index*/, __deref_out IMFMediaType **mediaType)
{
HRESULT hr = ExceptionBoundary([this, mediaType]()
{
auto lock = _lock.LockExclusive();
CHKNULL(mediaType);
*mediaType = nullptr;
_VerifyNotShutdown();
});
// Avoid throwing an exception to return MF_E_NO_MORE_TYPES as this is not a exceptional case
return FAILED(hr) ? hr : MF_E_NO_MORE_TYPES;
}
HRESULT MediaStreamSink::SetCurrentMediaType(__in IMFMediaType *mediaType)
{
return ExceptionBoundary([this, mediaType]()
{
auto lock = _lock.LockExclusive();
HRESULT hr = S_OK;
CHKNULL(mediaType);
_VerifyNotShutdown();
if (!_IsMediaTypeSupported(mediaType))
{
CHK(MF_E_INVALIDMEDIATYPE);
}
_UpdateMediaType(mediaType);
});
}
HRESULT MediaStreamSink::GetCurrentMediaType(__deref_out_opt IMFMediaType **mediaType)
{
return ExceptionBoundary([this, mediaType]()
{
auto lock = _lock.LockExclusive();
CHKNULL(mediaType);
*mediaType = nullptr;
_VerifyNotShutdown();
ComPtr<IMFMediaType> mt;
CHK(MFCreateMediaType(&mt));
CHK(_curMT->CopyAllItems(mt.Get()));
*mediaType = mt.Detach();
});
}
HRESULT MediaStreamSink::GetMajorType(__out GUID *majorType)
{
return ExceptionBoundary([this, majorType]()
{
auto lock = _lock.LockExclusive();
CHKNULL(majorType);
_VerifyNotShutdown();
*majorType = _majorType;
});
}
void MediaStreamSink::InternalSetCurrentMediaType(__in const ComPtr<IMFMediaType>& mediaType)
{
auto lock = _lock.LockExclusive();
CHKNULL(mediaType);
_VerifyNotShutdown();
_UpdateMediaType(mediaType);
}
void MediaStreamSink::Shutdown()
{
auto lock = _lock.LockExclusive();
if (_shutdown)
{
return;
}
_shutdown = true;
(void)_eventQueue->Shutdown();
_eventQueue = nullptr;
_curMT = nullptr;
_sink = nullptr;
_sampleHandler = nullptr;
}
bool MediaStreamSink::_IsMediaTypeSupported(__in const ComPtr<IMFMediaType>& mt) const
{
GUID majorType;
GUID subType;
if (SUCCEEDED(mt->GetGUID(MF_MT_MAJOR_TYPE, &majorType)) &&
SUCCEEDED(mt->GetGUID(MF_MT_SUBTYPE, &subType)) &&
(majorType == _majorType) &&
(subType == _subType))
{
return true;
}
return false;
}
void MediaStreamSink::_UpdateMediaType(__in const ComPtr<IMFMediaType>& mt)
{
CHK(mt->GetGUID(MF_MT_MAJOR_TYPE, &_majorType));
CHK(mt->GetGUID(MF_MT_SUBTYPE, &_subType));
if (_majorType == MFMediaType_Video)
{
CHK(MFGetAttributeSize(mt.Get(), MF_MT_FRAME_SIZE, &_width, &_height));
}
CHK(mt->CopyAllItems(_curMT.Get()));
}

View File

@ -0,0 +1,114 @@
// Copyright (c) Microsoft. All rights reserved.
//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include "MFIncludes.hpp"
namespace Media {
class MediaStreamSink WrlSealed :
public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
IMFStreamSink,
IMFMediaEventGenerator,
IMFMediaTypeHandler
>
{
public:
MediaStreamSink(
__in const MW::ComPtr<IMFMediaSink>& sink,
__in DWORD id,
__in const MW::ComPtr<IMFMediaType>& mt,
__in MediaSampleHandler^ sampleHandler
);
//
// IMFStreamSink
//
IFACEMETHODIMP GetMediaSink(__deref_out IMFMediaSink **sink);
IFACEMETHODIMP GetIdentifier(__out DWORD *identifier);
IFACEMETHODIMP GetMediaTypeHandler(__deref_out IMFMediaTypeHandler **handler);
IFACEMETHODIMP ProcessSample(__in_opt IMFSample *sample);
IFACEMETHODIMP PlaceMarker(__in MFSTREAMSINK_MARKER_TYPE markerType, __in const PROPVARIANT * markerValue, __in const PROPVARIANT * contextValue);
IFACEMETHODIMP Flush();
//
// IMFMediaEventGenerator
//
IFACEMETHODIMP GetEvent(__in DWORD flags, __deref_out IMFMediaEvent **event);
IFACEMETHODIMP BeginGetEvent(__in IMFAsyncCallback *callback, __in_opt IUnknown *state);
IFACEMETHODIMP EndGetEvent(__in IMFAsyncResult *result, __deref_out IMFMediaEvent **event);
IFACEMETHODIMP QueueEvent(__in MediaEventType met, __in REFGUID extendedType, __in HRESULT status, __in_opt const PROPVARIANT *value);
//
// IMFMediaTypeHandler
//
IFACEMETHODIMP IsMediaTypeSupported(__in IMFMediaType *mediaType, __deref_out_opt IMFMediaType **closestMediaType);
IFACEMETHODIMP GetMediaTypeCount(__out DWORD *typeCount);
IFACEMETHODIMP GetMediaTypeByIndex(__in DWORD index, __deref_out IMFMediaType **mediaType);
IFACEMETHODIMP SetCurrentMediaType(__in IMFMediaType *mediaType);
IFACEMETHODIMP GetCurrentMediaType(__deref_out_opt IMFMediaType **mediaType);
IFACEMETHODIMP GetMajorType(__out GUID *majorType);
//
// Misc
//
void InternalSetCurrentMediaType(__in const MW::ComPtr<IMFMediaType>& mediaType);
void RequestSample();
void Shutdown();
private:
bool _IsMediaTypeSupported(__in const MW::ComPtr<IMFMediaType>& mt) const;
void _UpdateMediaType(__in const MW::ComPtr<IMFMediaType>& mt);
void _VerifyNotShutdown()
{
if (_shutdown)
{
CHK(MF_E_SHUTDOWN);
}
}
MW::ComPtr<IMFMediaSink> _sink;
MW::ComPtr<IMFMediaEventQueue> _eventQueue;
MW::ComPtr<IMFMediaType> _curMT;
MediaSampleHandler^ _sampleHandler;
GUID _majorType;
GUID _subType;
unsigned int _width;
unsigned int _height;
DWORD _id;
bool _shutdown;
MWW::SRWLock _lock;
};
}

View File

@ -0,0 +1,89 @@
// videoio to XAML bridge for OpenCV
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "opencv2\videoio\cap_winrt.hpp"
#include "cap_winrt_capture.hpp"
#include "cap_winrt_bridge.hpp"
#include "cap_winrt_video.hpp"
using namespace Windows::Foundation;
using namespace Windows::Media::Capture;
using namespace Windows::Media::MediaProperties;
using namespace Windows::Devices::Enumeration;
using namespace Windows::UI::Xaml::Media::Imaging;
using namespace Microsoft::WRL;
using namespace Platform;
using namespace ::Concurrency;
using namespace ::std;
/***************************** VideoioBridge class ******************************/
// non-blocking
void VideoioBridge::requestForUIthreadAsync(int action, int widthp, int heightp)
{
reporter.report(action);
}
VideoioBridge& VideoioBridge::getInstance()
{
static VideoioBridge instance;
return instance;
}
void VideoioBridge::swapInputBuffers()
{
// TODO: already locked, check validity
// lock_guard<mutex> lock(inputBufferMutex);
swap(backInputPtr, frontInputPtr);
//if (currentFrame != frameCounter)
//{
// currentFrame = frameCounter;
// swap(backInputPtr, frontInputPtr);
//}
}
void VideoioBridge::swapOutputBuffers()
{
lock_guard<mutex> lock(outputBufferMutex);
swap(frontOutputBuffer, backOutputBuffer);
}
void VideoioBridge::allocateOutputBuffers()
{
frontOutputBuffer = ref new WriteableBitmap(width, height);
backOutputBuffer = ref new WriteableBitmap(width, height);
}
void VideoioBridge::imshow()
{
VideoioBridge::getInstance().swapOutputBuffers();
VideoioBridge::getInstance().requestForUIthreadAsync(cv::UPDATE_IMAGE_ELEMENT);
}
// end

View File

@ -0,0 +1,96 @@
// videoio to XAML bridge for OpenCV
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
// this header is included in the XAML App, so it cannot include any
// OpenCV headers, or a static assert will be raised
#include <ppl.h>
#include <ppltasks.h>
#include <concrt.h>
#include <agile.h>
#include <opencv2\core.hpp>
#include <mutex>
#include <memory>
#include <atomic>
#include <functional>
// Class VideoioBridge (singleton) is needed because the interface for
// VideoCapture_WinRT in cap_winrt_capture.hpp is fixed by OpenCV.
class VideoioBridge
{
public:
static VideoioBridge& getInstance();
// call after initialization
void setReporter(Concurrency::progress_reporter<int> pr) { reporter = pr; }
// to be called from cvMain via cap_winrt on bg thread - non-blocking (async)
void requestForUIthreadAsync( int action, int width=0, int height=0 );
// TODO: modify in window.cpp: void cv::imshow( const String& winname, InputArray _img )
void imshow(/*cv::InputArray matToShow*/); // shows Mat in the cvImage element
void swapInputBuffers();
void allocateOutputBuffers();
void swapOutputBuffers();
int deviceIndex, width, height;
std::atomic<bool> bIsFrameNew;
std::mutex inputBufferMutex; // input is double buffered
unsigned char * frontInputPtr; // OpenCV reads this
unsigned char * backInputPtr; // Video grabber writes this
std::atomic<unsigned long> frameCounter;
unsigned long currentFrame;
std::mutex outputBufferMutex; // output is double buffered
Windows::UI::Xaml::Media::Imaging::WriteableBitmap^ frontOutputBuffer; // OpenCV write this
Windows::UI::Xaml::Media::Imaging::WriteableBitmap^ backOutputBuffer; // XAML reads this
Windows::UI::Xaml::Controls::Image ^cvImage;
private:
VideoioBridge() {
deviceIndex = 0;
width = 640;
height = 480;
deviceReady = false;
bIsFrameNew = false;
currentFrame = 0;
frameCounter = 0;
};
// singleton
VideoioBridge(VideoioBridge const &);
void operator=(const VideoioBridge &);
std::atomic<bool> deviceReady;
Concurrency::progress_reporter<int> reporter;
};

View File

@ -0,0 +1,279 @@
// Capture support for WinRT
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "precomp.hpp"
#include "cap_winrt_capture.hpp"
#include "cap_winrt_bridge.hpp"
#include "cap_winrt_video.hpp"
#include <opencv2\videoio\cap_winrt.hpp>
using namespace Windows::Foundation;
using namespace Windows::Media::Capture;
using namespace Windows::Media::MediaProperties;
using namespace Windows::Devices::Enumeration;
using namespace Platform;
using namespace Windows::UI::Xaml::Media::Imaging;
using namespace Microsoft::WRL;
using namespace ::std;
// nb. VideoCapture_WinRT is not a singleton, so the Mats are made file statics
// we do not support more than one capture device simultaneously with the
// design at this time
// nb. inputBufferMutex was not able to guarantee that OpenCV Mats were
// ready to accept data in the UI thread (memory access exceptions were thrown
// even though buffer address was good).
// Therefore allocation of Mats is also done on the UI thread before the video
// device is initialized.
static cv::Mat frontInputMat;
static cv::Mat backInputMat;
namespace cv {
/***************************** exported control functions ******************************/
template <typename ...Args>
void winrt_startMessageLoop(std::function<void(Args...)>&& callback, Args... args)
{
auto asyncTask = ::concurrency::create_async([=](::concurrency::progress_reporter<int> reporter)
{
VideoioBridge::getInstance().setReporter(reporter);
// frame reading loop
callback(args...);
});
asyncTask->Progress = ref new AsyncActionProgressHandler<int>([=](IAsyncActionWithProgress<int>^ act, int progress)
{
int action = progress;
// these actions will be processed on the UI thread asynchronously
switch (action)
{
case OPEN_CAMERA:
winrt_openCamera();
break;
case CLOSE_CAMERA:
winrt_closeGrabber();
break;
case UPDATE_IMAGE_ELEMENT:
winrt_updateFrameContainer();
break;
}
});
}
template <typename ...Args>
void winrt_startMessageLoop(void callback(Args...), Args... args)
{
winrt_startMessageLoop(std::function<void(Args...)>(callback), args...);
}
void winrt_onVisibilityChanged(bool visible) {
if (visible)
{
VideoioBridge& bridge = VideoioBridge::getInstance();
// only start the grabber if the camera was opened in OpenCV
if (bridge.backInputPtr != nullptr)
{
if (Video::getInstance().isStarted()) return;
int device = bridge.deviceIndex;
int width = bridge.width;
int height = bridge.height;
winrt_initGrabber(device, width, height);
}
} else
{
//grabberStarted = false;
winrt_closeGrabber();
}
}
void winrt_imshow() {
VideoioBridge::getInstance().imshow();
}
void winrt_setFrameContainer(::Windows::UI::Xaml::Controls::Image^ image) {
VideoioBridge::getInstance().cvImage = image;
}
/********************************* Internal helpers ************************************/
void winrt_updateFrameContainer()
{
// copy output Mat to WBM
winrt_copyOutput();
// set XAML image element with image WBM
VideoioBridge::getInstance().cvImage->Source = VideoioBridge::getInstance().backOutputBuffer;
}
// performed on UI thread
bool winrt_openCamera()
{
VideoioBridge& bridge = VideoioBridge::getInstance();
int device = bridge.deviceIndex;
int width = bridge.width;
int height = bridge.height;
// buffers must alloc'd on UI thread
winrt_allocateBuffers(width, height);
// nb. video capture device init must be done on UI thread;
if (!Video::getInstance().isStarted())
{
winrt_initGrabber(device, width, height);
return true;
}
return false;
}
// performed on UI thread
void winrt_allocateBuffers(int width, int height)
{
VideoioBridge& bridge = VideoioBridge::getInstance();
// allocate input Mats (bgra8 = CV_8UC4, RGB24 = CV_8UC3)
frontInputMat.create(height, width, CV_8UC3);
backInputMat.create(height, width, CV_8UC3);
bridge.frontInputPtr = frontInputMat.ptr(0);
bridge.backInputPtr = backInputMat.ptr(0);
bridge.allocateOutputBuffers();
}
// non-blocking
bool winrt_initGrabber(int device, int w, int h) {
// nb. Video class is not exported outside of this DLL
// due to complexities in the CaptureFrameGrabber ref class
// as written in the header not mixing well with pure C++ classes
return Video::getInstance().initGrabber(device, w, h);
}
void winrt_closeGrabber() {
Video::getInstance().closeGrabber();
}
// nb on UI thread
void winrt_copyOutput() {
Video::getInstance().CopyOutput();
}
/********************************* VideoCapture_WinRT class ****************************/
VideoCapture_WinRT::VideoCapture_WinRT(int device) : started(false)
{
VideoioBridge::getInstance().deviceIndex = device;
}
bool VideoCapture_WinRT::isOpened() const
{
return true; // started;
}
// grab a frame:
// this will NOT block per spec
// should be called on the image processing thread, not the UI thread
bool VideoCapture_WinRT::grabFrame()
{
// if device is not started we must return true so retrieveFrame() is called to start device
// nb. we cannot start the device here because we do not know the size of the input Mat
if (!started) return true;
if (VideoioBridge::getInstance().bIsFrameNew)
{
return true;
}
// nb. if blocking is to be added:
// unique_lock<mutex> lock(VideoioBridge::getInstance().frameReadyMutex);
// VideoioBridge::getInstance().frameReadyEvent.wait(lock);
return false;
}
// should be called on the image processing thread after grabFrame
// see VideoCapture::read
bool VideoCapture_WinRT::retrieveFrame(int channel, cv::OutputArray outArray)
{
if (!started) {
int width, height;
width = outArray.size().width;
height = outArray.size().height;
if (width == 0) width = 640;
if (height == 0) height = 480;
VideoioBridge::getInstance().width = width;
VideoioBridge::getInstance().height = height;
// nb. Mats will be alloc'd on UI thread
// request device init on UI thread - this does not block, and is async
VideoioBridge::getInstance().requestForUIthreadAsync(OPEN_CAMERA,
outArray.size().width, outArray.size().height);
started = true;
return false;
}
if (!started) return false;
return VideoioBridge::getInstance().bIsFrameNew;
}
bool VideoCapture_WinRT::setProperty(int property_id, double value)
{
switch (property_id)
{
case CAP_PROP_FRAME_WIDTH:
size.width = (int)value;
break;
case CAP_PROP_FRAME_HEIGHT:
size.height = (int)value;
break;
default:
return false;
}
return true;
}
}
// end

View File

@ -0,0 +1,82 @@
// Capture support for WinRT
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include "precomp.hpp"
#include <mutex>
#include <memory>
#include <condition_variable>
#include <atomic>
#include <agile.h>
// nb. implemented the newer IVideoCapture C++ interface so that we can work
// directly with Mat, not the older C cv interface
// (which may have added overhead for IPL file conversion)
namespace cv {
/******************* Internal helpers **************************************/
void winrt_updateFrameContainer();
bool winrt_openCamera();
bool winrt_initGrabber(int device, int w, int h);
void winrt_closeGrabber();
void winrt_copyOutput();
void winrt_allocateBuffers(int width, int height);
/******************* VideoCapture_WinRT class ******************************/
class VideoCapture_WinRT : public IVideoCapture
{
public:
VideoCapture_WinRT() : started(false) {}
VideoCapture_WinRT(int device);
virtual ~VideoCapture_WinRT() {}
// from base class IVideoCapture
virtual double getProperty(int) { return 0; }
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual bool retrieveFrame(int channel, cv::OutputArray outArray);
// Return the type of the capture object
virtual int getCaptureDomain() { return CAP_WINRT; }
virtual bool isOpened() const;
protected:
bool started;
CvSize size;
int bytesPerPixel;
unsigned long frameCurrent;
std::atomic<bool> isFrameNew;
};
}

View File

@ -0,0 +1,322 @@
// Video support with XAML
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "cap_winrt_video.hpp"
#include <ppl.h>
#include <ppltasks.h>
#include <concrt.h>
#include <agile.h>
#include <atomic>
#include <future>
#include <vector>
using namespace ::concurrency;
using namespace ::Windows::Foundation;
using namespace ::std;
using namespace Microsoft::WRL;
using namespace Windows::Media::Devices;
using namespace Windows::Media::MediaProperties;
using namespace Windows::Media::Capture;
using namespace Windows::UI::Xaml::Media::Imaging;
using namespace Windows::Devices::Enumeration;
#include "cap_winrt/CaptureFrameGrabber.hpp"
// pull in Media Foundation libs
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mf")
#pragma comment(lib, "mfuuid")
#if (WINAPI_FAMILY!=WINAPI_FAMILY_PHONE_APP) && !defined(_M_ARM)
#pragma comment(lib, "Shlwapi")
#endif
#include "cap_winrt_bridge.hpp"
Video::Video() {}
Video &Video::getInstance() {
static Video v;
return v;
}
bool Video::isStarted() {
return bGrabberInited.load();
}
void Video::closeGrabber() {
// assigning nullptr causes deref of grabber and thus closes the device
m_frameGrabber = nullptr;
bGrabberInited = false;
bGrabberInitInProgress = false;
}
bool Video::initGrabber(int device, int w, int h) {
// already started?
if (bGrabberInited || bGrabberInitInProgress) return false;
width = w;
height = h;
bGrabberInited = false;
bGrabberInitInProgress = true;
m_deviceID = device;
create_task(DeviceInformation::FindAllAsync(DeviceClass::VideoCapture))
.then([this](task<DeviceInformationCollection^> findTask)
{
m_devices = findTask.get();
// got selected device?
if ((unsigned)m_deviceID >= m_devices.Get()->Size)
{
OutputDebugStringA("Video::initGrabber - no video device found\n");
return false;
}
auto devInfo = m_devices.Get()->GetAt(m_deviceID);
auto settings = ref new MediaCaptureInitializationSettings();
settings->StreamingCaptureMode = StreamingCaptureMode::Video; // Video-only capture
settings->VideoDeviceId = devInfo->Id;
auto location = devInfo->EnclosureLocation;
bFlipImageX = true;
if (location != nullptr && location->Panel == Windows::Devices::Enumeration::Panel::Back)
{
bFlipImageX = false;
}
m_capture = ref new MediaCapture();
create_task(m_capture->InitializeAsync(settings)).then([this](){
auto props = safe_cast<VideoEncodingProperties^>(m_capture->VideoDeviceController->GetMediaStreamProperties(MediaStreamType::VideoPreview));
// for 24 bpp
props->Subtype = MediaEncodingSubtypes::Rgb24; bytesPerPixel = 3;
// format used by XAML & WBM (for testing)
// props->Subtype = MediaEncodingSubtypes::Bgra8; bytesPerPixel = 4;
props->Width = width;
props->Height = height;
return ::Media::CaptureFrameGrabber::CreateAsync(m_capture.Get(), props);
}).then([this](::Media::CaptureFrameGrabber^ frameGrabber)
{
m_frameGrabber = frameGrabber;
bGrabberInited = true;
bGrabberInitInProgress = false;
//ready = true;
_GrabFrameAsync(frameGrabber);
});
return true;
});
// nb. cannot block here - this will lock the UI thread:
return true;
}
void Video::_GrabFrameAsync(::Media::CaptureFrameGrabber^ frameGrabber) {
// use rgb24 layout
create_task(frameGrabber->GetFrameAsync()).then([this, frameGrabber](const ComPtr<IMF2DBuffer2>& buffer)
{
// do the RGB swizzle while copying the pixels from the IMF2DBuffer2
BYTE *pbScanline;
LONG plPitch;
unsigned int colBytes = width * bytesPerPixel;
CHK(buffer->Lock2D(&pbScanline, &plPitch));
// flip
if (bFlipImageX)
{
std::lock_guard<std::mutex> lock(VideoioBridge::getInstance().inputBufferMutex);
// ptr to input Mat data array
auto buf = VideoioBridge::getInstance().backInputPtr;
for (unsigned int row = 0; row < height; row++)
{
unsigned int i = 0;
unsigned int j = colBytes - 1;
while (i < colBytes)
{
// reverse the scan line
// as a side effect this also swizzles R and B channels
buf[j--] = pbScanline[i++];
buf[j--] = pbScanline[i++];
buf[j--] = pbScanline[i++];
}
pbScanline += plPitch;
buf += colBytes;
}
VideoioBridge::getInstance().bIsFrameNew = true;
} else
{
std::lock_guard<std::mutex> lock(VideoioBridge::getInstance().inputBufferMutex);
// ptr to input Mat data array
auto buf = VideoioBridge::getInstance().backInputPtr;
for (unsigned int row = 0; row < height; row++)
{
// used for Bgr8:
//for (unsigned int i = 0; i < colBytes; i++ )
// buf[i] = pbScanline[i];
// used for RGB24:
for (unsigned int i = 0; i < colBytes; i += bytesPerPixel)
{
// swizzle the R and B values (BGR to RGB)
buf[i] = pbScanline[i + 2];
buf[i + 1] = pbScanline[i + 1];
buf[i + 2] = pbScanline[i];
// no swizzle
//buf[i] = pbScanline[i];
//buf[i + 1] = pbScanline[i + 1];
//buf[i + 2] = pbScanline[i + 2];
}
pbScanline += plPitch;
buf += colBytes;
}
VideoioBridge::getInstance().bIsFrameNew = true;
}
CHK(buffer->Unlock2D());
VideoioBridge::getInstance().frameCounter++;
if (bGrabberInited)
{
_GrabFrameAsync(frameGrabber);
}
}, task_continuation_context::use_current());
}
// copy from input Mat to output WBM
// must be on UI thread
void Video::CopyOutput() {
{
std::lock_guard<std::mutex> lock(VideoioBridge::getInstance().outputBufferMutex);
auto inAr = VideoioBridge::getInstance().frontInputPtr;
auto outAr = GetData(VideoioBridge::getInstance().frontOutputBuffer->PixelBuffer);
const unsigned int bytesPerPixel = 3;
auto pbScanline = inAr;
auto plPitch = width * bytesPerPixel;
auto buf = outAr;
unsigned int colBytes = width * 4;
// copy RGB24 to bgra8
for (unsigned int row = 0; row < height; row++)
{
// used for Bgr8:
// nb. no alpha
// for (unsigned int i = 0; i < colBytes; i++ ) buf[i] = pbScanline[i];
// used for RGB24:
// nb. alpha is set to full opaque
for (unsigned int i = 0, j = 0; i < plPitch; i += bytesPerPixel, j += 4)
{
// swizzle the R and B values (RGB24 to Bgr8)
buf[j] = pbScanline[i + 2];
buf[j + 1] = pbScanline[i + 1];
buf[j + 2] = pbScanline[i];
buf[j + 3] = 0xff;
// if no swizzle is desired:
//buf[i] = pbScanline[i];
//buf[i + 1] = pbScanline[i + 1];
//buf[i + 2] = pbScanline[i + 2];
//buf[i + 3] = 0xff;
}
pbScanline += plPitch;
buf += colBytes;
}
VideoioBridge::getInstance().frontOutputBuffer->PixelBuffer->Length = width * height * 4;
}
}
bool Video::listDevicesTask() {
std::atomic<bool> ready(false);
auto settings = ref new MediaCaptureInitializationSettings();
//vector <int> devices;
create_task(DeviceInformation::FindAllAsync(DeviceClass::VideoCapture))
.then([this, &ready](task<DeviceInformationCollection^> findTask)
{
m_devices = findTask.get();
for (size_t i = 0; i < m_devices->Size; i++)
{
// ofVideoDevice deviceInfo;
auto d = m_devices->GetAt(i);
//deviceInfo.bAvailable = true;
//deviceInfo.deviceName = PlatformStringToString(d->Name);
//deviceInfo.hardwareName = deviceInfo.deviceName;
// devices.push_back(deviceInfo);
}
ready = true;
});
// wait for async task to complete
int count = 0;
while (!ready)
{
count++;
}
return true;
}
bool Video::listDevices() {
// synchronous version of listing video devices on WinRT
std::future<bool> result = std::async(std::launch::async, &Video::listDevicesTask, this);
return result.get();
}
// end

View File

@ -0,0 +1,73 @@
// Video support with XAML
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include "cap_winrt/CaptureFrameGrabber.hpp"
#include <mutex>
#include <memory>
class Video {
public:
bool initGrabber(int device, int w, int h);
void closeGrabber();
bool isStarted();
// singleton
static Video &getInstance();
void CopyOutput();
private:
// singleton
Video();
void _GrabFrameAsync(::Media::CaptureFrameGrabber^ frameGrabber);
bool listDevices();
Platform::Agile<Windows::Media::Capture::MediaCapture> m_capture;
Platform::Agile<Windows::Devices::Enumeration::DeviceInformationCollection> m_devices;
::Media::CaptureFrameGrabber^ m_frameGrabber;
bool listDevicesTask();
bool bChooseDevice;
bool bVerbose;
bool bFlipImageX;
//std::atomic<bool> bGrabberInited;
int m_deviceID;
int attemptFramerate;
std::atomic<bool> bIsFrameNew;
std::atomic<bool> bGrabberInited;
std::atomic<bool> bGrabberInitInProgress;
unsigned int width, height;
int bytesPerPixel;
};

View File

@ -221,7 +221,7 @@ void CvCaptureCAM_XIMEA::resetCvImage()
xiGetParamInt( hmv, XI_PRM_HEIGHT, &height); xiGetParamInt( hmv, XI_PRM_HEIGHT, &height);
xiGetParamInt( hmv, XI_PRM_IMAGE_DATA_FORMAT, &format); xiGetParamInt( hmv, XI_PRM_IMAGE_DATA_FORMAT, &format);
if( (int)image.width != width || (int)image.height != height || image.frm != (XI_IMG_FORMAT)format) if( (int)image.width != frame->width || (int)image.height != frame->height || image.frm != (XI_IMG_FORMAT)format)
{ {
if(frame) cvReleaseImage(&frame); if(frame) cvReleaseImage(&frame);
frame = NULL; frame = NULL;

File diff suppressed because it is too large Load Diff

View File

@ -4,5 +4,9 @@ if(HAVE_CUDA)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wundef -Wmissing-declarations -Wshadow -Wunused-parameter) ocv_warnings_disable(CMAKE_CXX_FLAGS -Wundef -Wmissing-declarations -Wshadow -Wunused-parameter)
endif() endif()
if(WINRT_8_1)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /ZW")
endif()
ocv_define_module(videostab opencv_imgproc opencv_features2d opencv_video opencv_photo opencv_calib3d ocv_define_module(videostab opencv_imgproc opencv_features2d opencv_video opencv_photo opencv_calib3d
OPTIONAL opencv_cudawarping opencv_cudaoptflow opencv_videoio WRAP python) OPTIONAL opencv_cudawarping opencv_cudaoptflow opencv_videoio WRAP python)

205
samples/cpp/detect_blob.cpp Normal file
View File

@ -0,0 +1,205 @@
#include <opencv2/opencv.hpp>
#include <vector>
#include <map>
#include <iostream>
using namespace std;
using namespace cv;
static void help()
{
cout << "\n This program demonstrates how to use BLOB to detect and filter region \n"
"Usage: \n"
" ./detect_blob <image1(../data/detect_blob.png as default)>\n"
"Press a key when image window is active to change descriptor";
}
static String Legende(SimpleBlobDetector::Params &pAct)
{
String s = "";
if (pAct.filterByArea)
{
String inf = static_cast<ostringstream*>(&(ostringstream() << pAct.minArea))->str();
String sup = static_cast<ostringstream*>(&(ostringstream() << pAct.maxArea))->str();
s = " Area range [" + inf + " to " + sup + "]";
}
if (pAct.filterByCircularity)
{
String inf = static_cast<ostringstream*>(&(ostringstream() << pAct.minCircularity))->str();
String sup = static_cast<ostringstream*>(&(ostringstream() << pAct.maxCircularity))->str();
if (s.length() == 0)
s = " Circularity range [" + inf + " to " + sup + "]";
else
s += " AND Circularity range [" + inf + " to " + sup + "]";
}
if (pAct.filterByColor)
{
String inf = static_cast<ostringstream*>(&(ostringstream() << (int)pAct.blobColor))->str();
if (s.length() == 0)
s = " Blob color " + inf;
else
s += " AND Blob color " + inf;
}
if (pAct.filterByConvexity)
{
String inf = static_cast<ostringstream*>(&(ostringstream() << pAct.minConvexity))->str();
String sup = static_cast<ostringstream*>(&(ostringstream() << pAct.maxConvexity))->str();
if (s.length() == 0)
s = " Convexity range[" + inf + " to " + sup + "]";
else
s += " AND Convexity range[" + inf + " to " + sup + "]";
}
if (pAct.filterByInertia)
{
String inf = static_cast<ostringstream*>(&(ostringstream() << pAct.minInertiaRatio))->str();
String sup = static_cast<ostringstream*>(&(ostringstream() << pAct.maxInertiaRatio))->str();
if (s.length() == 0)
s = " Inertia ratio range [" + inf + " to " + sup + "]";
else
s += " AND Inertia ratio range [" + inf + " to " + sup + "]";
}
return s;
}
int main(int argc, char *argv[])
{
vector<String> fileName;
Mat img(600, 800, CV_8UC1);
if (argc == 1)
{
fileName.push_back("../data/detect_blob.png");
}
else if (argc == 2)
{
fileName.push_back(argv[1]);
}
else
{
help();
return(0);
}
img = imread(fileName[0], IMREAD_COLOR);
if (img.rows*img.cols <= 0)
{
cout << "Image " << fileName[0] << " is empty or cannot be found\n";
return(0);
}
SimpleBlobDetector::Params pDefaultBLOB;
// This is default parameters for SimpleBlobDetector
pDefaultBLOB.thresholdStep = 10;
pDefaultBLOB.minThreshold = 10;
pDefaultBLOB.maxThreshold = 220;
pDefaultBLOB.minRepeatability = 2;
pDefaultBLOB.minDistBetweenBlobs = 10;
pDefaultBLOB.filterByColor = false;
pDefaultBLOB.blobColor = 0;
pDefaultBLOB.filterByArea = false;
pDefaultBLOB.minArea = 25;
pDefaultBLOB.maxArea = 5000;
pDefaultBLOB.filterByCircularity = false;
pDefaultBLOB.minCircularity = 0.9f;
pDefaultBLOB.maxCircularity = (float)1e37;
pDefaultBLOB.filterByInertia = false;
pDefaultBLOB.minInertiaRatio = 0.1f;
pDefaultBLOB.maxInertiaRatio = (float)1e37;
pDefaultBLOB.filterByConvexity = false;
pDefaultBLOB.minConvexity = 0.95f;
pDefaultBLOB.maxConvexity = (float)1e37;
// Descriptor array for BLOB
vector<String> typeDesc;
// Param array for BLOB
vector<SimpleBlobDetector::Params> pBLOB;
vector<SimpleBlobDetector::Params>::iterator itBLOB;
// Color palette
vector< Vec3b > palette;
for (int i = 0; i<65536; i++)
{
palette.push_back(Vec3b((uchar)rand(), (uchar)rand(), (uchar)rand()));
}
help();
// This descriptor are going to be detect and compute BLOBS with 6 differents params
// Param for first BLOB detector we want all
typeDesc.push_back("BLOB"); // see http://docs.opencv.org/trunk/d0/d7a/classcv_1_1SimpleBlobDetector.html
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByArea = true;
pBLOB.back().minArea = 1;
pBLOB.back().maxArea = float(img.rows*img.cols);
// Param for second BLOB detector we want area between 500 and 2900 pixels
typeDesc.push_back("BLOB");
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByArea = true;
pBLOB.back().minArea = 500;
pBLOB.back().maxArea = 2900;
// Param for third BLOB detector we want only circular object
typeDesc.push_back("BLOB");
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByCircularity = true;
// Param for Fourth BLOB detector we want ratio inertia
typeDesc.push_back("BLOB");
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByInertia = true;
pBLOB.back().minInertiaRatio = 0;
pBLOB.back().maxInertiaRatio = (float)0.2;
// Param for fifth BLOB detector we want ratio inertia
typeDesc.push_back("BLOB");
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByConvexity = true;
pBLOB.back().minConvexity = 0.;
pBLOB.back().maxConvexity = (float)0.9;
// Param for six BLOB detector we want blob with gravity center color equal to 0 bug #4321 must be fixed
typeDesc.push_back("BLOB");
pBLOB.push_back(pDefaultBLOB);
pBLOB.back().filterByColor = true;
pBLOB.back().blobColor = 0;
itBLOB = pBLOB.begin();
vector<double> desMethCmp;
Ptr<Feature2D> b;
String label;
// Descriptor loop
vector<String>::iterator itDesc;
for (itDesc = typeDesc.begin(); itDesc != typeDesc.end(); itDesc++)
{
vector<KeyPoint> keyImg1;
if (*itDesc == "BLOB")
{
b = SimpleBlobDetector::create(*itBLOB);
label = Legende(*itBLOB);
itBLOB++;
}
try
{
// We can detect keypoint with detect method
vector<KeyPoint> keyImg;
vector<Rect> zone;
vector<vector <Point> > region;
Mat desc, result(img.rows, img.cols, CV_8UC3);
if (b.dynamicCast<SimpleBlobDetector>() != NULL)
{
Ptr<SimpleBlobDetector> sbd = b.dynamicCast<SimpleBlobDetector>();
sbd->detect(img, keyImg, Mat());
drawKeypoints(img, keyImg, result);
int i = 0;
for (vector<KeyPoint>::iterator k = keyImg.begin(); k != keyImg.end(); k++, i++)
circle(result, k->pt, (int)k->size, palette[i % 65536]);
}
namedWindow(*itDesc + label, WINDOW_AUTOSIZE);
imshow(*itDesc + label, result);
imshow("Original", img);
waitKey();
}
catch (Exception& e)
{
cout << "Feature : " << *itDesc << "\n";
cout << e.msg << endl;
}
}
return 0;
}

View File

@ -0,0 +1,84 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 2013
VisualStudioVersion = 12.0.31101.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "video_capture_xaml", "video_capture_xaml", "{D7F9BEB3-65C8-443A-82C6-9D6A5B2B00FC}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "video_capture_xaml.Shared", "video_capture_xaml\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems", "{6A274B7F-3982-499E-B55A-1F12EF2E3EC0}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "video_capture_xaml.Windows", "video_capture_xaml\video_capture_xaml.Windows\video_capture_xaml.Windows.vcxproj", "{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "video_capture_xaml.WindowsPhone", "video_capture_xaml\video_capture_xaml.WindowsPhone\video_capture_xaml.WindowsPhone.vcxproj", "{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}"
EndProject
Global
GlobalSection(SharedMSBuildProjectFiles) = preSolution
video_capture_xaml\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems*{d7a82e7f-1535-4eec-aba9-2c8447669d33}*SharedItemsImports = 4
video_capture_xaml\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems*{6a274b7f-3982-499e-b55a-1f12ef2e3ec0}*SharedItemsImports = 9
video_capture_xaml\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems*{32fdab94-f87e-4f0a-89a4-9ec10a3b1d3d}*SharedItemsImports = 4
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|ARM = Debug|ARM
Debug|Mixed Platforms = Debug|Mixed Platforms
Debug|Win32 = Debug|Win32
Debug|x64 = Debug|x64
Release|ARM = Release|ARM
Release|Mixed Platforms = Release|Mixed Platforms
Release|Win32 = Release|Win32
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|ARM.ActiveCfg = Debug|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|ARM.Build.0 = Debug|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|ARM.Deploy.0 = Debug|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Mixed Platforms.ActiveCfg = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Mixed Platforms.Build.0 = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Mixed Platforms.Deploy.0 = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Win32.ActiveCfg = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Win32.Build.0 = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|Win32.Deploy.0 = Debug|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|x64.ActiveCfg = Debug|x64
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|x64.Build.0 = Debug|x64
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Debug|x64.Deploy.0 = Debug|x64
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|ARM.ActiveCfg = Release|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|ARM.Build.0 = Release|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|ARM.Deploy.0 = Release|ARM
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Mixed Platforms.ActiveCfg = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Mixed Platforms.Build.0 = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Mixed Platforms.Deploy.0 = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Win32.ActiveCfg = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Win32.Build.0 = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|Win32.Deploy.0 = Release|Win32
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|x64.ActiveCfg = Release|x64
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|x64.Build.0 = Release|x64
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D}.Release|x64.Deploy.0 = Release|x64
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|ARM.ActiveCfg = Debug|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|ARM.Build.0 = Debug|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|ARM.Deploy.0 = Debug|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Mixed Platforms.ActiveCfg = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Mixed Platforms.Build.0 = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Mixed Platforms.Deploy.0 = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Win32.ActiveCfg = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Win32.Build.0 = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|Win32.Deploy.0 = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Debug|x64.ActiveCfg = Debug|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|ARM.ActiveCfg = Release|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|ARM.Build.0 = Release|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|ARM.Deploy.0 = Release|ARM
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Mixed Platforms.ActiveCfg = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Mixed Platforms.Build.0 = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Mixed Platforms.Deploy.0 = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Win32.ActiveCfg = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Win32.Build.0 = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|Win32.Deploy.0 = Release|Win32
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33}.Release|x64.ActiveCfg = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{6A274B7F-3982-499E-B55A-1F12EF2E3EC0} = {D7F9BEB3-65C8-443A-82C6-9D6A5B2B00FC}
{32FDAB94-F87E-4F0A-89A4-9EC10A3B1D3D} = {D7F9BEB3-65C8-443A-82C6-9D6A5B2B00FC}
{D7A82E7F-1535-4EEC-ABA9-2C8447669D33} = {D7F9BEB3-65C8-443A-82C6-9D6A5B2B00FC}
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros">
<Runtime Condition="'$(ApplicationType)'=='Windows Phone'">WP</Runtime>
<Runtime Condition="'$(ApplicationType)'=='Windows Store'">WS</Runtime>
<OpenCV_Bin>$(OPENCV_WINRT_INSTALL_DIR)$(Runtime)\8.1\$(PlatformTarget)\$(PlatformTarget)\vc12\bin\</OpenCV_Bin>
<OpenCV_Lib>$(OPENCV_WINRT_INSTALL_DIR)$(Runtime)\8.1\$(PlatformTarget)\$(PlatformTarget)\vc12\lib\</OpenCV_Lib>
<OpenCV_Include>$(OPENCV_WINRT_INSTALL_DIR)$(Runtime)\8.1\$(PlatformTarget)\include\</OpenCV_Include>
<!--debug suffix for OpenCV dlls and libs -->
<DebugSuffix Condition="'$(Configuration)'=='Debug'">d</DebugSuffix>
<DebugSuffix Condition="'$(Configuration)'!='Debug'"></DebugSuffix>
</PropertyGroup>
<ItemGroup>
<!-- Add required OpenCV dlls here-->
<!-- General-->
<None Include="$(OpenCV_Bin)opencv_core300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<None Include="$(OpenCV_Bin)opencv_imgproc300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<None Include="$(OpenCV_Bin)opencv_flann300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<None Include="$(OpenCV_Bin)opencv_features2d300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<None Include="$(OpenCV_Bin)opencv_imgcodecs300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<!-- Video processing -->
<None Include="$(OpenCV_Bin)opencv_videoio300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<!-- Face detection-->
<None Include="$(OpenCV_Bin)opencv_objdetect300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
<None Include="$(OpenCV_Bin)opencv_ml300$(DebugSuffix).dll">
<DeploymentContent>true</DeploymentContent>
</None>
</ItemGroup>
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(OpenCV_Include);%(AdditionalIncludeDirectories);</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<!--Add required OpenCV libs here-->
<AdditionalDependencies>opencv_core300$(DebugSuffix).lib;opencv_imgproc300$(DebugSuffix).lib;opencv_flann300$(DebugSuffix).lib;opencv_videoio300$(DebugSuffix).lib;opencv_features2d300$(DebugSuffix).lib;opencv_objdetect300$(DebugSuffix).lib;opencv_ml300$(DebugSuffix).lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>$(OpenCV_Lib);%(AdditionalLibraryDirectories);</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
</Project>

View File

@ -0,0 +1,4 @@
<Application x:Class="video_capture_xaml.App"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:video_capture_xaml" />

View File

@ -0,0 +1,173 @@
//
// App.xaml.cpp
// Implementation of the App class.
//
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "pch.h"
#include "MainPage.xaml.h"
#include "App.xaml.h"
using namespace video_capture_xaml;
using namespace Platform;
using namespace Windows::ApplicationModel;
using namespace Windows::ApplicationModel::Activation;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::UI::Xaml::Media::Animation;
using namespace Windows::UI::Xaml;
using namespace Windows::UI::Xaml::Controls;
using namespace Windows::UI::Xaml::Controls::Primitives;
using namespace Windows::UI::Xaml::Data;
using namespace Windows::UI::Xaml::Input;
using namespace Windows::UI::Xaml::Interop;
using namespace Windows::UI::Xaml::Media;
using namespace Windows::UI::Xaml::Navigation;
// The Blank Application template is documented at http://go.microsoft.com/fwlink/?LinkId=234227
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
App::App()
{
InitializeComponent();
Suspending += ref new SuspendingEventHandler(this, &App::OnSuspending);
Resuming += ref new Windows::Foundation::EventHandler<Platform::Object ^>(this, &video_capture_xaml::App::OnResuming);
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used when the application is launched to open a specific file, to display
/// search results, and so forth.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
void App::OnLaunched(LaunchActivatedEventArgs^ e)
{
#if _DEBUG
if (IsDebuggerPresent())
{
DebugSettings->EnableFrameRateCounter = true;
}
#endif
auto rootFrame = dynamic_cast<Frame^>(Window::Current->Content);
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active.
if (rootFrame == nullptr)
{
// Create a Frame to act as the navigation context and associate it with
// a SuspensionManager key
rootFrame = ref new Frame();
// TODO: Change this value to a cache size that is appropriate for your application.
rootFrame->CacheSize = 1;
if (e->PreviousExecutionState == ApplicationExecutionState::Terminated)
{
// TODO: Restore the saved session state only when appropriate, scheduling the
// final launch steps after the restore is complete.
}
// Place the frame in the current Window
Window::Current->Content = rootFrame;
}
if (rootFrame->Content == nullptr)
{
#if WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP
// Removes the turnstile navigation for startup.
if (rootFrame->ContentTransitions != nullptr)
{
_transitions = ref new TransitionCollection();
for (auto transition : rootFrame->ContentTransitions)
{
_transitions->Append(transition);
}
}
rootFrame->ContentTransitions = nullptr;
_firstNavigatedToken = rootFrame->Navigated += ref new NavigatedEventHandler(this, &App::RootFrame_FirstNavigated);
#endif
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter.
if (!rootFrame->Navigate(MainPage::typeid, e->Arguments))
{
throw ref new FailureException("Failed to create initial page");
}
}
// Ensure the current window is active
Window::Current->Activate();
}
#if WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP
/// <summary>
/// Restores the content transitions after the app has launched.
/// </summary>
void App::RootFrame_FirstNavigated(Object^ sender, NavigationEventArgs^ e)
{
auto rootFrame = safe_cast<Frame^>(sender);
TransitionCollection^ newTransitions;
if (_transitions == nullptr)
{
newTransitions = ref new TransitionCollection();
newTransitions->Append(ref new NavigationThemeTransition());
}
else
{
newTransitions = _transitions;
}
rootFrame->ContentTransitions = newTransitions;
rootFrame->Navigated -= _firstNavigatedToken;
}
#endif
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
void App::OnSuspending(Object^ sender, SuspendingEventArgs^ e)
{
(void) sender; // Unused parameter
(void) e; // Unused parameter
// TODO: Save application state and stop any background activity
}
void video_capture_xaml::App::OnResuming(Platform::Object ^sender, Platform::Object ^args)
{
// throw ref new Platform::NotImplementedException();
}

View File

@ -0,0 +1,57 @@
//
// App.xaml.h
// Declaration of the App class.
//
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include "App.g.h"
namespace video_capture_xaml
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
ref class App sealed
{
public:
App();
virtual void OnLaunched(Windows::ApplicationModel::Activation::LaunchActivatedEventArgs^ e) override;
private:
#if WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP
Windows::UI::Xaml::Media::Animation::TransitionCollection^ _transitions;
Windows::Foundation::EventRegistrationToken _firstNavigatedToken;
void RootFrame_FirstNavigated(Platform::Object^ sender, Windows::UI::Xaml::Navigation::NavigationEventArgs^ e);
#endif
void OnSuspending(Platform::Object^ sender, Windows::ApplicationModel::SuspendingEventArgs^ e);
void OnResuming(Platform::Object ^sender, Platform::Object ^args);
};
}

View File

@ -0,0 +1,87 @@
// main.cpp
// Copyright (c) Microsoft Open Technologies, Inc.
// All rights reserved.
//
// (3 - clause BSD License)
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "pch.h"
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/videoio/cap_winrt.hpp>
using namespace cv;
namespace video_capture_xaml {
void cvMain()
{
VideoCapture cam;
// open the default camera
cam.open(0);
Mat edges;
Mat frame;
// process frames
while (1)
{
// get a new frame from camera - this is non-blocking per spec
cam >> frame;
// don't reprocess the same frame again
// nb if commented then flashing may occur
if (!cam.grab()) continue;
// image processing calculations here
// nb Mat frame is in RGB24 format (8UC3)
// select processing type 1 or 2
#if 0
// image manipulation example #1
// write color bar at row 100 for 200 rows
auto ar = frame.ptr(100);
int bytesPerPixel = 3;
int adjust = (int)(((float)30 / 100.0f) * 255.0);
for (int i = 0; i < 640 * 100 * bytesPerPixel;)
{
ar[i++] = adjust; // R
i++; // G
ar[i++] = 255 - adjust; // B
}
#else
// image processing example #2
// apply 'canny' filter
cvtColor(frame, edges, COLOR_RGB2GRAY);
GaussianBlur(edges, edges, Size(7, 7), 1.5, 1.5);
Canny(edges, edges, 0, 30, 3);
cvtColor(edges, frame, COLOR_GRAY2RGB);
#endif
// important step to get XAML image component updated
winrt_imshow();
}
}
}

View File

@ -0,0 +1,6 @@
//
// pch.cpp
// Include the standard header and generate the precompiled header.
//
#include "pch.h"

View File

@ -0,0 +1,11 @@
//
// pch.h
// Header for standard system include files.
//
#pragma once
#include <collection.h>
#include <ppltasks.h>
#include "App.xaml.h"

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup Label="Globals">
<MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects>
<HasSharedItems>true</HasSharedItems>
<SharedGUID>98633655-f156-43bb-b452-37cd6a71e3f0</SharedGUID>
<ItemsProjectGuid>{6a274b7f-3982-499e-b55a-1f12ef2e3ec0}</ItemsProjectGuid>
<ItemsRootNamespace>highgui_xaml</ItemsRootNamespace>
<ItemsProjectName>video_capture_xaml.Shared</ItemsProjectName>
</PropertyGroup>
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>%(AdditionalIncludeDirectories);$(MSBuildThisFileDirectory)</AdditionalIncludeDirectories>
</ClCompile>
</ItemDefinitionGroup>
<ItemGroup>
<ApplicationDefinition Include="$(MSBuildThisFileDirectory)App.xaml">
<SubType>Designer</SubType>
</ApplicationDefinition>
<ClCompile Include="$(MSBuildThisFileDirectory)App.xaml.cpp">
<DependentUpon>$(MSBuildThisFileDirectory)App.xaml</DependentUpon>
</ClCompile>
<ClCompile Include="$(MSBuildThisFileDirectory)main.cpp" />
<ClInclude Include="$(MSBuildThisFileDirectory)App.xaml.h">
<DependentUpon>$(MSBuildThisFileDirectory)App.xaml</DependentUpon>
</ClInclude>
<ClCompile Include="$(MSBuildThisFileDirectory)pch.cpp">
<PrecompiledHeader>Create</PrecompiledHeader>
</ClCompile>
<ClInclude Include="$(MSBuildThisFileDirectory)pch.h" />
</ItemGroup>
<ItemGroup>
<ProjectCapability Include="SourceItemsFromImports" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<CLCompile Include="$(MSBuildThisFileDirectory)App.xaml.cpp" />
<ClInclude Include="$(MSBuildThisFileDirectory)App.xaml.h" />
<CLCompile Include="$(MSBuildThisFileDirectory)pch.cpp" />
<ClInclude Include="$(MSBuildThisFileDirectory)pch.h" />
<ClCompile Include="$(MSBuildThisFileDirectory)main.cpp" />
</ItemGroup>
<ItemGroup>
<ApplicationDefinition Include="$(MSBuildThisFileDirectory)App.xaml" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,25 @@
<Page x:Class="video_capture_xaml.MainPage"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:local="using:video_capture_xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
mc:Ignorable="d">
<Grid Background="{ThemeResource ApplicationPageBackgroundThemeBrush}">
<TextBlock Margin="20,35,0,0"
HorizontalAlignment="Left"
VerticalAlignment="Top"
FontSize="24"
TextWrapping="Wrap">
<Run Text="OpenCV: videoio implementation using XAML and Universal App Framework" />
<Run />
</TextBlock>
<Image Name="cvImage"
Width="640"
Height="480"
Margin="20,100,0,0"
HorizontalAlignment="Left"
VerticalAlignment="Top" />
</Grid>
</Page>

View File

@ -0,0 +1,61 @@
//
// MainPage.xaml.cpp
// Implementation of the MainPage class.
//
#include "pch.h"
#include "MainPage.xaml.h"
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/videoio/cap_winrt.hpp>
using namespace video_capture_xaml;
using namespace Platform;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::UI::Xaml;
using namespace Windows::UI::Xaml::Controls;
using namespace Windows::UI::Xaml::Controls::Primitives;
using namespace Windows::UI::Xaml::Data;
using namespace Windows::UI::Xaml::Input;
using namespace Windows::UI::Xaml::Media;
using namespace Windows::UI::Xaml::Navigation;
// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
using namespace ::Windows::Foundation;
using namespace Windows::UI::Xaml::Media::Imaging;
namespace video_capture_xaml
{
// nb. implemented in main.cpp
void cvMain();
MainPage::MainPage()
{
InitializeComponent();
Window::Current->VisibilityChanged += ref new Windows::UI::Xaml::WindowVisibilityChangedEventHandler(this, &video_capture_xaml::MainPage::OnVisibilityChanged);
// attach XAML elements
cv::winrt_setFrameContainer(cvImage);
// start (1) frame-grabbing loop and (2) message loop
//
// 1. Function passed as an argument must implement common OCV reading frames
// pattern (see cv::VideoCapture documentation) AND call cv::winrt_imgshow().
// 2. Message processing loop required to overcome WinRT container and type
// conversion restrictions. OCV provides default implementation
cv::winrt_startMessageLoop(cvMain);
}
}
void video_capture_xaml::MainPage::OnVisibilityChanged(Platform::Object ^sender,
Windows::UI::Core::VisibilityChangedEventArgs ^e)
{
cv::winrt_onVisibilityChanged(e->Visible);
}

View File

@ -0,0 +1,24 @@
//
// MainPage.xaml.h
// Declaration of the MainPage class.
//
#pragma once
#include "MainPage.g.h"
namespace video_capture_xaml
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public ref class MainPage sealed
{
public:
MainPage();
private:
void OnVisibilityChanged(Platform::Object ^sender, Windows::UI::Core::VisibilityChangedEventArgs ^e);
};
}

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<Package xmlns="http://schemas.microsoft.com/appx/2010/manifest" xmlns:m2="http://schemas.microsoft.com/appx/2013/manifest">
<Identity Name="dd4947e8-f250-49d1-be17-4442dfea2737" Publisher="CN=daver_000" Version="1.0.0.0" />
<Properties>
<DisplayName>video_capture_xaml.Windows</DisplayName>
<PublisherDisplayName>daver_000</PublisherDisplayName>
<Logo>Assets\StoreLogo.png</Logo>
</Properties>
<Prerequisites>
<OSMinVersion>6.3.0</OSMinVersion>
<OSMaxVersionTested>6.3.0</OSMaxVersionTested>
</Prerequisites>
<Resources>
<Resource Language="x-generate" />
</Resources>
<Applications>
<Application Id="App" Executable="$targetnametoken$.exe" EntryPoint="video_capture_xaml_Windows.App">
<m2:VisualElements DisplayName="video_capture_xaml.Windows" Square150x150Logo="Assets\Logo.png" Square30x30Logo="Assets\SmallLogo.png" Description="video_capture_xaml.Windows" ForegroundText="light" BackgroundColor="#464646">
<m2:SplashScreen Image="Assets\SplashScreen.png" />
</m2:VisualElements>
</Application>
</Applications>
<Capabilities>
<Capability Name="internetClient" />
<DeviceCapability Name="webcam" />
</Capabilities>
</Package>

View File

@ -0,0 +1,30 @@
notes for OpenCV WinRT implementation:
cvMain() in main.cpp
implements the image processing and OpenCV app control
it is running on a background thread, started by XAML
see file main.cpp
in the Application project
class VideoCapture_WinRT:
implements the IVideoCapture interface from OpenCV
video is initialized and frames are grabbed on the UI thread
see files cap_winrt.hpp/cpp
class HighguiBridge, a singleton
implements the OpenCV Highgui functions for XAML (limited at this time),
and also bridges to the UI thread functions for XAML and video operations.
see files cap_winrt_highgui.hpp/cpp
class Video, a singleton
encapsulates the Media Foundation interface needed for video initialization and grabbing.
called through Highgui and XAML, only on the UI thread
see files cap_winrt_video.hpp/cpp
threading:
requests from the OpenCV bg thread to the Video/XAML UI thread
are made through HighguiBridge::requestForUIthreadAsync(), which uses
the "progress reporter" method provided by the WinRT class
IAsyncActionWithProgress. Also the bg thread is started by create_async().
see file MainPage.xaml.cpp
in the Application project

View File

@ -0,0 +1,200 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|ARM">
<Configuration>Debug</Configuration>
<Platform>ARM</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|ARM">
<Configuration>Release</Configuration>
<Platform>ARM</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{32fdab94-f87e-4f0a-89a4-9ec10a3b1d3d}</ProjectGuid>
<RootNamespace>video_capture_xaml</RootNamespace>
<DefaultLanguage>en-US</DefaultLanguage>
<MinimumVisualStudioVersion>12.0</MinimumVisualStudioVersion>
<AppContainerApplication>true</AppContainerApplication>
<ApplicationType>Windows Store</ApplicationType>
<ApplicationTypeRevision>8.1</ApplicationTypeRevision>
<ProjectName>video_capture_xaml.Windows</ProjectName>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<Import Project="..\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems" Label="Shared" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|ARM'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<PackageCertificateKeyFile>video_capture_xaml.Windows_TemporaryKey.pfx</PackageCertificateKeyFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<IgnoreImportLibrary>true</IgnoreImportLibrary>
<LibraryPath>$(OPENCV_WINRT_INSTALL_DIR)WS\8.1\x86\x86\vc12\lib;$(VC_LibraryPath_x86);$(WindowsSDK_LibraryPath_x86);</LibraryPath>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<IgnoreImportLibrary>false</IgnoreImportLibrary>
<IncludePath>$(VC_IncludePath);$(WindowsSDK_IncludePath);</IncludePath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
<AdditionalIncludeDirectories>%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<ProjectReference>
<UseLibraryDependencyInputs>true</UseLibraryDependencyInputs>
</ProjectReference>
<Link>
<AdditionalDependencies>%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
<AdditionalIncludeDirectories>%(AdditionalIncludeDirectories);$(MSBuildThisFileDirectory)</AdditionalIncludeDirectories>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
</ClCompile>
<ProjectReference>
<UseLibraryDependencyInputs>false</UseLibraryDependencyInputs>
</ProjectReference>
<Link>
<AdditionalDependencies>kernel32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="MainPage.xaml.h">
<DependentUpon>MainPage.xaml</DependentUpon>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Page Include="MainPage.xaml">
<SubType>Designer</SubType>
</Page>
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest">
<SubType>Designer</SubType>
</AppxManifest>
<None Include="video_capture_xaml.Windows_TemporaryKey.pfx" />
</ItemGroup>
<ItemGroup>
<Image Include="Assets\Logo.scale-100.png" />
<Image Include="Assets\SmallLogo.scale-100.png" />
<Image Include="Assets\StoreLogo.scale-100.png" />
<Image Include="Assets\SplashScreen.scale-100.png" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="MainPage.xaml.cpp">
<DependentUpon>MainPage.xaml</DependentUpon>
</ClCompile>
</ItemGroup>
<ItemGroup>
<Text Include="readme.txt" />
</ItemGroup>
<ItemGroup>
<Xml Include="Assets\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Assets">
<UniqueIdentifier>{ef9b8c45-f2b7-4eec-a8b4-a9b340be770b}</UniqueIdentifier>
<Extensions>bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png</Extensions>
</Filter>
</ItemGroup>
<ItemGroup>
<Page Include="MainPage.xaml" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="MainPage.xaml.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="MainPage.xaml.h" />
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest" />
</ItemGroup>
<ItemGroup>
<None Include="video_capture_xaml.Windows_TemporaryKey.pfx" />
</ItemGroup>
<ItemGroup>
<Image Include="Assets\Logo.scale-100.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\SmallLogo.scale-100.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\StoreLogo.scale-100.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\SplashScreen.scale-100.png">
<Filter>Assets</Filter>
</Image>
</ItemGroup>
<ItemGroup>
<Text Include="readme.txt" />
</ItemGroup>
<ItemGroup>
<Xml Include="Assets\haarcascade_frontalface_alt.xml">
<Filter>Assets</Filter>
</Xml>
</ItemGroup>
</Project>

View File

@ -0,0 +1,26 @@
<Page x:Class="video_capture_xaml.MainPage"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:local="using:video_capture_xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
Background="{ThemeResource ApplicationPageBackgroundThemeBrush}"
mc:Ignorable="d">
<Grid Background="{ThemeResource ApplicationPageBackgroundThemeBrush}">
<TextBlock Margin="20,35,0,0"
HorizontalAlignment="Left"
VerticalAlignment="Top"
FontSize="24"
TextWrapping="Wrap">
<Run Text="OpenCV: videoio" />
<Run />
</TextBlock>
<Image Name="cvImage"
Width="640"
Height="480"
Margin="20,100,0,0"
HorizontalAlignment="Left"
VerticalAlignment="Top" />
</Grid>
</Page>

View File

@ -0,0 +1,79 @@
//
// MainPage.xaml.cpp
// Implementation of the MainPage class.
//
#include "pch.h"
#include "MainPage.xaml.h"
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/videoio/cap_winrt.hpp>
using namespace video_capture_xaml;
using namespace Platform;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::UI::Xaml;
using namespace Windows::UI::Xaml::Controls;
using namespace Windows::UI::Xaml::Controls::Primitives;
using namespace Windows::UI::Xaml::Data;
using namespace Windows::UI::Xaml::Input;
using namespace Windows::UI::Xaml::Media;
using namespace Windows::UI::Xaml::Navigation;
// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
using namespace ::Windows::Foundation;
using namespace Windows::UI::Xaml::Media::Imaging;
namespace video_capture_xaml {
// nb. implemented in main.cpp
void cvMain();
MainPage::MainPage()
{
InitializeComponent();
Window::Current->VisibilityChanged += ref new Windows::UI::Xaml::WindowVisibilityChangedEventHandler(this, &video_capture_xaml::MainPage::OnVisibilityChanged);
// attach XAML elements
cv::winrt_setFrameContainer(cvImage);
// start (1) frame-grabbing loop and (2) message loop
//
// 1. Function passed as an argument must implement common OCV reading frames
// pattern (see cv::VideoCapture documentation) AND call cv::winrt_imgshow().
// 2. Message processing loop required to overcome WinRT container and type
// conversion restrictions. OCV provides default implementation
cv::winrt_startMessageLoop(cvMain);
}
void video_capture_xaml::MainPage::OnVisibilityChanged(Platform::Object ^sender,
Windows::UI::Core::VisibilityChangedEventArgs ^e)
{
cv::winrt_onVisibilityChanged(e->Visible);
}
/// <summary>
/// Invoked when this page is about to be displayed in a Frame.
/// </summary>
/// <param name="e">Event data that describes how this page was reached. The Parameter
/// property is typically used to configure the page.</param>
void MainPage::OnNavigatedTo(NavigationEventArgs^ e)
{
(void)e; // Unused parameter
// TODO: Prepare page for display here.
// TODO: If your application contains multiple pages, ensure that you are
// handling the hardware Back button by registering for the
// Windows::Phone::UI::Input::HardwareButtons.BackPressed event.
// If you are using the NavigationHelper provided by some templates,
// this event is handled for you.
}
}

View File

@ -0,0 +1,28 @@
//
// MainPage.xaml.h
// Declaration of the MainPage class.
//
#pragma once
#include "MainPage.g.h"
namespace video_capture_xaml
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public ref class MainPage sealed
{
public:
MainPage();
protected:
virtual void OnNavigatedTo(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e) override;
private:
void OnVisibilityChanged(Platform::Object ^sender, Windows::UI::Core::VisibilityChangedEventArgs ^e);
};
}

View File

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<Package xmlns="http://schemas.microsoft.com/appx/2010/manifest" xmlns:m2="http://schemas.microsoft.com/appx/2013/manifest" xmlns:m3="http://schemas.microsoft.com/appx/2014/manifest" xmlns:mp="http://schemas.microsoft.com/appx/2014/phone/manifest">
<Identity Name="dfafcbf2-a525-4ed0-9b6c-4e8bd5a9c0ba"
Publisher="CN=daver_000"
Version="1.0.0.0" />
<mp:PhoneIdentity PhoneProductId="dfafcbf2-a525-4ed0-9b6c-4e8bd5a9c0ba" PhonePublisherId="00000000-0000-0000-0000-000000000000"/>
<Properties>
<DisplayName>video_capture_xaml.WindowsPhone</DisplayName>
<PublisherDisplayName>daver_000</PublisherDisplayName>
<Logo>Assets\StoreLogo.png</Logo>
</Properties>
<Prerequisites>
<OSMinVersion>6.3.1</OSMinVersion>
<OSMaxVersionTested>6.3.1</OSMaxVersionTested>
</Prerequisites>
<Resources>
<Resource Language="x-generate"/>
</Resources>
<Applications>
<Application Id="App"
Executable="$targetnametoken$.exe"
EntryPoint="video_capture_xaml_WindowsPhone.App">
<m3:VisualElements
DisplayName="video_capture_xaml.WindowsPhone"
Square150x150Logo="Assets\Logo.png"
Square44x44Logo="Assets\SmallLogo.png"
Description="video_capture_xaml.WindowsPhone"
ForegroundText="light"
BackgroundColor="transparent">
<m3:DefaultTile Wide310x150Logo="Assets\WideLogo.png" Square71x71Logo="Assets\Square71x71Logo.png"/>
<m3:SplashScreen Image="Assets\SplashScreen.png"/>
<m3:ApplicationView MinWidth="width320"/> <!--Used in XAML Designer. DO NOT REMOVE-->
</m3:VisualElements>
</Application>
</Applications>
<Capabilities>
<Capability Name="internetClientServer" />
<DeviceCapability Name="webcam" />
</Capabilities>
</Package>

View File

@ -0,0 +1,147 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|ARM">
<Configuration>Debug</Configuration>
<Platform>ARM</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|ARM">
<Configuration>Release</Configuration>
<Platform>ARM</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{d7a82e7f-1535-4eec-aba9-2c8447669d33}</ProjectGuid>
<RootNamespace>video_capture_xaml</RootNamespace>
<DefaultLanguage>en-US</DefaultLanguage>
<MinimumVisualStudioVersion>12.0</MinimumVisualStudioVersion>
<AppContainerApplication>true</AppContainerApplication>
<ApplicationType>Windows Phone</ApplicationType>
<ApplicationTypeRevision>8.1</ApplicationTypeRevision>
<ProjectName>video_capture_xaml.WindowsPhone</ProjectName>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120_wp81</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120_wp81</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<PlatformToolset>v120_wp81</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<PlatformToolset>v120_wp81</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<Import Project="..\video_capture_xaml.Shared\video_capture_xaml.Shared.vcxitems" Label="Shared" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|ARM'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\opencv.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LibraryPath>$(OPENCV_WINRT_INSTALL_DIR)WP\8.1\x86\x86\vc12\lib;$(VC_LibraryPath_x86);$(WindowsPhoneSDK_LibraryPath_x86);</LibraryPath>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<LibraryPath>$(OPENCV_WINRT_INSTALL_DIR)WP\8.1\ARM\ARM\vc12\lib;$(VC_LibraryPath_ARM);$(WindowsPhoneSDK_LibraryPath_arm);</LibraryPath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
<AdditionalIncludeDirectories>../../../../../modules/core/include;../../../../../modules/flann/include;../../../../../modules/videoio/include;../../../../../modules/hal/include;../../../../../modules/imgproc/include;../../../../../modules/features2d/include;%(AdditionalIncludeDirectories);$(MSBuildThisFileDirectory)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<AdditionalDependencies>opencv_core300d.lib;opencv_imgproc300d.lib;opencv_videoio300d.lib;WindowsPhoneCore.lib;RuntimeObject.lib;PhoneAppModelHost.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
<AdditionalIncludeDirectories>../../../../../modules/core/include;../../../../../modules/flann/include;../../../../../modules/videoio/include;../../../../../modules/hal/include;../../../../../modules/imgproc/include;../../../../../modules/features2d/include;%(AdditionalIncludeDirectories);$(MSBuildThisFileDirectory)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<AdditionalDependencies>opencv_core300d.lib;opencv_imgproc300d.lib;opencv_videoio300d.lib;WindowsPhoneCore.lib;RuntimeObject.lib;PhoneAppModelHost.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4453;28204</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="MainPage.xaml.h">
<DependentUpon>MainPage.xaml</DependentUpon>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Page Include="MainPage.xaml">
<SubType>Designer</SubType>
</Page>
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest">
<SubType>Designer</SubType>
</AppxManifest>
</ItemGroup>
<ItemGroup>
<Image Include="Assets\Logo.scale-240.png" />
<Image Include="Assets\SmallLogo.scale-240.png" />
<Image Include="Assets\Square71x71Logo.scale-240.png" />
<Image Include="Assets\StoreLogo.scale-240.png" />
<Image Include="Assets\SplashScreen.scale-240.png" />
<Image Include="Assets\WideLogo.scale-240.png" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="MainPage.xaml.cpp">
<DependentUpon>MainPage.xaml</DependentUpon>
</ClCompile>
</ItemGroup>
<ItemGroup>
<None Include="video_capture_xaml.WindowsPhone_TemporaryKey.pfx" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Assets">
<UniqueIdentifier>{$guid3$}</UniqueIdentifier>
<Extensions>bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png</Extensions>
</Filter>
<Image Include="Assets\Logo.scale-240.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\SmallLogo.scale-240.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\Square71x71Logo.scale-240.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\StoreLogo.scale-240.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\SplashScreen.scale-240.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\WideLogo.scale-240.png">
<Filter>Assets</Filter>
</Image>
</ItemGroup>
<ItemGroup>
<ClCompile Include="MainPage.xaml.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="MainPage.xaml.h" />
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest" />
</ItemGroup>
<ItemGroup>
<Page Include="MainPage.xaml" />
</ItemGroup>
<ItemGroup>
<None Include="video_capture_xaml.WindowsPhone_TemporaryKey.pfx" />
</ItemGroup>
</Project>