Merge branch '2.4'

This commit is contained in:
Andrey Kamaev 2013-03-11 18:50:19 +04:00
commit 9b7dfd677d
9 changed files with 605 additions and 212 deletions

View File

@ -5,8 +5,6 @@
Introduction to Java Development Introduction to Java Development
******************************** ********************************
Last updated: 28 February, 2013.
As of OpenCV 2.4.4, OpenCV supports desktop Java development using nearly the same interface as for As of OpenCV 2.4.4, OpenCV supports desktop Java development using nearly the same interface as for
Android development. This guide will help you to create your first Java (or Scala) application using OpenCV. Android development. This guide will help you to create your first Java (or Scala) application using OpenCV.
We will use either `Eclipse <http://eclipse.org/>`_, `Apache Ant <http://ant.apache.org/>`_ or the We will use either `Eclipse <http://eclipse.org/>`_, `Apache Ant <http://ant.apache.org/>`_ or the
@ -15,7 +13,7 @@ We will use either `Eclipse <http://eclipse.org/>`_, `Apache Ant <http://ant.apa
For further reading after this guide, look at the :ref:`Android_Dev_Intro` tutorials. For further reading after this guide, look at the :ref:`Android_Dev_Intro` tutorials.
What we'll do in this guide What we'll do in this guide
*************************** ===========================
In this guide, we will: In this guide, we will:
@ -29,12 +27,12 @@ The same process was used to create the samples in the :file:`samples/java` fold
so consult those files if you get lost. so consult those files if you get lost.
Get proper OpenCV Get proper OpenCV
***************** =================
Starting from version 2.4.4 OpenCV includes desktop Java bindings. Starting from version 2.4.4 OpenCV includes desktop Java bindings.
Download Download
######## --------
The most simple way to get it is downloading the appropriate package of **version 2.4.4 or higher** from the The most simple way to get it is downloading the appropriate package of **version 2.4.4 or higher** from the
`OpenCV SourceForge repository <http://sourceforge.net/projects/opencvlibrary/files/>`_. `OpenCV SourceForge repository <http://sourceforge.net/projects/opencvlibrary/files/>`_.
@ -50,30 +48,30 @@ In order to build OpenCV with Java bindings you need :abbr:`JDK (Java Developmen
`Apache Ant <http://ant.apache.org/>`_ and `Python` v2.6 or higher to be installed. `Apache Ant <http://ant.apache.org/>`_ and `Python` v2.6 or higher to be installed.
Build Build
##### -----
Let's build OpenCV: Let's build OpenCV:
.. code-block:: bash .. code-block:: bash
git clone git://github.com/Itseez/opencv.git git clone git://github.com/Itseez/opencv.git
cd opencv cd opencv
git checkout 2.4 git checkout 2.4
mkdir build mkdir build
cd build cd build
Generate a Makefile or a MS Visual Studio* solution, or whatever you use for Generate a Makefile or a MS Visual Studio* solution, or whatever you use for
building executables in your system: building executables in your system:
.. code-block:: bash .. code-block:: bash
cmake -DBUILD_SHARED_LIBS=OFF .. cmake -DBUILD_SHARED_LIBS=OFF ..
or or
.. code-block:: bat .. code-block:: bat
cmake -DBUILD_SHARED_LIBS=OFF -G "Visual Studio 10" .. cmake -DBUILD_SHARED_LIBS=OFF -G "Visual Studio 10" ..
.. note:: When OpenCV is built as a set of **static** libraries (``-DBUILD_SHARED_LIBS=OFF`` option) .. note:: When OpenCV is built as a set of **static** libraries (``-DBUILD_SHARED_LIBS=OFF`` option)
the Java bindings dynamic library is all-sufficient, the Java bindings dynamic library is all-sufficient,
@ -83,9 +81,9 @@ Examine the output of CMake and ensure ``java`` is one of the modules "To be bui
If not, it's likely you're missing a dependency. You should troubleshoot by looking If not, it's likely you're missing a dependency. You should troubleshoot by looking
through the CMake output for any Java-related tools that aren't found and installing them. through the CMake output for any Java-related tools that aren't found and installing them.
.. image:: images/cmake_output.png .. image:: images/cmake_output.png
:alt: CMake output :alt: CMake output
:align: center :align: center
.. note:: If ``CMake`` can't find Java in your system set the ``JAVA_HOME`` .. note:: If ``CMake`` can't find Java in your system set the ``JAVA_HOME``
environment variable with the path to installed JDK environment variable with the path to installed JDK
@ -99,23 +97,23 @@ through the CMake output for any Java-related tools that aren't found and instal
Now start the build: Now start the build:
.. code-block:: bash .. code-block:: bash
make -j8 make -j8
or or
.. code-block:: bat .. code-block:: bat
msbuild /m OpenCV.sln /t:Build /p:Configuration=Release /v:m msbuild /m OpenCV.sln /t:Build /p:Configuration=Release /v:m
Besides all this will create a ``jar`` containing the Java interface (:file:`bin/opencv-244.jar`) Besides all this will create a ``jar`` containing the Java interface (:file:`bin/opencv-244.jar`)
and a native dynamic library containing Java bindings and all the OpenCV stuff and a native dynamic library containing Java bindings and all the OpenCV stuff
(:file:`bin/Release/opencv_java244.dll` or :file:`lib/libopencv_java244.so` respectively). (:file:`lib/libopencv_java244.so` or :file:`bin/Release/opencv_java244.dll` respectively).
We'll use these files later. We'll use these files later.
Java sample with Ant Java sample with Ant
******************** ====================
.. note:: .. note::
The described sample is provided with OpenCV library in the :file:`opencv/samples/java/ant` folder. The described sample is provided with OpenCV library in the :file:`opencv/samples/java/ant` folder.
@ -124,8 +122,8 @@ Java sample with Ant
* In this folder create the :file:`build.xml` file with the following content using any text editor: * In this folder create the :file:`build.xml` file with the following content using any text editor:
.. code-block:: xml .. code-block:: xml
:linenos: :linenos:
<project name="SimpleSample" basedir="." default="rebuild-run"> <project name="SimpleSample" basedir="." default="rebuild-run">
@ -177,18 +175,18 @@ Java sample with Ant
</project> </project>
.. note:: .. note::
This XML file can be reused for building other Java applications. This XML file can be reused for building other Java applications.
It describes a common folder structure in the lines 3 - 12 and common targets It describes a common folder structure in the lines 3 - 12 and common targets
for compiling and running the application. for compiling and running the application.
When reusing this XML don't forget to modify the project name in the line 1, When reusing this XML don't forget to modify the project name in the line 1,
that is also the name of the `main` class (line 14). that is also the name of the `main` class (line 14).
The paths to OpenCV `jar` and `jni lib` are expected as parameters The paths to OpenCV `jar` and `jni lib` are expected as parameters
(``"${ocvJarDir}"`` in line 5 and ``"${ocvLibDir}"`` in line 37), but (``"${ocvJarDir}"`` in line 5 and ``"${ocvLibDir}"`` in line 37), but
you can hardcode these paths for your convenience. you can hardcode these paths for your convenience.
See `Ant documentation <http://ant.apache.org/manual/>`_ for detailed description See `Ant documentation <http://ant.apache.org/manual/>`_ for detailed description
of its build file format. of its build file format.
* Create an :file:`src` folder next to the :file:`build.xml` file and a :file:`SimpleSample.java` file in it. * Create an :file:`src` folder next to the :file:`build.xml` file and a :file:`SimpleSample.java` file in it.
@ -236,99 +234,99 @@ Java sample with Ant
:align: center :align: center
Java project in Eclipse Java project in Eclipse
*********************** =======================
Now let's look at the possiblity of using OpenCV in Java when developing in Eclipse IDE. Now let's look at the possiblity of using OpenCV in Java when developing in Eclipse IDE.
* Create a new Eclipse workspace * Create a new Eclipse workspace
* Create a new Java project via :guilabel:`File --> New --> Java Project` * Create a new Java project via :guilabel:`File --> New --> Java Project`
.. image:: images/eclipse_new_java_prj.png .. image:: images/eclipse_new_java_prj.png
:alt: Eclipse: new Java project :alt: Eclipse: new Java project
:align: center :align: center
Call it say "HelloCV". Call it say "HelloCV".
* Open :guilabel:`Java Build Path` tab on :guilabel:`Project Properties` dialog * Open :guilabel:`Java Build Path` tab on :guilabel:`Project Properties` dialog
and configure additional library (OpenCV) reference (jar and native library location): and configure additional library (OpenCV) reference (jar and native library location):
.. image:: images/eclipse_user_lib.png .. image:: images/eclipse_user_lib.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib2.png .. image:: images/eclipse_user_lib2.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib3.png .. image:: images/eclipse_user_lib3.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib4.png .. image:: images/eclipse_user_lib4.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib5.png .. image:: images/eclipse_user_lib5.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib6.png .. image:: images/eclipse_user_lib6.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib7.png .. image:: images/eclipse_user_lib7.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` ` |
.. image:: images/eclipse_user_lib8.png .. image:: images/eclipse_user_lib8.png
:alt: Eclipse: external JAR :alt: Eclipse: external JAR
:align: center :align: center
` `
* Add a new Java class (say ``Main``) containing the application entry: * Add a new Java class (say ``Main``) containing the application entry:
.. image:: images/eclipse_main_class.png .. image:: images/eclipse_main_class.png
:alt: Eclipse: Main class :alt: Eclipse: Main class
:align: center :align: center
* Put some simple OpenCV calls there, e.g.: * Put some simple OpenCV calls there, e.g.:
.. code-block:: java
import org.opencv.core.Core; .. code-block:: java
import org.opencv.core.CvType;
import org.opencv.core.Mat;
public class Main { import org.opencv.core.Core;
public static void main(String[] args) { import org.opencv.core.CvType;
System.loadLibrary(Core.NATIVE_LIBRARY_NAME); import org.opencv.core.Mat;
Mat m = Mat.eye(3, 3, CvType.CV_8UC1);
System.out.println("m = " + m.dump()); public class Main {
} public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
Mat m = Mat.eye(3, 3, CvType.CV_8UC1);
System.out.println("m = " + m.dump());
} }
}
* Press :guilabel:`Run` button and find the identity matrix content in the Eclipse ``Console`` window. * Press :guilabel:`Run` button and find the identity matrix content in the Eclipse ``Console`` window.
.. image:: images/eclipse_run.png .. image:: images/eclipse_run.png
:alt: Eclipse: run :alt: Eclipse: run
:align: center :align: center
SBT project for Java and Scala SBT project for Java and Scala
****************************** ==============================
Now we'll create a simple Java application using SBT. This serves as a brief introduction to Now we'll create a simple Java application using SBT. This serves as a brief introduction to
those unfamiliar with this build tool. We're using SBT because it is particularly easy and powerful. those unfamiliar with this build tool. We're using SBT because it is particularly easy and powerful.
@ -338,66 +336,66 @@ First, download and install `SBT <http://www.scala-sbt.org/>`_ using the instruc
Next, navigate to a new directory where you'd like the application source to live (outside :file:`opencv` dir). Next, navigate to a new directory where you'd like the application source to live (outside :file:`opencv` dir).
Let's call it "JavaSample" and create a directory for it: Let's call it "JavaSample" and create a directory for it:
.. code-block:: bash .. code-block:: bash
cd <somewhere outside opencv> cd <somewhere outside opencv>
mkdir JavaSample mkdir JavaSample
Now we will create the necessary folders and an SBT project: Now we will create the necessary folders and an SBT project:
.. code-block:: bash .. code-block:: bash
cd JavaSample cd JavaSample
mkdir -p src/main/java # This is where SBT expects to find Java sources mkdir -p src/main/java # This is where SBT expects to find Java sources
mkdir project # This is where the build definitions live mkdir project # This is where the build definitions live
Now open :file:`project/build.scala` in your favorite editor and paste the following. Now open :file:`project/build.scala` in your favorite editor and paste the following.
It defines your project: It defines your project:
.. code-block:: scala .. code-block:: scala
import sbt._ import sbt._
import Keys._ import Keys._
object JavaSampleBuild extends Build { object JavaSampleBuild extends Build {
def scalaSettings = Seq( def scalaSettings = Seq(
scalaVersion := "2.10.0", scalaVersion := "2.10.0",
scalacOptions ++= Seq( scalacOptions ++= Seq(
"-optimize", "-optimize",
"-unchecked", "-unchecked",
"-deprecation" "-deprecation"
) )
) )
def buildSettings = def buildSettings =
Project.defaultSettings ++ Project.defaultSettings ++
scalaSettings scalaSettings
lazy val root = { lazy val root = {
val settings = buildSettings ++ Seq(name := "JavaSample") val settings = buildSettings ++ Seq(name := "JavaSample")
Project(id = "JavaSample", base = file("."), settings = settings) Project(id = "JavaSample", base = file("."), settings = settings)
} }
} }
Now edit :file:`project/plugins.sbt` and paste the following. Now edit :file:`project/plugins.sbt` and paste the following.
This will enable auto-generation of an Eclipse project: This will enable auto-generation of an Eclipse project:
.. code-block:: scala .. code-block:: scala
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.0") addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.0")
Now run ``sbt`` from the :file:`JavaSample` root and from within SBT run ``eclipse`` to generate an eclipse project: Now run ``sbt`` from the :file:`JavaSample` root and from within SBT run ``eclipse`` to generate an eclipse project:
.. code-block:: bash .. code-block:: bash
sbt # Starts the sbt console sbt # Starts the sbt console
> eclipse # Running "eclipse" from within the sbt console > eclipse # Running "eclipse" from within the sbt console
You should see something like this: You should see something like this:
.. image:: images/sbt_eclipse.png .. image:: images/sbt_eclipse.png
:alt: SBT output :alt: SBT output
:align: center :align: center
You can now import the SBT project to Eclipse using :guilabel:`Import ... -> Existing projects into workspace`. You can now import the SBT project to Eclipse using :guilabel:`Import ... -> Existing projects into workspace`.
Whether you actually do this is optional for the guide; Whether you actually do this is optional for the guide;
@ -406,28 +404,28 @@ we'll be using SBT to build the project, so if you choose to use Eclipse it will
To test that everything is working, create a simple "Hello OpenCV" application. To test that everything is working, create a simple "Hello OpenCV" application.
Do this by creating a file :file:`src/main/java/HelloOpenCV.java` with the following contents: Do this by creating a file :file:`src/main/java/HelloOpenCV.java` with the following contents:
.. code-block:: java .. code-block:: java
public class HelloOpenCV { public class HelloOpenCV {
public static void main(String[] args) { public static void main(String[] args) {
System.out.println("Hello, OpenCV"); System.out.println("Hello, OpenCV");
} }
} }
Now execute ``run`` from the sbt console, or more concisely, run ``sbt run`` from the command line: Now execute ``run`` from the sbt console, or more concisely, run ``sbt run`` from the command line:
.. code-block:: bash .. code-block:: bash
sbt run sbt run
You should see something like this: You should see something like this:
.. image:: images/sbt_run.png .. image:: images/sbt_run.png
:alt: SBT run :alt: SBT run
:align: center :align: center
Running SBT samples Running SBT samples
################### -------------------
Now we'll create a simple face detection application using OpenCV. Now we'll create a simple face detection application using OpenCV.
@ -435,17 +433,17 @@ First, create a :file:`lib/` folder and copy the OpenCV jar into it.
By default, SBT adds jars in the lib folder to the Java library search path. By default, SBT adds jars in the lib folder to the Java library search path.
You can optionally rerun ``sbt eclipse`` to update your Eclipse project. You can optionally rerun ``sbt eclipse`` to update your Eclipse project.
.. code-block:: bash .. code-block:: bash
mkdir lib mkdir lib
cp <opencv_dir>/build/bin/opencv_<version>.jar lib/ cp <opencv_dir>/build/bin/opencv_<version>.jar lib/
sbt eclipse sbt eclipse
Next, create the directory :file:`src/main/resources` and download this Lena image into it: Next, create the directory :file:`src/main/resources` and download this Lena image into it:
.. image:: images/lena.png .. image:: images/lena.png
:alt: Lena :alt: Lena
:align: center :align: center
Make sure it's called :file:`"lena.png"`. Make sure it's called :file:`"lena.png"`.
Items in the resources directory are available to the Java application at runtime. Items in the resources directory are available to the Java application at runtime.
@ -453,9 +451,9 @@ Items in the resources directory are available to the Java application at runtim
Next, copy :file:`lbpcascade_frontalface.xml` from :file:`opencv/data/lbpcascades/` into the :file:`resources` Next, copy :file:`lbpcascade_frontalface.xml` from :file:`opencv/data/lbpcascades/` into the :file:`resources`
directory: directory:
.. code-block:: bash .. code-block:: bash
cp <opencv_dir>/data/lbpcascades/lbpcascade_frontalface.xml src/main/resources/ cp <opencv_dir>/data/lbpcascades/lbpcascade_frontalface.xml src/main/resources/
Now modify src/main/java/HelloOpenCV.java so it contains the following Java code: Now modify src/main/java/HelloOpenCV.java so it contains the following Java code:
@ -519,21 +517,21 @@ You will also get errors if you try to load OpenCV when it has already been load
Now run the face detection app using ``sbt run``: Now run the face detection app using ``sbt run``:
.. code-block:: bash .. code-block:: bash
sbt run sbt run
You should see something like this: You should see something like this:
.. image:: images/sbt_run_face.png .. image:: images/sbt_run_face.png
:alt: SBT run :alt: SBT run
:align: center :align: center
It should also write the following image to :file:`faceDetection.png`: It should also write the following image to :file:`faceDetection.png`:
.. image:: images/faceDetection.png .. image:: images/faceDetection.png
:alt: Detected face :alt: Detected face
:align: center :align: center
You're done! You're done!
Now you have a sample Java application working with OpenCV, so you can start the work on your own. Now you have a sample Java application working with OpenCV, so you can start the work on your own.

View File

@ -1044,18 +1044,32 @@ enum
COLOR_RGBA2mRGBA = 125, COLOR_RGBA2mRGBA = 125,
COLOR_mRGBA2RGBA = 126, COLOR_mRGBA2RGBA = 126,
COLOR_RGB2YUV_I420 = 127,
COLOR_BGR2YUV_I420 = 128,
COLOR_RGB2YUV_IYUV = COLOR_RGB2YUV_I420,
COLOR_BGR2YUV_IYUV = COLOR_BGR2YUV_I420,
COLOR_RGBA2YUV_I420 = 129,
COLOR_BGRA2YUV_I420 = 130,
COLOR_RGBA2YUV_IYUV = COLOR_RGBA2YUV_I420,
COLOR_BGRA2YUV_IYUV = COLOR_BGRA2YUV_I420,
COLOR_RGB2YUV_YV12 = 131,
COLOR_BGR2YUV_YV12 = 132,
COLOR_RGBA2YUV_YV12 = 133,
COLOR_BGRA2YUV_YV12 = 134,
// Edge-Aware Demosaicing // Edge-Aware Demosaicing
COLOR_BayerBG2BGR_EA = 127, COLOR_BayerBG2BGR_EA = 135,
COLOR_BayerGB2BGR_EA = 128, COLOR_BayerGB2BGR_EA = 136,
COLOR_BayerRG2BGR_EA = 129, COLOR_BayerRG2BGR_EA = 137,
COLOR_BayerGR2BGR_EA = 130, COLOR_BayerGR2BGR_EA = 138,
COLOR_BayerBG2RGB_EA = COLOR_BayerRG2BGR_EA, COLOR_BayerBG2RGB_EA = COLOR_BayerRG2BGR_EA,
COLOR_BayerGB2RGB_EA = COLOR_BayerGR2BGR_EA, COLOR_BayerGB2RGB_EA = COLOR_BayerGR2BGR_EA,
COLOR_BayerRG2RGB_EA = COLOR_BayerBG2BGR_EA, COLOR_BayerRG2RGB_EA = COLOR_BayerBG2BGR_EA,
COLOR_BayerGR2RGB_EA = COLOR_BayerGB2BGR_EA, COLOR_BayerGR2RGB_EA = COLOR_BayerGB2BGR_EA,
COLOR_COLORCVT_MAX = 131 COLOR_COLORCVT_MAX = 139
}; };

View File

@ -310,18 +310,32 @@ enum
CV_RGBA2mRGBA = 125, CV_RGBA2mRGBA = 125,
CV_mRGBA2RGBA = 126, CV_mRGBA2RGBA = 126,
CV_RGB2YUV_I420 = 127,
CV_BGR2YUV_I420 = 128,
CV_RGB2YUV_IYUV = CV_RGB2YUV_I420,
CV_BGR2YUV_IYUV = CV_BGR2YUV_I420,
CV_RGBA2YUV_I420 = 129,
CV_BGRA2YUV_I420 = 130,
CV_RGBA2YUV_IYUV = CV_RGBA2YUV_I420,
CV_BGRA2YUV_IYUV = CV_BGRA2YUV_I420,
CV_RGB2YUV_YV12 = 131,
CV_BGR2YUV_YV12 = 132,
CV_RGBA2YUV_YV12 = 133,
CV_BGRA2YUV_YV12 = 134,
// Edge-Aware Demosaicing // Edge-Aware Demosaicing
CV_BayerBG2BGR_EA = 127, CV_BayerBG2BGR_EA = 135,
CV_BayerGB2BGR_EA = 128, CV_BayerGB2BGR_EA = 136,
CV_BayerRG2BGR_EA = 129, CV_BayerRG2BGR_EA = 137,
CV_BayerGR2BGR_EA = 130, CV_BayerGR2BGR_EA = 138,
CV_BayerBG2RGB_EA = CV_BayerRG2BGR_EA, CV_BayerBG2RGB_EA = CV_BayerRG2BGR_EA,
CV_BayerGB2RGB_EA = CV_BayerGR2BGR_EA, CV_BayerGB2RGB_EA = CV_BayerGR2BGR_EA,
CV_BayerRG2RGB_EA = CV_BayerBG2BGR_EA, CV_BayerRG2RGB_EA = CV_BayerBG2BGR_EA,
CV_BayerGR2RGB_EA = CV_BayerGB2BGR_EA, CV_BayerGR2RGB_EA = CV_BayerGB2BGR_EA,
CV_COLORCVT_MAX = 131 CV_COLORCVT_MAX = 139
}; };

View File

@ -115,6 +115,9 @@ CV_ENUM(CvtMode2, CV_YUV2BGR_NV12, CV_YUV2BGRA_NV12, CV_YUV2RGB_NV12, CV_YUV2RGB
COLOR_YUV2GRAY_420, CV_YUV2RGB_UYVY, CV_YUV2BGR_UYVY, CV_YUV2RGBA_UYVY, CV_YUV2BGRA_UYVY, CV_YUV2RGB_YUY2, CV_YUV2BGR_YUY2, CV_YUV2RGB_YVYU, COLOR_YUV2GRAY_420, CV_YUV2RGB_UYVY, CV_YUV2BGR_UYVY, CV_YUV2RGBA_UYVY, CV_YUV2BGRA_UYVY, CV_YUV2RGB_YUY2, CV_YUV2BGR_YUY2, CV_YUV2RGB_YVYU,
CV_YUV2BGR_YVYU, CV_YUV2RGBA_YUY2, CV_YUV2BGRA_YUY2, CV_YUV2RGBA_YVYU, CV_YUV2BGRA_YVYU) CV_YUV2BGR_YVYU, CV_YUV2RGBA_YUY2, CV_YUV2BGRA_YUY2, CV_YUV2RGBA_YVYU, CV_YUV2BGRA_YVYU)
CV_ENUM(CvtMode3, CV_RGB2YUV_IYUV, CV_BGR2YUV_IYUV, CV_RGBA2YUV_IYUV, CV_BGRA2YUV_IYUV,
CV_RGB2YUV_YV12, CV_BGR2YUV_YV12, CV_RGBA2YUV_YV12, CV_BGRA2YUV_YV12)
struct ChPair struct ChPair
{ {
ChPair(int _scn, int _dcn): scn(_scn), dcn(_dcn) {} ChPair(int _scn, int _dcn): scn(_scn), dcn(_dcn) {}
@ -162,6 +165,8 @@ ChPair getConversionInfo(int cvtMode)
case CV_BGR5652BGRA: case CV_BGR5652RGBA: case CV_BGR5652BGRA: case CV_BGR5652RGBA:
return ChPair(2,4); return ChPair(2,4);
case CV_BGR2GRAY: case CV_RGB2GRAY: case CV_BGR2GRAY: case CV_RGB2GRAY:
case CV_RGB2YUV_IYUV: case CV_RGB2YUV_YV12:
case CV_BGR2YUV_IYUV: case CV_BGR2YUV_YV12:
return ChPair(3,1); return ChPair(3,1);
case CV_BGR2BGR555: case CV_BGR2BGR565: case CV_BGR2BGR555: case CV_BGR2BGR565:
case CV_RGB2BGR555: case CV_RGB2BGR565: case CV_RGB2BGR555: case CV_RGB2BGR565:
@ -204,6 +209,8 @@ ChPair getConversionInfo(int cvtMode)
case CX_YUV2BGRA: case CX_YUV2RGBA: case CX_YUV2BGRA: case CX_YUV2RGBA:
return ChPair(3,4); return ChPair(3,4);
case CV_BGRA2GRAY: case CV_RGBA2GRAY: case CV_BGRA2GRAY: case CV_RGBA2GRAY:
case CV_RGBA2YUV_IYUV: case CV_RGBA2YUV_YV12:
case CV_BGRA2YUV_IYUV: case CV_BGRA2YUV_YV12:
return ChPair(4,1); return ChPair(4,1);
case CV_BGRA2BGR555: case CV_BGRA2BGR565: case CV_BGRA2BGR555: case CV_BGRA2BGR565:
case CV_RGBA2BGR555: case CV_RGBA2BGR565: case CV_RGBA2BGR555: case CV_RGBA2BGR565:
@ -307,6 +314,31 @@ PERF_TEST_P(Size_CvtMode2, cvtColorYUV420,
SANITY_CHECK(dst, 1); SANITY_CHECK(dst, 1);
} }
typedef std::tr1::tuple<Size, CvtMode3> Size_CvtMode3_t;
typedef perf::TestBaseWithParam<Size_CvtMode3_t> Size_CvtMode3;
PERF_TEST_P(Size_CvtMode3, cvtColorRGB2YUV420p,
testing::Combine(
testing::Values(szVGA, sz720p, sz1080p, Size(130, 60)),
testing::ValuesIn(CvtMode3::all())
)
)
{
Size sz = get<0>(GetParam());
int mode = get<1>(GetParam());
ChPair ch = getConversionInfo(mode);
Mat src(sz, CV_8UC(ch.scn));
Mat dst(sz.height + sz.height / 2, sz.width, CV_8UC(ch.dcn));
declare.time(100);
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE() cvtColor(src, dst, mode, ch.dcn);
SANITY_CHECK(dst, 1);
}
CV_ENUM(EdgeAwareBayerMode, COLOR_BayerBG2BGR_EA, COLOR_BayerGB2BGR_EA, COLOR_BayerRG2BGR_EA, COLOR_BayerGR2BGR_EA) CV_ENUM(EdgeAwareBayerMode, COLOR_BayerBG2BGR_EA, COLOR_BayerGB2BGR_EA, COLOR_BayerRG2BGR_EA, COLOR_BayerGR2BGR_EA)
typedef std::tr1::tuple<Size, EdgeAwareBayerMode> EdgeAwareParams; typedef std::tr1::tuple<Size, EdgeAwareBayerMode> EdgeAwareParams;

View File

@ -1802,6 +1802,16 @@ const int ITUR_BT_601_CVG = -852492;
const int ITUR_BT_601_CVR = 1673527; const int ITUR_BT_601_CVR = 1673527;
const int ITUR_BT_601_SHIFT = 20; const int ITUR_BT_601_SHIFT = 20;
// Coefficients for RGB to YUV420p conversion
const int ITUR_BT_601_CRY = 269484;
const int ITUR_BT_601_CGY = 528482;
const int ITUR_BT_601_CBY = 102760;
const int ITUR_BT_601_CRU = -155188;
const int ITUR_BT_601_CGU = -305135;
const int ITUR_BT_601_CBU = 460324;
const int ITUR_BT_601_CGV = -385875;
const int ITUR_BT_601_CBV = -74448;
template<int bIdx, int uIdx> template<int bIdx, int uIdx>
struct YUV420sp2RGB888Invoker struct YUV420sp2RGB888Invoker
{ {
@ -2134,6 +2144,84 @@ inline void cvtYUV420p2RGBA(Mat& _dst, int _stride, const uchar* _y1, const ucha
converter(BlockedRange(0, _dst.rows/2)); converter(BlockedRange(0, _dst.rows/2));
} }
///////////////////////////////////// RGB -> YUV420p /////////////////////////////////////
template<int bIdx>
struct RGB888toYUV420pInvoker: public ParallelLoopBody
{
RGB888toYUV420pInvoker( const Mat& src, Mat* dst, const int uIdx )
: src_(src),
dst_(dst),
uIdx_(uIdx) { }
void operator()(const Range& rowRange) const
{
const int w = src_.cols;
const int h = src_.rows;
const int cn = src_.channels();
for( int i = rowRange.start; i < rowRange.end; i++ )
{
const uchar* row0 = src_.ptr<uchar>(2 * i);
const uchar* row1 = src_.ptr<uchar>(2 * i + 1);
uchar* y = dst_->ptr<uchar>(2*i);
uchar* u = dst_->ptr<uchar>(h + i/2) + (i % 2) * (w/2);
uchar* v = dst_->ptr<uchar>(h + (i + h/2)/2) + ((i + h/2) % 2) * (w/2);
if( uIdx_ == 2 ) std::swap(u, v);
for( int j = 0, k = 0; j < w * cn; j += 2 * cn, k++ )
{
int r00 = row0[2-bIdx + j]; int g00 = row0[1 + j]; int b00 = row0[bIdx + j];
int r01 = row0[2-bIdx + cn + j]; int g01 = row0[1 + cn + j]; int b01 = row0[bIdx + cn + j];
int r10 = row1[2-bIdx + j]; int g10 = row1[1 + j]; int b10 = row1[bIdx + j];
int r11 = row1[2-bIdx + cn + j]; int g11 = row1[1 + cn + j]; int b11 = row1[bIdx + cn + j];
const int shifted16 = (16 << ITUR_BT_601_SHIFT);
const int halfShift = (1 << (ITUR_BT_601_SHIFT - 1));
int y00 = ITUR_BT_601_CRY * r00 + ITUR_BT_601_CGY * g00 + ITUR_BT_601_CBY * b00 + halfShift + shifted16;
int y01 = ITUR_BT_601_CRY * r01 + ITUR_BT_601_CGY * g01 + ITUR_BT_601_CBY * b01 + halfShift + shifted16;
int y10 = ITUR_BT_601_CRY * r10 + ITUR_BT_601_CGY * g10 + ITUR_BT_601_CBY * b10 + halfShift + shifted16;
int y11 = ITUR_BT_601_CRY * r11 + ITUR_BT_601_CGY * g11 + ITUR_BT_601_CBY * b11 + halfShift + shifted16;
y[2*k + 0] = saturate_cast<uchar>(y00 >> ITUR_BT_601_SHIFT);
y[2*k + 1] = saturate_cast<uchar>(y01 >> ITUR_BT_601_SHIFT);
y[2*k + dst_->step + 0] = saturate_cast<uchar>(y10 >> ITUR_BT_601_SHIFT);
y[2*k + dst_->step + 1] = saturate_cast<uchar>(y11 >> ITUR_BT_601_SHIFT);
const int shifted128 = (128 << ITUR_BT_601_SHIFT);
int u00 = ITUR_BT_601_CRU * r00 + ITUR_BT_601_CGU * g00 + ITUR_BT_601_CBU * b00 + halfShift + shifted128;
int v00 = ITUR_BT_601_CBU * r00 + ITUR_BT_601_CGV * g00 + ITUR_BT_601_CBV * b00 + halfShift + shifted128;
u[k] = saturate_cast<uchar>(u00 >> ITUR_BT_601_SHIFT);
v[k] = saturate_cast<uchar>(v00 >> ITUR_BT_601_SHIFT);
}
}
}
static bool isFit( const Mat& src )
{
return (src.total() >= 320*240);
}
private:
RGB888toYUV420pInvoker& operator=(const RGB888toYUV420pInvoker&);
const Mat& src_;
Mat* const dst_;
const int uIdx_;
};
template<int bIdx, int uIdx>
static void cvtRGBtoYUV420p(const Mat& src, Mat& dst)
{
RGB888toYUV420pInvoker<bIdx> colorConverter(src, &dst, uIdx);
if( RGB888toYUV420pInvoker<bIdx>::isFit(src) )
parallel_for_(Range(0, src.rows/2), colorConverter);
else
colorConverter(Range(0, src.rows/2));
}
///////////////////////////////////// YUV422 -> RGB ///////////////////////////////////// ///////////////////////////////////// YUV422 -> RGB /////////////////////////////////////
template<int bIdx, int uIdx, int yIdx> template<int bIdx, int uIdx, int yIdx>
@ -2736,6 +2824,31 @@ void cv::cvtColor( InputArray _src, OutputArray _dst, int code, int dcn )
src(Range(0, dstSz.height), Range::all()).copyTo(dst); src(Range(0, dstSz.height), Range::all()).copyTo(dst);
} }
break; break;
case CV_RGB2YUV_YV12: case CV_BGR2YUV_YV12: case CV_RGBA2YUV_YV12: case CV_BGRA2YUV_YV12:
case CV_RGB2YUV_IYUV: case CV_BGR2YUV_IYUV: case CV_RGBA2YUV_IYUV: case CV_BGRA2YUV_IYUV:
{
if (dcn <= 0) dcn = 1;
const int bIdx = (code == CV_BGR2YUV_IYUV || code == CV_BGRA2YUV_IYUV || code == CV_BGR2YUV_YV12 || code == CV_BGRA2YUV_YV12) ? 0 : 2;
const int uIdx = (code == CV_BGR2YUV_IYUV || code == CV_BGRA2YUV_IYUV || code == CV_RGB2YUV_IYUV || code == CV_RGBA2YUV_IYUV) ? 1 : 2;
CV_Assert( (scn == 3 || scn == 4) && depth == CV_8U );
CV_Assert( dcn == 1 );
CV_Assert( sz.width % 2 == 0 && sz.height % 2 == 0 );
Size dstSz(sz.width, sz.height / 2 * 3);
_dst.create(dstSz, CV_MAKETYPE(depth, dcn));
dst = _dst.getMat();
switch(bIdx + uIdx*10)
{
case 10: cvtRGBtoYUV420p<0, 1>(src, dst); break;
case 12: cvtRGBtoYUV420p<2, 1>(src, dst); break;
case 20: cvtRGBtoYUV420p<0, 2>(src, dst); break;
case 22: cvtRGBtoYUV420p<2, 2>(src, dst); break;
default: CV_Error( CV_StsBadFlag, "Unknown/unsupported color conversion code" ); break;
};
}
break;
case CV_YUV2RGB_UYVY: case CV_YUV2BGR_UYVY: case CV_YUV2RGBA_UYVY: case CV_YUV2BGRA_UYVY: case CV_YUV2RGB_UYVY: case CV_YUV2BGR_UYVY: case CV_YUV2RGBA_UYVY: case CV_YUV2BGRA_UYVY:
case CV_YUV2RGB_YUY2: case CV_YUV2BGR_YUY2: case CV_YUV2RGB_YVYU: case CV_YUV2BGR_YVYU: case CV_YUV2RGB_YUY2: case CV_YUV2BGR_YUY2: case CV_YUV2RGB_YVYU: case CV_YUV2BGR_YVYU:
case CV_YUV2RGBA_YUY2: case CV_YUV2BGRA_YUY2: case CV_YUV2RGBA_YVYU: case CV_YUV2BGRA_YVYU: case CV_YUV2RGBA_YUY2: case CV_YUV2BGRA_YUY2: case CV_YUV2RGBA_YVYU: case CV_YUV2BGRA_YVYU:

View File

@ -30,6 +30,16 @@ public:
static YUVreader* getReader(int code); static YUVreader* getReader(int code);
}; };
class RGBreader
{
public:
virtual ~RGBreader() {}
virtual RGB read(const Mat& rgb, int row, int col) = 0;
virtual int channels() = 0;
static RGBreader* getReader(int code);
};
class RGBwriter class RGBwriter
{ {
public: public:
@ -56,6 +66,21 @@ public:
static GRAYwriter* getWriter(int code); static GRAYwriter* getWriter(int code);
}; };
class YUVwriter
{
public:
virtual ~YUVwriter() {}
virtual void write(Mat& yuv, int row, int col, const YUV& val) = 0;
virtual int channels() = 0;
virtual Size size(Size imgSize) = 0;
virtual bool requiresEvenHeight() { return true; }
virtual bool requiresEvenWidth() { return true; }
static YUVwriter* getWriter(int code);
};
class RGB888Writer : public RGBwriter class RGB888Writer : public RGBwriter
{ {
void write(Mat& rgb, int row, int col, const RGB& val) void write(Mat& rgb, int row, int col, const RGB& val)
@ -99,6 +124,42 @@ class BGRA8888Writer : public RGBwriter
int channels() { return 4; } int channels() { return 4; }
}; };
class YUV420pWriter: public YUVwriter
{
int channels() { return 1; }
Size size(Size imgSize) { return Size(imgSize.width, imgSize.height + imgSize.height/2); }
};
class YV12Writer: public YUV420pWriter
{
void write(Mat& yuv, int row, int col, const YUV& val)
{
int h = yuv.rows * 2 / 3;
yuv.ptr<uchar>(row)[col] = val[0];
if( row % 2 == 0 && col % 2 == 0 )
{
yuv.ptr<uchar>(h + row/4)[col/2 + ((row/2) % 2) * (yuv.cols/2)] = val[2];
yuv.ptr<uchar>(h + (row/2 + h/2)/2)[col/2 + ((row/2 + h/2) % 2) * (yuv.cols/2)] = val[1];
}
}
};
class I420Writer: public YUV420pWriter
{
void write(Mat& yuv, int row, int col, const YUV& val)
{
int h = yuv.rows * 2 / 3;
yuv.ptr<uchar>(row)[col] = val[0];
if( row % 2 == 0 && col % 2 == 0 )
{
yuv.ptr<uchar>(h + row/4)[col/2 + ((row/2) % 2) * (yuv.cols/2)] = val[1];
yuv.ptr<uchar>(h + (row/2 + h/2)/2)[col/2 + ((row/2 + h/2) % 2) * (yuv.cols/2)] = val[2];
}
}
};
class YUV420Reader: public YUVreader class YUV420Reader: public YUVreader
{ {
int channels() { return 1; } int channels() { return 1; }
@ -212,6 +273,49 @@ class YUV888Reader : public YUVreader
bool requiresEvenWidth() { return false; } bool requiresEvenWidth() { return false; }
}; };
class RGB888Reader : public RGBreader
{
RGB read(const Mat& rgb, int row, int col)
{
return rgb.at<RGB>(row, col);
}
int channels() { return 3; }
};
class BGR888Reader : public RGBreader
{
RGB read(const Mat& rgb, int row, int col)
{
RGB tmp = rgb.at<RGB>(row, col);
return RGB(tmp[2], tmp[1], tmp[0]);
}
int channels() { return 3; }
};
class RGBA8888Reader : public RGBreader
{
RGB read(const Mat& rgb, int row, int col)
{
Vec4b rgba = rgb.at<Vec4b>(row, col);
return RGB(rgba[0], rgba[1], rgba[2]);
}
int channels() { return 4; }
};
class BGRA8888Reader : public RGBreader
{
RGB read(const Mat& rgb, int row, int col)
{
Vec4b rgba = rgb.at<Vec4b>(row, col);
return RGB(rgba[2], rgba[1], rgba[0]);
}
int channels() { return 4; }
};
class YUV2RGB_Converter class YUV2RGB_Converter
{ {
public: public:
@ -237,6 +341,23 @@ public:
} }
}; };
class RGB2YUV_Converter
{
public:
YUV convert(RGB rgb)
{
int r = rgb[0];
int g = rgb[1];
int b = rgb[2];
uchar y = saturate_cast<uchar>((int)( 0.257f*r + 0.504f*g + 0.098f*b + 0.5f) + 16);
uchar u = saturate_cast<uchar>((int)(-0.148f*r - 0.291f*g + 0.439f*b + 0.5f) + 128);
uchar v = saturate_cast<uchar>((int)( 0.439f*r - 0.368f*g - 0.071f*b + 0.5f) + 128);
return YUV(y, u, v);
}
};
YUVreader* YUVreader::getReader(int code) YUVreader* YUVreader::getReader(int code)
{ {
switch(code) switch(code)
@ -295,6 +416,27 @@ YUVreader* YUVreader::getReader(int code)
} }
} }
RGBreader* RGBreader::getReader(int code)
{
switch(code)
{
case CV_RGB2YUV_YV12:
case CV_RGB2YUV_I420:
return new RGB888Reader();
case CV_BGR2YUV_YV12:
case CV_BGR2YUV_I420:
return new BGR888Reader();
case CV_RGBA2YUV_I420:
case CV_RGBA2YUV_YV12:
return new RGBA8888Reader();
case CV_BGRA2YUV_YV12:
case CV_BGRA2YUV_I420:
return new BGRA8888Reader();
default:
return 0;
};
}
RGBwriter* RGBwriter::getWriter(int code) RGBwriter* RGBwriter::getWriter(int code)
{ {
switch(code) switch(code)
@ -355,6 +497,25 @@ GRAYwriter* GRAYwriter::getWriter(int code)
} }
} }
YUVwriter* YUVwriter::getWriter(int code)
{
switch(code)
{
case CV_RGB2YUV_YV12:
case CV_BGR2YUV_YV12:
case CV_RGBA2YUV_YV12:
case CV_BGRA2YUV_YV12:
return new YV12Writer();
case CV_RGB2YUV_I420:
case CV_BGR2YUV_I420:
case CV_RGBA2YUV_I420:
case CV_BGRA2YUV_I420:
return new I420Writer();
default:
return 0;
};
}
template<class convertor> template<class convertor>
void referenceYUV2RGB(const Mat& yuv, Mat& rgb, YUVreader* yuvReader, RGBwriter* rgbWriter) void referenceYUV2RGB(const Mat& yuv, Mat& rgb, YUVreader* yuvReader, RGBwriter* rgbWriter)
{ {
@ -375,6 +536,64 @@ void referenceYUV2GRAY(const Mat& yuv, Mat& rgb, YUVreader* yuvReader, GRAYwrite
grayWriter->write(rgb, row, col, cvt.convert(yuvReader->read(yuv, row, col))); grayWriter->write(rgb, row, col, cvt.convert(yuvReader->read(yuv, row, col)));
} }
template<class convertor>
void referenceRGB2YUV(const Mat& rgb, Mat& yuv, RGBreader* rgbReader, YUVwriter* yuvWriter)
{
convertor cvt;
for(int row = 0; row < rgb.rows; ++row)
for(int col = 0; col < rgb.cols; ++col)
yuvWriter->write(yuv, row, col, cvt.convert(rgbReader->read(rgb, row, col)));
}
struct ConversionYUV
{
ConversionYUV( const int code )
{
yuvReader_ = YUVreader :: getReader(code);
yuvWriter_ = YUVwriter :: getWriter(code);
rgbReader_ = RGBreader :: getReader(code);
rgbWriter_ = RGBwriter :: getWriter(code);
grayWriter_ = GRAYwriter:: getWriter(code);
}
int getDcn()
{
return (rgbWriter_ != 0) ? rgbWriter_->channels() : ((grayWriter_ != 0) ? grayWriter_->channels() : yuvWriter_->channels());
}
int getScn()
{
return (yuvReader_ != 0) ? yuvReader_->channels() : rgbReader_->channels();
}
Size getSrcSize( const Size& imgSize )
{
return (yuvReader_ != 0) ? yuvReader_->size(imgSize) : imgSize;
}
Size getDstSize( const Size& imgSize )
{
return (yuvWriter_ != 0) ? yuvWriter_->size(imgSize) : imgSize;
}
bool requiresEvenHeight()
{
return (yuvReader_ != 0) ? yuvReader_->requiresEvenHeight() : ((yuvWriter_ != 0) ? yuvWriter_->requiresEvenHeight() : false);
}
bool requiresEvenWidth()
{
return (yuvReader_ != 0) ? yuvReader_->requiresEvenWidth() : ((yuvWriter_ != 0) ? yuvWriter_->requiresEvenWidth() : false);
}
YUVreader* yuvReader_;
YUVwriter* yuvWriter_;
RGBreader* rgbReader_;
RGBwriter* rgbWriter_;
GRAYwriter* grayWriter_;
};
CV_ENUM(YUVCVTS, CV_YUV2RGB_NV12, CV_YUV2BGR_NV12, CV_YUV2RGB_NV21, CV_YUV2BGR_NV21, CV_ENUM(YUVCVTS, CV_YUV2RGB_NV12, CV_YUV2BGR_NV12, CV_YUV2RGB_NV21, CV_YUV2BGR_NV21,
CV_YUV2RGBA_NV12, CV_YUV2BGRA_NV12, CV_YUV2RGBA_NV21, CV_YUV2BGRA_NV21, CV_YUV2RGBA_NV12, CV_YUV2BGRA_NV12, CV_YUV2RGBA_NV21, CV_YUV2BGRA_NV21,
CV_YUV2RGB_YV12, CV_YUV2BGR_YV12, CV_YUV2RGB_IYUV, CV_YUV2BGR_IYUV, CV_YUV2RGB_YV12, CV_YUV2BGR_YV12, CV_YUV2RGB_IYUV, CV_YUV2BGR_IYUV,
@ -383,7 +602,8 @@ CV_ENUM(YUVCVTS, CV_YUV2RGB_NV12, CV_YUV2BGR_NV12, CV_YUV2RGB_NV21, CV_YUV2BGR_N
CV_YUV2RGB_YUY2, CV_YUV2BGR_YUY2, CV_YUV2RGB_YVYU, CV_YUV2BGR_YVYU, CV_YUV2RGB_YUY2, CV_YUV2BGR_YUY2, CV_YUV2RGB_YVYU, CV_YUV2BGR_YVYU,
CV_YUV2RGBA_YUY2, CV_YUV2BGRA_YUY2, CV_YUV2RGBA_YVYU, CV_YUV2BGRA_YVYU, CV_YUV2RGBA_YUY2, CV_YUV2BGRA_YUY2, CV_YUV2RGBA_YVYU, CV_YUV2BGRA_YVYU,
CV_YUV2GRAY_420, CV_YUV2GRAY_UYVY, CV_YUV2GRAY_YUY2, CV_YUV2GRAY_420, CV_YUV2GRAY_UYVY, CV_YUV2GRAY_YUY2,
CV_YUV2BGR, CV_YUV2RGB); CV_YUV2BGR, CV_YUV2RGB, CV_RGB2YUV_YV12, CV_BGR2YUV_YV12, CV_RGBA2YUV_YV12,
CV_BGRA2YUV_YV12, CV_RGB2YUV_I420, CV_BGR2YUV_I420, CV_RGBA2YUV_I420, CV_BGRA2YUV_I420);
typedef ::testing::TestWithParam<YUVCVTS> Imgproc_ColorYUV; typedef ::testing::TestWithParam<YUVCVTS> Imgproc_ColorYUV;
@ -392,31 +612,32 @@ TEST_P(Imgproc_ColorYUV, accuracy)
int code = GetParam(); int code = GetParam();
RNG& random = theRNG(); RNG& random = theRNG();
YUVreader* yuvReader = YUVreader::getReader(code); ConversionYUV cvt(code);
RGBwriter* rgbWriter = RGBwriter::getWriter(code);
GRAYwriter* grayWriter = GRAYwriter::getWriter(code);
int dcn = (rgbWriter == 0) ? grayWriter->channels() : rgbWriter->channels();
const int scn = cvt.getScn();
const int dcn = cvt.getDcn();
for(int iter = 0; iter < 30; ++iter) for(int iter = 0; iter < 30; ++iter)
{ {
Size sz(random.uniform(1, 641), random.uniform(1, 481)); Size sz(random.uniform(1, 641), random.uniform(1, 481));
if(yuvReader->requiresEvenWidth()) sz.width += sz.width % 2; if(cvt.requiresEvenWidth()) sz.width += sz.width % 2;
if(yuvReader->requiresEvenHeight()) sz.height += sz.height % 2; if(cvt.requiresEvenHeight()) sz.height += sz.height % 2;
Size ysz = yuvReader->size(sz); Size srcSize = cvt.getSrcSize(sz);
Mat src = Mat(ysz.height, ysz.width * yuvReader->channels(), CV_8UC1).reshape(yuvReader->channels()); Mat src = Mat(srcSize.height, srcSize.width * scn, CV_8UC1).reshape(scn);
Mat dst = Mat(sz.height, sz.width * dcn, CV_8UC1).reshape(dcn); Size dstSize = cvt.getDstSize(sz);
Mat gold(sz, CV_8UC(dcn)); Mat dst = Mat(dstSize.height, dstSize.width * dcn, CV_8UC1).reshape(dcn);
Mat gold(dstSize, CV_8UC(dcn));
random.fill(src, RNG::UNIFORM, 0, 256); random.fill(src, RNG::UNIFORM, 0, 256);
if(rgbWriter) if(cvt.rgbWriter_)
referenceYUV2RGB<YUV2RGB_Converter>(src, gold, yuvReader, rgbWriter); referenceYUV2RGB<YUV2RGB_Converter> (src, gold, cvt.yuvReader_, cvt.rgbWriter_);
else else if(cvt.grayWriter_)
referenceYUV2GRAY<YUV2GRAY_Converter>(src, gold, yuvReader, grayWriter); referenceYUV2GRAY<YUV2GRAY_Converter>(src, gold, cvt.yuvReader_, cvt.grayWriter_);
else if(cvt.yuvWriter_)
referenceRGB2YUV<RGB2YUV_Converter> (src, gold, cvt.rgbReader_, cvt.yuvWriter_);
cv::cvtColor(src, dst, code, -1); cv::cvtColor(src, dst, code, -1);
@ -429,40 +650,41 @@ TEST_P(Imgproc_ColorYUV, roi_accuracy)
int code = GetParam(); int code = GetParam();
RNG& random = theRNG(); RNG& random = theRNG();
YUVreader* yuvReader = YUVreader::getReader(code); ConversionYUV cvt(code);
RGBwriter* rgbWriter = RGBwriter::getWriter(code);
GRAYwriter* grayWriter = GRAYwriter::getWriter(code);
int dcn = (rgbWriter == 0) ? grayWriter->channels() : rgbWriter->channels();
const int scn = cvt.getScn();
const int dcn = cvt.getDcn();
for(int iter = 0; iter < 30; ++iter) for(int iter = 0; iter < 30; ++iter)
{ {
Size sz(random.uniform(1, 641), random.uniform(1, 481)); Size sz(random.uniform(1, 641), random.uniform(1, 481));
if(yuvReader->requiresEvenWidth()) sz.width += sz.width % 2; if(cvt.requiresEvenWidth()) sz.width += sz.width % 2;
if(yuvReader->requiresEvenHeight()) sz.height += sz.height % 2; if(cvt.requiresEvenHeight()) sz.height += sz.height % 2;
int roi_offset_top = random.uniform(0, 6); int roi_offset_top = random.uniform(0, 6);
int roi_offset_bottom = random.uniform(0, 6); int roi_offset_bottom = random.uniform(0, 6);
int roi_offset_left = random.uniform(0, 6); int roi_offset_left = random.uniform(0, 6);
int roi_offset_right = random.uniform(0, 6); int roi_offset_right = random.uniform(0, 6);
Size ysz = yuvReader->size(sz); Size srcSize = cvt.getSrcSize(sz);
Mat src_full(srcSize.height + roi_offset_top + roi_offset_bottom, srcSize.width + roi_offset_left + roi_offset_right, CV_8UC(scn));
Mat src_full(ysz.height + roi_offset_top + roi_offset_bottom, ysz.width + roi_offset_left + roi_offset_right, CV_8UC(yuvReader->channels())); Size dstSize = cvt.getDstSize(sz);
Mat dst_full(sz.height + roi_offset_left + roi_offset_right, sz.width + roi_offset_top + roi_offset_bottom, CV_8UC(dcn), Scalar::all(0)); Mat dst_full(dstSize.height + roi_offset_left + roi_offset_right, dstSize.width + roi_offset_top + roi_offset_bottom, CV_8UC(dcn), Scalar::all(0));
Mat gold_full(dst_full.size(), CV_8UC(dcn), Scalar::all(0)); Mat gold_full(dst_full.size(), CV_8UC(dcn), Scalar::all(0));
random.fill(src_full, RNG::UNIFORM, 0, 256); random.fill(src_full, RNG::UNIFORM, 0, 256);
Mat src = src_full(Range(roi_offset_top, roi_offset_top + ysz.height), Range(roi_offset_left, roi_offset_left + ysz.width)); Mat src = src_full(Range(roi_offset_top, roi_offset_top + srcSize.height), Range(roi_offset_left, roi_offset_left + srcSize.width));
Mat dst = dst_full(Range(roi_offset_left, roi_offset_left + sz.height), Range(roi_offset_top, roi_offset_top + sz.width)); Mat dst = dst_full(Range(roi_offset_left, roi_offset_left + dstSize.height), Range(roi_offset_top, roi_offset_top + dstSize.width));
Mat gold = gold_full(Range(roi_offset_left, roi_offset_left + sz.height), Range(roi_offset_top, roi_offset_top + sz.width)); Mat gold = gold_full(Range(roi_offset_left, roi_offset_left + dstSize.height), Range(roi_offset_top, roi_offset_top + dstSize.width));
if(rgbWriter) if(cvt.rgbWriter_)
referenceYUV2RGB<YUV2RGB_Converter>(src, gold, yuvReader, rgbWriter); referenceYUV2RGB<YUV2RGB_Converter> (src, gold, cvt.yuvReader_, cvt.rgbWriter_);
else else if(cvt.grayWriter_)
referenceYUV2GRAY<YUV2GRAY_Converter>(src, gold, yuvReader, grayWriter); referenceYUV2GRAY<YUV2GRAY_Converter>(src, gold, cvt.yuvReader_, cvt.grayWriter_);
else if(cvt.yuvWriter_)
referenceRGB2YUV<RGB2YUV_Converter> (src, gold, cvt.rgbReader_, cvt.yuvWriter_);
cv::cvtColor(src, dst, code, -1); cv::cvtColor(src, dst, code, -1);
@ -475,7 +697,9 @@ INSTANTIATE_TEST_CASE_P(cvt420, Imgproc_ColorYUV,
(int)CV_YUV2RGBA_NV12, (int)CV_YUV2BGRA_NV12, (int)CV_YUV2RGBA_NV21, (int)CV_YUV2BGRA_NV21, (int)CV_YUV2RGBA_NV12, (int)CV_YUV2BGRA_NV12, (int)CV_YUV2RGBA_NV21, (int)CV_YUV2BGRA_NV21,
(int)CV_YUV2RGB_YV12, (int)CV_YUV2BGR_YV12, (int)CV_YUV2RGB_IYUV, (int)CV_YUV2BGR_IYUV, (int)CV_YUV2RGB_YV12, (int)CV_YUV2BGR_YV12, (int)CV_YUV2RGB_IYUV, (int)CV_YUV2BGR_IYUV,
(int)CV_YUV2RGBA_YV12, (int)CV_YUV2BGRA_YV12, (int)CV_YUV2RGBA_IYUV, (int)CV_YUV2BGRA_IYUV, (int)CV_YUV2RGBA_YV12, (int)CV_YUV2BGRA_YV12, (int)CV_YUV2RGBA_IYUV, (int)CV_YUV2BGRA_IYUV,
(int)CV_YUV2GRAY_420)); (int)CV_YUV2GRAY_420, (int)CV_RGB2YUV_YV12, (int)CV_BGR2YUV_YV12, (int)CV_RGBA2YUV_YV12,
(int)CV_BGRA2YUV_YV12, (int)CV_RGB2YUV_I420, (int)CV_BGR2YUV_I420, (int)CV_RGBA2YUV_I420,
(int)CV_BGRA2YUV_I420));
INSTANTIATE_TEST_CASE_P(cvt422, Imgproc_ColorYUV, INSTANTIATE_TEST_CASE_P(cvt422, Imgproc_ColorYUV,
::testing::Values((int)CV_YUV2RGB_UYVY, (int)CV_YUV2BGR_UYVY, (int)CV_YUV2RGBA_UYVY, (int)CV_YUV2BGRA_UYVY, ::testing::Values((int)CV_YUV2RGB_UYVY, (int)CV_YUV2BGR_UYVY, (int)CV_YUV2RGBA_UYVY, (int)CV_YUV2BGRA_UYVY,

View File

@ -301,16 +301,14 @@ endif()
# Additional target properties # Additional target properties
set_target_properties(${the_module} PROPERTIES set_target_properties(${the_module} PROPERTIES
OUTPUT_NAME "${the_module}${LIB_NAME_SUFIX}" OUTPUT_NAME "${the_module}${LIB_NAME_SUFIX}"
#DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_PATH} ARCHIVE_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_PATH}
LIBRARY_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_PATH}
RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}
INSTALL_NAME_DIR ${OPENCV_LIB_INSTALL_PATH} INSTALL_NAME_DIR ${OPENCV_LIB_INSTALL_PATH}
LINK_INTERFACE_LIBRARIES "" LINK_INTERFACE_LIBRARIES ""
) )
if(ANDROID) if(WIN32)
set_target_properties(${the_module} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${LIBRARY_OUTPUT_PATH})
else()
set_target_properties(${the_module} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}) set_target_properties(${the_module} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
endif() endif()

View File

@ -514,7 +514,7 @@ static bool pyopencv_to(PyObject* obj, double& value, const char* name = "<unkno
(void)name; (void)name;
if(!obj || obj == Py_None) if(!obj || obj == Py_None)
return true; return true;
if(PyInt_CheckExact(obj)) if(!!PyInt_CheckExact(obj))
value = (double)PyInt_AS_LONG(obj); value = (double)PyInt_AS_LONG(obj);
else else
value = PyFloat_AsDouble(obj); value = PyFloat_AsDouble(obj);
@ -531,7 +531,7 @@ static bool pyopencv_to(PyObject* obj, float& value, const char* name = "<unknow
(void)name; (void)name;
if(!obj || obj == Py_None) if(!obj || obj == Py_None)
return true; return true;
if(PyInt_CheckExact(obj)) if(!!PyInt_CheckExact(obj))
value = (float)PyInt_AS_LONG(obj); value = (float)PyInt_AS_LONG(obj);
else else
value = (float)PyFloat_AsDouble(obj); value = (float)PyFloat_AsDouble(obj);
@ -627,7 +627,7 @@ static inline bool pyopencv_to(PyObject* obj, Point& p, const char* name = "<unk
(void)name; (void)name;
if(!obj || obj == Py_None) if(!obj || obj == Py_None)
return true; return true;
if(PyComplex_CheckExact(obj)) if(!!PyComplex_CheckExact(obj))
{ {
Py_complex c = PyComplex_AsCComplex(obj); Py_complex c = PyComplex_AsCComplex(obj);
p.x = saturate_cast<int>(c.real); p.x = saturate_cast<int>(c.real);
@ -642,7 +642,7 @@ static inline bool pyopencv_to(PyObject* obj, Point2f& p, const char* name = "<u
(void)name; (void)name;
if(!obj || obj == Py_None) if(!obj || obj == Py_None)
return true; return true;
if(PyComplex_CheckExact(obj)) if(!!PyComplex_CheckExact(obj))
{ {
Py_complex c = PyComplex_AsCComplex(obj); Py_complex c = PyComplex_AsCComplex(obj);
p.x = saturate_cast<float>(c.real); p.x = saturate_cast<float>(c.real);
@ -993,7 +993,7 @@ static bool pyopencv_to(PyObject *o, cv::flann::IndexParams& p, const char *name
const char* value = PyString_AsString(item); const char* value = PyString_AsString(item);
p.setString(k, value); p.setString(k, value);
} }
else if( PyBool_Check(item) ) else if( !!PyBool_Check(item) )
p.setBool(k, item == Py_True); p.setBool(k, item == Py_True);
else if( PyInt_Check(item) ) else if( PyInt_Check(item) )
{ {

View File

@ -1158,7 +1158,7 @@ static PyObject* cvseq_map_getitem(PyObject *o, PyObject *item)
if (i < 0) if (i < 0)
i += (int)cvseq_seq_length(o); i += (int)cvseq_seq_length(o);
return cvseq_seq_getitem(o, i); return cvseq_seq_getitem(o, i);
} else if (PySlice_Check(item)) { } else if (!!PySlice_Check(item)) {
Py_ssize_t start, stop, step, slicelength, cur, i; Py_ssize_t start, stop, step, slicelength, cur, i;
PyObject* result; PyObject* result;
@ -1975,7 +1975,7 @@ struct dims
static int convert_to_dim(PyObject *item, int i, dims *dst, CvArr *cva, const char *name = "no_name") static int convert_to_dim(PyObject *item, int i, dims *dst, CvArr *cva, const char *name = "no_name")
{ {
if (PySlice_Check(item)) { if (!!PySlice_Check(item)) {
Py_ssize_t start, stop, step, slicelength; Py_ssize_t start, stop, step, slicelength;
PySlice_GetIndicesEx((PySliceObject*)item, cvGetDimSize(cva, i), &start, &stop, &step, &slicelength); PySlice_GetIndicesEx((PySliceObject*)item, cvGetDimSize(cva, i), &start, &stop, &step, &slicelength);
dst->i[i] = (int)start; dst->i[i] = (int)start;