Fixed Android samples; Fixed bugs in cmake (java module).
This commit is contained in:
parent
fd4fe3ef5a
commit
930b2995eb
@ -1,5 +1,6 @@
|
||||
.. |Author_AnaH| unicode:: Ana U+0020 Huam U+00E1 n
|
||||
.. |Author_BernatG| unicode:: Bern U+00E1 t U+0020 G U+00E1 bor
|
||||
.. |Author_AndreyK| unicode:: Andrey U+0020 Kamaev
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,38 @@
|
||||
.. _Android_Binary_Package:
|
||||
|
||||
Using Android binary package with Eclipse
|
||||
*****************************************
|
||||
|
||||
.. include:: <isonum.txt>
|
||||
|
||||
This tutorial was tested using Ubuntu 10.04 and Windows 7 SP1 operating systems. Nevertheless, it should also work on any other **OS**\ es supported by Android SDK (including Mac OS X). If you encounter errors after following the steps described here feel free to contact us via *android-opencv* disscussion group https://groups.google.com/group/android-opencv/ and we will try to fix your problem.
|
||||
|
||||
.. _Android_Environment_Setup_Lite:
|
||||
|
||||
Setup environment to start Android Development
|
||||
==============================================
|
||||
|
||||
You need the following tools to be installed:
|
||||
|
||||
1. **Sun JDK 6**
|
||||
|
||||
Visit http://www.oracle.com/technetwork/java/javase/downloads/index.html and download installer for your OS.
|
||||
|
||||
Here is detailed JDK installation guide for Ubuntu and Mac OS: http://source.android.com/source/initializing.html (only JDK sections are applicable for OpenCV)
|
||||
|
||||
.. note:: OpenJDK is not usable for Android development because Android SDK supports only Sun JDK.
|
||||
|
||||
#. **Android SDK**
|
||||
|
||||
Get the latest Android SDK from http://developer.android.com/sdk/index.html
|
||||
|
||||
Here is Google's install guide for SDK http://developer.android.com/sdk/installing.html
|
||||
|
||||
.. note:: If you choose SDK packed into Windows installer then you should have installed 32-bit JRE. It does not needed for Android development but installer is x86 application and requires 32-bit Java runtime.
|
||||
|
||||
.. note:: If you are running x64 version of Ubuntu Linux then you need ia32 shared libraries for use on amd64 and ia64 systems installed. You can install them with following command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt-get install ia32-libs
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 8.3 KiB |
@ -97,9 +97,29 @@ Here you can read tutorials about how to set up your computer to work with the O
|
||||
|
||||
=========== ======================================================
|
||||
|
||||
.. |WinVSHowT| image:: images/visual-studio-2010-logo.jpg
|
||||
:height: 90pt
|
||||
:width: 90pt
|
||||
.. |WinVSHowT| image:: images/visual-studio-2010-logo.jpg
|
||||
:height: 90pt
|
||||
:width: 90pt
|
||||
|
||||
* **Android**
|
||||
|
||||
.. tabularcolumns:: m{100pt} m{300pt}
|
||||
.. cssclass:: toctableopencv
|
||||
|
||||
================ ======================================================
|
||||
|AndroidBinPack| **Title:** :ref:`Android_Binary_Package`
|
||||
|
||||
*Compatibility:* > OpenCV 2.3.1
|
||||
|
||||
*Author:* |Author_AndreyK|
|
||||
|
||||
You will learn how to setup OpenCV for Android platform!
|
||||
|
||||
================ ======================================================
|
||||
|
||||
.. |AndroidBinPack| image:: images/android_logo.png
|
||||
:height: 90pt
|
||||
:width: 90pt
|
||||
|
||||
* **From where to start?**
|
||||
|
||||
@ -148,5 +168,6 @@ Here you can read tutorials about how to set up your computer to work with the O
|
||||
../linux_eclipse/linux_eclipse
|
||||
../windows_install/windows_install
|
||||
../windows_visual_studio_Opencv/windows_visual_studio_Opencv
|
||||
../android_binary_package/android_binary_package
|
||||
../display_image/display_image
|
||||
../load_save_image/load_save_image
|
||||
|
@ -190,7 +190,9 @@ foreach(java_file ${documented_java_files})
|
||||
)
|
||||
LIST(APPEND additional_clean_files "${JAVA_OUTPUT_DIR}/${java_file_name}")
|
||||
if(ANDROID)
|
||||
install(FILES "${JAVA_OUTPUT_DIR}/${java_file_name}" DESTINATION src/org/opencv COMPONENT main)
|
||||
get_filename_component(install_dir "${java_file_name}" PATH)
|
||||
message("!!!${java_file_name}!!!!!src/org/opencv/${install_dir}!!!!!!!!!!!!")
|
||||
install(FILES "${JAVA_OUTPUT_DIR}/${java_file_name}" DESTINATION src/org/opencv/${install_dir} COMPONENT main)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
|
@ -2,7 +2,9 @@ package org.opencv.samples.puzzle15;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.highgui.Highgui;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -46,15 +48,15 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (mCamera.isOpened()) {
|
||||
(new Thread(this)).start();
|
||||
} else {
|
||||
|
@ -1,6 +1,13 @@
|
||||
package org.opencv.samples.puzzle15;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.Highgui;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -30,7 +37,7 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
|
||||
mTextWidths = new int[gridArea];
|
||||
mTextHeights = new int[gridArea];
|
||||
for (int i = 0; i < gridArea; i++) {
|
||||
Size s = core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
|
||||
Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
|
||||
mTextHeights[i] = (int) s.height;
|
||||
mTextWidths[i] = (int) s.width;
|
||||
}
|
||||
@ -106,7 +113,7 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(VideoCapture capture) {
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
int cols = mRgba.cols();
|
||||
int rows = mRgba.rows();
|
||||
|
||||
@ -121,7 +128,7 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
|
||||
else {
|
||||
mCells[idx].copyTo(mCells15[i]);
|
||||
if (mShowTileNumbers) {
|
||||
core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2,
|
||||
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2,
|
||||
(rows / gridSize + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
|
||||
}
|
||||
}
|
||||
@ -139,8 +146,8 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
|
||||
|
||||
private void drawGrid(int cols, int rows) {
|
||||
for (int i = 1; i < gridSize; i++) {
|
||||
core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);
|
||||
core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);
|
||||
Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);
|
||||
Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,14 @@ import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.Highgui;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.objdetect.CascadeClassifier;
|
||||
|
||||
import android.content.Context;
|
||||
@ -68,8 +75,8 @@ class FdView extends SampleCvViewBase {
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(VideoCapture capture) {
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
|
||||
if (mCascade != null) {
|
||||
int height = mGray.rows();
|
||||
@ -79,7 +86,7 @@ class FdView extends SampleCvViewBase {
|
||||
, new Size(faceSize, faceSize));
|
||||
|
||||
for (Rect r : faces)
|
||||
core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
|
||||
Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
|
||||
}
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
|
||||
|
@ -2,7 +2,7 @@ package org.opencv.samples.fd;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
|
||||
import org.opencv.core;
|
||||
import org.opencv.core.Core;
|
||||
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Color;
|
||||
@ -22,8 +22,8 @@ public class FpsMeter {
|
||||
public void init() {
|
||||
step = 20;
|
||||
framesCouner = 0;
|
||||
freq = core.getTickFrequency();
|
||||
prevFrameTime = core.getTickCount();
|
||||
freq = Core.getTickFrequency();
|
||||
prevFrameTime = Core.getTickCount();
|
||||
strfps = "";
|
||||
|
||||
paint = new Paint();
|
||||
@ -34,7 +34,7 @@ public class FpsMeter {
|
||||
public void measure() {
|
||||
framesCouner++;
|
||||
if (framesCouner % step == 0) {
|
||||
long time = core.getTickCount();
|
||||
long time = Core.getTickCount();
|
||||
double fps = step * freq / (time - prevFrameTime);
|
||||
prevFrameTime = time;
|
||||
DecimalFormat twoPlaces = new DecimalFormat("0.00");
|
||||
|
@ -2,7 +2,9 @@ package org.opencv.samples.fd;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.highgui.Highgui;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -48,15 +50,15 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (mCamera.isOpened()) {
|
||||
(new Thread(this)).start();
|
||||
} else {
|
||||
|
@ -2,7 +2,7 @@ package org.opencv.samples.imagemanipulations;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
|
||||
import org.opencv.core;
|
||||
import org.opencv.core.Core;
|
||||
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Color;
|
||||
@ -22,8 +22,8 @@ public class FpsMeter {
|
||||
public void init() {
|
||||
step = 20;
|
||||
framesCouner = 0;
|
||||
freq = core.getTickFrequency();
|
||||
prevFrameTime = core.getTickCount();
|
||||
freq = Core.getTickFrequency();
|
||||
prevFrameTime = Core.getTickCount();
|
||||
strfps = "";
|
||||
|
||||
paint = new Paint();
|
||||
@ -34,7 +34,7 @@ public class FpsMeter {
|
||||
public void measure() {
|
||||
framesCouner++;
|
||||
if (framesCouner % step == 0) {
|
||||
long time = core.getTickCount();
|
||||
long time = Core.getTickCount();
|
||||
double fps = step * freq / (time - prevFrameTime);
|
||||
prevFrameTime = time;
|
||||
DecimalFormat twoPlaces = new DecimalFormat("0.00");
|
||||
|
@ -1,6 +1,15 @@
|
||||
package org.opencv.samples.imagemanipulations;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.highgui.Highgui;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -75,52 +84,52 @@ class ImageManipulationsView extends SampleCvViewBase {
|
||||
switch (ImageManipulationsActivity.viewMode) {
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_RGBA:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
break;
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_CANNY:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
|
||||
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
|
||||
CreateAuxiliaryMats();
|
||||
|
||||
imgproc.Canny(mGrayInnerWindow, mGrayInnerWindow, 80, 90);
|
||||
imgproc.cvtColor(mGrayInnerWindow, mRgbaInnerWindow, imgproc.COLOR_GRAY2BGRA, 4);
|
||||
Imgproc.Canny(mGrayInnerWindow, mGrayInnerWindow, 80, 90);
|
||||
Imgproc.cvtColor(mGrayInnerWindow, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_SOBEL:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
|
||||
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
|
||||
CreateAuxiliaryMats();
|
||||
|
||||
imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
|
||||
core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10);
|
||||
imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, imgproc.COLOR_GRAY2BGRA, 4);
|
||||
Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
|
||||
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_SEPIA:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
core.transform(mRgba, mRgba, mSepiaKernel);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
Core.transform(mRgba, mRgba, mSepiaKernel);
|
||||
break;
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_BLUR:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
if (mBlurWindow == null)
|
||||
CreateAuxiliaryMats();
|
||||
imgproc.blur(mBlurWindow, mBlurWindow, new Size(15, 15));
|
||||
Imgproc.blur(mBlurWindow, mBlurWindow, new Size(15, 15));
|
||||
break;
|
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
if (mZoomCorner == null || mZoomWindow == null)
|
||||
CreateAuxiliaryMats();
|
||||
imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
|
||||
Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
|
||||
|
||||
Size wsize = mZoomWindow.size();
|
||||
core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
|
||||
Core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -139,16 +148,16 @@ class ImageManipulationsView extends SampleCvViewBase {
|
||||
|
||||
synchronized (this) {
|
||||
// Explicitly deallocate Mats
|
||||
if (mZoomWindow != null)
|
||||
mZoomWindow.dispose();
|
||||
if (mZoomCorner != null)
|
||||
mZoomCorner.dispose();
|
||||
if (mBlurWindow != null)
|
||||
mBlurWindow.dispose();
|
||||
if (mGrayInnerWindow != null)
|
||||
mGrayInnerWindow.dispose();
|
||||
if (mRgbaInnerWindow != null)
|
||||
mRgbaInnerWindow.dispose();
|
||||
if (mZoomWindow != null)
|
||||
mZoomWindow.dispose();
|
||||
if (mZoomCorner != null)
|
||||
mZoomCorner.dispose();
|
||||
if (mBlurWindow != null)
|
||||
mBlurWindow.dispose();
|
||||
if (mGrayInnerWindow != null)
|
||||
mGrayInnerWindow.dispose();
|
||||
if (mRgbaInnerWindow != null)
|
||||
mRgbaInnerWindow.dispose();
|
||||
if (mRgba != null)
|
||||
mRgba.dispose();
|
||||
if (mGray != null)
|
||||
|
@ -2,7 +2,9 @@ package org.opencv.samples.imagemanipulations;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.highgui.Highgui;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -48,15 +50,15 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (mCamera.isOpened()) {
|
||||
(new Thread(this)).start();
|
||||
} else {
|
||||
|
@ -3,7 +3,6 @@ package org.opencv.samples.tutorial1;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.CvType;
|
||||
|
@ -1,6 +1,13 @@
|
||||
package org.opencv.samples.tutorial2;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.highgui.Highgui;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -31,17 +38,17 @@ class Sample2View extends SampleCvViewBase {
|
||||
protected Bitmap processFrame(VideoCapture capture) {
|
||||
switch (Sample2NativeCamera.viewMode) {
|
||||
case Sample2NativeCamera.VIEW_MODE_GRAY:
|
||||
capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
imgproc.cvtColor(mGray, mRgba, imgproc.COLOR_GRAY2RGBA, 4);
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
|
||||
break;
|
||||
case Sample2NativeCamera.VIEW_MODE_RGBA:
|
||||
capture.retrieve(mRgba, highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
break;
|
||||
case Sample2NativeCamera.VIEW_MODE_CANNY:
|
||||
capture.retrieve(mGray, highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
imgproc.Canny(mGray, mIntermediateMat, 80, 100);
|
||||
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.COLOR_GRAY2BGRA, 4);
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,9 @@ package org.opencv.samples.tutorial2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.highgui.Highgui;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -46,15 +48,15 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = new VideoCapture(highgui.CV_CAP_ANDROID);
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (mCamera.isOpened()) {
|
||||
(new Thread(this)).start();
|
||||
} else {
|
||||
|
@ -1,6 +1,9 @@
|
||||
package org.opencv.samples.tutorial4;
|
||||
|
||||
import org.opencv.*;
|
||||
import org.opencv.android;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -36,17 +39,17 @@ class Sample4View extends SampleViewBase {
|
||||
|
||||
switch (Sample4Mixed.viewMode) {
|
||||
case Sample4Mixed.VIEW_MODE_GRAY:
|
||||
imgproc.cvtColor(mGraySubmat, mRgba, imgproc.COLOR_GRAY2RGBA, 4);
|
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_RGBA:
|
||||
imgproc.cvtColor(mYuv, mRgba, imgproc.COLOR_YUV420i2RGB, 4);
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_CANNY:
|
||||
imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
|
||||
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.COLOR_GRAY2BGRA, 4);
|
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_FEATURES:
|
||||
imgproc.cvtColor(mYuv, mRgba, imgproc.COLOR_YUV420i2RGB, 4);
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
|
||||
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
|
||||
break;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user