Merge branch '2.4'

This commit is contained in:
Andrey Kamaev
2012-10-30 15:10:17 +04:00
48 changed files with 905 additions and 196 deletions

View File

@@ -4,8 +4,8 @@ import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import android.os.Bundle;
import android.app.Activity;
@@ -18,13 +18,13 @@ import android.view.View;
public class Puzzle15Activity extends Activity implements CvCameraViewListener, View.OnTouchListener {
private static final String TAG = "Sample::Puzzle15::Activity";
private static final String TAG = "Sample::Puzzle15::Activity";
private JavaCameraView mOpenCvCameraView;
private Puzzle15Processor mPuzzle15;
private CameraBridgeViewBase mOpenCvCameraView;
private Puzzle15Processor mPuzzle15;
private int mGameWidth;
private int mGameHeight;
private int mGameWidth;
private int mGameHeight;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@@ -54,7 +54,7 @@ public class Puzzle15Activity extends Activity implements CvCameraViewListener,
setContentView(R.layout.activity_puzzle15);
mOpenCvCameraView = (JavaCameraView) findViewById(R.id.puzzle_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.puzzle_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
mPuzzle15 = new Puzzle15Processor();
mPuzzle15.prepareNewGame();

View File

@@ -20,14 +20,13 @@ public class Puzzle15Processor {
private static final int GRID_AREA = GRID_SIZE * GRID_SIZE;
private static final int GRID_EMPTY_INDEX = GRID_AREA - 1;
private static final String TAG = "Puzzle15Processor";
private static final Scalar GRID_EMPTY_COLOR = new Scalar(0x33, 0x33, 0x33, 0xFF);
private int[] mIndexes;
private int[] mTextWidths;
private int[] mTextHeights;
private Mat mRgba15;
private Mat[] mCells;
private Mat[] mCells15;
private boolean mShowTileNumbers = true;
@@ -54,8 +53,6 @@ public class Puzzle15Processor {
*/
public synchronized void prepareGameSize(int width, int height) {
mRgba15 = new Mat(height, width, CvType.CV_8UC4);
mCells = new Mat[GRID_AREA];
mCells15 = new Mat[GRID_AREA];
for (int i = 0; i < GRID_SIZE; i++) {
@@ -76,6 +73,7 @@ public class Puzzle15Processor {
* the tiles as specified by mIndexes array
*/
public synchronized Mat puzzleFrame(Mat inputPicture) {
Mat[] cells = new Mat[GRID_AREA];
int rows = inputPicture.rows();
int cols = inputPicture.cols();
@@ -85,7 +83,7 @@ public class Puzzle15Processor {
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
mCells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
cells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
}
}
@@ -96,9 +94,9 @@ public class Puzzle15Processor {
for (int i = 0; i < GRID_AREA; i++) {
int idx = mIndexes[i];
if (idx == GRID_EMPTY_INDEX)
mCells15[i].setTo(new Scalar(0x33, 0x33, 0x33, 0xFF));
mCells15[i].setTo(GRID_EMPTY_COLOR);
else {
mCells[idx].copyTo(mCells15[i]);
cells[idx].copyTo(mCells15[i]);
if (mShowTileNumbers) {
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2,
(rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
@@ -106,6 +104,9 @@ public class Puzzle15Processor {
}
}
for (int i = 0; i < GRID_AREA; i++)
cells[i].release();
drawGrid(cols, rows, mRgba15);
return mRgba15;

View File

@@ -12,8 +12,8 @@ import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
@@ -26,9 +26,9 @@ import android.view.WindowManager;
import android.view.View.OnTouchListener;
public class ColorBlobDetectionActivity extends Activity implements OnTouchListener, CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private static final String TAG = "OCVSample::Activity";
private boolean mIsColorSelected = false;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
@@ -37,7 +37,7 @@ public class ColorBlobDetectionActivity extends Activity implements OnTouchListe
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -71,7 +71,7 @@ public class ColorBlobDetectionActivity extends Activity implements OnTouchListe
setContentView(R.layout.color_blob_detection_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.color_blob_detection_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -14,7 +14,7 @@ import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
@@ -53,7 +53,7 @@ public class FdActivity extends Activity implements CvCameraViewListener {
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -125,7 +125,7 @@ public class FdActivity extends Activity implements CvCameraViewListener {
setContentView(R.layout.face_detect_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -13,7 +13,7 @@ import org.opencv.core.MatOfInt;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
@@ -45,7 +45,7 @@ public class ImageManipulationsActivity extends Activity implements CvCameraView
private MenuItem mItemPreviewZoom;
private MenuItem mItemPreviewPixelize;
private MenuItem mItemPreviewPosterize;
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private Size mSize0;
private Size mSizeRgba;
@@ -106,7 +106,7 @@ public class ImageManipulationsActivity extends Activity implements CvCameraView
setContentView(R.layout.image_manipulations_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.image_manipulations_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.image_manipulations_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -6,6 +6,13 @@
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial1_activity_surface_view" />
android:visibility="gone"
android:id="@+id/tutorial1_activity_java_surface_view" />
<org.opencv.android.NativeCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:visibility="gone"
android:id="@+id/tutorial1_activity_native_surface_view" />
</LinearLayout>

View File

@@ -4,19 +4,25 @@ import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
public class Sample1Java extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -49,7 +55,13 @@ public class Sample1Java extends Activity implements CvCameraViewListener {
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial1_activity_surface_view);
if (mIsJavaCamera)
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
else
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@@ -72,6 +84,40 @@ public class Sample1Java extends Activity implements CvCameraViewListener {
mOpenCvCameraView.disableView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemSwitchCamera = menu.add("Switch camera");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
String toastMesage = new String();
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemSwitchCamera) {
mOpenCvCameraView.setVisibility(SurfaceView.GONE);
mIsJavaCamera = !mIsJavaCamera;
if (mIsJavaCamera) {
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
toastMesage = "Java Camera";
} else {
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);
toastMesage = "Native Camera";
}
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.enableView();
Toast toast = Toast.makeText(this, toastMesage, Toast.LENGTH_LONG);
toast.show();
}
return true;
}
public void onCameraViewStarted(int width, int height) {
}

View File

@@ -8,7 +8,7 @@ import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.android.NativeCameraView;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
@@ -22,20 +22,20 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
private static int viewMode = VIEW_MODE_RGBA;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private Mat mRgba;
private Mat mIntermediateMat;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private Mat mRgba;
private Mat mIntermediateMat;
private NativeCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -68,7 +68,7 @@ public class Sample2NativeCamera extends Activity implements CvCameraViewListene
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (NativeCameraView)findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -5,7 +5,7 @@ import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
@@ -20,7 +20,7 @@ public class Sample3Native extends Activity implements CvCameraViewListener {
private Mat mRgba;
private Mat mGrayMat;
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -57,7 +57,7 @@ public class Sample3Native extends Activity implements CvCameraViewListener {
setContentView(R.layout.tutorial3_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -5,7 +5,7 @@ import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
@@ -19,7 +19,7 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private static final String TAG = "OCVSample::Activity";
private static final int VIEW_MODE_RGBA = 0;
private static final int VIEW_MODE_GRAY = 1;
@@ -36,7 +36,7 @@ public class Sample4Mixed extends Activity implements CvCameraViewListener {
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private JavaCameraView mOpenCvCameraView;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
@@ -73,7 +73,7 @@ public class Sample4Mixed extends Activity implements CvCameraViewListener {
setContentView(R.layout.tutorial4_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}

View File

@@ -0,0 +1,146 @@
//============================================================================
// Name : retina_tutorial.cpp
// Author : Alexandre Benoit, benoit.alexandre.vision@gmail.com
// Version : 0.1
// Copyright : LISTIC/GIPSA French Labs, july 2012
// Description : Gipsa/LISTIC Labs retina demo in C++, Ansi-style
//============================================================================
#include <iostream>
#include <cstring>
#include "opencv2/opencv.hpp"
static void help(std::string errorMessage)
{
std::cout<<"Program init error : "<<errorMessage<<std::endl;
std::cout<<"\nProgram call procedure : retinaDemo [processing mode] [Optional : media target] [Optional LAST parameter: \"log\" to activate retina log sampling]"<<std::endl;
std::cout<<"\t[processing mode] :"<<std::endl;
std::cout<<"\t -image : for still image processing"<<std::endl;
std::cout<<"\t -video : for video stream processing"<<std::endl;
std::cout<<"\t[Optional : media target] :"<<std::endl;
std::cout<<"\t if processing an image or video file, then, specify the path and filename of the target to process"<<std::endl;
std::cout<<"\t leave empty if processing video stream coming from a connected video device"<<std::endl;
std::cout<<"\t[Optional : activate retina log sampling] : an optional last parameter can be specified for retina spatial log sampling"<<std::endl;
std::cout<<"\t set \"log\" without quotes to activate this sampling, output frame size will be divided by 4"<<std::endl;
std::cout<<"\nExamples:"<<std::endl;
std::cout<<"\t-Image processing : ./retinaDemo -image lena.jpg"<<std::endl;
std::cout<<"\t-Image processing with log sampling : ./retinaDemo -image lena.jpg log"<<std::endl;
std::cout<<"\t-Video processing : ./retinaDemo -video myMovie.mp4"<<std::endl;
std::cout<<"\t-Live video processing : ./retinaDemo -video"<<std::endl;
std::cout<<"\nPlease start again with new parameters"<<std::endl;
std::cout<<"****************************************************"<<std::endl;
std::cout<<" NOTE : this program generates the default retina parameters file 'RetinaDefaultParameters.xml'"<<std::endl;
std::cout<<" => you can use this to fine tune parameters and load them if you save to file 'RetinaSpecificParameters.xml'"<<std::endl;
}
int main(int argc, char* argv[]) {
// welcome message
std::cout<<"****************************************************"<<std::endl;
std::cout<<"* Retina demonstration : demonstrates the use of is a wrapper class of the Gipsa/Listic Labs retina model."<<std::endl;
std::cout<<"* This demo will try to load the file 'RetinaSpecificParameters.xml' (if exists).\nTo create it, copy the autogenerated template 'RetinaDefaultParameters.xml'.\nThen twaek it with your own retina parameters."<<std::endl;
// basic input arguments checking
if (argc<2)
{
help("bad number of parameter");
return -1;
}
bool useLogSampling = !strcmp(argv[argc-1], "log"); // check if user wants retina log sampling processing
std::string inputMediaType=argv[1];
// declare the retina input buffer... that will be fed differently in regard of the input media
cv::Mat inputFrame;
cv::VideoCapture videoCapture; // in case a video media is used, its manager is declared here
//////////////////////////////////////////////////////////////////////////////
// checking input media type (still image, video file, live video acquisition)
if (!strcmp(inputMediaType.c_str(), "-image") && argc >= 3)
{
std::cout<<"RetinaDemo: processing image "<<argv[2]<<std::endl;
// image processing case
inputFrame = cv::imread(std::string(argv[2]), 1); // load image in RGB mode
}else
if (!strcmp(inputMediaType.c_str(), "-video"))
{
if (argc == 2 || (argc == 3 && useLogSampling)) // attempt to grab images from a video capture device
{
videoCapture.open(0);
}else// attempt to grab images from a video filestream
{
std::cout<<"RetinaDemo: processing video stream "<<argv[2]<<std::endl;
videoCapture.open(argv[2]);
}
// grab a first frame to check if everything is ok
videoCapture>>inputFrame;
}else
{
// bad command parameter
help("bad command parameter");
return -1;
}
if (inputFrame.empty())
{
help("Input media could not be loaded, aborting");
return -1;
}
//////////////////////////////////////////////////////////////////////////////
// Program start in a try/catch safety context (Retina may throw errors)
try
{
// create a retina instance with default parameters setup, uncomment the initialisation you wanna test
cv::Ptr<cv::Retina> myRetina;
// if the last parameter is 'log', then activate log sampling (favour foveal vision and subsamples peripheral vision)
if (useLogSampling)
{
myRetina = new cv::Retina(inputFrame.size(), true, cv::RETINA_COLOR_BAYER, true, 2.0, 10.0);
}
else// -> else allocate "classical" retina :
myRetina = new cv::Retina(inputFrame.size());
// save default retina parameters file in order to let you see this and maybe modify it and reload using method "setup"
myRetina->write("RetinaDefaultParameters.xml");
// load parameters if file exists
myRetina->setup("RetinaSpecificParameters.xml");
// reset all retina buffers (imagine you close your eyes for a long time)
myRetina->clearBuffers();
// declare retina output buffers
cv::Mat retinaOutput_parvo;
cv::Mat retinaOutput_magno;
// processing loop with no stop condition
while(true)
{
// if using video stream, then, grabbing a new frame, else, input remains the same
if (videoCapture.isOpened())
videoCapture>>inputFrame;
// run retina filter on the loaded input frame
myRetina->run(inputFrame);
// Retrieve and display retina output
myRetina->getParvo(retinaOutput_parvo);
myRetina->getMagno(retinaOutput_magno);
cv::imshow("retina input", inputFrame);
cv::imshow("Retina Parvo", retinaOutput_parvo);
cv::imshow("Retina Magno", retinaOutput_magno);
cv::waitKey(10);
}
}catch(cv::Exception e)
{
std::cerr<<"Error using Retina : "<<e.what()<<std::endl;
}
// Program end message
std::cout<<"Retina demo end"<<std::endl;
return 0;
}