adding cmake based android scripts and a reusable android library - samples to follow - haven't tested yet completely

This commit is contained in:
Ethan Rublee
2010-09-22 01:47:11 +00:00
parent bd829b8f7c
commit fbac20273c
66 changed files with 13419 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv" android:versionCode="1"
android:versionName="0.1">
<application android:debuggable="true">
<!-- The activity tag here is currently not used. The main project TicTacToeMain
must currently redefine the activities to be used from the libraries.
However later the tools will pick up the activities from here and merge them
automatically, so it's best to define your activities here like for any
regular Android project.
-->
<activity android:name="com.opencv.OpenCV" />
</application>
<!-- set the opengl version
<uses-feature android:glEsVersion="0x00020000" />-->
<uses-sdk android:minSdkVersion="7" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
</manifest>

View File

@@ -0,0 +1,75 @@
# The path to the NDK, requires crystax version r-4 for now, due to support
#for the standard library
#load environment from local make file
LOCAL_ENV_MK=local.env.mk
ifneq "$(wildcard $(LOCAL_ENV_MK))" ""
include $(LOCAL_ENV_MK)
else
$(shell cp sample.$(LOCAL_ENV_MK) $(LOCAL_ENV_MK))
$(info ERROR local environement not setup! try:)
$(info gedit $(LOCAL_ENV_MK))
$(info Please setup the $(LOCAL_ENV_MK) - the default was just created')
include $(LOCAL_ENV_MK)
endif
ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
$(info OPENCV_CONFIG = $(OPENCV_CONFIG))
# The name of the native library
LIBNAME = libandroid-opencv.so
# Find all the C++ sources in the native folder
SOURCES = $(wildcard jni/*.cpp)
HEADERS = $(wildcard jni/*.h)
SWIG_IS = $(wildcard jni/*.i)
ANDROID_MKS = $(wildcard jni/*.mk)
SWIG_MAIN = jni/android-cv.i
SWIG_JAVA_DIR = src/com/opencv/jni
SWIG_JAVA_OUT = $(wildcard $(SWIG_JAVA_DIR)/*.java)
SWIG_C_DIR = jni/gen
SWIG_C_OUT = $(SWIG_C_DIR)/android_cv_wrap.cpp
# The real native library stripped of symbols
LIB = libs/armeabi-v7a/$(LIBNAME) libs/armeabi/$(LIBNAME)
all: $(LIB)
#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS)
#this creates the swig wrappers
$(SWIG_C_OUT): $(SWIG_IS)
make clean-swig &&\
mkdir -p $(SWIG_C_DIR) &&\
mkdir -p $(SWIG_JAVA_DIR) &&\
swig -java -c++ -package "com.opencv.jni" \
-outdir $(SWIG_JAVA_DIR) \
-o $(SWIG_C_OUT) $(SWIG_MAIN)
#clean targets
.PHONY: clean clean-swig cleanall nogdb
nogdb: $(LIB)
rm -f libs/armeabi*/gdb*
#this deletes the generated swig java and the generated c wrapper
clean-swig:
rm -f $(SWIG_JAVA_OUT) $(SWIG_C_OUT)
#does clean-swig and then uses the ndk-build clean
clean: clean-swig
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS)

View File

@@ -0,0 +1,18 @@
android-opencv
this is an example of an android library project that has some reusable
code that exposes part of OpenCV to android. In particular this provides a
native camera interface for loading live video frames from the android camera
into native opencv functions(as cv::Mat's)
to build make sure you have swig and the crystax ndk in your path
cp sample.local.env.mk local.env.mk
make
that should work...
more later on how to build actual project for android
- see the code.google.com/p/android-opencv for details on this

View File

@@ -0,0 +1,12 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library=true
# Project target.
target=android-7

View File

@@ -0,0 +1,22 @@
# date: Summer, 2010
# author: Ethan Rublee
# contact: ethan.rublee@gmail.com
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#define OPENCV_INCLUDES and OPENCV_LIBS
include $(OPENCV_CONFIG)
LOCAL_LDLIBS += $(OPENCV_LIBS) -llog -lGLESv2
LOCAL_C_INCLUDES += $(OPENCV_INCLUDES)
LOCAL_MODULE := android-opencv
LOCAL_SRC_FILES := gen/android_cv_wrap.cpp image_pool.cpp \
yuv420sp2rgb.c gl_code.cpp Calibration.cpp
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,4 @@
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
APP_PROJECT_PATH := $(PROJECT_PATH)

View File

@@ -0,0 +1,261 @@
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Calibration.h"
#include <sys/stat.h>
using namespace cv;
Calibration::Calibration():patternsize(6,8)
{
}
Calibration::~Calibration() {
}
namespace
{
double computeReprojectionErrors(
const vector<vector<Point3f> >& objectPoints, const vector<vector<
Point2f> >& imagePoints, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const Mat& cameraMatrix,
const Mat& distCoeffs, vector<float>& perViewErrors) {
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int) objectPoints.size(); i++) {
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix,
distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1 );
int n = (int) objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
void calcChessboardCorners(Size boardSize, float squareSize, vector<
Point3f>& corners) {
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i
* squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
bool runCalibration(vector<vector<Point2f> > imagePoints,
Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr) {
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(5, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET ) && checkRange(
distCoeffs, CV_CHECK_QUIET );
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs,
tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize,
float squareSize, float aspectRatio, int flags,
const Mat& cameraMatrix, const Mat& distCoeffs,
const vector<Mat>& rvecs, const vector<Mat>& tvecs,
const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr) {
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int) std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0) {
sprintf(buf, "flags: %s%s%s%s",
flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess"
: "",
flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "",
flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point"
: "",
flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty()) {
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++) {
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(
*fs,
"a set of 6-tuples (rotation vector + translation vector) for each view",
0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty()) {
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++) {
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
}//anon namespace
bool Calibration::detectAndDrawChessboard(int idx,image_pool* pool) {
Mat grey;
pool->getGrey(idx, grey);
if (grey.empty())
return false;
vector<Point2f> corners;
IplImage iplgrey = grey;
if (!cvCheckChessboard(&iplgrey, patternsize))
return false;
bool patternfound = findChessboardCorners(grey, patternsize, corners);
Mat * img = pool->getImage(idx);
if (corners.size() < 1)
return false;
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(
CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
if(patternfound)
imagepoints.push_back(corners);
drawChessboardCorners(*img, patternsize, Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Calibration::drawText(int i, image_pool* pool, const char* ctext){
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = *pool->getImage(i);
int baseline=0;
Size textSize = getTextSize(text, fontFace,
fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width)/2,
(img.rows - textSize.height *2));
// draw the box
rectangle(img, textOrg + Point(0, baseline),
textOrg + Point(textSize.width, -textSize.height),
Scalar(0,0,255),CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness),
textOrg + Point(textSize.width, thickness),
Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale,
Scalar::all(255), thickness, 8);
}
void Calibration::resetChess() {
imagepoints.clear();
}
void Calibration::calibrate(const char* filename) {
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f,
flags, K, distortion, rvecs, tvecs, reprojErrs, totalAvgErr);
if (ok){
saveCameraParams(filename, imgsize, patternsize, 1.f,
1.f, flags, K, distortion, writeExtrinsics ? rvecs
: vector<Mat> (), writeExtrinsics ? tvecs
: vector<Mat> (), writeExtrinsics ? reprojErrs
: vector<float> (), writePoints ? imagepoints : vector<
vector<Point2f> > (), totalAvgErr);
}
}
int Calibration::getNumberDetectedChessboards() {
return imagepoints.size();
}

View File

@@ -0,0 +1,59 @@
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Calibration {
std::vector<cv::KeyPoint> keypoints;
vector<vector<Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
public:
cv::Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};
#endif /* PROCESSOR_H_ */

View File

@@ -0,0 +1,28 @@
/*
* include the headers required by the generated cpp code
*/
%{
#include "Calibration.h"
#include "image_pool.h"
using namespace cv;
%}
class Calibration {
public:
Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};

View File

@@ -0,0 +1,6 @@
%feature("director") Mat;
%feature("director") glcamera;
%feature("director") image_pool;
%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";

View File

@@ -0,0 +1,57 @@
/* File : android-cv.i
import this file, and make sure to add the System.loadlibrary("android-opencv")
before loading any lib that depends on this.
*/
%module opencv
%{
#include "image_pool.h"
#include "glcamera.h"
using namespace cv;
%}
#ifndef SWIGIMPORTED
%include "various.i"
%include "typemaps.i"
%include "arrays_java.i"
#endif
/**
* Make all the swig pointers public, so that
* external libraries can refer to these, otherwise they default to
* protected...
*/
%typemap(javabody) SWIGTYPE %{
private long swigCPtr;
protected boolean swigCMemOwn;
public $javaclassname(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr($javaclassname obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
%include "cv.i"
%include "glcamera.i"
%include "image_pool.i"
%include "Calibration.i"

View File

@@ -0,0 +1,165 @@
/*
* These typemaps provide support for sharing data between JNI and JVM code
* using NIO direct buffers. It is the responsibility of the JVM code to
* allocate a direct buffer of the appropriate size.
*
* Example use:
* Wrapping:
* %include "buffers.i"
* %apply int* BUFF {int* buffer}
* int read_foo_int(int* buffer);
*
* Java:
* IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
* Example.read_foo_int(buffer);
*
* The following typemaps are defined:
* void* BUFF <--> javax.nio.Buffer
* char* BUFF <--> javax.nio.ByteBuffer
* char* CBUFF <--> javax.nio.CharBuffer
* unsigned char* INBUFF/OUTBUFF <--> javax.nio.ShortBuffer
* short* BUFF <--> javax.nio.ShortBuffer
* unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
* int* BUFF <--> javax.nio.IntBuffer
* unsigned int* INBUFF/OUTBUFF <--> javax.nio.LongBuffer
* long* BUFF <--> javax.nio.IntBuffer
* unsigned long* INBUFF/OUTBUF <--> javax.nio.LongBuffer
* long long* BUFF <--> javax.nio.LongBuffer
* float* BUFF <--> javax.nio.FloatBuffer
* double* BUFF <--> javax.nio.DoubleBuffer
*
* Note the potential for data loss in the conversion from
* the C type 'unsigned long' to the signed Java long type.
* Hopefully, I can implement a workaround with BigNumber in the future.
*
* The use of ByteBuffer vs CharBuffer for the char* type should
* depend on the type of data. In general you'll probably
* want to use CharBuffer for actual text data.
*/
/*
* This macro is used to define the nio buffers for primitive types.
*/
%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
%typemap(jni) CTYPE* LABEL "jobject"
%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
%typemap(javain,
pre=" assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
%typemap(javaout) CTYPE* LABEL {
return $jnicall;
}
%typemap(in) CTYPE* LABEL {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* LABEL {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* LABEL ""
%enddef
NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
#undef NIO_BUFFER_TYPEMAP
%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
%typemap(jni) CTYPE* INBUFF "jobject"
%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
%typemap(javaout) CTYPE* INBUFF {
return $jnicall;
}
%typemap(in) CTYPE* INBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* INBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* INBUFF ""
%typemap(jni) CTYPE* OUTBUFF "jobject"
%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
post=" UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
%typemap(javaout) CTYPE* OUTBUFF {
return $jnicall;
}
%typemap(in) CTYPE* OUTBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* OUTBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* OUTBUFF ""
%enddef
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
/*
%typemap(jni) unsigned char* BUFF "jobject"
%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
post=" permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
%typemap(javaout) unsigned char* BUFF {
return $jnicall;
}
%typemap(in) unsigned char* BUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) unsigned char* BUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) unsigned char* BUFF ""
*/
#undef UNSIGNED_NIO_BUFFER_TYPEMAP

View File

@@ -0,0 +1,59 @@
%typemap(javaimports) Mat "
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/"
%typemap(javaimports) Size "
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/"
class Mat {
public:
%immutable;
int rows;
int cols;
};
class Size{
public:
Size(int width,int height);
int width;
int height;
};
template<class _Tp> class Ptr
{
public:
//! empty constructor
Ptr();
//! take ownership of the pointer. The associated reference counter is allocated and set to 1
Ptr(_Tp* _obj);
//! calls release()
~Ptr();
//! copy constructor. Copies the members and calls addref()
Ptr(const Ptr& ptr);
//! copy operator. Calls ptr.addref() and release() before copying the members
// Ptr& operator = (const Ptr& ptr);
//! increments the reference counter
void addref();
//! decrements the reference counter. If it reaches 0, delete_obj() is called
void release();
//! deletes the object. Override if needed
void delete_obj();
//! returns true iff obj==NULL
bool empty() const;
//! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
_Tp* operator -> ();
// const _Tp* operator -> () const;
// operator _Tp* ();
// operator const _Tp*() const;
protected:
_Tp* obj; //< the object pointer.
int* refcount; //< the associated reference counter
};
%template(PtrMat) Ptr<Mat>;

View File

@@ -0,0 +1,309 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// OpenGL ES 2.0 code
#include <jni.h>
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <stdint.h>
#include "glcamera.h"
#include "image_pool.h"
using namespace cv;
#define LOG_TAG "libandroid-opencv"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static void printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char* op) {
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] = "attribute vec4 a_position; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
static const char gFragmentShader[] =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
const GLfloat gTriangleVertices[] = { 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f };
GLubyte testpixels[4 * 3] = { 255, 0, 0, // Red
0, 255, 0, // Green
0, 0, 255, // Blue
255, 255, 0 // Yellow
};
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels,
int width, int height, int channels) {
// Bind the texture
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
// Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture");
GLenum format;
switch (channels) {
case 3:
format = GL_RGB;
break;
case 1:
format = GL_LUMINANCE;
break;
case 4:
format = GL_RGBA;
break;
}
// Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format,
GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
// Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST );
return _textureid;
}
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint glcamera::createProgram(const char* pVertexSource,
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
//GLuint textureID;
bool glcamera::setupGraphics(int w, int h) {
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
LOGI("setupGraphics(%d, %d)", w, h);
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
// Use tightly packed data
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Generate a texture object
glGenTextures(1, &textureID);
textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3);
checkGlError("glGetAttribLocation");
LOGI("glGetAttribLocation(\"vPosition\") = %d\n",
gvPositionHandle);
glViewport(0, 0, w, h);
checkGlError("glViewport");
return true;
}
void glcamera::renderFrame() {
GLfloat vVertices[] = { -1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
glUseProgram(gProgram);
checkGlError("glUseProgram");
// Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride,
vVertices);
// Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride,
&vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0
glUniform1i(gvSamplerHandle, 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
//checkGlError("glVertexAttribPointer");
//glEnableVertexAttribArray(gvPositionHandle);
//checkGlError("glEnableVertexAttribArray");
//glDrawArrays(GL_TRIANGLES, 0, 3);
//checkGlError("glDrawArrays");
}
void glcamera::init(int width, int height) {
newimage = false;
nimg = Mat();
setupGraphics(width, height);
}
void glcamera::step() {
if (newimage && !nimg.empty()) {
textureID = createSimpleTexture2D(textureID,
nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols,
nimg.channels());
newimage = false;
}
renderFrame();
}
void glcamera::setTextureImage(Ptr<Mat> img) {
//int p2 = (int)(std::log(img->size().width)/0.69315);
int sz = 256;//std::pow(2,p2);
Size size(sz, sz);
resize(*img, nimg, size,cv::INTER_NEAREST);
newimage = true;
}
void glcamera::drawMatToGL(int idx, image_pool* pool) {
Ptr<Mat> img = pool->getImage(idx);
if (img.empty())
return; //no image at input_idx!
setTextureImage(img);
}
glcamera::glcamera():newimage(false) {
LOGI("glcamera constructor");
}
glcamera::~glcamera() {
LOGI("glcamera destructor");
}

View File

@@ -0,0 +1,40 @@
#ifndef GLCAMERA_H_
#define GLCAMERA_H_
#include <opencv2/core/core.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include "image_pool.h"
class glcamera {
Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
public:
glcamera();
~glcamera();
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void setTextureImage(Ptr<Mat> img);
private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width,
int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint
createProgram(const char* pVertexSource,
const char* pFragmentSource);
bool setupGraphics(int w, int h);
void renderFrame();
};
#endif

View File

@@ -0,0 +1,43 @@
%typemap(javaimports) glcamera "
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/"
%javamethodmodifiers glcamera::init"
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public";
%javamethodmodifiers glcamera::step"
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public";
%javamethodmodifiers glcamera::drawMatToGL"
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public";
class glcamera {
public:
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
};

View File

@@ -0,0 +1,86 @@
#include "image_pool.h"
#include "yuv420sp2rgb.h"
#include <android/log.h>
#include <opencv2/imgproc/imgproc.hpp>
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env,
jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer,
jint jidx, jint jwidth, jint jheight, jboolean jgrey) {
image_pool *pool = (image_pool *) ppool;
Ptr<Mat> mat = pool->getYUV(jidx);
if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) {
//pool->deleteGrey(jidx);
mat = new Mat(jheight * 2, jwidth, CV_8UC1);
}
jsize sz = env->GetArrayLength(jbuffer);
uchar* buff = mat->ptr<uchar> (0);
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff);
pool->addYUVMat(jidx, mat);
Ptr<Mat> color = pool->getImage(jidx);
if (color.empty() || color->cols != jwidth || color->rows != jheight) {
//pool->deleteImage(jidx);
color = new Mat(jheight, jwidth, CV_8UC3);
}
if (!jgrey) {
//doesn't work unfortunately..
//cvtColor(*mat,*color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color->ptr<uchar> (0), false);
}
if (jgrey) {
Mat grey;
pool->getGrey(jidx, grey);
cvtColor(grey, *color, CV_GRAY2RGB);
}
pool->addImage(jidx, color);
}
image_pool::image_pool() {
}
image_pool::~image_pool() {
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
}
cv::Ptr<Mat> image_pool::getImage(int i) {
return imagesmap[i];
}
void image_pool::getGrey(int i, Mat & grey) {
cv::Ptr<Mat> tm = yuvImagesMap[i];
if (tm.empty())
return;
grey = (*tm)(Range(0, tm->rows / 2), Range::all());
}
cv::Ptr<Mat> image_pool::getYUV(int i) {
return yuvImagesMap[i];
}
void image_pool::addYUVMat(int i, cv::Ptr<Mat> mat) {
yuvImagesMap[i] = mat;
}
void image_pool::addImage(int i, cv::Ptr<Mat> mat) {
imagesmap[i] = mat;
}

View File

@@ -0,0 +1,62 @@
#ifndef IMAGE_POOL_H
#define IMAGE_POOL_H
#include <opencv2/core/core.hpp>
#include <jni.h>
#include <map>
using namespace cv;
#ifdef __cplusplus
extern "C" {
#endif
//
//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool
(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean);
#ifdef __cplusplus
}
#endif
//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat);
class image_pool {
std::map<int, Ptr< Mat> > imagesmap;
std::map<int, Ptr< Mat> > yuvImagesMap;
//uchar * mbuffer;
//int length;
public:
image_pool();
~image_pool();
cv::Ptr<Mat> getImage(int i);
void getGrey(int i, Mat & grey);
cv::Ptr<Mat> getYUV(int i);
int getCount(){
return imagesmap.size();
}
void addImage(int i, Ptr< Mat> mat);
/** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top
* half of the YUV mat.
*
* \param i index to store yuv image at
* \param mat the yuv matrix to store
*/
void addYUVMat(int i, Ptr< Mat> mat);
int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
void getBitmap(int * outintarray, int size, int idx);
};
#endif

View File

@@ -0,0 +1,58 @@
%typemap(javaimports) image_pool "
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
%javamethodmodifiers image_pool::getImage"
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public";
%javamethodmodifiers image_pool::deleteImage"
/** deletes the image from the pool
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%javamethodmodifiers addYUVtoPool"
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%include "various.i"
%apply (char* BYTE) { (char *data)}; //byte[] to char*
%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
%feature("director") image_pool;
class image_pool {
public:
image_pool();
~image_pool();
Ptr<Mat> getImage(int i);
void addImage(int i, Ptr< Mat> mat);
};

View File

@@ -0,0 +1,36 @@
/*
* int *INTARRAY typemaps.
* These are input typemaps for mapping a Java int[] array to a C int array.
* Note that as a Java array is used and thus passeed by reference, the C routine
* can return data to Java via the parameter.
*
* Example usage wrapping:
* void foo((int *INTARRAY, int INTARRAYSIZE);
*
* Java usage:
* byte b[] = new byte[20];
* modulename.foo(b);
*/
%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
$1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0);
jsize sz = JCALL1(GetArrayLength, jenv, $input);
$2 = (int)sz;
}
%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0);
}
/* Prevent default freearg typemap from being used */
%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"

View File

@@ -0,0 +1,98 @@
#include <string.h>
#include <jni.h>
#include <yuv420sp2rgb.h>
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
const int bytes_per_pixel = 2;
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey)
{
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
if(grey){
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
unsigned char nB = *(pY + i * width + j);
out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
}
}
}else
// YUV 4:2:0
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
nY = *(pY + i * width + j);
nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
// Yuv Convert
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0)
nY = 0;
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
nB = (int)(1192 * nY + 2066 * nU);
nG = (int)(1192 * nY - 833 * nV - 400 * nU);
nR = (int)(1192 * nY + 1634 * nV);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10; nR &= 0xff;
nG >>= 10; nG &= 0xff;
nB >>= 10; nB &= 0xff;
out[offset++] = (unsigned char)nR;
out[offset++] = (unsigned char)nG;
out[offset++] = (unsigned char)nB;
//out[offset++] = 0xff; //set alpha for ARGB 8888 format
}
//offset = i * width * 3; //non power of two
//offset = i * texture_size + j;//power of two
//offset *= 3; //3 byte per pixel
//out = buffer + offset;
}
}

View File

@@ -0,0 +1,18 @@
//yuv420sp2rgb.h
#ifndef YUV420SP2RGB_H
#define YUV420SP2RGB_H
#ifdef __cplusplus
extern "C" {
#endif
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,4 @@
#location of android-opencv port of OpenCV to android
OPENCV_CONFIG=../build/android-opencv.mk
ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax

View File

@@ -0,0 +1,157 @@
package com.opencv;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.view.ViewGroup.LayoutParams;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.opengl.GL2CameraViewer;
public class OpenCV extends Activity {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// menu.add("Sample");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if(item.getTitle().equals("Sample")){
// //do stuff...
// }
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(getApplication());
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 400, 300);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
frame.addView(glview);
setContentView(frame);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
mPreview.onResume();
}
}

View File

@@ -0,0 +1,334 @@
package com.opencv.camera;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
import com.opencv.camera.NativeProcessor.PoolCallback;
public class NativePreviewer extends SurfaceView implements
SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
SurfaceHolder mHolder;
Camera mCamera;
private NativeProcessor processor;
private int preview_width, preview_height;
private int pixelformat;
private PixelFormat pixelinfo;
public NativePreviewer(Context context, int preview_width,
int preview_height) {
super(context);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.preview_width = preview_width;
this.preview_height = preview_height;
processor = new NativeProcessor();
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
// processor = null;
mCamera = null;
mAcb = null;
mPCWB = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> pvsizes = mCamera.getParameters().getSupportedPreviewSizes();
int best_width = 1000000;
int best_height = 1000000;
for(Size x: pvsizes){
if(x.width - preview_width >= 0 && x.width <= best_width){
best_width = x.width;
best_height = x.height;
}
}
preview_width = best_width;
preview_height = best_height;
parameters.setPreviewSize(preview_width, preview_height);
mCamera.setParameters(parameters);
pixelinfo = new PixelFormat();
pixelformat = mCamera.getParameters().getPreviewFormat();
PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
Size preview_size = mCamera.getParameters().getPreviewSize();
preview_width = preview_size.width;
preview_height = preview_size.height;
int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
/ 8;
// Must call this before calling addCallbackBuffer to get all the
// reflection variables setup
initForACB();
initForPCWB();
// Use only one buffer, so that we don't preview to many frames and bog
// down system
byte[] buffer = new byte[bufSize];
addCallbackBuffer(buffer);
setPreviewCallbackWithBuffer();
mCamera.startPreview();
postautofocus(0);
}
public void postautofocus(int delay) {
handler.postDelayed(autofocusrunner, delay);
}
Runnable autofocusrunner = new Runnable() {
@Override
public void run() {
mCamera.autoFocus(autocallback);
}
};
Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if(!success)
postautofocus(1000);
else{
Parameters params = camera.getParameters();
params.setSceneMode(Parameters.SCENE_MODE_AUTO);
camera.setParameters(params);
}
}
};
Handler handler = new Handler();
/**
* This method will list all methods of the android.hardware.Camera class,
* even the hidden ones. With the information it provides, you can use the
* same approach I took below to expose methods that were written but hidden
* in eclair
*/
private void listAllCameraMethods() {
try {
Class<?> c = Class.forName("android.hardware.Camera");
Method[] m = c.getMethods();
for (int i = 0; i < m.length; i++) {
Log.d("NativePreviewer", " method:" + m[i].toString());
}
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("NativePreviewer", e.toString());
}
}
/**
* These variables are re-used over and over by addCallbackBuffer
*/
Method mAcb;
private void initForACB() {
try {
mAcb = Class.forName("android.hardware.Camera").getMethod(
"addCallbackBuffer", byte[].class);
} catch (Exception e) {
Log
.e("NativePreviewer",
"Problem setting up for addCallbackBuffer: "
+ e.toString());
}
}
/**
* This method allows you to add a byte buffer to the queue of buffers to be
* used by preview. See:
* http://android.git.kernel.org/?p=platform/frameworks
* /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
* b3d07b9620b4269ab33f78604a36327e536ce1
*
* @param b
* The buffer to register. Size should be width * height *
* bitsPerPixel / 8.
*/
private void addCallbackBuffer(byte[] b) {
try {
mAcb.invoke(mCamera, b);
} catch (Exception e) {
Log.e("NativePreviewer", "invoking addCallbackBuffer failed: "
+ e.toString());
}
}
Method mPCWB;
private void initForPCWB() {
try {
mPCWB = Class.forName("android.hardware.Camera").getMethod(
"setPreviewCallbackWithBuffer", PreviewCallback.class);
} catch (Exception e) {
Log.e("NativePreviewer",
"Problem setting up for setPreviewCallbackWithBuffer: "
+ e.toString());
}
}
/**
* Use this method instead of setPreviewCallback if you want to use manually
* allocated buffers. Assumes that "this" implements Camera.PreviewCallback
*/
private void setPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, this);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
protected void clearPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, (PreviewCallback) null);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
Date start;
int fcount = 0;
boolean processing = false;
/**
* Demonstration of how to use onPreviewFrame. In this case I'm not
* processing the data, I'm just adding the buffer back to the buffer queue
* for re-use
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (start == null) {
start = new Date();
}
processor.post(data, preview_width, preview_height, pixelformat, System.nanoTime(),
this);
fcount++;
if (fcount % 100 == 0) {
double ms = (new Date()).getTime() - start.getTime();
Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
start = new Date();
fcount = 0;
}
}
@Override
public void onDoneNativeProcessing(byte[] buffer) {
addCallbackBuffer(buffer);
}
public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
processor.addCallbackStack(callbackstack);
}
/**This must be called when the activity pauses, in Activity.onPause
* This has the side effect of clearing the callback stack.
*
*/
public void onPause() {
addCallbackStack(null);
processor.stop();
mCamera.stopPreview();
}
public void onResume() {
processor.start();
}
}

View File

@@ -0,0 +1,241 @@
package com.opencv.camera;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.graphics.PixelFormat;
import android.util.Log;
import com.opencv.jni.image_pool;
import com.opencv.jni.opencv;
public class NativeProcessor {
private class ProcessorThread extends Thread {
private void process(NPPostObject pobj) throws Exception {
if (pobj.format == PixelFormat.YCbCr_420_SP) {
// add as color image, because we know how to decode this
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, false);
} else if (pobj.format == PixelFormat.YCbCr_422_SP) {
// add as gray image, because this format is not coded
// for...//TODO figure out how to decode this
// format
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, true);
} else
throw new Exception("bad pixel format!");
for (PoolCallback x : stack) {
if (interrupted()) {
throw new InterruptedException(
"Native Processor interupted while processing");
}
x.process(0, pool, pobj.timestamp, NativeProcessor.this);
}
pobj.done(); // tell the postobject that we're done doing
// all the processing.
}
@Override
public void run() {
try {
while (true) {
yield();
while(!stacklock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if (nextStack != null) {
stack = nextStack;
nextStack = null;
}
} finally {
stacklock.unlock();
}
NPPostObject pobj = null;
while(!lock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if(postobjects.isEmpty()) continue;
pobj = postobjects.removeLast();
} finally {
lock.unlock();
}
if(interrupted())throw new InterruptedException();
if(stack != null && pobj != null)
process(pobj);
}
} catch (InterruptedException e) {
Log.i("NativeProcessor",
"native processor interupted, ending now");
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
}
ProcessorThread mthread;
static public interface PoolCallback {
void process(int idx, image_pool pool,long timestamp, NativeProcessor nativeProcessor);
}
Lock stacklock = new ReentrantLock();
LinkedList<PoolCallback> nextStack;
void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*
* @author ethan
*
*/
static public interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
public boolean post(byte[] buffer, int width, int height, int format,long timestamp,
NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height,
format,timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
static private class NPPostObject {
public NPPostObject(byte[] buffer, int width, int height, int format, long timestamp,
NativeProcessorCallback callback) {
this.buffer = buffer;
this.width = width;
this.height = height;
this.format = format;
this.timestamp = timestamp;
this.callback = callback;
}
public void done() {
callback.onDoneNativeProcessing(buffer);
}
int width, height;
byte[] buffer;
int format;
long timestamp;
NativeProcessorCallback callback;
}
private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
private image_pool pool = new image_pool();
private final Lock lock = new ReentrantLock();
private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
void stop() {
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor","interupted while stoping " + e.getMessage());
}
mthread = null;
}
void start() {
mthread = new ProcessorThread();
mthread.start();
}
}

View File

@@ -0,0 +1,396 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opencv.opengl;
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.glcamera;
import com.opencv.jni.image_pool;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.Log;
/**
* A simple GLSurfaceView sub-class that demonstrate how to perform
* OpenGL ES 2.0 rendering into a GL Surface. Note the following important
* details:
*
* - The class must use a custom context factory to enable 2.0 rendering.
* See ContextFactory class definition below.
*
* - The class must use a custom EGLConfigChooser to be able to select
* an EGLConfig that supports 2.0. This is done by providing a config
* specification to eglChooseConfig() that has the attribute
* EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
* set. See ConfigChooser class definition below.
*
* - The class must select the surface's format, then choose an EGLConfig
* that matches it exactly (with regards to red/green/blue/alpha channels
* bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
*/
public class GL2CameraViewer extends GLSurfaceView{
private static String TAG = "GL2JNIView";
private static final boolean DEBUG = false;
private PoolCallback poolcallback = new PoolCallback() {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor){
drawMatToGL(idx, pool);
requestRender();
}
};
public GL2CameraViewer(Context context) {
super(context);
init(false, 0, 0);
}
public GL2CameraViewer(Context context, boolean translucent, int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
}
private void init(boolean translucent, int depth, int stencil) {
/* By default, GLSurfaceView() creates a RGB_565 opaque surface.
* If we want a translucent one, we should change the surface's
* format here, using PixelFormat.TRANSLUCENT for GL Surfaces
* is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
*/
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, depth, stencil) :
new ConfigChooser(5, 6, 5, 0, depth, stencil) );
/* Set the renderer responsible for frame rendering */
setRenderer(new Renderer());
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(TAG, "creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.w(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
glcamera mglcamera;
public void drawMatToGL(int idx, image_pool pool){
if(mglcamera != null)
mglcamera.drawMatToGL(idx, pool);
else
Log.e("android-opencv", "null glcamera!!!!");
}
private class Renderer implements GLSurfaceView.Renderer {
public void onDrawFrame(GL10 gl) {
mglcamera.step();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
mglcamera.init(width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
}
@Override
public void onPause() {
mglcamera = null;
// TODO Auto-generated method stub
super.onPause();
}
@Override
public void onResume() {
mglcamera = new glcamera();
// TODO Auto-generated method stub
super.onResume();
}
public PoolCallback getDrawCallback() {
// TODO Auto-generated method stub
return poolcallback;
}
}