refactor java code

Review URL: http://webrtc-codereview.appspot.com/29011

git-svn-id: http://webrtc.googlecode.com/svn/trunk@55 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@google.com 2011-06-07 17:58:15 +00:00
parent 7a60252e4f
commit 7f43de8dc9
3 changed files with 711 additions and 719 deletions

View File

@ -1,7 +1,17 @@
package org.webrtc.videoengine;
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;
public int maxFPS = 0;
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;
public int maxFPS = 0;
}

View File

@ -1,266 +1,262 @@
/**
*
*/
package org.webrtc.videoengine;
import java.io.IOException;
import java.util.Locale;
import java.util.concurrent.locks.ReentrantLock;
import org.webrtc.videoengine.CaptureCapabilityAndroid;
import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
public class VideoCaptureAndroid implements
PreviewCallback, Callback{
private Camera _camera;
private AndroidVideoCaptureDevice _currentDevice=null;
public ReentrantLock _previewBufferLock = new ReentrantLock();
private int PIXEL_FORMAT = ImageFormat.NV21;
PixelFormat _pixelFormat = new PixelFormat();
private boolean _isRunning=false; // True when the C++ layer has ordered the camera to be started.
private final int _numCaptureBuffers = 3;
private int _expectedFrameSize = 0;
private int _orientation = 0;
private int _id=0;
private long _context=0; // C++ callback context variable.
private SurfaceHolder _localPreview=null;
private boolean _ownsBuffers=false; // True if this class owns the preview video buffers.
//Logging
private static int LOGLEVEL = 0; // Set this to 2 for VERBOSE logging. 1 for DEBUG
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
CaptureCapabilityAndroid _currentCapability=null;
public static void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid)
{
if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid");
captureAndroid.StopCapture();
captureAndroid._camera.release();
captureAndroid._camera=null;
captureAndroid._context=0;
if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended");
}
public VideoCaptureAndroid(int id, long context,Camera camera,AndroidVideoCaptureDevice device)
{
_id=id;
_context=context;
_camera=camera;
_currentDevice=device;
}
public int StartCapture(int width, int height, int frameRate)
{
if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width + " height " + height +" frame rate " + frameRate);
try
{
if (_camera == null)
{
Log.e("*WEBRTC*",String.format(Locale.US,"Camera not initialized %d",_id));
return -1;
}
_currentCapability=new CaptureCapabilityAndroid();
_currentCapability.width=width;
_currentCapability.height=height;
_currentCapability.maxFPS=frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, _pixelFormat);
Camera.Parameters parameters = _camera.getParameters();
parameters.setPreviewSize(_currentCapability.width, _currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT );
parameters.setPreviewFrameRate(_currentCapability.maxFPS);
_camera.setParameters(parameters);
_localPreview=ViERenderer.GetLocalRenderer(); // Get the local preview SurfaceHolder from the static render class
if(_localPreview!=null)
{
_localPreview.addCallback(this);
}
int bufSize = width * height * _pixelFormat.bitsPerPixel / 8;
if(android.os.Build.VERSION.SDK_INT>=7)
{
//According to Doc addCallbackBuffer belongs to API level 8. But it seems like it works on Android 2.1 as well.
//At least SE X10 and Milestone
byte[] buffer = null;
for (int i = 0; i < _numCaptureBuffers; i++)
{
buffer = new byte[bufSize];
_camera.addCallbackBuffer(buffer);
}
_camera.setPreviewCallbackWithBuffer(this);
_ownsBuffers=true;
}
else
{
_camera.setPreviewCallback(this);
}
_camera.startPreview();
_previewBufferLock.lock();
_expectedFrameSize = bufSize;
_isRunning=true;
_previewBufferLock.unlock();
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to start camera");
return -1;
}
return 0;
}
public int StopCapture()
{
if(DEBUG) Log.d("*WEBRTC*", "StopCapture");
try
{
_previewBufferLock.lock();
_isRunning=false;
_previewBufferLock.unlock();
_camera.stopPreview();
if(android.os.Build.VERSION.SDK_INT>7)
{
_camera.setPreviewCallbackWithBuffer(null);
}
else
{
_camera.setPreviewCallback(null);
}
} catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to stop camera");
return -1;
}
if(DEBUG) Log.d("*WEBRTC*", "StopCapture ended");
return 0;
}
native void ProvideCameraFrame(byte[] data,int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
_previewBufferLock.lock();
if(VERBOSE) Log.v("*WEBRTC*",String.format(Locale.US,"preview frame length %d context %x",data.length,_context));
if(_isRunning)
{
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == _expectedFrameSize)
{
ProvideCameraFrame(data, _expectedFrameSize, _context);
if (VERBOSE) Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered"));
if(_ownsBuffers)
{
// Give the video buffer to the camera service again.
_camera.addCallbackBuffer(data);
}
}
}
_previewBufferLock.unlock();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
try {
if(_camera!=null)
{
_camera.setPreviewDisplay(_localPreview);
}
} catch (IOException e) {
Log.e("*WEBRTC*", String.format(Locale.US, "Failed to set Local preview. "+ e.getMessage()));
}
}
/*
* Sets the rotation of the preview render window.
* Does not affect the captured video image.
*/
public void SetPreviewRotation(int rotation)
{
if(_camera!=null)
{
_previewBufferLock.lock();
final boolean running=_isRunning;
int width=0;
int height=0;
int framerate=0;
if(running)
{
width=_currentCapability.width;
height=_currentCapability.height;
framerate=_currentCapability.maxFPS;
StopCapture();
}
int resultRotation=0;
if(_currentDevice._frontCameraType==VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23)
{
// this is a 2.3 or later front facing camera. SetDisplayOrientation will flip the image horizontally before doing the rotation.
resultRotation=(360-rotation) % 360; // compensate the mirror
}
else
{ // Back facing or 2.2 or previous front camera
resultRotation=rotation;
}
if(android.os.Build.VERSION.SDK_INT>7)
{
_camera.setDisplayOrientation(resultRotation);
}
else // Android 2.1 and previous
{
// This rotation unfortunately does not seems to work.
//http://code.google.com/p/android/issues/detail?id=1193
Camera.Parameters parameters = _camera.getParameters();
parameters.setRotation(resultRotation);
_camera.setParameters(parameters);
}
if(running)
{
StartCapture(width, height, framerate);
}
_previewBufferLock.unlock();
}
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) { //
}
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import java.io.IOException;
import java.util.Locale;
import java.util.concurrent.locks.ReentrantLock;
import org.webrtc.videoengine.CaptureCapabilityAndroid;
import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
public class VideoCaptureAndroid implements PreviewCallback, Callback {
private Camera camera;
private AndroidVideoCaptureDevice currentDevice = null;
public ReentrantLock previewBufferLock = new ReentrantLock();
private int PIXEL_FORMAT = ImageFormat.NV21;
PixelFormat pixelFormat = new PixelFormat();
// True when the C++ layer has ordered the camera to be started.
private boolean isRunning=false;
private final int numCaptureBuffers = 3;
private int expectedFrameSize = 0;
private int orientation = 0;
private int id = 0;
// C++ callback context variable.
private long context = 0;
private SurfaceHolder localPreview = null;
// True if this class owns the preview video buffers.
private boolean ownsBuffers = false;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
CaptureCapabilityAndroid currentCapability = null;
public static
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid");
captureAndroid.StopCapture();
captureAndroid.camera.release();
captureAndroid.camera = null;
captureAndroid.context = 0;
if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended");
}
public VideoCaptureAndroid(int in_id,
long in_context,
Camera in_camera,
AndroidVideoCaptureDevice in_device) {
id = in_id;
context = in_context;
camera = in_camera;
currentDevice = in_device;
}
public int StartCapture(int width, int height, int frameRate) {
if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width +
" height " + height +" frame rate " + frameRate);
try {
if (camera == null) {
Log.e("*WEBRTC*",
String.format(Locale.US,"Camera not initialized %d",id));
return -1;
}
currentCapability = new CaptureCapabilityAndroid();
currentCapability.width = width;
currentCapability.height = height;
currentCapability.maxFPS = frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(currentCapability.width,
currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT );
parameters.setPreviewFrameRate(currentCapability.maxFPS);
camera.setParameters(parameters);
// Get the local preview SurfaceHolder from the static render class
localPreview = ViERenderer.GetLocalRenderer();
if(localPreview != null) {
localPreview.addCallback(this);
}
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
if(android.os.Build.VERSION.SDK_INT >= 7) {
// According to Doc addCallbackBuffer belongs to API level 8.
// But it seems like it works on Android 2.1 as well.
// At least SE X10 and Milestone
byte[] buffer = null;
for (int i = 0; i < numCaptureBuffers; i++) {
buffer = new byte[bufSize];
camera.addCallbackBuffer(buffer);
}
camera.setPreviewCallbackWithBuffer(this);
ownsBuffers = true;
}
else {
camera.setPreviewCallback(this);
}
camera.startPreview();
previewBufferLock.lock();
expectedFrameSize = bufSize;
isRunning = true;
previewBufferLock.unlock();
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to start camera");
return -1;
}
return 0;
}
public int StopCapture() {
if(DEBUG) Log.d("*WEBRTC*", "StopCapture");
try {
previewBufferLock.lock();
isRunning = false;
previewBufferLock.unlock();
camera.stopPreview();
if(android.os.Build.VERSION.SDK_INT > 7) {
camera.setPreviewCallbackWithBuffer(null);
}
else {
camera.setPreviewCallback(null);
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to stop camera");
return -1;
}
if(DEBUG) {
Log.d("*WEBRTC*", "StopCapture ended");
}
return 0;
}
native void ProvideCameraFrame(byte[] data,int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
previewBufferLock.lock();
if(VERBOSE) {
Log.v("*WEBRTC*",
String.format(Locale.US, "preview frame length %d context %x",
data.length, context));
}
if(isRunning) {
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == expectedFrameSize) {
ProvideCameraFrame(data, expectedFrameSize, context);
if (VERBOSE) {
Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered"));
}
if(ownsBuffers) {
// Give the video buffer to the camera service again.
camera.addCallbackBuffer(data);
}
}
}
previewBufferLock.unlock();
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
try {
if(camera != null) {
camera.setPreviewDisplay(localPreview);
}
} catch (IOException e) {
Log.e("*WEBRTC*",
String.format(Locale.US,
"Failed to set Local preview. " + e.getMessage()));
}
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
public void SetPreviewRotation(int rotation) {
if(camera != null) {
previewBufferLock.lock();
final boolean running = isRunning;
int width = 0;
int height = 0;
int framerate = 0;
if(running) {
width = currentCapability.width;
height = currentCapability.height;
framerate = currentCapability.maxFPS;
StopCapture();
}
int resultRotation = 0;
if(currentDevice.frontCameraType ==
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
// this is a 2.3 or later front facing camera.
// SetDisplayOrientation will flip the image horizontally
// before doing the rotation.
resultRotation=(360-rotation) % 360; // compensate the mirror
}
else {
// Back facing or 2.2 or previous front camera
resultRotation=rotation;
}
if(android.os.Build.VERSION.SDK_INT>7) {
camera.setDisplayOrientation(resultRotation);
}
else {
// Android 2.1 and previous
// This rotation unfortunately does not seems to work.
// http://code.google.com/p/android/issues/detail?id=1193
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(resultRotation);
camera.setParameters(parameters);
}
if(running) {
StartCapture(width, height, framerate);
}
previewBufferLock.unlock();
}
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
}

View File

@ -1,446 +1,432 @@
package org.webrtc.videoengine;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import dalvik.system.DexClassLoader;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
public class VideoCaptureDeviceInfoAndroid {
//Context
Context _context;
//Logging
private static int LOGLEVEL = 0; // Set this to 2 for VERBOSE logging. 1 for DEBUG
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
/* Private class with info about all available cameras and the capabilities*/
public class AndroidVideoCaptureDevice
{
AndroidVideoCaptureDevice()
{
_frontCameraType=FrontFacingCameraType.None;
_index=0;
}
public String _deviceUniqueName;
public CaptureCapabilityAndroid _captureCapabilies[];
public FrontFacingCameraType _frontCameraType;
public int _orientation; //Orientation of camera as described in android.hardware.Camera.CameraInfo.Orientation
public int _index; // Camera index used in Camera.Open on Android 2.3 and onwards
}
public enum FrontFacingCameraType
{
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23 // Android 2.3 front facing camera.
}
String _currentDeviceUniqueId;
int _id;
List<AndroidVideoCaptureDevice> _deviceList;
public static VideoCaptureDeviceInfoAndroid CreateVideoCaptureDeviceInfoAndroid(int id, Context context)
{
if(DEBUG) Log.d("*WEBRTC*",String.format(Locale.US,"VideoCaptureDeviceInfoAndroid"));
VideoCaptureDeviceInfoAndroid self = new VideoCaptureDeviceInfoAndroid(id,context);
if(self!=null && self.Init()==0)
{
return self;
}
else
{
if(DEBUG) Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid.");
}
return null;
}
private VideoCaptureDeviceInfoAndroid(int id, Context context)
{
_id=id;
_context=context;
_deviceList= new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init()
{
// Populate the _deviceList with available cameras and their capabilities.
Camera camera=null;
try{
if(android.os.Build.VERSION.SDK_INT>8) // From Android 2.3 and onwards
{
for(int i=0; i<Camera.getNumberOfCameras();++i)
{
AndroidVideoCaptureDevice newDevice=new AndroidVideoCaptureDevice();
Camera.CameraInfo info=new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice._index=i;
newDevice._orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK)
{
newDevice._deviceUniqueName="Camera " + i +", Facing back, Orientation "+ info.orientation;
}
else
{
newDevice._deviceUniqueName="Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice._frontCameraType=FrontFacingCameraType.Android23;
}
camera=Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera=null;
_deviceList.add(newDevice);
}
}
else // Prior to Android 2.3
{
AndroidVideoCaptureDevice newDevice;
Camera.Parameters parameters;
newDevice=new AndroidVideoCaptureDevice();
camera=Camera.open();
parameters = camera.getParameters();
newDevice._deviceUniqueName="Camera 1, Facing back";
newDevice._orientation=90;
AddDeviceInfo(newDevice, parameters);
_deviceList.add(newDevice);
camera.release();
camera=null;
newDevice=new AndroidVideoCaptureDevice();
newDevice._deviceUniqueName="Camera 2, Facing front";
parameters=SearchOldFrontFacingCameras(newDevice);
if(parameters!=null)
{
AddDeviceInfo(newDevice, parameters);
_deviceList.add(newDevice);
}
}
}catch (Exception ex) {
Log.e("*WEBRTC*", "VideoCaptureDeviceInfoAndroid:Init Failed to init VideoCaptureDeviceInfo ex " +ex.getLocalizedMessage());
return -1;
}
VerifyCapabilities();
return 0;
}
/*
* Adds the capture capabilities of the currently opened device
*/
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,Camera.Parameters parameters)
{
List<Size> sizes=parameters.getSupportedPreviewSizes();
List<Integer> frameRates=parameters.getSupportedPreviewFrameRates();
int maxFPS=0;
for(Integer frameRate:frameRates)
{
if(VERBOSE) Log.v("*WEBRTC*", "VideoCaptureDeviceInfoAndroid:Init supports frameRate "+ frameRate);
if(frameRate>maxFPS)
{
maxFPS=frameRate;
}
}
newDevice._captureCapabilies= new CaptureCapabilityAndroid[sizes.size()];
for(int i=0;i<sizes.size();++i)
{
Size s=sizes.get(i);
newDevice._captureCapabilies[i]=new CaptureCapabilityAndroid();
newDevice._captureCapabilies[i].height=s.height;
newDevice._captureCapabilies[i].width=s.width;
newDevice._captureCapabilies[i].maxFPS=maxFPS;
}
}
/*
* Function that make sure device specific capabilities are in the capability list.
* Ie Galaxy S supports CIF but does not list CIF as a supported capability.
* Motorola Droid Camera does not work with frame rate above 15fps.
* http://code.google.com/p/android/issues/detail?id=5514#c0
*/
private void VerifyCapabilities()
{
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") || android.os.Build.DEVICE.equals("crespo"))
{
CaptureCapabilityAndroid specificCapability=new CaptureCapabilityAndroid();
specificCapability.width=352;
specificCapability.height=288;
specificCapability.maxFPS=15;
AddDeviceSpecificCapability(specificCapability);
specificCapability=new CaptureCapabilityAndroid();
specificCapability.width=176;
specificCapability.height=144;
specificCapability.maxFPS=15;
AddDeviceSpecificCapability(specificCapability);
specificCapability=new CaptureCapabilityAndroid();
specificCapability.width=320;
specificCapability.height=240;
specificCapability.maxFPS=15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") && android.os.Build.DEVICE.equals("umts_sholes"))
{
for(AndroidVideoCaptureDevice device:_deviceList)
{
for(CaptureCapabilityAndroid capability:device._captureCapabilies)
{
capability.maxFPS=15;
}
}
}
}
private void AddDeviceSpecificCapability(CaptureCapabilityAndroid specificCapability)
{
for(AndroidVideoCaptureDevice device:_deviceList)
{
boolean foundCapability=false;
for(CaptureCapabilityAndroid capability:device._captureCapabilies)
{
if(capability.width==specificCapability.width && capability.height==specificCapability.height)
{
foundCapability=true;
break;
}
}
if(foundCapability==false)
{
CaptureCapabilityAndroid newCaptureCapabilies[]= new CaptureCapabilityAndroid[device._captureCapabilies.length+1];
for(int i=0;i<device._captureCapabilies.length;++i)
{
newCaptureCapabilies[i+1]=device._captureCapabilies[i];
}
newCaptureCapabilies[0]=specificCapability;
device._captureCapabilies=newCaptureCapabilies;
}
}
}
/*
* Returns the number of Capture devices that is supported
*/
public int NumberOfDevices()
{
return _deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber)
{
if(deviceNumber<0 || deviceNumber>=_deviceList.size())
{
return null;
}
return _deviceList.get(deviceNumber)._deviceUniqueName;
}
public CaptureCapabilityAndroid[]
GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: _deviceList)
{
if(device._deviceUniqueName.equals(deviceUniqueId))
{
return (CaptureCapabilityAndroid[]) device._captureCapabilies;
}
}
return null;
}
/* Returns the camera orientation as described by
* android.hardware.Camera.CameraInfo.orientation
*/
public int GetOrientation(String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: _deviceList)
{
if(device._deviceUniqueName.equals(deviceUniqueId))
{
return device._orientation;
}
}
return -1;
}
/*
* Returns an instance of VideoCaptureAndroid.
*/
public VideoCaptureAndroid AllocateCamera(int id, long context,String deviceUniqueId)
{
try
{
if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId);
Camera camera=null;
AndroidVideoCaptureDevice deviceToUse=null;
for (AndroidVideoCaptureDevice device: _deviceList)
{
if(device._deviceUniqueName.equals(deviceUniqueId)) // Found the wanted camera
{
deviceToUse=device;
switch(device._frontCameraType)
{
case GalaxyS:
camera= AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera= AllocateEVOFrontFacingCamera();
break;
default:
if(android.os.Build.VERSION.SDK_INT>8) // From Android 2.3 and onwards)
camera=Camera.open(device._index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera==null)
{
return null;
}
if(VERBOSE) Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid");
return new VideoCaptureAndroid(id,context,camera,deviceToUse);
}catch (Exception ex) {
Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " +ex.getLocalizedMessage());
}
return null;
}
/*
* Searches for a front facing camera device. This is device specific code.
*/
private Camera.Parameters SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice) throws SecurityException, IllegalArgumentException, NoSuchMethodException, ClassNotFoundException, IllegalAccessException, InvocationTargetException
{
//Check the id of the opened camera device (Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera=Camera.open();
Camera.Parameters parameters=camera.getParameters();
String cameraId=parameters.get("camera-id");
if(cameraId!=null && cameraId.equals("1")) // This might be a Samsung Galaxy S with a front facing camera.
{
try
{
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice._frontCameraType=FrontFacingCameraType.GalaxyS;
newDevice._orientation=0;
camera.release();
return parameters;
}
catch (Exception ex) {
//Nope - it did not work.
Log.e("*WEBRTC*", "VideoCaptureDeviceInfoAndroid:Init Failed to open front camera camera - ex " +ex.getLocalizedMessage());
}
}
camera.release();
//Check for Evo front facing camera.
File file = new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists){
file = new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists)
{
newDevice._frontCameraType=FrontFacingCameraType.HTCEvo;
newDevice._orientation=0;
Camera evCamera=AllocateEVOFrontFacingCamera();
parameters=evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
/*
* Returns a handle to HTC front facing camera.
* The caller is responsible to release it on completion.
*/
private Camera AllocateEVOFrontFacingCamera() throws SecurityException, NoSuchMethodException, ClassNotFoundException, IllegalArgumentException, IllegalAccessException, InvocationTargetException
{
String classPath=null;
File file = new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file = new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists)
{
return null;
}
String dexOutputDir="";
if(_context!=null){
dexOutputDir = _context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
//Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
//Log.e("*WEBRTCN*", "Unable to create files directory");
}
}
}
dexOutputDir += "/dexfiles";
DexClassLoader loader = new DexClassLoader(
file.getAbsolutePath(),
dexOutputDir,
null,
ClassLoader.getSystemClassLoader()
);
Method method = loader.loadClass(classPath).getDeclaredMethod("getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
/*
* Returns a handle to Galaxy S front camera.
* The caller is responsible to release it on completion.
*/
private Camera AllocateGalaxySFrontCamera()
{
Camera camera=Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import dalvik.system.DexClassLoader;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
public class VideoCaptureDeviceInfoAndroid {
//Context
Context context;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
// Private class with info about all available cameras and the capabilities
public class AndroidVideoCaptureDevice {
AndroidVideoCaptureDevice() {
frontCameraType = FrontFacingCameraType.None;
index = 0;
}
public String deviceUniqueName;
public CaptureCapabilityAndroid captureCapabilies[];
public FrontFacingCameraType frontCameraType;
// Orientation of camera as described in
// android.hardware.Camera.CameraInfo.Orientation
public int orientation;
// Camera index used in Camera.Open on Android 2.3 and onwards
public int index;
}
public enum FrontFacingCameraType {
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23, // Android 2.3 front facing camera.
}
String currentDeviceUniqueId;
int id;
List<AndroidVideoCaptureDevice> deviceList;
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
if(DEBUG) {
Log.d("*WEBRTC*",
String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
}
VideoCaptureDeviceInfoAndroid self =
new VideoCaptureDeviceInfoAndroid(in_id, in_context);
if(self != null && self.Init() == 0) {
return self;
}
else {
if(DEBUG) {
Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid.");
}
}
return null;
}
private VideoCaptureDeviceInfoAndroid(int in_id,
Context in_context) {
id = in_id;
context = in_context;
deviceList = new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init() {
// Populate the deviceList with available cameras and their capabilities.
Camera camera = null;
try{
if(android.os.Build.VERSION.SDK_INT > 8) {
// From Android 2.3 and onwards
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice.index = i;
newDevice.orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
newDevice.deviceUniqueName =
"Camera " + i +", Facing back, Orientation "+ info.orientation;
}
else {
newDevice.deviceUniqueName =
"Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice.frontCameraType = FrontFacingCameraType.Android23;
}
camera = Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera = null;
deviceList.add(newDevice);
}
}
else {
// Prior to Android 2.3
AndroidVideoCaptureDevice newDevice;
Camera.Parameters parameters;
newDevice = new AndroidVideoCaptureDevice();
camera = Camera.open();
parameters = camera.getParameters();
newDevice.deviceUniqueName = "Camera 1, Facing back";
newDevice.orientation = 90;
AddDeviceInfo(newDevice, parameters);
deviceList.add(newDevice);
camera.release();
camera=null;
newDevice = new AndroidVideoCaptureDevice();
newDevice.deviceUniqueName = "Camera 2, Facing front";
parameters = SearchOldFrontFacingCameras(newDevice);
if(parameters != null) {
AddDeviceInfo(newDevice, parameters);
deviceList.add(newDevice);
}
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" +
ex.getLocalizedMessage());
return -1;
}
VerifyCapabilities();
return 0;
}
// Adds the capture capabilities of the currently opened device
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
int maxFPS=0;
for(Integer frameRate:frameRates) {
if(VERBOSE) {
Log.v("*WEBRTC*",
"VideoCaptureDeviceInfoAndroid:frameRate " + frameRate);
}
if(frameRate > maxFPS) {
maxFPS = frameRate;
}
}
newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
for(int i = 0; i < sizes.size(); ++i) {
Size s = sizes.get(i);
newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
newDevice.captureCapabilies[i].height = s.height;
newDevice.captureCapabilies[i].width = s.width;
newDevice.captureCapabilies[i].maxFPS = maxFPS;
}
}
// Function that make sure device specific capabilities are
// in the capability list.
// Ie Galaxy S supports CIF but does not list CIF as a supported capability.
// Motorola Droid Camera does not work with frame rate above 15fps.
// http://code.google.com/p/android/issues/detail?id=5514#c0
private void VerifyCapabilities() {
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") ||
android.os.Build.DEVICE.equals("crespo")) {
CaptureCapabilityAndroid specificCapability =
new CaptureCapabilityAndroid();
specificCapability.width = 352;
specificCapability.height = 288;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 176;
specificCapability.height = 144;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 320;
specificCapability.height = 240;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps
// even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") &&
android.os.Build.DEVICE.equals("umts_sholes")) {
for(AndroidVideoCaptureDevice device:deviceList) {
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
capability.maxFPS=15;
}
}
}
}
private void AddDeviceSpecificCapability(
CaptureCapabilityAndroid specificCapability) {
for(AndroidVideoCaptureDevice device:deviceList) {
boolean foundCapability = false;
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
if(capability.width == specificCapability.width &&
capability.height == specificCapability.height) {
foundCapability = true;
break;
}
}
if(foundCapability==false) {
CaptureCapabilityAndroid newCaptureCapabilies[]=
new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
for(int i = 0; i < device.captureCapabilies.length; ++i) {
newCaptureCapabilies[i+1] = device.captureCapabilies[i];
}
newCaptureCapabilies[0] = specificCapability;
device.captureCapabilies = newCaptureCapabilies;
}
}
}
// Returns the number of Capture devices that is supported
public int NumberOfDevices() {
return deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
}
return deviceList.get(deviceNumber).deviceUniqueName;
}
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return (CaptureCapabilityAndroid[]) device.captureCapabilies;
}
}
return null;
}
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return device.orientation;
}
}
return -1;
}
// Returns an instance of VideoCaptureAndroid.
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
try {
if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId);
Camera camera = null;
AndroidVideoCaptureDevice deviceToUse = null;
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
// Found the wanted camera
deviceToUse = device;
switch(device.frontCameraType) {
case GalaxyS:
camera = AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera = AllocateEVOFrontFacingCamera();
break;
default:
// From Android 2.3 and onwards)
if(android.os.Build.VERSION.SDK_INT>8)
camera=Camera.open(device.index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera == null) {
return null;
}
if(VERBOSE) {
Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid");
}
return new VideoCaptureAndroid(id,context,camera,deviceToUse);
}catch (Exception ex) {
Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " +
ex.getLocalizedMessage());
}
return null;
}
// Searches for a front facing camera device. This is device specific code.
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,
NoSuchMethodException, ClassNotFoundException,
IllegalAccessException, InvocationTargetException {
// Check the id of the opened camera device
// Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
String cameraId = parameters.get("camera-id");
if(cameraId != null && cameraId.equals("1")) {
// This might be a Samsung Galaxy S with a front facing camera.
try {
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
newDevice.orientation = 0;
camera.release();
return parameters;
}
catch (Exception ex) {
//Nope - it did not work.
Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " +
ex.getLocalizedMessage());
}
}
camera.release();
//Check for Evo front facing camera
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists) {
newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
newDevice.orientation = 0;
Camera evCamera = AllocateEVOFrontFacingCamera();
parameters = evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
// Returns a handle to HTC front facing camera.
// The caller is responsible to release it on completion.
private Camera AllocateEVOFrontFacingCamera()
throws SecurityException, NoSuchMethodException,
ClassNotFoundException, IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
String classPath = null;
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists) {
return null;
}
String dexOutputDir = "";
if(context != null) {
dexOutputDir = context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
//Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
//Log.e("*WEBRTCN*", "Unable to create files directory");
}
}
}
dexOutputDir += "/dexfiles";
DexClassLoader loader =
new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
null, ClassLoader.getSystemClassLoader());
Method method = loader.loadClass(classPath).getDeclaredMethod(
"getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
// Returns a handle to Galaxy S front camera.
// The caller is responsible to release it on completion.
private Camera AllocateGalaxySFrontCamera()
{
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
}