Reformat android related code

BUG=
TEST=
Review URL: https://webrtc-codereview.appspot.com/472004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1988 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@webrtc.org 2012-04-04 17:15:42 +00:00
parent 851becd00c
commit f3dc22f7d1
20 changed files with 3205 additions and 3430 deletions

View File

@ -18,70 +18,62 @@
namespace webrtc
{
namespace videocapturemodule
{
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
const WebRtc_Word32 id)
{
videocapturemodule::DeviceInfoAndroid *deviceInfo =
new videocapturemodule::DeviceInfoAndroid(id);
if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
{
delete deviceInfo;
deviceInfo = NULL;
}
return deviceInfo;
VideoCaptureModule::DeviceInfo*
VideoCaptureImpl::CreateDeviceInfo (const WebRtc_Word32 id) {
videocapturemodule::DeviceInfoAndroid *deviceInfo =
new videocapturemodule::DeviceInfoAndroid(id);
if (deviceInfo && deviceInfo->Init() != 0) {
delete deviceInfo;
deviceInfo = NULL;
}
return deviceInfo;
}
DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
DeviceInfoImpl(id)
{
DeviceInfoImpl(id) {
}
WebRtc_Word32 DeviceInfoAndroid::Init()
{
WebRtc_Word32 DeviceInfoAndroid::Init() {
return 0;
}
DeviceInfoAndroid::~DeviceInfoAndroid() {
}
WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
return 0;
}
DeviceInfoAndroid::~DeviceInfoAndroid()
{
}
WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
{
return 0;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
jint numberOfDevices = 0;
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
jint numberOfDevices = 0;
if (cid != NULL)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
}
WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
@ -93,86 +85,75 @@ WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
char* /*productUniqueIdUTF8*/,
WebRtc_UWord32 /*productUniqueIdUTF8Length*/) {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
WebRtc_Word32 result = 0;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
{
return -1;
}
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
WebRtc_Word32 result = 0;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
return -1;
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL)
{
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
}
else
{
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength =
env->GetStringUTFLength((jstring) javaDeviceNameObj);
if ((WebRtc_UWord32) javaDeviceNameCharLength <
deviceUniqueIdUTF8Length) {
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
{
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
}//javaDeviceNameObj==NULL
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find GetDeviceUniqueName function id",
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL) {
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
} else {
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength =
env->GetStringUTFLength((jstring) javaDeviceNameObj);
if ((WebRtc_UWord32) javaDeviceNameCharLength <
deviceUniqueIdUTF8Length) {
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
}
if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) {
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
} // javaDeviceNameObj == NULL
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find GetDeviceUniqueName function id",
__FUNCTION__);
result = -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
}
WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8)
{
const char* deviceUniqueIdUTF8) {
MapItem* item = NULL;
while ((item = _captureCapabilities.Last())) {
delete (VideoCaptureCapability*) item->GetItem();
@ -187,9 +168,8 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0) {
attached) != 0)
return -1;
}
// Find the capability class
jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
@ -282,8 +262,7 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation)
{
VideoCaptureRotation& orientation) {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
@ -292,9 +271,8 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0) {
attached) != 0)
return -1;
}
// get the method ID for the Android Java GetOrientation .
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
@ -321,7 +299,7 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
WebRtc_Word32 retValue = 0;
switch (jorientation) {
case -1: //Error
case -1: // Error
orientation = kCameraRotate0;
retValue = -1;
break;
@ -344,5 +322,5 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
return retValue;
}
} // namespace videocapturemodule
} // namespace webrtc
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -29,35 +29,37 @@ namespace videocapturemodule
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
class DeviceInfoAndroid: public DeviceInfoImpl
{
public:
class DeviceInfoAndroid : public DeviceInfoImpl {
DeviceInfoAndroid(const WebRtc_Word32 id);
WebRtc_Word32 Init();
virtual ~DeviceInfoAndroid();
virtual WebRtc_UWord32 NumberOfDevices();
virtual WebRtc_Word32 GetDeviceName(WebRtc_UWord32 deviceNumber,
char* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
char* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8 = 0,
WebRtc_UWord32 productUniqueIdUTF8Length = 0);
virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8);
public:
DeviceInfoAndroid(const WebRtc_Word32 id);
WebRtc_Word32 Init();
virtual ~DeviceInfoAndroid();
virtual WebRtc_UWord32 NumberOfDevices();
virtual WebRtc_Word32 GetDeviceName(
WebRtc_UWord32 deviceNumber,
char* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
char* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8 = 0,
WebRtc_UWord32 productUniqueIdUTF8Length = 0);
virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8);
virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
const char* /*deviceUniqueIdUTF8*/,
const char* /*dialogTitleUTF8*/,
void* /*parentWindow*/,
WebRtc_UWord32 /*positionX*/,
WebRtc_UWord32 /*positionY*/){return -1;}
virtual WebRtc_Word32 GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
private:
bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
enum {_expectedCaptureDelay = 190};
virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
const char* /*deviceUniqueIdUTF8*/,
const char* /*dialogTitleUTF8*/,
void* /*parentWindow*/,
WebRtc_UWord32 /*positionX*/,
WebRtc_UWord32 /*positionY*/) { return -1; }
virtual WebRtc_Word32 GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
private:
bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
enum {_expectedCaptureDelay = 190};
};
} // namespace videocapturemodule
} // namespace webrtc
} // namespace videocapturemodule
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_

View File

@ -11,7 +11,7 @@
package org.webrtc.videoengine;
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;
public int maxFPS = 0;
public int width = 0;
public int height = 0;
public int maxFPS = 0;
}

View File

@ -27,236 +27,236 @@ import android.view.SurfaceHolder.Callback;
public class VideoCaptureAndroid implements PreviewCallback, Callback {
private Camera camera;
private AndroidVideoCaptureDevice currentDevice = null;
public ReentrantLock previewBufferLock = new ReentrantLock();
private int PIXEL_FORMAT = ImageFormat.NV21;
PixelFormat pixelFormat = new PixelFormat();
// True when the C++ layer has ordered the camera to be started.
private boolean isRunning=false;
private Camera camera;
private AndroidVideoCaptureDevice currentDevice = null;
public ReentrantLock previewBufferLock = new ReentrantLock();
private int PIXEL_FORMAT = ImageFormat.NV21;
PixelFormat pixelFormat = new PixelFormat();
// True when the C++ layer has ordered the camera to be started.
private boolean isRunning=false;
private final int numCaptureBuffers = 3;
private int expectedFrameSize = 0;
private int orientation = 0;
private int id = 0;
// C++ callback context variable.
private long context = 0;
private SurfaceHolder localPreview = null;
// True if this class owns the preview video buffers.
private boolean ownsBuffers = false;
private final int numCaptureBuffers = 3;
private int expectedFrameSize = 0;
private int orientation = 0;
private int id = 0;
// C++ callback context variable.
private long context = 0;
private SurfaceHolder localPreview = null;
// True if this class owns the preview video buffers.
private boolean ownsBuffers = false;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
CaptureCapabilityAndroid currentCapability = null;
CaptureCapabilityAndroid currentCapability = null;
public static
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid");
public static
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid");
captureAndroid.StopCapture();
captureAndroid.camera.release();
captureAndroid.camera = null;
captureAndroid.context = 0;
captureAndroid.StopCapture();
captureAndroid.camera.release();
captureAndroid.camera = null;
captureAndroid.context = 0;
if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended");
if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended");
}
}
public VideoCaptureAndroid(int in_id,
long in_context,
Camera in_camera,
AndroidVideoCaptureDevice in_device) {
id = in_id;
context = in_context;
camera = in_camera;
currentDevice = in_device;
}
public VideoCaptureAndroid(int in_id,
long in_context,
Camera in_camera,
AndroidVideoCaptureDevice in_device) {
id = in_id;
context = in_context;
camera = in_camera;
currentDevice = in_device;
}
public int StartCapture(int width, int height, int frameRate) {
if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width +
" height " + height +" frame rate " + frameRate);
try {
if (camera == null) {
Log.e("*WEBRTC*",
String.format(Locale.US,"Camera not initialized %d",id));
return -1;
}
currentCapability = new CaptureCapabilityAndroid();
currentCapability.width = width;
currentCapability.height = height;
currentCapability.maxFPS = frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
public int StartCapture(int width, int height, int frameRate) {
if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width +
" height " + height +" frame rate " + frameRate);
try {
if (camera == null) {
Log.e("*WEBRTC*",
String.format(Locale.US,"Camera not initialized %d",id));
return -1;
}
currentCapability = new CaptureCapabilityAndroid();
currentCapability.width = width;
currentCapability.height = height;
currentCapability.maxFPS = frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(currentCapability.width,
currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT );
parameters.setPreviewFrameRate(currentCapability.maxFPS);
camera.setParameters(parameters);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(currentCapability.width,
currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT );
parameters.setPreviewFrameRate(currentCapability.maxFPS);
camera.setParameters(parameters);
// Get the local preview SurfaceHolder from the static render class
localPreview = ViERenderer.GetLocalRenderer();
if(localPreview != null) {
localPreview.addCallback(this);
}
// Get the local preview SurfaceHolder from the static render class
localPreview = ViERenderer.GetLocalRenderer();
if(localPreview != null) {
localPreview.addCallback(this);
}
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
if(android.os.Build.VERSION.SDK_INT >= 7) {
// According to Doc addCallbackBuffer belongs to API level 8.
// But it seems like it works on Android 2.1 as well.
// At least SE X10 and Milestone
byte[] buffer = null;
for (int i = 0; i < numCaptureBuffers; i++) {
buffer = new byte[bufSize];
camera.addCallbackBuffer(buffer);
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
if(android.os.Build.VERSION.SDK_INT >= 7) {
// According to Doc addCallbackBuffer belongs to API level 8.
// But it seems like it works on Android 2.1 as well.
// At least SE X10 and Milestone
byte[] buffer = null;
for (int i = 0; i < numCaptureBuffers; i++) {
buffer = new byte[bufSize];
camera.addCallbackBuffer(buffer);
}
camera.setPreviewCallbackWithBuffer(this);
ownsBuffers = true;
}
else {
camera.setPreviewCallback(this);
}
camera.startPreview();
previewBufferLock.lock();
expectedFrameSize = bufSize;
isRunning = true;
previewBufferLock.unlock();
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to start camera");
return -1;
}
return 0;
}
public int StopCapture() {
if(DEBUG) Log.d("*WEBRTC*", "StopCapture");
try {
previewBufferLock.lock();
isRunning = false;
previewBufferLock.unlock();
camera.stopPreview();
if(android.os.Build.VERSION.SDK_INT > 7) {
camera.setPreviewCallbackWithBuffer(null);
}
else {
camera.setPreviewCallback(null);
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to stop camera");
return -1;
}
camera.setPreviewCallbackWithBuffer(this);
ownsBuffers = true;
}
else {
camera.setPreviewCallback(this);
}
camera.startPreview();
previewBufferLock.lock();
expectedFrameSize = bufSize;
isRunning = true;
previewBufferLock.unlock();
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to start camera");
return -1;
}
return 0;
}
public int StopCapture() {
if(DEBUG) Log.d("*WEBRTC*", "StopCapture");
try {
previewBufferLock.lock();
isRunning = false;
previewBufferLock.unlock();
camera.stopPreview();
if(android.os.Build.VERSION.SDK_INT > 7) {
camera.setPreviewCallbackWithBuffer(null);
}
else {
camera.setPreviewCallback(null);
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to stop camera");
return -1;
}
if(DEBUG) {
Log.d("*WEBRTC*", "StopCapture ended");
}
return 0;
}
native void ProvideCameraFrame(byte[] data,int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
previewBufferLock.lock();
if(VERBOSE) {
Log.v("*WEBRTC*",
String.format(Locale.US, "preview frame length %d context %x",
data.length, context));
}
if(isRunning) {
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == expectedFrameSize) {
ProvideCameraFrame(data, expectedFrameSize, context);
if (VERBOSE) {
Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered"));
if(DEBUG) {
Log.d("*WEBRTC*", "StopCapture ended");
}
if(ownsBuffers) {
// Give the video buffer to the camera service again.
camera.addCallbackBuffer(data);
return 0;
}
native void ProvideCameraFrame(byte[] data,int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
previewBufferLock.lock();
if(VERBOSE) {
Log.v("*WEBRTC*",
String.format(Locale.US, "preview frame length %d context %x",
data.length, context));
}
}
if(isRunning) {
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == expectedFrameSize) {
ProvideCameraFrame(data, expectedFrameSize, context);
if (VERBOSE) {
Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered"));
}
if(ownsBuffers) {
// Give the video buffer to the camera service again.
camera.addCallbackBuffer(data);
}
}
}
previewBufferLock.unlock();
}
previewBufferLock.unlock();
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
try {
if(camera != null) {
camera.setPreviewDisplay(localPreview);
}
} catch (IOException e) {
Log.e("*WEBRTC*",
String.format(Locale.US,
"Failed to set Local preview. " + e.getMessage()));
try {
if(camera != null) {
camera.setPreviewDisplay(localPreview);
}
} catch (IOException e) {
Log.e("*WEBRTC*",
String.format(Locale.US,
"Failed to set Local preview. " + e.getMessage()));
}
}
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
public void SetPreviewRotation(int rotation) {
if(camera != null) {
previewBufferLock.lock();
final boolean running = isRunning;
int width = 0;
int height = 0;
int framerate = 0;
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
public void SetPreviewRotation(int rotation) {
if(camera != null) {
previewBufferLock.lock();
final boolean running = isRunning;
int width = 0;
int height = 0;
int framerate = 0;
if(running) {
width = currentCapability.width;
height = currentCapability.height;
framerate = currentCapability.maxFPS;
if(running) {
width = currentCapability.width;
height = currentCapability.height;
framerate = currentCapability.maxFPS;
StopCapture();
}
StopCapture();
}
int resultRotation = 0;
if(currentDevice.frontCameraType ==
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
// this is a 2.3 or later front facing camera.
// SetDisplayOrientation will flip the image horizontally
// before doing the rotation.
resultRotation=(360-rotation) % 360; // compensate the mirror
}
else {
// Back facing or 2.2 or previous front camera
resultRotation=rotation;
}
if(android.os.Build.VERSION.SDK_INT>7) {
camera.setDisplayOrientation(resultRotation);
}
else {
// Android 2.1 and previous
// This rotation unfortunately does not seems to work.
// http://code.google.com/p/android/issues/detail?id=1193
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(resultRotation);
camera.setParameters(parameters);
}
int resultRotation = 0;
if(currentDevice.frontCameraType ==
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
// this is a 2.3 or later front facing camera.
// SetDisplayOrientation will flip the image horizontally
// before doing the rotation.
resultRotation=(360-rotation) % 360; // compensate the mirror
}
else {
// Back facing or 2.2 or previous front camera
resultRotation=rotation;
}
if(android.os.Build.VERSION.SDK_INT>7) {
camera.setDisplayOrientation(resultRotation);
}
else {
// Android 2.1 and previous
// This rotation unfortunately does not seems to work.
// http://code.google.com/p/android/issues/detail?id=1193
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(resultRotation);
camera.setParameters(parameters);
}
if(running) {
StartCapture(width, height, framerate);
}
previewBufferLock.unlock();
if(running) {
StartCapture(width, height, framerate);
}
previewBufferLock.unlock();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
}

View File

@ -26,407 +26,406 @@ import android.util.Log;
public class VideoCaptureDeviceInfoAndroid {
//Context
Context context;
//Context
Context context;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
// Set this to 2 for VERBOSE logging. 1 for DEBUG
private static int LOGLEVEL = 0;
private static boolean VERBOSE = LOGLEVEL > 2;
private static boolean DEBUG = LOGLEVEL > 1;
// Private class with info about all available cameras and the capabilities
public class AndroidVideoCaptureDevice {
AndroidVideoCaptureDevice() {
frontCameraType = FrontFacingCameraType.None;
index = 0;
}
public String deviceUniqueName;
public CaptureCapabilityAndroid captureCapabilies[];
public FrontFacingCameraType frontCameraType;
// Orientation of camera as described in
// android.hardware.Camera.CameraInfo.Orientation
public int orientation;
// Camera index used in Camera.Open on Android 2.3 and onwards
public int index;
}
public enum FrontFacingCameraType {
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23, // Android 2.3 front facing camera.
}
String currentDeviceUniqueId;
int id;
List<AndroidVideoCaptureDevice> deviceList;
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
if(DEBUG) {
Log.d("*WEBRTC*",
String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
}
VideoCaptureDeviceInfoAndroid self =
new VideoCaptureDeviceInfoAndroid(in_id, in_context);
if(self != null && self.Init() == 0) {
return self;
}
else {
if(DEBUG) {
Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid.");
}
}
return null;
}
private VideoCaptureDeviceInfoAndroid(int in_id,
Context in_context) {
id = in_id;
context = in_context;
deviceList = new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init() {
// Populate the deviceList with available cameras and their capabilities.
Camera camera = null;
try{
if(android.os.Build.VERSION.SDK_INT > 8) {
// From Android 2.3 and onwards
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice.index = i;
newDevice.orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
newDevice.deviceUniqueName =
"Camera " + i +", Facing back, Orientation "+ info.orientation;
}
else {
newDevice.deviceUniqueName =
"Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice.frontCameraType = FrontFacingCameraType.Android23;
}
camera = Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera = null;
deviceList.add(newDevice);
// Private class with info about all available cameras and the capabilities
public class AndroidVideoCaptureDevice {
AndroidVideoCaptureDevice() {
frontCameraType = FrontFacingCameraType.None;
index = 0;
}
}
else {
// Prior to Android 2.3
AndroidVideoCaptureDevice newDevice;
Camera.Parameters parameters;
newDevice = new AndroidVideoCaptureDevice();
camera = Camera.open();
parameters = camera.getParameters();
newDevice.deviceUniqueName = "Camera 1, Facing back";
newDevice.orientation = 90;
AddDeviceInfo(newDevice, parameters);
public String deviceUniqueName;
public CaptureCapabilityAndroid captureCapabilies[];
public FrontFacingCameraType frontCameraType;
deviceList.add(newDevice);
camera.release();
camera=null;
// Orientation of camera as described in
// android.hardware.Camera.CameraInfo.Orientation
public int orientation;
// Camera index used in Camera.Open on Android 2.3 and onwards
public int index;
}
newDevice = new AndroidVideoCaptureDevice();
newDevice.deviceUniqueName = "Camera 2, Facing front";
parameters = SearchOldFrontFacingCameras(newDevice);
if(parameters != null) {
AddDeviceInfo(newDevice, parameters);
deviceList.add(newDevice);
public enum FrontFacingCameraType {
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23, // Android 2.3 front facing camera.
}
String currentDeviceUniqueId;
int id;
List<AndroidVideoCaptureDevice> deviceList;
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
if(DEBUG) {
Log.d("*WEBRTC*",
String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
}
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" +
ex.getLocalizedMessage());
return -1;
}
VerifyCapabilities();
return 0;
}
// Adds the capture capabilities of the currently opened device
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
int maxFPS=0;
for(Integer frameRate:frameRates) {
if(VERBOSE) {
Log.v("*WEBRTC*",
"VideoCaptureDeviceInfoAndroid:frameRate " + frameRate);
}
if(frameRate > maxFPS) {
maxFPS = frameRate;
}
}
newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
for(int i = 0; i < sizes.size(); ++i) {
Size s = sizes.get(i);
newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
newDevice.captureCapabilies[i].height = s.height;
newDevice.captureCapabilies[i].width = s.width;
newDevice.captureCapabilies[i].maxFPS = maxFPS;
}
}
// Function that make sure device specific capabilities are
// in the capability list.
// Ie Galaxy S supports CIF but does not list CIF as a supported capability.
// Motorola Droid Camera does not work with frame rate above 15fps.
// http://code.google.com/p/android/issues/detail?id=5514#c0
private void VerifyCapabilities() {
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") ||
android.os.Build.DEVICE.equals("crespo")) {
CaptureCapabilityAndroid specificCapability =
new CaptureCapabilityAndroid();
specificCapability.width = 352;
specificCapability.height = 288;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 176;
specificCapability.height = 144;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 320;
specificCapability.height = 240;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps
// even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") &&
android.os.Build.DEVICE.equals("umts_sholes")) {
for(AndroidVideoCaptureDevice device:deviceList) {
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
capability.maxFPS=15;
VideoCaptureDeviceInfoAndroid self =
new VideoCaptureDeviceInfoAndroid(in_id, in_context);
if(self != null && self.Init() == 0) {
return self;
}
}
}
}
private void AddDeviceSpecificCapability(
CaptureCapabilityAndroid specificCapability) {
for(AndroidVideoCaptureDevice device:deviceList) {
boolean foundCapability = false;
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
if(capability.width == specificCapability.width &&
capability.height == specificCapability.height) {
foundCapability = true;
break;
else {
if(DEBUG) {
Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid.");
}
}
}
if(foundCapability==false) {
CaptureCapabilityAndroid newCaptureCapabilies[]=
new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
for(int i = 0; i < device.captureCapabilies.length; ++i) {
newCaptureCapabilies[i+1] = device.captureCapabilies[i];
}
newCaptureCapabilies[0] = specificCapability;
device.captureCapabilies = newCaptureCapabilies;
}
}
}
// Returns the number of Capture devices that is supported
public int NumberOfDevices() {
return deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
}
return deviceList.get(deviceNumber).deviceUniqueName;
}
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return (CaptureCapabilityAndroid[]) device.captureCapabilies;
}
}
return null;
}
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return device.orientation;
}
}
return -1;
}
// Returns an instance of VideoCaptureAndroid.
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
try {
if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId);
Camera camera = null;
AndroidVideoCaptureDevice deviceToUse = null;
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
// Found the wanted camera
deviceToUse = device;
switch(device.frontCameraType) {
case GalaxyS:
camera = AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera = AllocateEVOFrontFacingCamera();
break;
default:
// From Android 2.3 and onwards)
if(android.os.Build.VERSION.SDK_INT>8)
camera=Camera.open(device.index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera == null) {
return null;
}
if(VERBOSE) {
Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid");
}
return new VideoCaptureAndroid(id,context,camera,deviceToUse);
}catch (Exception ex) {
Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " +
ex.getLocalizedMessage());
}
return null;
}
// Searches for a front facing camera device. This is device specific code.
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,
NoSuchMethodException, ClassNotFoundException,
IllegalAccessException, InvocationTargetException {
// Check the id of the opened camera device
// Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
String cameraId = parameters.get("camera-id");
if(cameraId != null && cameraId.equals("1")) {
// This might be a Samsung Galaxy S with a front facing camera.
try {
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
newDevice.orientation = 0;
camera.release();
return parameters;
}
catch (Exception ex) {
//Nope - it did not work.
Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " +
ex.getLocalizedMessage());
}
}
camera.release();
//Check for Evo front facing camera
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists) {
newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
newDevice.orientation = 0;
Camera evCamera = AllocateEVOFrontFacingCamera();
parameters = evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
// Returns a handle to HTC front facing camera.
// The caller is responsible to release it on completion.
private Camera AllocateEVOFrontFacingCamera()
throws SecurityException, NoSuchMethodException,
ClassNotFoundException, IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
String classPath = null;
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists) {
return null;
}
String dexOutputDir = "";
if(context != null) {
dexOutputDir = context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
//Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
//Log.e("*WEBRTCN*", "Unable to create files directory");
private VideoCaptureDeviceInfoAndroid(int in_id,
Context in_context) {
id = in_id;
context = in_context;
deviceList = new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init() {
// Populate the deviceList with available cameras and their capabilities.
Camera camera = null;
try{
if(android.os.Build.VERSION.SDK_INT > 8) {
// From Android 2.3 and onwards
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice.index = i;
newDevice.orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
newDevice.deviceUniqueName =
"Camera " + i +", Facing back, Orientation "+ info.orientation;
}
else {
newDevice.deviceUniqueName =
"Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice.frontCameraType = FrontFacingCameraType.Android23;
}
camera = Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera = null;
deviceList.add(newDevice);
}
}
else {
// Prior to Android 2.3
AndroidVideoCaptureDevice newDevice;
Camera.Parameters parameters;
newDevice = new AndroidVideoCaptureDevice();
camera = Camera.open();
parameters = camera.getParameters();
newDevice.deviceUniqueName = "Camera 1, Facing back";
newDevice.orientation = 90;
AddDeviceInfo(newDevice, parameters);
deviceList.add(newDevice);
camera.release();
camera=null;
newDevice = new AndroidVideoCaptureDevice();
newDevice.deviceUniqueName = "Camera 2, Facing front";
parameters = SearchOldFrontFacingCameras(newDevice);
if(parameters != null) {
AddDeviceInfo(newDevice, parameters);
deviceList.add(newDevice);
}
}
}
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" +
ex.getLocalizedMessage());
return -1;
}
VerifyCapabilities();
return 0;
}
dexOutputDir += "/dexfiles";
// Adds the capture capabilities of the currently opened device
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
Camera.Parameters parameters) {
DexClassLoader loader =
new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
null, ClassLoader.getSystemClassLoader());
List<Size> sizes = parameters.getSupportedPreviewSizes();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
int maxFPS=0;
for(Integer frameRate:frameRates) {
if(VERBOSE) {
Log.v("*WEBRTC*",
"VideoCaptureDeviceInfoAndroid:frameRate " + frameRate);
}
if(frameRate > maxFPS) {
maxFPS = frameRate;
}
}
Method method = loader.loadClass(classPath).getDeclaredMethod(
"getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
for(int i = 0; i < sizes.size(); ++i) {
Size s = sizes.get(i);
newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
newDevice.captureCapabilies[i].height = s.height;
newDevice.captureCapabilies[i].width = s.width;
newDevice.captureCapabilies[i].maxFPS = maxFPS;
}
}
// Returns a handle to Galaxy S front camera.
// The caller is responsible to release it on completion.
private Camera AllocateGalaxySFrontCamera()
{
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
// Function that make sure device specific capabilities are
// in the capability list.
// Ie Galaxy S supports CIF but does not list CIF as a supported capability.
// Motorola Droid Camera does not work with frame rate above 15fps.
// http://code.google.com/p/android/issues/detail?id=5514#c0
private void VerifyCapabilities() {
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") ||
android.os.Build.DEVICE.equals("crespo")) {
CaptureCapabilityAndroid specificCapability =
new CaptureCapabilityAndroid();
specificCapability.width = 352;
specificCapability.height = 288;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 176;
specificCapability.height = 144;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 320;
specificCapability.height = 240;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps
// even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") &&
android.os.Build.DEVICE.equals("umts_sholes")) {
for(AndroidVideoCaptureDevice device:deviceList) {
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
capability.maxFPS=15;
}
}
}
}
private void AddDeviceSpecificCapability(
CaptureCapabilityAndroid specificCapability) {
for(AndroidVideoCaptureDevice device:deviceList) {
boolean foundCapability = false;
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
if(capability.width == specificCapability.width &&
capability.height == specificCapability.height) {
foundCapability = true;
break;
}
}
if(foundCapability==false) {
CaptureCapabilityAndroid newCaptureCapabilies[]=
new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
for(int i = 0; i < device.captureCapabilies.length; ++i) {
newCaptureCapabilies[i+1] = device.captureCapabilies[i];
}
newCaptureCapabilies[0] = specificCapability;
device.captureCapabilies = newCaptureCapabilies;
}
}
}
// Returns the number of Capture devices that is supported
public int NumberOfDevices() {
return deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
}
return deviceList.get(deviceNumber).deviceUniqueName;
}
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return (CaptureCapabilityAndroid[]) device.captureCapabilies;
}
}
return null;
}
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return device.orientation;
}
}
return -1;
}
// Returns an instance of VideoCaptureAndroid.
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
try {
if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId);
Camera camera = null;
AndroidVideoCaptureDevice deviceToUse = null;
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
// Found the wanted camera
deviceToUse = device;
switch(device.frontCameraType) {
case GalaxyS:
camera = AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera = AllocateEVOFrontFacingCamera();
break;
default:
// From Android 2.3 and onwards)
if(android.os.Build.VERSION.SDK_INT>8)
camera=Camera.open(device.index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera == null) {
return null;
}
if(VERBOSE) {
Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid");
}
return new VideoCaptureAndroid(id,context,camera,deviceToUse);
}catch (Exception ex) {
Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " +
ex.getLocalizedMessage());
}
return null;
}
// Searches for a front facing camera device. This is device specific code.
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,
NoSuchMethodException, ClassNotFoundException,
IllegalAccessException, InvocationTargetException {
// Check the id of the opened camera device
// Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
String cameraId = parameters.get("camera-id");
if(cameraId != null && cameraId.equals("1")) {
// This might be a Samsung Galaxy S with a front facing camera.
try {
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
newDevice.orientation = 0;
camera.release();
return parameters;
}
catch (Exception ex) {
//Nope - it did not work.
Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " +
ex.getLocalizedMessage());
}
}
camera.release();
//Check for Evo front facing camera
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists) {
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists) {
newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
newDevice.orientation = 0;
Camera evCamera = AllocateEVOFrontFacingCamera();
parameters = evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
// Returns a handle to HTC front facing camera.
// The caller is responsible to release it on completion.
private Camera AllocateEVOFrontFacingCamera()
throws SecurityException, NoSuchMethodException,
ClassNotFoundException, IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
String classPath = null;
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists) {
return null;
}
String dexOutputDir = "";
if(context != null) {
dexOutputDir = context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
//Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
//Log.e("*WEBRTCN*", "Unable to create files directory");
}
}
}
dexOutputDir += "/dexfiles";
DexClassLoader loader =
new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
null, ClassLoader.getSystemClassLoader());
Method method = loader.loadClass(classPath).getDeclaredMethod(
"getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
// Returns a handle to Galaxy S front camera.
// The caller is responsible to release it on completion.
private Camera AllocateGalaxySFrontCamera() {
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
}

View File

@ -17,50 +17,50 @@
#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
namespace webrtc
{
namespace videocapturemodule
{
class VideoCaptureAndroid: public VideoCaptureImpl
{
public:
static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects(JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached);
static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached);
namespace webrtc {
namespace videocapturemodule {
VideoCaptureAndroid(const WebRtc_Word32 id);
virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
const char* deviceUniqueIdUTF8);
class VideoCaptureAndroid : public VideoCaptureImpl {
public:
static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached);
static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached);
VideoCaptureAndroid(const WebRtc_Word32 id);
virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
const char* deviceUniqueIdUTF8);
virtual WebRtc_Word32 StartCapture(
const VideoCaptureCapability& capability);
virtual WebRtc_Word32 StopCapture();
virtual bool CaptureStarted();
virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
virtual WebRtc_Word32 StartCapture(
const VideoCaptureCapability& capability);
virtual WebRtc_Word32 StopCapture();
virtual bool CaptureStarted();
virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
protected:
virtual ~VideoCaptureAndroid();
static void JNICALL ProvideCameraFrame (JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length, jlong context);
DeviceInfoAndroid _capInfo;
jobject _javaCaptureObj; // Java Camera object.
VideoCaptureCapability _frameInfo;
bool _captureStarted;
protected:
virtual ~VideoCaptureAndroid();
static void JNICALL ProvideCameraFrame (JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length, jlong context);
DeviceInfoAndroid _capInfo;
jobject _javaCaptureObj; // Java Camera object.
VideoCaptureCapability _frameInfo;
bool _captureStarted;
static JavaVM* g_jvm;
static jclass g_javaCmClass;
static jclass g_javaCmDevInfoClass;
static JavaVM* g_jvm;
static jclass g_javaCmClass;
static jclass g_javaCmDevInfoClass;
//Static java object implementing the needed device info functions;
static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
};
} // namespace videocapturemodule
} // namespace webrtc
} // namespace videocapturemodule
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_

View File

@ -25,239 +25,239 @@ import android.opengl.GLSurfaceView;
import android.util.Log;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer {
// True if onSurfaceCreated has been called.
private boolean surfaceCreated = false;
private boolean openGLCreated = false;
// True if NativeFunctionsRegistered has been called.
private boolean nativeFunctionsRegisted = false;
private ReentrantLock nativeFunctionLock = new ReentrantLock();
// Address of Native object that will do the drawing.
private long nativeObject = 0;
private int viewWidth = 0;
private int viewHeight = 0;
implements GLSurfaceView.Renderer {
// True if onSurfaceCreated has been called.
private boolean surfaceCreated = false;
private boolean openGLCreated = false;
// True if NativeFunctionsRegistered has been called.
private boolean nativeFunctionsRegisted = false;
private ReentrantLock nativeFunctionLock = new ReentrantLock();
// Address of Native object that will do the drawing.
private long nativeObject = 0;
private int viewWidth = 0;
private int viewHeight = 0;
public static boolean UseOpenGL2(Object renderWindow) {
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
public ViEAndroidGLES20(Context context) {
super(context);
// Setup the context factory for 2.0 rendering.
// See ContextFactory class definition below
setEGLContextFactory(new ContextFactory());
// We need to choose an EGLConfig that matches the format of
// our surface exactly. This is going to be done in our
// custom config chooser. See ConfigChooser class definition below
// Use RGB 565 without an alpha channel.
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) );
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
// IsSupported
// Return true if this device support Open GL ES 2.0 rendering.
public static boolean IsSupported(Context context) {
ActivityManager am =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) {
// Open GL ES 2.0 is supported.
return true;
}
return false;
}
public void onDrawFrame(GL10 gl) {
nativeFunctionLock.lock();
if(!nativeFunctionsRegisted || !surfaceCreated) {
nativeFunctionLock.unlock();
return;
public static boolean UseOpenGL2(Object renderWindow) {
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
if(!openGLCreated) {
if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
return; // Failed to create OpenGL
}
openGLCreated = true; // Created OpenGL successfully
}
DrawNative(nativeObject); // Draw the new frame
nativeFunctionLock.unlock();
}
public ViEAndroidGLES20(Context context) {
super(context);
public void onSurfaceChanged(GL10 gl, int width, int height) {
surfaceCreated = true;
viewWidth = width;
viewHeight = height;
// Setup the context factory for 2.0 rendering.
// See ContextFactory class definition below
setEGLContextFactory(new ContextFactory());
nativeFunctionLock.lock();
if(nativeFunctionsRegisted) {
if(CreateOpenGLNative(nativeObject,width,height) == 0)
openGLCreated = true;
}
nativeFunctionLock.unlock();
}
// We need to choose an EGLConfig that matches the format of
// our surface exactly. This is going to be done in our
// custom config chooser. See ConfigChooser class definition below
// Use RGB 565 without an alpha channel.
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) );
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
nativeObject = nativeObject;
nativeFunctionsRegisted = true;
nativeFunctionLock.unlock();
}
public void DeRegisterNativeObject() {
nativeFunctionLock.lock();
nativeFunctionsRegisted = false;
openGLCreated = false;
nativeObject = 0;
nativeFunctionLock.unlock();
}
public void ReDraw() {
if(surfaceCreated) {
// Request the renderer to redraw using the render thread context.
this.requestRender();
}
}
// EGL Context factory used for creating EGL 2.0 context
// on Android 2.1(and later,
// though there are simpler ways in 2.2)
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
private static class ContextFactory
implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl,
EGLDisplay display,
EGLConfig eglConfig) {
//checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
// Create an Open GL ES 2.0 context
EGLContext context = egl.eglCreateContext(display,
eglConfig,
EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("ContextFactory eglCreateContext", egl);
return context;
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void destroyContext(EGL10 egl, EGLDisplay display,
EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni
private static class ConfigChooser
implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
// IsSupported
// Return true if this device support Open GL ES 2.0 rendering.
public static boolean IsSupported(Context context) {
ActivityManager am =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) {
// Open GL ES 2.0 is supported.
return true;
}
return false;
}
// This EGL config specification is used to specify 2.0 rendering.
// We use a minimum size of 4 bits for red/green/blue, but will
// perform actual matching in chooseConfig() below.
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public void onDrawFrame(GL10 gl) {
nativeFunctionLock.lock();
if(!nativeFunctionsRegisted || !surfaceCreated) {
nativeFunctionLock.unlock();
return;
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
// Get the number of minimally matching EGL configurations
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
// Allocate then read the array of minimally matching EGL configs
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs,
numConfigs, num_config);
// Now return the "best" one
return chooseConfig(egl, display, configs);
if(!openGLCreated) {
if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
return; // Failed to create OpenGL
}
openGLCreated = true; // Created OpenGL successfully
}
DrawNative(nativeObject); // Draw the new frame
nativeFunctionLock.unlock();
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
public void onSurfaceChanged(GL10 gl, int width, int height) {
surfaceCreated = true;
viewWidth = width;
viewHeight = height;
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize &&
b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
nativeFunctionLock.lock();
if(nativeFunctionsRegisted) {
if(CreateOpenGLNative(nativeObject,width,height) == 0)
openGLCreated = true;
}
nativeFunctionLock.unlock();
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute,
int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
nativeObject = nativeObject;
nativeFunctionsRegisted = true;
nativeFunctionLock.unlock();
}
private native int CreateOpenGLNative(long nativeObject,
int width, int height);
private native void DrawNative(long nativeObject);
public void DeRegisterNativeObject() {
nativeFunctionLock.lock();
nativeFunctionsRegisted = false;
openGLCreated = false;
nativeObject = 0;
nativeFunctionLock.unlock();
}
public void ReDraw() {
if(surfaceCreated) {
// Request the renderer to redraw using the render thread context.
this.requestRender();
}
}
// EGL Context factory used for creating EGL 2.0 context
// on Android 2.1(and later,
// though there are simpler ways in 2.2)
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
private static class ContextFactory
implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl,
EGLDisplay display,
EGLConfig eglConfig) {
//checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
// Create an Open GL ES 2.0 context
EGLContext context = egl.eglCreateContext(display,
eglConfig,
EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("ContextFactory eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display,
EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni
private static class ConfigChooser
implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
// This EGL config specification is used to specify 2.0 rendering.
// We use a minimum size of 4 bits for red/green/blue, but will
// perform actual matching in chooseConfig() below.
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
// Get the number of minimally matching EGL configurations
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
// Allocate then read the array of minimally matching EGL configs
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs,
numConfigs, num_config);
// Now return the "best" one
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize &&
b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute,
int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
private native int CreateOpenGLNative(long nativeObject,
int width, int height);
private native void DrawNative(long nativeObject);
}

View File

@ -16,43 +16,43 @@ import android.view.SurfaceView;
public class ViERenderer {
// View used for local rendering that Cameras can use for Video Overlay.
private static SurfaceHolder g_localRenderer;
// View used for local rendering that Cameras can use for Video Overlay.
private static SurfaceHolder g_localRenderer;
public static SurfaceView CreateRenderer(Context context) {
return CreateRenderer(context,false);
}
public static SurfaceView CreateRenderer(Context context) {
return CreateRenderer(context,false);
}
public static SurfaceView CreateRenderer(Context context,
boolean useOpenGLES2) {
if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
public static SurfaceView CreateRenderer(Context context,
boolean useOpenGLES2) {
if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
// Creates a SurfaceView to be used by Android Camera
// service to display a local preview.
// This needs to be used on Android prior to version 2.1
// in order to run the camera.
// Call this function before ViECapture::StartCapture.
// The created view needs to be added to a visible layout
// after a camera has been allocated
// (with the call ViECapture::AllocateCaptureDevice).
// IE.
// CreateLocalRenderer
// ViECapture::AllocateCaptureDevice
// LinearLayout.addview
// ViECapture::StartCapture
public static SurfaceView CreateLocalRenderer(Context context) {
SurfaceView localRender = new SurfaceView(context);
g_localRenderer = localRender.getHolder();
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
// Creates a SurfaceView to be used by Android Camera
// service to display a local preview.
// This needs to be used on Android prior to version 2.1
// in order to run the camera.
// Call this function before ViECapture::StartCapture.
// The created view needs to be added to a visible layout
// after a camera has been allocated
// (with the call ViECapture::AllocateCaptureDevice).
// IE.
// CreateLocalRenderer
// ViECapture::AllocateCaptureDevice
// LinearLayout.addview
// ViECapture::StartCapture
public static SurfaceView CreateLocalRenderer(Context context) {
SurfaceView localRender = new SurfaceView(context);
g_localRenderer = localRender.getHolder();
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
public static SurfaceHolder GetLocalRenderer() {
return g_localRenderer;
}
public static SurfaceHolder GetLocalRenderer() {
return g_localRenderer;
}
}

View File

@ -22,134 +22,134 @@ import android.view.SurfaceHolder.Callback;
public class ViESurfaceRenderer implements Callback {
// the bitmap used for drawing.
private Bitmap bitmap = null;
private ByteBuffer byteBuffer;
private SurfaceHolder surfaceHolder;
// Rect of the source bitmap to draw
private Rect srcRect = new Rect();
// Rect of the destination canvas to draw to
private Rect dstRect = new Rect();
private int dstHeight = 0;
private int dstWidth = 0;
private float dstTopScale = 0;
private float dstBottomScale = 1;
private float dstLeftScale = 0;
private float dstRightScale = 1;
// the bitmap used for drawing.
private Bitmap bitmap = null;
private ByteBuffer byteBuffer;
private SurfaceHolder surfaceHolder;
// Rect of the source bitmap to draw
private Rect srcRect = new Rect();
// Rect of the destination canvas to draw to
private Rect dstRect = new Rect();
private int dstHeight = 0;
private int dstWidth = 0;
private float dstTopScale = 0;
private float dstBottomScale = 1;
private float dstLeftScale = 0;
private float dstRightScale = 1;
public ViESurfaceRenderer(SurfaceView view) {
surfaceHolder = view.getHolder();
if(surfaceHolder == null)
return;
public ViESurfaceRenderer(SurfaceView view) {
surfaceHolder = view.getHolder();
if(surfaceHolder == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
Rect dst =surfaceHolder.getSurfaceFrame();
if(dst != null) {
dstRect = dst;
dstHeight =dstRect.bottom-dstRect.top;
dstWidth = dstRect.right-dstRect.left;
}
surfaceHolder.unlockCanvasAndPost(canvas);
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
Rect dst =surfaceHolder.getSurfaceFrame();
if(dst != null) {
dstRect = dst;
dstHeight =dstRect.bottom-dstRect.top;
dstWidth = dstRect.right-dstRect.left;
}
surfaceHolder.unlockCanvasAndPost(canvas);
}
surfaceHolder.addCallback(this);
}
surfaceHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder holder, int format,
int in_width, int in_height) {
public void surfaceChanged(SurfaceHolder holder, int format,
int in_width, int in_height) {
dstHeight = in_height;
dstWidth = in_width;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
dstHeight = in_height;
dstWidth = in_width;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public Bitmap CreateBitmap(int width, int height) {
if (bitmap == null) {
try {
android.os.Process.setThreadPriority(
android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
public Bitmap CreateBitmap(int width, int height) {
if (bitmap == null) {
try {
android.os.Process.setThreadPriority(
android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
return bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height) {
if (bitmap == null) {
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
try {
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
byteBuffer = ByteBuffer.allocateDirect(width*height*2);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
bitmap = null;
byteBuffer = null;
}
return byteBuffer;
}
public void SetCoordinates(float left, float top,
float right, float bottom) {
dstLeftScale = left;
dstTopScale = top;
dstRightScale = right;
dstBottomScale = bottom;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
public void DrawByteBuffer() {
if(byteBuffer == null)
return;
byteBuffer.rewind();
bitmap.copyPixelsFromBuffer(byteBuffer);
DrawBitmap();
}
public void DrawBitmap() {
if(bitmap == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
canvas.drawBitmap(bitmap, srcRect, dstRect, null);
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
return bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height) {
if (bitmap == null) {
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
try {
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
byteBuffer = ByteBuffer.allocateDirect(width*height*2);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
bitmap = null;
byteBuffer = null;
}
return byteBuffer;
}
public void SetCoordinates(float left, float top,
float right, float bottom) {
dstLeftScale = left;
dstTopScale = top;
dstRightScale = right;
dstBottomScale = bottom;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
public void DrawByteBuffer() {
if(byteBuffer == null)
return;
byteBuffer.rewind();
bitmap.copyPixelsFromBuffer(byteBuffer);
DrawBitmap();
}
public void DrawBitmap() {
if(bitmap == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
canvas.drawBitmap(bitmap, srcRect, dstRect, null);
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}

View File

@ -28,14 +28,10 @@
namespace webrtc {
JavaVM* VideoRenderAndroid::g_jvm = NULL;
WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
g_jvm = (JavaVM*) javaVM;
return 0;
WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
g_jvm = (JavaVM*) javaVM;
return 0;
}
VideoRenderAndroid::VideoRenderAndroid(
@ -53,40 +49,34 @@ VideoRenderAndroid::VideoRenderAndroid(
_javaRenderEvent(*EventWrapper::Create()),
_lastJavaRenderEvent(0),
_javaRenderJniEnv(NULL),
_javaRenderThread(NULL)
{
_javaRenderThread(NULL) {
}
VideoRenderAndroid::~VideoRenderAndroid()
{
VideoRenderAndroid::~VideoRenderAndroid() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"VideoRenderAndroid dtor");
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"VideoRenderAndroid dtor");
if (_javaRenderThread)
StopRender();
if (_javaRenderThread)
StopRender();
for (MapItem* item = _streamsMap.First(); item != NULL; item
= _streamsMap.Next(item))
{ // Delete streams
delete static_cast<AndroidStream*> (item->GetItem());
}
delete &_javaShutdownEvent;
delete &_javaRenderEvent;
delete &_critSect;
for (MapItem* item = _streamsMap.First(); item != NULL; item
= _streamsMap.Next(item)) { // Delete streams
delete static_cast<AndroidStream*> (item->GetItem());
}
delete &_javaShutdownEvent;
delete &_javaRenderEvent;
delete &_critSect;
}
WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(&_critSect);
_id = id;
WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id) {
CriticalSectionScoped cs(&_critSect);
_id = id;
return 0;
return 0;
}
WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/)
{
return -1;
WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/) {
return -1;
}
VideoRenderCallback*
@ -94,56 +84,48 @@ VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
const float bottom) {
CriticalSectionScoped cs(&_critSect);
AndroidStream* renderStream = NULL;
MapItem* item = _streamsMap.Find(streamId);
if (item)
{
renderStream = (AndroidStream*) (item->GetItem());
if (NULL != renderStream)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
"%s: Render stream already exists", __FUNCTION__);
return renderStream;
}
AndroidStream* renderStream = NULL;
MapItem* item = _streamsMap.Find(streamId);
if (item) {
renderStream = (AndroidStream*) (item->GetItem());
if (NULL != renderStream) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
"%s: Render stream already exists", __FUNCTION__);
return renderStream;
}
}
renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
right, bottom, *this);
if (renderStream)
{
_streamsMap.Insert(streamId, renderStream);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return NULL;
}
return renderStream;
renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
right, bottom, *this);
if (renderStream) {
_streamsMap.Insert(streamId, renderStream);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return NULL;
}
return renderStream;
}
WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(&_critSect);
const WebRtc_UWord32 streamId) {
CriticalSectionScoped cs(&_critSect);
MapItem* item = _streamsMap.Find(streamId);
if (item)
{
delete (AndroidStream*) item->GetItem();
_streamsMap.Erase(streamId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return -1;
}
return 0;
MapItem* item = _streamsMap.Find(streamId);
if (item) {
delete (AndroidStream*) item->GetItem();
_streamsMap.Erase(streamId);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
@ -178,10 +160,9 @@ WebRtc_Word32 VideoRenderAndroid::StartRender() {
}
unsigned int tId = 0;
if (_javaRenderThread->Start(tId)) {
if (_javaRenderThread->Start(tId))
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: thread started: %u", __FUNCTION__, tId);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not start send thread", __FUNCTION__);
@ -190,36 +171,32 @@ WebRtc_Word32 VideoRenderAndroid::StartRender() {
return 0;
}
WebRtc_Word32 VideoRenderAndroid::StopRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
{
CriticalSectionScoped cs(&_critSect);
if (!_javaRenderThread)
{
return -1;
}
_javaShutDownFlag = true;
_javaRenderEvent.Set();
}
_javaShutdownEvent.Wait(3000);
WebRtc_Word32 VideoRenderAndroid::StopRender() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
{
CriticalSectionScoped cs(&_critSect);
_javaRenderThread->SetNotAlive();
if (_javaRenderThread->Stop())
if (!_javaRenderThread)
{
delete _javaRenderThread;
_javaRenderThread = NULL;
return -1;
}
else
{
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Not able to stop thread, leaking", __FUNCTION__);
_javaRenderThread = NULL;
}
return 0;
_javaShutDownFlag = true;
_javaRenderEvent.Set();
}
_javaShutdownEvent.Wait(3000);
CriticalSectionScoped cs(&_critSect);
_javaRenderThread->SetNotAlive();
if (_javaRenderThread->Stop()) {
delete _javaRenderThread;
_javaRenderThread = NULL;
}
else {
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Not able to stop thread, leaking", __FUNCTION__);
_javaRenderThread = NULL;
}
return 0;
}
void VideoRenderAndroid::ReDraw() {
@ -237,65 +214,55 @@ bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
bool VideoRenderAndroid::JavaRenderThreadProcess()
{
_javaRenderEvent.Wait(1000);
_javaRenderEvent.Wait(1000);
CriticalSectionScoped cs(&_critSect);
if (!_javaRenderJniEnv)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
CriticalSectionScoped cs(&_critSect);
if (!_javaRenderJniEnv) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
// Get the JNI env for this thread
if ((res < 0) || !_javaRenderJniEnv)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, _javaRenderJniEnv);
return false;
}
// Get the JNI env for this thread
if ((res < 0) || !_javaRenderJniEnv) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, _javaRenderJniEnv);
return false;
}
}
for (MapItem* item = _streamsMap.First(); item != NULL; item
= _streamsMap.Next(item))
{
static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
_javaRenderJniEnv);
}
for (MapItem* item = _streamsMap.First(); item != NULL;
item = _streamsMap.Next(item)) {
static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
_javaRenderJniEnv);
}
if (_javaShutDownFlag)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
else
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: Java thread detached", __FUNCTION__);
}
_javaRenderJniEnv = false;
_javaShutDownFlag = false;
_javaShutdownEvent.Set();
return false; // Do not run this thread again.
if (_javaShutDownFlag) {
if (g_jvm->DetachCurrentThread() < 0)
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
else {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: Java thread detached", __FUNCTION__);
}
return true;
_javaRenderJniEnv = false;
_javaShutDownFlag = false;
_javaShutdownEvent.Set();
return false; // Do not run this thread again.
}
return true;
}
VideoRenderType VideoRenderAndroid::RenderType()
{
return _renderType;
VideoRenderType VideoRenderAndroid::RenderType() {
return _renderType;
}
RawVideoType VideoRenderAndroid::PerferedVideoType()
{
return kVideoI420;
RawVideoType VideoRenderAndroid::PerferedVideoType() {
return kVideoI420;
}
bool VideoRenderAndroid::FullScreen()
{
return false;
bool VideoRenderAndroid::FullScreen() {
return false;
}
WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
@ -374,4 +341,4 @@ WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
return -1;
}
} //namespace webrtc
} // namespace webrtc

View File

@ -20,32 +20,24 @@ namespace webrtc {
//#define ANDROID_LOG
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
// The object a module user uses to send new frames to the java renderer
// Base class for android render streams.
class AndroidStream: public VideoRenderCallback
{
public:
/*
* DeliverFrame is called from a thread connected to the Java VM.
* Used for Delivering frame for rendering.
*/
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
class AndroidStream : public VideoRenderCallback {
public:
// DeliverFrame is called from a thread connected to the Java VM.
// Used for Delivering frame for rendering.
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
virtual ~AndroidStream()
{
};
virtual ~AndroidStream() {};
};
class VideoRenderAndroid: IVideoRender
{
public:
class VideoRenderAndroid: IVideoRender {
public:
static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
VideoRenderAndroid(const WebRtc_Word32 id,
@ -82,11 +74,7 @@ public:
virtual void ReDraw();
/**************************************************************************
*
* Properties
*
***************************************************************************/
// Properties
virtual VideoRenderType RenderType();
@ -165,4 +153,4 @@ public:
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_

View File

@ -31,208 +31,182 @@ AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
const bool fullscreen) :
VideoRenderAndroid(id, videoRenderType, window, fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
_javaRenderClass(NULL) {
}
bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window)
{
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"RendererAndroid():UseOpenGL No JVM set.");
return false;
}
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
if (!g_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"RendererAndroid():UseOpenGL No JVM set.");
return false;
}
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
return false;
}
isAttached = true;
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
return false;
}
isAttached = true;
}
// get the renderer class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not find ViEAndroidRenderer class",
__FUNCTION__);
return false;
}
// get the renderer class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not find ViEAndroidRenderer class",
__FUNCTION__);
return false;
}
// get the method ID for UseOpenGL
jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
"UseOpenGL2",
"(Ljava/lang/Object;)Z");
if (cidUseOpenGL == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not get UseOpenGL ID", __FUNCTION__);
return false;
}
jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
cidUseOpenGL, (jobject) window);
// get the method ID for UseOpenGL
jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
"UseOpenGL2",
"(Ljava/lang/Object;)Z");
if (cidUseOpenGL == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not get UseOpenGL ID", __FUNCTION__);
return false;
}
jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
cidUseOpenGL, (jobject) window);
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
return res;
}
return res;
}
AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Renderer dtor");
if (g_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else
{
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"(%s): No window have been provided.", __FUNCTION__);
return -1;
}
AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Renderer dtor");
if (g_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else {
isAttached = true;
}
}
// get the ViEAndroidGLES20 class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViEAndroidGLES20", __FUNCTION__);
return -1;
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not create Java SurfaceHolder class reference",
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
return -1;
}
}
}
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if (!_ptrWindow) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"(%s): No window have been provided.", __FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(_ptrWindow);
if (!_javaRenderObj)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
// get the ViEAndroidGLES20 class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViEAndroidGLES20", __FUNCTION__);
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not create Java SurfaceHolder class reference",
__FUNCTION__);
return 0;
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(_ptrWindow);
if (!_javaRenderObj) {
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
__FUNCTION__);
return 0;
}
AndroidStream*
@ -243,20 +217,18 @@ AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
AndroidNativeOpenGl2Channel* stream =
new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
_javaRenderObj);
if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else
{
delete stream;
}
return NULL;
VideoRenderAndroid& renderer) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
AndroidNativeOpenGl2Channel* stream =
new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
_javaRenderObj);
if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else {
delete stream;
}
return NULL;
}
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
@ -267,194 +239,170 @@ AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
_registerNativeCID(NULL), _deRegisterNativeCID(NULL),
_openGLRenderer(streamId)
{
_openGLRenderer(streamId) {
}
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
delete &_renderCritSect;
if (_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else
{
isAttached = true;
}
}
if (env && _deRegisterNativeCID)
{
env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
}
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
if (!_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
delete &_renderCritSect;
if (_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
} else {
isAttached = true;
}
}
if (env && _deRegisterNativeCID) {
env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
}
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// get the method ID for the ReDraw function
_redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
if (_redrawCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get ReDraw ID", __FUNCTION__);
return -1;
}
_registerNativeCID = env->GetMethodID(javaRenderClass,
"RegisterNativeObject", "(J)V");
if (_registerNativeCID == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get RegisterNativeObject ID", __FUNCTION__);
return -1;
}
_deRegisterNativeCID = env->GetMethodID(javaRenderClass,
"DeRegisterNativeObject", "()V");
if (_deRegisterNativeCID == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get DeRegisterNativeObject ID",
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
return -1;
}
}
}
}
JNINativeMethod nativeFunctions[2] = {
{ "DrawNative",
"(J)V",
(void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
{ "CreateOpenGLNative",
"(JII)I",
(void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
};
if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
"%s: Registered native functions", __FUNCTION__);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: Failed to register native functions", __FUNCTION__);
return -1;
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
if (!_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Detach this thread if it was attached
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0)
{
return -1;
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClass) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// get the method ID for the ReDraw function
_redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
if (_redrawCid == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get ReDraw ID", __FUNCTION__);
return -1;
}
_registerNativeCID = env->GetMethodID(javaRenderClass,
"RegisterNativeObject", "(J)V");
if (_registerNativeCID == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get RegisterNativeObject ID", __FUNCTION__);
return -1;
}
_deRegisterNativeCID = env->GetMethodID(javaRenderClass,
"DeRegisterNativeObject", "()V");
if (_deRegisterNativeCID == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get DeRegisterNativeObject ID",
__FUNCTION__);
return -1;
}
JNINativeMethod nativeFunctions[2] = {
{ "DrawNative",
"(J)V",
(void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
{ "CreateOpenGLNative",
"(JII)I",
(void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
};
if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
"%s: Registered native functions", __FUNCTION__);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: Failed to register native functions", __FUNCTION__);
return -1;
}
env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
// Detach this thread if it was attached
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
return 0;
}
if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
return 0;
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(
const WebRtc_UWord32 /*streamId*/,
VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv)
{
//TickTime timeNow=TickTime::Now();
void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
//TickTime timeNow=TickTime::Now();
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
// "%s: time to deliver %lld" ,__FUNCTION__,
// (TickTime::Now()-timeNow).Milliseconds());
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
// "%s: time to deliver %lld" ,__FUNCTION__,
// (TickTime::Now()-timeNow).Milliseconds());
}
/*
@ -470,8 +418,7 @@ void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
renderChannel->DrawNative();
}
void AndroidNativeOpenGl2Channel::DrawNative()
{
void AndroidNativeOpenGl2Channel::DrawNative() {
_openGLRenderer.Render(_bufferToRender);
}
@ -498,4 +445,4 @@ jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
return _openGLRenderer.Setup(width, height);
}
} //namespace webrtc
} //namespace webrtc

View File

@ -16,7 +16,6 @@
#include <android/bitmap.h>
#endif
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
@ -29,15 +28,14 @@
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen)
:
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
VideoRenderAndroid(id,videoRenderType,window,fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
_javaRenderClass(NULL) {
}
AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
@ -460,25 +458,25 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
#ifdef ANDROID_NDK_8_OR_ABOVE
if (_bitmapWidth != _bufferToRender.Width() ||
_bitmapHeight != _bufferToRender.Height()) {
_bitmapHeight != _bufferToRender.Height()) {
// Create the bitmap to write to
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
"%u", __FUNCTION__, _bufferToRender.Width(),
_bufferToRender.Height());
if (_javaBitmapObj) {
jniEnv->DeleteGlobalRef(_javaBitmapObj);
_javaBitmapObj = NULL;
_javaBitmapObj = NULL;
}
jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
_createBitmapCid,
videoFrame.Width(),
videoFrame.Height());
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
if (!_javaBitmapObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java Bitmap object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
if (!_javaBitmapObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java Bitmap object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_bitmapWidth = _bufferToRender.Width();
_bitmapHeight = _bufferToRender.Height();
@ -518,14 +516,14 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
"%d",__FUNCTION__,
_bufferToRender.Width(), _bufferToRender.Height());
if (_javaByteBufferObj) {
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj = NULL;
_directBuffer = NULL;
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj = NULL;
_directBuffer = NULL;
}
jobject javaByteBufferObj =
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
_bufferToRender.Width(),
_bufferToRender.Height());
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
_bufferToRender.Width(),
_bufferToRender.Height());
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
if (!_javaByteBufferObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
@ -544,8 +542,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
// Android requires a vertically flipped image compared to std convert.
// This is done by giving a negative height input.
const int conversionResult =
ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
_directBuffer, _bitmapWidth, -_bitmapHeight);
ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
_directBuffer, _bitmapWidth, -_bitmapHeight);
if (conversionResult < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
" failed.", __FUNCTION__);

View File

@ -20,9 +20,8 @@ namespace webrtc {
class CriticalSectionWrapper;
class AndroidSurfaceViewChannel: public AndroidStream
{
public:
class AndroidSurfaceViewChannel : public AndroidStream {
public:
AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
@ -67,9 +66,8 @@ public:
unsigned int _bitmapHeight;
};
class AndroidSurfaceViewRenderer: private VideoRenderAndroid
{
public:
class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
public:
AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
@ -89,6 +87,6 @@ public:
jclass _javaRenderClass;
};
} //namespace webrtc
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_

View File

@ -33,318 +33,287 @@ namespace webrtc {
const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
const char VideoRenderOpenGles20::g_vertextShader[] = {
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n" };
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n" };
// The fragment shader.
// Do YUV to RGB565 conversion.
const char VideoRenderOpenGles20::g_fragmentShader[] = {
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
//" y = v;\n"+
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
//" y = v;\n"+
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n" };
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n" };
VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
_id(id),
_textureWidth(-1),
_textureHeight(-1)
_textureHeight(-1) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
const GLfloat vertices[20] = {
const GLfloat vertices[20] = {
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
memcpy(_vertices, vertices, sizeof(_vertices));
memcpy(_vertices, vertices, sizeof(_vertices));
}
VideoRenderOpenGles20::~VideoRenderOpenGles20()
{
VideoRenderOpenGles20::~VideoRenderOpenGles20() {
}
WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
WebRtc_Word32 height)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__, (int) width,
(int) height);
WebRtc_Word32 height) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__, (int) width,
(int) height);
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
int maxTextureImageUnits[2];
int maxTextureSize[2];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
int maxTextureImageUnits[2];
int maxTextureSize[2];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: number of textures %d, size %d", __FUNCTION__,
(int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: number of textures %d, size %d", __FUNCTION__,
(int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
_program = createProgram(g_vertextShader, g_fragmentShader);
if (!_program)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not create program", __FUNCTION__);
return -1;
}
_program = createProgram(g_vertextShader, g_fragmentShader);
if (!_program) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not create program", __FUNCTION__);
return -1;
}
int positionHandle = glGetAttribLocation(_program, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (positionHandle == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aPosition handle", __FUNCTION__);
return -1;
}
int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (textureHandle == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aTextureCoord handle", __FUNCTION__);
return -1;
}
int positionHandle = glGetAttribLocation(_program, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (positionHandle == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aPosition handle", __FUNCTION__);
return -1;
}
// set the vertices array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5
* sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (textureHandle == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aTextureCoord handle", __FUNCTION__);
return -1;
}
glEnableVertexAttribArray(positionHandle);
checkGlError("glEnableVertexAttribArray positionHandle");
// set the vertices array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
5 * sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
// set the texture coordinate array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
glEnableVertexAttribArray(textureHandle);
checkGlError("glEnableVertexAttribArray textureHandle");
glEnableVertexAttribArray(positionHandle);
checkGlError("glEnableVertexAttribArray positionHandle");
glUseProgram(_program);
int i = glGetUniformLocation(_program, "Ytex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
checkGlError("glUniform1i Ytex");
// set the texture coordinate array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
glEnableVertexAttribArray(textureHandle);
checkGlError("glEnableVertexAttribArray textureHandle");
i = glGetUniformLocation(_program, "Utex");
checkGlError("glGetUniformLocation Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
checkGlError("glUniform1i Utex");
glUseProgram(_program);
int i = glGetUniformLocation(_program, "Ytex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
checkGlError("glUniform1i Ytex");
i = glGetUniformLocation(_program, "Vtex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
checkGlError("glUniform1i");
i = glGetUniformLocation(_program, "Utex");
checkGlError("glGetUniformLocation Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
checkGlError("glUniform1i Utex");
glViewport(0, 0, width, height);
checkGlError("glViewport");
return 0;
i = glGetUniformLocation(_program, "Vtex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
checkGlError("glUniform1i");
glViewport(0, 0, width, height);
checkGlError("glViewport");
return 0;
}
/*
* SetCoordinates
* Sets the coordinates where the stream shall be rendered.
* Values must be between 0 and 1.
*/
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
// Values must be between 0 and 1.
WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1
|| bottom < 0) || (left > 1 || left < 0))
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
/*
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
*/
// Bottom Left
_vertices[0] = (left * 2) - 1;
_vertices[1] = -1 * (2 * bottom) + 1;
_vertices[2] = zOrder;
const float bottom) {
if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
(bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
//Bottom Right
_vertices[5] = (right * 2) - 1;
_vertices[6] = -1 * (2 * bottom) + 1;
_vertices[7] = zOrder;
// X, Y, Z, U, V
// -1, -1, 0, 0, 1, // Bottom Left
// 1, -1, 0, 1, 1, //Bottom Right
// 1, 1, 0, 1, 0, //Top Right
// -1, 1, 0, 0, 0 //Top Left
//Top Right
_vertices[10] = (right * 2) - 1;
_vertices[11] = -1 * (2 * top) + 1;
_vertices[12] = zOrder;
// Bottom Left
_vertices[0] = (left * 2) - 1;
_vertices[1] = -1 * (2 * bottom) + 1;
_vertices[2] = zOrder;
//Top Left
_vertices[15] = (left * 2) - 1;
_vertices[16] = -1 * (2 * top) + 1;
_vertices[17] = zOrder;
//Bottom Right
_vertices[5] = (right * 2) - 1;
_vertices[6] = -1 * (2 * bottom) + 1;
_vertices[7] = zOrder;
return 0;
//Top Right
_vertices[10] = (right * 2) - 1;
_vertices[11] = -1 * (2 * top) + 1;
_vertices[12] = zOrder;
//Top Left
_vertices[15] = (left * 2) - 1;
_vertices[16] = -1 * (2 * top) + 1;
_vertices[17] = zOrder;
return 0;
}
WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender)
{
if (frameToRender.Length() == 0)
{
return -1;
}
WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
if (frameToRender.Length() == 0) {
return -1;
}
//glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
//glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
glUseProgram(_program);
checkGlError("glUseProgram");
glUseProgram(_program);
checkGlError("glUseProgram");
if (_textureWidth != (GLsizei) frameToRender.Width() || _textureHeight
!= (GLsizei) frameToRender.Height())
{
SetupTextures(frameToRender);
}
else
{
UpdateTextures(frameToRender);
}
if (_textureWidth != (GLsizei) frameToRender.Width() ||
_textureHeight != (GLsizei) frameToRender.Height()) {
SetupTextures(frameToRender);
}
else {
UpdateTextures(frameToRender);
}
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
checkGlError("glDrawArrays");
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
checkGlError("glDrawArrays");
return 0;
return 0;
}
GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
const char* pSource)
{
GLuint shader = glCreateShader(shaderType);
if (shader)
{
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*) malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not compile shader %d: %s",
__FUNCTION__, shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not compile shader %d: %s",
__FUNCTION__, shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
return shader;
}
GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
const char* pFragmentSource)
{
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader)
{
return 0;
}
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader)
{
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program)
{
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*) malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not link program: %s",
__FUNCTION__, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not link program: %s",
__FUNCTION__, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
return program;
}
return program;
}
void VideoRenderOpenGles20::printGLString(const char *name, GLenum s)
{
const char *v = (const char *) glGetString(s);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
name, v);
void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
name, v);
}
void VideoRenderOpenGles20::checkGlError(const char* op) {
@ -358,89 +327,87 @@ void VideoRenderOpenGles20::checkGlError(const char* op) {
#endif
}
void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d length %u", __FUNCTION__,
frameToRender.Width(), frameToRender.Height(),
frameToRender.Length());
void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d length %u", __FUNCTION__,
frameToRender.Width(), frameToRender.Height(),
frameToRender.Length());
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
glGenTextures(3, _textureIds); //Generate the Y, U and V texture
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glGenTextures(3, _textureIds); //Generate the Y, U and V texture
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE,
(const GLvoid*) frameToRender.Buffer());
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE,
(const GLvoid*) frameToRender.Buffer());
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("SetupTextures");
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("SetupTextures");
_textureWidth = width;
_textureHeight = height;
_textureWidth = width;
_textureHeight = height;
}
void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender)
{
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer());
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer());
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("UpdateTextures");
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("UpdateTextures");
}
} //namespace webrtc
} // namespace webrtc

View File

@ -19,44 +19,43 @@
namespace webrtc
{
class VideoRenderOpenGles20
{
public:
VideoRenderOpenGles20(WebRtc_Word32 id);
~VideoRenderOpenGles20();
class VideoRenderOpenGles20 {
public:
VideoRenderOpenGles20(WebRtc_Word32 id);
~VideoRenderOpenGles20();
WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
WebRtc_Word32 Render(const VideoFrame& frameToRender);
WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
WebRtc_Word32 Render(const VideoFrame& frameToRender);
WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
private:
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
private:
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
WebRtc_Word32 _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.
GLuint _program;
GLuint _vPositionHandle;
GLsizei _textureWidth;
GLsizei _textureHeight;
WebRtc_Word32 _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.
GLuint _program;
GLuint _vPositionHandle;
GLsizei _textureWidth;
GLsizei _textureHeight;
GLfloat _vertices[20];
static const char g_indices[];
GLfloat _vertices[20];
static const char g_indices[];
static const char g_vertextShader[];
static const char g_fragmentShader[];
static const char g_vertextShader[];
static const char g_fragmentShader[];
};
} //namespace webrtc
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -11,7 +11,7 @@
package org.webrtc.videoengineapp;
public interface IViEAndroidCallback {
public int UpdateStats(int frameRateI, int bitRateI,
int packetLoss, int frameRateO,
int bitRateO);
public int UpdateStats(int frameRateI, int bitRateI,
int packetLoss, int frameRateO,
int bitRateO);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -18,121 +18,121 @@ import android.view.SurfaceView;
public class ViEAndroidJavaAPI {
public ViEAndroidJavaAPI(Context context) {
Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
System.loadLibrary("webrtc-video-demo-jni");
public ViEAndroidJavaAPI(Context context) {
Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
System.loadLibrary("webrtc-video-demo-jni");
Log.d("*WEBRTCJ*", "Calling native init...");
if (!NativeInit(context)) {
Log.e("*WEBRTCJ*", "Native init failed");
throw new RuntimeException("Native init failed");
Log.d("*WEBRTCJ*", "Calling native init...");
if (!NativeInit(context)) {
Log.e("*WEBRTCJ*", "Native init failed");
throw new RuntimeException("Native init failed");
}
else {
Log.d("*WEBRTCJ*", "Native init successful");
}
String a = "";
a.getBytes();
}
else {
Log.d("*WEBRTCJ*", "Native init successful");
}
String a = "";
a.getBytes();
}
// API Native
private native boolean NativeInit(Context context);
// API Native
private native boolean NativeInit(Context context);
// Video Engine API
// Initialization and Termination functions
public native int GetVideoEngine();
public native int Init(boolean enableTrace);
public native int Terminate();
// Video Engine API
// Initialization and Termination functions
public native int GetVideoEngine();
public native int Init(boolean enableTrace);
public native int Terminate();
public native int StartSend(int channel);
public native int StopRender(int channel);
public native int StopSend(int channel);
public native int StartReceive(int channel);
public native int StopReceive(int channel);
// Channel functions
public native int CreateChannel(int voiceChannel);
// Receiver & Destination functions
public native int SetLocalReceiver(int channel, int port);
public native int SetSendDestination(int channel, int port, byte ipadr[]);
// Codec
public native int SetReceiveCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
public native int SetSendCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
// Rendering
public native int AddRemoteRenderer(int channel, Object glSurface);
public native int RemoveRemoteRenderer(int channel);
public native int StartRender(int channel);
public native int StartSend(int channel);
public native int StopRender(int channel);
public native int StopSend(int channel);
public native int StartReceive(int channel);
public native int StopReceive(int channel);
// Channel functions
public native int CreateChannel(int voiceChannel);
// Receiver & Destination functions
public native int SetLocalReceiver(int channel, int port);
public native int SetSendDestination(int channel, int port, byte ipadr[]);
// Codec
public native int SetReceiveCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
public native int SetSendCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
// Rendering
public native int AddRemoteRenderer(int channel, Object glSurface);
public native int RemoveRemoteRenderer(int channel);
public native int StartRender(int channel);
// Capture
public native int StartCamera(int channel, int cameraNum);
public native int StopCamera(int cameraId);
public native int GetCameraOrientation(int cameraNum);
public native int SetRotation(int cameraId,int degrees);
// Capture
public native int StartCamera(int channel, int cameraNum);
public native int StopCamera(int cameraId);
public native int GetCameraOrientation(int cameraNum);
public native int SetRotation(int cameraId,int degrees);
// NACK
public native int EnableNACK(int channel, boolean enable);
// NACK
public native int EnableNACK(int channel, boolean enable);
//PLI for H.264
public native int EnablePLI(int channel, boolean enable);
//PLI for H.264
public native int EnablePLI(int channel, boolean enable);
// Enable stats callback
public native int SetCallback(int channel, IViEAndroidCallback callback);
// Enable stats callback
public native int SetCallback(int channel, IViEAndroidCallback callback);
// Voice Engine API
// Create and Delete functions
public native boolean VoE_Create(Activity context);
public native boolean VoE_Delete();
// Voice Engine API
// Create and Delete functions
public native boolean VoE_Create(Activity context);
public native boolean VoE_Delete();
// Initialization and Termination functions
public native int VoE_Authenticate(String key);
public native int VoE_Init(boolean enableTrace);
public native int VoE_Terminate();
// Initialization and Termination functions
public native int VoE_Authenticate(String key);
public native int VoE_Init(boolean enableTrace);
public native int VoE_Terminate();
// Channel functions
public native int VoE_CreateChannel();
public native int VoE_DeleteChannel(int channel);
// Channel functions
public native int VoE_CreateChannel();
public native int VoE_DeleteChannel(int channel);
// Receiver & Destination functions
public native int VoE_SetLocalReceiver(int channel, int port);
public native int VoE_SetSendDestination(int channel, int port,
String ipaddr);
// Receiver & Destination functions
public native int VoE_SetLocalReceiver(int channel, int port);
public native int VoE_SetSendDestination(int channel, int port,
String ipaddr);
// Media functions
public native int VoE_StartListen(int channel);
public native int VoE_StartPlayout(int channel);
public native int VoE_StartSend(int channel);
public native int VoE_StopListen(int channel);
public native int VoE_StopPlayout(int channel);
public native int VoE_StopSend(int channel);
// Media functions
public native int VoE_StartListen(int channel);
public native int VoE_StartPlayout(int channel);
public native int VoE_StartSend(int channel);
public native int VoE_StopListen(int channel);
public native int VoE_StopPlayout(int channel);
public native int VoE_StopSend(int channel);
// Volume
public native int VoE_SetSpeakerVolume(int volume);
// Volume
public native int VoE_SetSpeakerVolume(int volume);
// Hardware
public native int VoE_SetLoudspeakerStatus(boolean enable);
// Hardware
public native int VoE_SetLoudspeakerStatus(boolean enable);
// Playout file locally
public native int VoE_StartPlayingFileLocally(int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileLocally(int channel);
// Playout file locally
public native int VoE_StartPlayingFileLocally(int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileLocally(int channel);
// Play file as microphone
public native int VoE_StartPlayingFileAsMicrophone(int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileAsMicrophone(int channel);
// Play file as microphone
public native int VoE_StartPlayingFileAsMicrophone(int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileAsMicrophone(int channel);
// Codec-setting functions
public native int VoE_NumOfCodecs();
public native int VoE_SetSendCodec(int channel, int index);
// Codec-setting functions
public native int VoE_NumOfCodecs();
public native int VoE_SetSendCodec(int channel, int index);
//VE funtions
public native int VoE_SetECStatus(boolean enable, int mode,
int AESmode, int AESattenuation);
public native int VoE_SetAGCStatus(boolean enable, int mode);
public native int VoE_SetNSStatus(boolean enable, int mode);
//VE funtions
public native int VoE_SetECStatus(boolean enable, int mode,
int AESmode, int AESattenuation);
public native int VoE_SetAGCStatus(boolean enable, int mode);
public native int VoE_SetNSStatus(boolean enable, int mode);
}