VideoCaptureAndroid: rewrote the (standalone) implementation of video capture on Android.

Besides being ~40% the size of the previous implementation, this makes it so
that VideoCaptureAndroid can stop and restart capture, which is necessary to
support onPause/onResume reasonably on Android.

BUG=1407
R=henrike@webrtc.org, wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2334004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4915 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org 2013-10-03 18:23:13 +00:00
parent ddc5a19ce9
commit 4e65e07e41
28 changed files with 745 additions and 1550 deletions

View File

@ -1241,7 +1241,7 @@ JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context) {
CHECK(g_jvm, "JNI_OnLoad failed to run?");
bool failure = false;
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context);
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm);
failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
return !failure;
}
@ -1543,7 +1543,7 @@ JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
CHECK(device_manager->Init(), "DeviceManager::Init() failed");
cricket::Device device;
if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
LOG(LS_ERROR) << "GetVideoCaptureDevice failed";
LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
return 0;
}
talk_base::scoped_ptr<cricket::VideoCapturer> capturer(
@ -1566,6 +1566,28 @@ JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
return (jlong)renderer.release();
}
JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
cricket::VideoCapturer* capturer =
reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
talk_base::scoped_ptr<cricket::VideoFormatPod> format(
new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
capturer->Stop();
return jlongFromPointer(format.release());
}
JOW(void, VideoSource_restart)(
JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
talk_base::scoped_ptr<cricket::VideoFormatPod> format(
reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
StartCapturing(cricket::VideoFormat(*format));
}
JOW(jboolean, VideoSource_freeNativeVideoFormat)(
JNIEnv* jni, jclass, jlong j_p) {
delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
}
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());

View File

@ -28,9 +28,46 @@
package org.webrtc;
/** Java version of VideoSourceInterface. */
/**
* Java version of VideoSourceInterface, extended with stop/restart
* functionality to allow explicit control of the camera device on android,
* where there is no support for multiple open capture devices and the cost of
* holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
* its output to the encoder) can be too high to bear.
*/
public class VideoSource extends MediaSource {
private long nativeVideoFormatAtStop;
public VideoSource(long nativeSource) {
super(nativeSource);
}
// Stop capture feeding this source.
public void stop() {
nativeVideoFormatAtStop = stop(nativeSource);
}
// Restart capture feeding this source. stop() must have been called since
// the last call to restart() (if any). Note that this isn't "start()";
// sources are started by default at birth.
public void restart() {
restart(nativeSource, nativeVideoFormatAtStop);
nativeVideoFormatAtStop = 0;
}
@Override
public void dispose() {
if (nativeVideoFormatAtStop != 0) {
freeNativeVideoFormat(nativeVideoFormatAtStop);
nativeVideoFormatAtStop = 0;
}
super.dispose();
}
// This stop() returns an owned C++ VideoFormat pointer for use in restart()
// and dispose().
private static native long stop(long nativeSource);
private static native void restart(
long nativeSource, long nativeVideoFormatAtStop);
private static native void freeNativeVideoFormat(long nativeVideoFormat);
}

View File

@ -156,19 +156,18 @@ public class AppRTCDemoActivity extends Activity
public void onPause() {
super.onPause();
vsv.onPause();
// TODO(fischman): IWBN to support pause/resume, but the WebRTC codebase
// isn't ready for that yet; e.g.
// https://code.google.com/p/webrtc/issues/detail?id=1407
// Instead, simply exit instead of pausing (the alternative leads to
// system-borking with wedged cameras; e.g. b/8224551)
disconnectAndExit();
if (videoSource != null) {
videoSource.stop();
}
}
@Override
public void onResume() {
// The onResume() is a lie! See TODO(fischman) in onPause() above.
super.onResume();
vsv.onResume();
if (videoSource != null) {
videoSource.restart();
}
}
@Override
@ -249,7 +248,8 @@ public class AppRTCDemoActivity extends Activity
}
@Override
public void onDestroy() {
protected void onDestroy() {
disconnectAndExit();
super.onDestroy();
}
@ -524,7 +524,6 @@ public class AppRTCDemoActivity extends Activity
return;
}
quit[0] = true;
wakeLock.release();
if (pc != null) {
pc.dispose();
pc = null;
@ -542,6 +541,7 @@ public class AppRTCDemoActivity extends Activity
factory.dispose();
factory = null;
}
wakeLock.release();
finish();
}
}

View File

@ -75,6 +75,7 @@ public class VideoStreamsView
public VideoStreamsView(Context c, Point screenDimensions) {
super(c);
this.screenDimensions = screenDimensions;
setPreserveEGLContextOnPause(true);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);

View File

@ -107,7 +107,6 @@
'android_java_files': [
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java',
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java',
'<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java',

View File

@ -12,39 +12,9 @@
#include <assert.h>
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace {
class AttachThreadScoped {
public:
explicit AttachThreadScoped(JavaVM* jvm)
: attached_(false), jvm_(jvm), env_(NULL) {
jint ret_val = jvm->GetEnv(reinterpret_cast<void**>(&env_),
REQUIRED_JNI_VERSION);
if (ret_val == JNI_EDETACHED) {
// Attach the thread to the Java VM.
ret_val = jvm_->AttachCurrentThread(&env_, NULL);
attached_ = ret_val == JNI_OK;
assert(attached_);
}
}
~AttachThreadScoped() {
if (attached_ && (jvm_->DetachCurrentThread() < 0)) {
assert(false);
}
}
JNIEnv* env() { return env_; }
private:
bool attached_;
JavaVM* jvm_;
JNIEnv* env_;
};
} // namespace
namespace webrtc {
static JavaVM* g_jvm_ = NULL;

View File

@ -18,8 +18,6 @@
namespace webrtc {
#define REQUIRED_JNI_VERSION JNI_VERSION_1_4
class AudioManagerJni {
public:
AudioManagerJni();

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
#define WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
#include <jni.h>
namespace webrtc {
// Attach thread to JVM if necessary and detach at scope end if originally
// attached.
class AttachThreadScoped {
public:
explicit AttachThreadScoped(JavaVM* jvm);
~AttachThreadScoped();
JNIEnv* env();
private:
bool attached_;
JavaVM* jvm_;
JNIEnv* env_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/utility/interface/helpers_android.h"
#include <assert.h>
#include <stddef.h>
namespace webrtc {
AttachThreadScoped::AttachThreadScoped(JavaVM* jvm)
: attached_(false), jvm_(jvm), env_(NULL) {
jint ret_val = jvm->GetEnv(reinterpret_cast<void**>(&env_), JNI_VERSION_1_4);
if (ret_val == JNI_EDETACHED) {
// Attach the thread to the Java VM.
ret_val = jvm_->AttachCurrentThread(&env_, NULL);
attached_ = ret_val == JNI_OK;
assert(attached_);
}
}
AttachThreadScoped::~AttachThreadScoped() {
if (attached_ && (jvm_->DetachCurrentThread() < 0)) {
assert(false);
}
}
JNIEnv* AttachThreadScoped::env() { return env_; }
} // namespace webrtc

View File

@ -21,6 +21,7 @@
'../interface/audio_frame_operations.h',
'../interface/file_player.h',
'../interface/file_recorder.h',
'../interface/helpers_android.h',
'../interface/process_thread.h',
'../interface/rtp_dump.h',
'audio_frame_operations.cc',
@ -30,6 +31,7 @@
'file_player_impl.h',
'file_recorder_impl.cc',
'file_recorder_impl.h',
'helpers_android.cc',
'process_thread_impl.cc',
'process_thread_impl.h',
'rtp_dump_impl.cc',

View File

@ -10,9 +10,14 @@
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include <stdio.h>
#include <algorithm>
#include <sstream>
#include <vector>
#include "json/json.h"
#include "third_party/icu/source/common/unicode/unistr.h"
#include "webrtc/modules/video_capture/android/video_capture_android.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -22,65 +27,136 @@ namespace webrtc
namespace videocapturemodule
{
static jclass g_capabilityClass = NULL;
// static
void DeviceInfoAndroid::SetAndroidCaptureClasses(jclass capabilityClass) {
g_capabilityClass = capabilityClass;
static std::string ResolutionsToString(
const std::vector<std::pair<int, int> >& pairs) {
std::stringstream stream;
for (size_t i = 0; i < pairs.size(); ++i) {
if (i > 0)
stream << ", ";
stream << "(" << pairs[i].first << "x" << pairs[i].second << ")";
}
return stream.str();
}
VideoCaptureModule::DeviceInfo*
VideoCaptureImpl::CreateDeviceInfo (const int32_t id) {
videocapturemodule::DeviceInfoAndroid *deviceInfo =
new videocapturemodule::DeviceInfoAndroid(id);
if (deviceInfo && deviceInfo->Init() != 0) {
delete deviceInfo;
deviceInfo = NULL;
struct AndroidCameraInfo {
std::string name;
int min_mfps, max_mfps; // FPS*1000.
bool front_facing;
int orientation;
std::vector<std::pair<int, int> > resolutions; // Pairs are: (width,height).
std::string ToString() {
std::stringstream stream;
stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps
<< "], front_facing: " << front_facing
<< ", orientation: " << orientation << ", resolutions: ["
<< ResolutionsToString(resolutions) << "]";
return stream.str();
}
return deviceInfo;
};
// Camera info; populated during DeviceInfoAndroid::Initialize() and immutable
// thereafter.
static std::vector<AndroidCameraInfo>* g_camera_info = NULL;
// Set |*index| to the index of |name| in g_camera_info or return false if no
// match found.
static bool FindCameraIndexByName(const std::string& name, size_t* index) {
for (size_t i = 0; i < g_camera_info->size(); ++i) {
if (g_camera_info->at(i).name == name) {
*index = i;
return true;
}
}
return false;
}
// Returns a pointer to the named member of g_camera_info, or NULL if no match
// is found.
static AndroidCameraInfo* FindCameraInfoByName(const std::string& name) {
size_t index = 0;
if (FindCameraIndexByName(name, &index))
return &g_camera_info->at(index);
return NULL;
}
// static
void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
// TODO(henrike): this "if" would make a lot more sense as an assert, but
// Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine() and
// Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate() conspire to
// prevent this. Once that code is made to only
// VideoEngine::SetAndroidObjects() once per process, this can turn into an
// assert.
if (g_camera_info)
return;
g_camera_info = new std::vector<AndroidCameraInfo>();
jclass j_info_class =
jni->FindClass("org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
assert(j_info_class);
jmethodID j_initialize = jni->GetStaticMethodID(
j_info_class, "getDeviceInfo", "()Ljava/lang/String;");
jstring j_json_info = static_cast<jstring>(
jni->CallStaticObjectMethod(j_info_class, j_initialize));
const jchar* jchars = jni->GetStringChars(j_json_info, NULL);
icu::UnicodeString ustr(jchars, jni->GetStringLength(j_json_info));
jni->ReleaseStringChars(j_json_info, jchars);
std::string json_info;
ustr.toUTF8String(json_info);
Json::Value cameras;
Json::Reader reader(Json::Features::strictMode());
bool parsed = reader.parse(json_info, cameras);
if (!parsed) {
std::stringstream stream;
stream << "Failed to parse configuration:\n"
<< reader.getFormattedErrorMessages();
assert(false);
return;
}
for (Json::ArrayIndex i = 0; i < cameras.size(); ++i) {
const Json::Value& camera = cameras[i];
AndroidCameraInfo info;
info.name = camera["name"].asString();
info.min_mfps = camera["min_mfps"].asInt();
info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
for (Json::ArrayIndex j = 0; j < sizes.size(); ++j) {
const Json::Value& size = sizes[j];
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
}
g_camera_info->push_back(info);
}
}
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
const int32_t id) {
return new videocapturemodule::DeviceInfoAndroid(id);
}
DeviceInfoAndroid::DeviceInfoAndroid(const int32_t id) :
DeviceInfoImpl(id) {
}
DeviceInfoAndroid::~DeviceInfoAndroid() {
}
bool DeviceInfoAndroid::FindCameraIndex(const char* deviceUniqueIdUTF8,
size_t* index) {
return FindCameraIndexByName(deviceUniqueIdUTF8, index);
}
int32_t DeviceInfoAndroid::Init() {
return 0;
}
DeviceInfoAndroid::~DeviceInfoAndroid() {
}
uint32_t DeviceInfoAndroid::NumberOfDevices() {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
return 0;
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
jint numberOfDevices = 0;
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
return g_camera_info->size();
}
int32_t DeviceInfoAndroid::GetDeviceName(
@ -91,235 +167,56 @@ int32_t DeviceInfoAndroid::GetDeviceName(
uint32_t deviceUniqueIdUTF8Length,
char* /*productUniqueIdUTF8*/,
uint32_t /*productUniqueIdUTF8Length*/) {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
int32_t result = 0;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
if (deviceNumber >= g_camera_info->size())
return -1;
const AndroidCameraInfo& info = g_camera_info->at(deviceNumber);
if (info.name.length() + 1 > deviceNameLength ||
info.name.length() + 1 > deviceUniqueIdUTF8Length) {
return -1;
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL) {
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
} else {
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength =
env->GetStringUTFLength((jstring) javaDeviceNameObj);
if ((uint32_t) javaDeviceNameCharLength <
deviceUniqueIdUTF8Length) {
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
if ((uint32_t) javaDeviceNameCharLength < deviceNameLength) {
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
} // javaDeviceNameObj == NULL
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find GetDeviceUniqueName function id",
__FUNCTION__);
result = -1;
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
memcpy(deviceNameUTF8, info.name.c_str(), info.name.length() + 1);
memcpy(deviceUniqueIdUTF8, info.name.c_str(), info.name.length() + 1);
return 0;
}
int32_t DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) {
_captureCapabilities.clear();
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL)
return -1;
// Find the capability class
jclass javaCapClass = g_capabilityClass;
if (javaCapClass == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: SetAndroidCaptureClasses must be called first!",
__FUNCTION__);
return -1;
}
// get the method ID for the Android Java GetCapabilityArray .
jmethodID cid = env->GetMethodID(
javaCmDevInfoClass,
"GetCapabilityArray",
"(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
if (cid == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get an array with capabilities back.
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, capureIdString);
if (!javaCapabilitiesObj) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to call java GetCapabilityArray.",
__FUNCTION__);
return -1;
}
jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I");
jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get field Id.", __FUNCTION__);
return -1;
}
const jsize numberOfCapabilities =
env->GetArrayLength((jarray) javaCapabilitiesObj);
for (jsize i = 0; i < numberOfCapabilities; ++i) {
for (size_t i = 0; i < info->resolutions.size(); ++i) {
const std::pair<int, int>& size = info->resolutions[i];
VideoCaptureCapability cap;
jobject capabilityElement = env->GetObjectArrayElement(
(jobjectArray) javaCapabilitiesObj,
i);
cap.width = env->GetIntField(capabilityElement, widthField);
cap.height = env->GetIntField(capabilityElement, heigtField);
cap.expectedCaptureDelay = _expectedCaptureDelay;
cap.width = size.first;
cap.height = size.second;
cap.maxFPS = info->max_mfps / 1000;
cap.expectedCaptureDelay = kExpectedCaptureDelay;
cap.rawType = kVideoNV21;
cap.maxFPS = env->GetIntField(capabilityElement, maxFpsField);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap.width, cap.height, cap.maxFPS);
_captureCapabilities.push_back(cap);
}
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
_lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
_lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName,
deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"CreateCapabilityMap %d", _captureCapabilities.size());
return _captureCapabilities.size();
}
int32_t DeviceInfoAndroid::GetOrientation(
const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation) {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL ||
!VideoCaptureImpl::RotationFromDegrees(info->orientation, &orientation)) {
return -1;
}
return 0;
}
// get the method ID for the Android Java GetOrientation .
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
"(Ljava/lang/String;)I");
if (cid == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetOrientation.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get the orientation.
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
capureIdString);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
int32_t retValue = 0;
switch (jorientation) {
case -1: // Error
orientation = kCameraRotate0;
retValue = -1;
break;
case 0:
orientation = kCameraRotate0;
break;
case 90:
orientation = kCameraRotate90;
break;
case 180:
orientation = kCameraRotate180;
break;
case 270:
orientation = kCameraRotate270;
break;
case 360:
orientation = kCameraRotate0;
break;
}
return retValue;
void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8,
int* min_mfps, int* max_mfps) {
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL)
return;
*min_mfps = info->min_mfps;
*max_mfps = info->max_mfps;
}
} // namespace videocapturemodule

View File

@ -21,19 +21,18 @@ namespace webrtc
namespace videocapturemodule
{
// Android logging, uncomment to print trace to
// logcat instead of trace file/callback
// #include <android/log.h>
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
class DeviceInfoAndroid : public DeviceInfoImpl {
public:
static void SetAndroidCaptureClasses(jclass capabilityClass);
DeviceInfoAndroid(const int32_t id);
int32_t Init();
static void Initialize(JNIEnv* env);
DeviceInfoAndroid(int32_t id);
virtual ~DeviceInfoAndroid();
// Set |*index| to the index of the camera matching |deviceUniqueIdUTF8|, or
// return false if no match.
bool FindCameraIndex(const char* deviceUniqueIdUTF8, size_t* index);
virtual int32_t Init();
virtual uint32_t NumberOfDevices();
virtual int32_t GetDeviceName(
uint32_t deviceNumber,
@ -53,9 +52,14 @@ class DeviceInfoAndroid : public DeviceInfoImpl {
uint32_t /*positionY*/) { return -1; }
virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
// Populate |min_mfps| and |max_mfps| with the supported range of the device.
void GetFpsRange(const char* deviceUniqueIdUTF8,
int* min_mfps,
int* max_mfps);
private:
bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
enum {_expectedCaptureDelay = 190};
enum { kExpectedCaptureDelay = 190};
};
} // namespace videocapturemodule

View File

@ -1,17 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;
public int maxFPS = 0;
}

View File

@ -14,9 +14,6 @@ import java.io.IOException;
import java.util.Locale;
import java.util.concurrent.locks.ReentrantLock;
import org.webrtc.videoengine.CaptureCapabilityAndroid;
import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.Rect;
@ -28,240 +25,177 @@ import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
// Wrapper for android Camera, with support for direct local preview rendering.
// Threading notes: this class is called from ViE C++ code, and from Camera &
// SurfaceHolder Java callbacks. Since these calls happen on different threads,
// the entry points to this class are all synchronized. This shouldn't present
// a performance bottleneck because only onPreviewFrame() is called more than
// once (and is called serially on a single thread), so the lock should be
// uncontended.
public class VideoCaptureAndroid implements PreviewCallback, Callback {
private final static String TAG = "WEBRTC-JC";
private final static String TAG = "WEBRTC-JC";
private Camera camera; // Only non-null while capturing.
private final int id;
private final Camera.CameraInfo info;
private final long native_capturer; // |VideoCaptureAndroid*| in C++.
private SurfaceHolder localPreview;
private SurfaceTexture dummySurfaceTexture;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private final int numCaptureBuffers = 3;
private Camera camera;
private AndroidVideoCaptureDevice currentDevice = null;
public ReentrantLock previewBufferLock = new ReentrantLock();
// This lock takes sync with StartCapture and SurfaceChanged
private ReentrantLock captureLock = new ReentrantLock();
private int PIXEL_FORMAT = ImageFormat.NV21;
PixelFormat pixelFormat = new PixelFormat();
// True when the C++ layer has ordered the camera to be started.
private boolean isCaptureStarted = false;
private boolean isCaptureRunning = false;
private boolean isSurfaceReady = false;
public VideoCaptureAndroid(int id, long native_capturer) {
this.id = id;
this.native_capturer = native_capturer;
this.info = new Camera.CameraInfo();
Camera.getCameraInfo(id, info);
}
private final int numCaptureBuffers = 3;
private int expectedFrameSize = 0;
private int orientation = 0;
private int id = 0;
// C++ callback context variable.
private long context = 0;
private SurfaceHolder localPreview = null;
private SurfaceTexture dummySurfaceTexture = null;
// True if this class owns the preview video buffers.
private boolean ownsBuffers = false;
// Called by native code. Returns true if capturer is started.
//
// Note that this actually opens the camera, which can be a slow operation and
// thus might be done on a background thread, but ViE API needs a
// synchronous success return value so we can't do that.
private synchronized boolean startCapture(
int width, int height, int min_mfps, int max_mfps) {
Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
min_mfps + ":" + max_mfps);
Throwable error = null;
try {
camera = Camera.open(id);
private int mCaptureWidth = -1;
private int mCaptureHeight = -1;
private int mCaptureFPS = -1;
public static
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
Log.d(TAG, "DeleteVideoCaptureAndroid");
if (captureAndroid.camera == null) {
return;
localPreview = ViERenderer.GetLocalRenderer();
if (localPreview != null) {
localPreview.addCallback(this);
if (localPreview.getSurface() != null &&
localPreview.getSurface().isValid()) {
camera.setPreviewDisplay(localPreview);
}
captureAndroid.StopCapture();
captureAndroid.camera.release();
captureAndroid.camera = null;
captureAndroid.context = 0;
}
public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
AndroidVideoCaptureDevice in_device) {
id = in_id;
context = in_context;
camera = in_camera;
currentDevice = in_device;
}
private int tryStartCapture(int width, int height, int frameRate) {
if (camera == null) {
Log.e(TAG, "Camera not initialized %d" + id);
return -1;
}
Log.d(TAG, "tryStartCapture: " + width +
"x" + height +", frameRate: " + frameRate +
", isCaptureRunning: " + isCaptureRunning +
", isSurfaceReady: " + isSurfaceReady +
", isCaptureStarted: " + isCaptureStarted);
if (isCaptureRunning || !isCaptureStarted) {
return 0;
}
CaptureCapabilityAndroid currentCapability =
new CaptureCapabilityAndroid();
currentCapability.width = width;
currentCapability.height = height;
currentCapability.maxFPS = frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(currentCapability.width,
currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT);
parameters.setPreviewFrameRate(currentCapability.maxFPS);
} else {
// No local renderer (we only care about onPreviewFrame() buffers, not a
// directly-displayed UI element). Camera won't capture without
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
// hand it over to Camera, but never listen for frame-ready callbacks,
// and never call updateTexImage on it.
try {
camera.setParameters(parameters);
} catch (RuntimeException e) {
Log.e(TAG, "setParameters failed", e);
return -1;
}
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
byte[] buffer = null;
for (int i = 0; i < numCaptureBuffers; i++) {
buffer = new byte[bufSize];
camera.addCallbackBuffer(buffer);
}
camera.setPreviewCallbackWithBuffer(this);
ownsBuffers = true;
camera.startPreview();
previewBufferLock.lock();
expectedFrameSize = bufSize;
isCaptureRunning = true;
previewBufferLock.unlock();
return 0;
}
public int StartCapture(int width, int height, int frameRate) {
Log.d(TAG, "StartCapture width " + width +
" height " + height +" frame rate " + frameRate);
// Get the local preview SurfaceHolder from the static render class
localPreview = ViERenderer.GetLocalRenderer();
if (localPreview != null) {
if (localPreview.getSurface() != null &&
localPreview.getSurface().isValid()) {
surfaceCreated(localPreview);
}
localPreview.addCallback(this);
} else {
// No local renderer. Camera won't capture without
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture
// and hand it over to Camera, but never listen for frame-ready
// callbacks, and never call updateTexImage on it.
captureLock.lock();
try {
dummySurfaceTexture = new SurfaceTexture(42);
camera.setPreviewTexture(dummySurfaceTexture);
} catch (IOException e) {
throw new RuntimeException(e);
}
captureLock.unlock();
}
captureLock.lock();
isCaptureStarted = true;
mCaptureWidth = width;
mCaptureHeight = height;
mCaptureFPS = frameRate;
int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
captureLock.unlock();
return res;
}
public int StopCapture() {
Log.d(TAG, "StopCapture");
try {
previewBufferLock.lock();
isCaptureRunning = false;
previewBufferLock.unlock();
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
} catch (RuntimeException e) {
Log.e(TAG, "Failed to stop camera", e);
return -1;
}
isCaptureStarted = false;
return 0;
}
native void ProvideCameraFrame(byte[] data, int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
previewBufferLock.lock();
// The following line is for debug only
// Log.v(TAG, "preview frame length " + data.length +
// " context" + context);
if (isCaptureRunning) {
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == expectedFrameSize) {
ProvideCameraFrame(data, expectedFrameSize, context);
if (ownsBuffers) {
// Give the video buffer to the camera service again.
camera.addCallbackBuffer(data);
}
}
}
previewBufferLock.unlock();
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
public void SetPreviewRotation(int rotation) {
Log.v(TAG, "SetPreviewRotation:" + rotation);
if (camera == null) {
return;
}
int resultRotation = 0;
if (currentDevice.frontCameraType ==
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
// this is a 2.3 or later front facing camera.
// SetDisplayOrientation will flip the image horizontally
// before doing the rotation.
resultRotation = ( 360 - rotation ) % 360; // compensate the mirror
}
else {
// Back facing or 2.2 or previous front camera
resultRotation = rotation;
}
camera.setDisplayOrientation(resultRotation);
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
captureLock.lock();
try {
if (camera != null) {
camera.setPreviewDisplay(holder);
}
// "42" because http://goo.gl/KaEn8
dummySurfaceTexture = new SurfaceTexture(42);
camera.setPreviewTexture(dummySurfaceTexture);
} catch (IOException e) {
Log.e(TAG, "Failed to set preview surface!", e);
throw new RuntimeException(e);
}
captureLock.unlock();
}
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(width, height);
parameters.setPreviewFpsRange(min_mfps, max_mfps);
int format = ImageFormat.NV21;
parameters.setPreviewFormat(format);
camera.setParameters(parameters);
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
camera.addCallbackBuffer(new byte[bufSize]);
}
camera.setPreviewCallbackWithBuffer(this);
camera.startPreview();
return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "startCapture failed", error);
if (camera != null) {
stopCapture();
}
return false;
}
// Called by native code. Returns true when camera is known to be stopped.
private synchronized boolean stopCapture() {
Log.d(TAG, "stopCapture");
if (camera == null) {
throw new RuntimeException("Camera is already stopped!");
}
Throwable error = null;
try {
if (localPreview != null) {
localPreview.removeCallback(this);
camera.setPreviewDisplay(null);
} else {
camera.setPreviewTexture(null);
}
camera.setPreviewCallbackWithBuffer(null);
camera.stopPreview();
camera.release();
camera = null;
return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "Failed to stop camera", error);
return false;
}
private native void ProvideCameraFrame(
byte[] data, int length, long captureObject);
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
ProvideCameraFrame(data, data.length, native_capturer);
camera.addCallbackBuffer(data);
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
// Called by native code.
private synchronized void setPreviewRotation(int rotation) {
Log.v(TAG, "setPreviewRotation:" + rotation);
if (camera == null) {
return;
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
captureLock.lock();
try {
if (camera != null) {
camera.setPreviewDisplay(null);
}
} catch (IOException e) {
Log.e(TAG, "Failed to clear preview surface!", e);
}
captureLock.unlock();
int resultRotation = 0;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// This is a front facing camera. SetDisplayOrientation will flip
// the image horizontally before doing the rotation.
resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
} else {
// Back-facing camera.
resultRotation = rotation;
}
camera.setDisplayOrientation(resultRotation);
}
public synchronized void surfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
format + ": " + width + "x" + height);
}
public synchronized void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
try {
if (camera != null) {
camera.setPreviewDisplay(holder);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public synchronized void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
try {
if (camera != null) {
camera.setPreviewDisplay(null);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -17,377 +17,79 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import dalvik.system.DexClassLoader;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.hardware.Camera;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class VideoCaptureDeviceInfoAndroid {
private final static String TAG = "WEBRTC-JC";
//Context
Context context;
private static boolean isFrontFacing(CameraInfo info) {
return info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
}
// Set VERBOSE as the default logging level because camera device info
// is very useful information and doesn't degrade performance normally
private final static String TAG = "WEBRTC";
private static String deviceUniqueName(int index, CameraInfo info) {
return "Camera " + index +", Facing " +
(isFrontFacing(info) ? "front" : "back") +
", Orientation "+ info.orientation;
}
// Private class with info about all available cameras and the capabilities
public class AndroidVideoCaptureDevice {
AndroidVideoCaptureDevice() {
frontCameraType = FrontFacingCameraType.None;
index = 0;
}
public String deviceUniqueName;
public CaptureCapabilityAndroid captureCapabilies[];
public FrontFacingCameraType frontCameraType;
// Orientation of camera as described in
// android.hardware.Camera.CameraInfo.Orientation
public int orientation;
// Camera index used in Camera.Open on Android 2.3 and onwards
public int index;
}
public enum FrontFacingCameraType {
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23, // Android 2.3 front facing camera.
}
String currentDeviceUniqueId;
int id;
List<AndroidVideoCaptureDevice> deviceList;
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
Log.d(TAG,
String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
VideoCaptureDeviceInfoAndroid self =
new VideoCaptureDeviceInfoAndroid(in_id, in_context);
if(self != null && self.Init() == 0) {
return self;
}
else {
Log.d(TAG, "Failed to create VideoCaptureDeviceInfoAndroid.");
}
return null;
}
private VideoCaptureDeviceInfoAndroid(int in_id,
Context in_context) {
id = in_id;
context = in_context;
deviceList = new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init() {
// Populate the deviceList with available cameras and their capabilities.
Camera camera = null;
if(android.os.Build.VERSION.SDK_INT > 8) {
// From Android 2.3 and onwards
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice.index = i;
newDevice.orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
newDevice.deviceUniqueName =
"Camera " + i +", Facing back, Orientation "+ info.orientation;
Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation);
}
else {
newDevice.deviceUniqueName =
"Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice.frontCameraType = FrontFacingCameraType.Android23;
Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation);
}
camera = Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera = null;
deviceList.add(newDevice);
}
}
VerifyCapabilities();
return 0;
}
// Adds the capture capabilities of the currently opened device
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
int maxFPS = 0;
if (frameRates != null) {
for(Integer frameRate:frameRates) {
if(frameRate > maxFPS) {
maxFPS = frameRate;
}
}
}
newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
for(int i = 0; i < sizes.size(); ++i) {
Size s = sizes.get(i);
newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
newDevice.captureCapabilies[i].height = s.height;
newDevice.captureCapabilies[i].width = s.width;
newDevice.captureCapabilies[i].maxFPS = maxFPS;
Log.v(TAG, "VideoCaptureDeviceInfo " + ", maxFPS: " + maxFPS +
", width: " + s.width + ", height: " + s.height);
}
}
// Function that make sure device specific capabilities are
// in the capability list.
// Ie Galaxy S supports CIF but does not list CIF as a supported capability.
// Motorola Droid Camera does not work with frame rate above 15fps.
// http://code.google.com/p/android/issues/detail?id=5514#c0
private void VerifyCapabilities() {
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") ||
android.os.Build.DEVICE.equals("crespo")) {
CaptureCapabilityAndroid specificCapability =
new CaptureCapabilityAndroid();
specificCapability.width = 352;
specificCapability.height = 288;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 176;
specificCapability.height = 144;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 320;
specificCapability.height = 240;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps
// even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") &&
android.os.Build.DEVICE.equals("umts_sholes")) {
for (AndroidVideoCaptureDevice device : deviceList) {
for (CaptureCapabilityAndroid capability : device.captureCapabilies) {
capability.maxFPS = 15;
}
}
}
}
private void AddDeviceSpecificCapability(
CaptureCapabilityAndroid specificCapability) {
for(AndroidVideoCaptureDevice device:deviceList) {
boolean foundCapability = false;
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
if(capability.width == specificCapability.width &&
capability.height == specificCapability.height) {
foundCapability = true;
break;
}
}
if(foundCapability==false) {
CaptureCapabilityAndroid newCaptureCapabilies[]=
new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
for(int i = 0; i < device.captureCapabilies.length; ++i) {
newCaptureCapabilies[i+1] = device.captureCapabilies[i];
}
newCaptureCapabilies[0] = specificCapability;
device.captureCapabilies = newCaptureCapabilies;
}
}
}
// Returns the number of Capture devices that is supported
public int NumberOfDevices() {
return deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
}
return deviceList.get(deviceNumber).deviceUniqueName;
}
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return (CaptureCapabilityAndroid[]) device.captureCapabilies;
}
}
return null;
}
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return device.orientation;
}
}
return -1;
}
// Returns an instance of VideoCaptureAndroid.
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
// Returns information about all cameras on the device as a serialized JSON
// array of dictionaries encoding information about a single device. Since
// this reflects static information about the hardware present, there is no
// need to call this function more than once in a single process. It is
// marked "private" as it is only called by native code.
private static String getDeviceInfo() {
try {
JSONArray devices = new JSONArray();
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
String uniqueName = deviceUniqueName(i, info);
JSONObject cameraDict = new JSONObject();
devices.put(cameraDict);
List<Size> supportedSizes;
List<int[]> supportedFpsRanges;
try {
Log.d(TAG, "AllocateCamera " + deviceUniqueId);
Camera camera = null;
AndroidVideoCaptureDevice deviceToUse = null;
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
// Found the wanted camera
deviceToUse = device;
switch(device.frontCameraType) {
case GalaxyS:
camera = AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera = AllocateEVOFrontFacingCamera();
break;
default:
// From Android 2.3 and onwards)
if(android.os.Build.VERSION.SDK_INT>8)
camera=Camera.open(device.index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera == null) {
return null;
}
Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
return new VideoCaptureAndroid(id, context, camera, deviceToUse);
} catch (NoSuchMethodException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (ClassNotFoundException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (InvocationTargetException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (IllegalAccessException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
Camera camera = Camera.open(i);
Parameters parameters = camera.getParameters();
supportedSizes = parameters.getSupportedPreviewSizes();
supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
camera.release();
Log.d(TAG, uniqueName);
} catch (RuntimeException e) {
Log.e(TAG, "Failed to open " + uniqueName + ", skipping");
continue;
}
return null;
JSONArray sizes = new JSONArray();
for (Size supportedSize : supportedSizes) {
JSONObject size = new JSONObject();
size.put("width", supportedSize.width);
size.put("height", supportedSize.height);
sizes.put(size);
}
// Android SDK deals in integral "milliframes per second"
// (i.e. fps*1000, instead of floating-point frames-per-second) so we
// preserve that through the Java->C++->Java round-trip.
int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
cameraDict.put("name", uniqueName);
cameraDict.put("front_facing", isFrontFacing(info))
.put("orientation", info.orientation)
.put("sizes", sizes)
.put("min_mfps", mfps[Parameters.PREVIEW_FPS_MIN_INDEX])
.put("max_mfps", mfps[Parameters.PREVIEW_FPS_MAX_INDEX]);
}
String ret = devices.toString(2);
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
}
// Searches for a front facing camera device. This is device specific code.
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,
NoSuchMethodException, ClassNotFoundException,
IllegalAccessException, InvocationTargetException {
// Check the id of the opened camera device
// Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
String cameraId = parameters.get("camera-id");
if(cameraId != null && cameraId.equals("1")) {
// This might be a Samsung Galaxy S with a front facing camera.
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
newDevice.orientation = 0;
camera.release();
return parameters;
}
camera.release();
// Check for Evo front facing camera
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists) {
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists) {
newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
newDevice.orientation = 0;
Camera evCamera = AllocateEVOFrontFacingCamera();
parameters = evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
// Returns a handle to HTC front facing camera.
// The caller is responsible to release it on completion.
private Camera AllocateEVOFrontFacingCamera()
throws SecurityException, NoSuchMethodException,
ClassNotFoundException, IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
String classPath = null;
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists) {
return null;
}
String dexOutputDir = "";
if(context != null) {
dexOutputDir = context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
// Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
// Log.e("*WEBRTCN*", "Unable to create files directory");
}
}
}
dexOutputDir += "/dexfiles";
DexClassLoader loader =
new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
null, ClassLoader.getSystemClassLoader());
Method method = loader.loadClass(classPath).getDeclaredMethod(
"getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
// Returns a handle to Galaxy S front camera.
// The caller is responsible to release it on completion.
private Camera AllocateGalaxySFrontCamera() {
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
}
}

View File

@ -10,599 +10,173 @@
#include "webrtc/modules/video_capture/android/video_capture_android.h"
#include <stdio.h>
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc
{
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only
// keep and reference java vm.
int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext) {
return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(
javaVM,
javaContext);
}
#endif
static JavaVM* g_jvm = NULL;
static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
namespace videocapturemodule
{
namespace webrtc {
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
jobject,
jbyteArray javaCameraFrame,
jint length,
jlong context) {
webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
context);
jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
captureModule->OnIncomingFrame(
reinterpret_cast<uint8_t*>(cameraFrame), length, 0);
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
g_jvm = javaVM;
AttachThreadScoped ats(g_jvm);
videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
jclass j_capture_class =
ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
assert(j_capture_class);
g_java_capturer_class =
reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
assert(g_java_capturer_class);
JNINativeMethod native_method = {
"ProvideCameraFrame", "([BIJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)
};
if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0)
assert(false);
return 0;
}
namespace videocapturemodule {
VideoCaptureModule* VideoCaptureImpl::Create(
const int32_t id,
const char* deviceUniqueIdUTF8) {
RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) {
if (implementation->Init(id, deviceUniqueIdUTF8) != 0) {
delete implementation;
implementation = NULL;
}
return implementation;
}
// Android logging, uncomment to print trace to
// logcat instead of trace file/callback
// #include <android/log.h>
// #undef WEBRTC_TRACE
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
//VideoCaptureAndroid.java
jclass VideoCaptureAndroid::g_javaCmClass = NULL;
//VideoCaptureDeviceInfoAndroid.java
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
//static instance of VideoCaptureDeviceInfoAndroid.java
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
jobject VideoCaptureAndroid::g_javaContext = NULL;
/*
* Register references to Java Capture class.
*/
int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
void* javaContext) {
g_jvm = static_cast<JavaVM*> (javaVM);
g_javaContext = static_cast<jobject> (javaContext);
if (javaVM) {
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get Java environment", __FUNCTION__);
return -1;
}
// get java capture class type (note path to class packet)
jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
if (!javaCmClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmClass = static_cast<jclass>
(env->NewGlobalRef(javaCmClassLocal));
if (!g_javaCmClass) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: InitVideoEngineJava(): could not create"
" Java Camera class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaCmClassLocal);
JNINativeMethod nativeFunctions =
{ "ProvideCameraFrame", "([BIJ)V",
(void*) &VideoCaptureAndroid::ProvideCameraFrame };
if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Registered native functions", __FUNCTION__);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to register native functions",
__FUNCTION__);
return -1;
}
jclass capabilityClassLocal = env->FindClass(
"org/webrtc/videoengine/CaptureCapabilityAndroid");
if (!capabilityClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
capabilityClassLocal));
DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
// get java capture class type (note path to class packet)
jclass javaCmDevInfoClassLocal = env->FindClass(
"org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
if (!javaCmDevInfoClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmDevInfoClass = static_cast<jclass>
(env->NewGlobalRef(javaCmDevInfoClassLocal));
if (!g_javaCmDevInfoClass) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: InitVideoEngineJava(): could not create Java "
"Camera Device info class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaCmDevInfoClassLocal);
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"VideoCaptureDeviceInfoAndroid get method id");
// get the method ID for the Android Java CaptureClass static
//CreateVideoCaptureAndroid factory method.
jmethodID cid = env->GetStaticMethodID(
g_javaCmDevInfoClass,
"CreateVideoCaptureDeviceInfoAndroid",
"(ILandroid/content/Context;)"
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get java"
"VideoCaptureDeviceInfoAndroid constructor ID",
__FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: construct static java device object", __FUNCTION__);
// construct the object by calling the static constructor object
jobject javaCameraDeviceInfoObjLocal =
env->CallStaticObjectMethod(g_javaCmDevInfoClass,
cid, (int) -1,
g_javaContext);
if (!javaCameraDeviceInfoObjLocal) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: could not create Java Capture Device info object",
__FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that
// we are referencing it after this function has returned)
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
if (!g_javaCmDevInfoObject) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceAudioDevice,
-1,
"%s: could not create Java"
"cameradevinceinfo object reference",
__FUNCTION__);
return -1;
}
// Delete local object ref, we only use the global ref
env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
return 0;
}
else {
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: JVM is NULL, assuming deinit", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: SetAndroidObjects not called with a valid JVM.",
__FUNCTION__);
return -1;
}
JNIEnv* env = NULL;
bool attached = false;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
-1, "%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
attached = true;
}
env->DeleteGlobalRef(g_javaCmDevInfoObject);
env->DeleteGlobalRef(g_javaCmDevInfoClass);
env->DeleteGlobalRef(g_javaCmClass);
if (attached && g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
env = (JNIEnv *) NULL;
}
return 0;
int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
int32_t videoFrameLength,
int64_t captureTime) {
return IncomingFrame(
videoFrame, videoFrameLength, _captureCapability, captureTime);
}
int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached) {
// get the JNI env for this thread
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: SetAndroidObjects not called with a valid JVM.",
__FUNCTION__);
return -1;
}
attached = false;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
attached = true;
}
javaCmDevInfoClass = g_javaCmDevInfoClass;
javaCmDevInfoObject = g_javaCmDevInfoObject;
return 0;
}
int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
bool attached) {
if (attached && g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
}
/*
* JNI callback from Java class. Called
* when the camera has a new frame to deliver
* Class: org_webrtc_capturemodule_VideoCaptureAndroid
* Method: ProvideCameraFrame
* Signature: ([BIJ)V
*/
void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length,
jlong context) {
VideoCaptureAndroid* captureModule =
reinterpret_cast<VideoCaptureAndroid*>(context);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
captureModule->IncomingFrame((uint8_t*) cameraFrame,
length,captureModule->_frameInfo,0);
env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
}
VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id)
: VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL),
: VideoCaptureImpl(id),
_deviceInfo(id),
_jCapturer(NULL),
_captureStarted(false) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: context %x", __FUNCTION__, (int) this);
}
// ----------------------------------------------------------------------------
// Init
//
// Initializes needed Java resources like the JNI interface to
// VideoCaptureAndroid.java
// ----------------------------------------------------------------------------
int32_t VideoCaptureAndroid::Init(const int32_t id,
const char* deviceUniqueIdUTF8) {
const int nameLength = strlen(deviceUniqueIdUTF8);
if (nameLength >= kVideoCaptureUniqueNameLength) {
if (nameLength >= kVideoCaptureUniqueNameLength)
return -1;
}
// Store the device name
_deviceUniqueId = new char[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
if (_capInfo.Init() != 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCapture,
_id,
"%s: Failed to initialize CaptureDeviceInfo",
__FUNCTION__);
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
assert(ctor);
jlong j_this = reinterpret_cast<intptr_t>(this);
size_t camera_id = 0;
if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
return -1;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
__FUNCTION__);
// use the jvm that has been set
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
JNIEnv *env;
bool isAttached = false;
// get the JNI env for this thread
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"get method id");
// get the method ID for the Android Java
// CaptureDeviceInfoClass AllocateCamera factory method.
char signature[256];
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
signature);
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: could not get constructor ID", __FUNCTION__);
return -1; /* exception thrown */
}
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
// construct the object by calling the static constructor object
jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
cid, (jint) id,
(jlong) this,
capureIdString);
if (!javaCameraObjLocal) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
"%s: could not create Java Capture object", __FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
if (!_javaCaptureObj) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
"%s: could not create Java camera object reference",
__FUNCTION__);
return -1;
}
// Delete local object ref, we only use the global ref
env->DeleteLocalRef(javaCameraObjLocal);
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
_jCapturer = env->NewGlobalRef(
env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
assert(_jCapturer);
return 0;
}
VideoCaptureAndroid::~VideoCaptureAndroid() {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
__FUNCTION__);
if (_javaCaptureObj == NULL || g_jvm == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Nothing to clean", __FUNCTION__);
}
else {
bool isAttached = false;
// get the JNI env for this thread
JNIEnv *env;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
// get the method ID for the Android Java CaptureClass static
// DeleteVideoCaptureAndroid method. Call this to release the camera so
// another application can use it.
jmethodID cid = env->GetStaticMethodID(
g_javaCmClass,
"DeleteVideoCaptureAndroid",
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
// Close the camera by calling the static destruct function.
env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
// Delete global object ref to the camera.
env->DeleteGlobalRef(_javaCaptureObj);
_javaCaptureObj = NULL;
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find DeleteVideoCaptureAndroid id",
__FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
_id, "%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
// Ensure Java camera is released even if our caller didn't explicitly Stop.
if (_captureStarted)
StopCapture();
AttachThreadScoped ats(g_jvm);
ats.env()->DeleteGlobalRef(_jCapturer);
}
int32_t VideoCaptureAndroid::StartCapture(
const VideoCaptureCapability& capability) {
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
bool isAttached = false;
int32_t result = 0;
// get the JNI env for this thread
JNIEnv *env;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
_frameInfo) < 0) {
if (_deviceInfo.GetBestMatchedCapability(
_deviceUniqueId, capability, _captureCapability) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: GetBestMatchedCapability failed. Req cap w%d h%d",
"%s: GetBestMatchedCapability failed: %dx%d",
__FUNCTION__, capability.width, capability.height);
return -1;
}
// Store the new expected capture delay
_captureDelay = _frameInfo.expectedCaptureDelay;
_captureDelay = _captureCapability.expectedCaptureDelay;
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
_frameInfo.height);
// get the method ID for the Android Java
// CaptureClass static StartCapture method.
jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Call StartCapture", __FUNCTION__);
// Close the camera by calling the static destruct function.
result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
_frameInfo.height, _frameInfo.maxFPS);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find StartCapture id", __FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
if (result == 0) {
jmethodID j_start =
env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
assert(j_start);
int min_mfps = 0;
int max_mfps = 0;
_deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
bool started = env->CallBooleanMethod(_jCapturer, j_start,
_captureCapability.width,
_captureCapability.height,
min_mfps, max_mfps);
if (started) {
_requestedCapability = capability;
_captureStarted = true;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, result);
return result;
return started ? 0 : -1;
}
int32_t VideoCaptureAndroid::StopCapture() {
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
bool isAttached = false;
int32_t result = 0;
// get the JNI env for this thread
JNIEnv *env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
memset(&_requestedCapability, 0, sizeof(_requestedCapability));
memset(&_frameInfo, 0, sizeof(_frameInfo));
// get the method ID for the Android Java CaptureClass StopCapture method.
jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Call StopCapture", __FUNCTION__);
// Close the camera by calling the static destruct function.
result = env->CallIntMethod(_javaCaptureObj, cid);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find StopCapture id", __FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
memset(&_captureCapability, 0, sizeof(_captureCapability));
_captureStarted = false;
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, result);
return result;
jmethodID j_stop =
env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
}
bool VideoCaptureAndroid::CaptureStarted() {
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
return _captureStarted;
}
int32_t VideoCaptureAndroid::CaptureSettings(
VideoCaptureCapability& settings) {
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
settings = _requestedCapability;
return 0;
}
@ -610,64 +184,20 @@ int32_t VideoCaptureAndroid::CaptureSettings(
int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) {
if (!g_jvm)
return -1;
if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
return 0;
// get the JNI env for this thread
JNIEnv *env;
bool isAttached = false;
// get the JNI env for this thread
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
"(I)V");
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get java SetPreviewRotation ID",
__FUNCTION__);
return -1;
}
jint rotateFrame = 0;
switch (rotation) {
case kCameraRotate0:
rotateFrame = 0;
break;
case kCameraRotate90:
rotateFrame = 90;
break;
case kCameraRotate180:
rotateFrame = 180;
break;
case kCameraRotate270:
rotateFrame = 270;
break;
}
env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
_id, "%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
jmethodID j_spr =
env->GetMethodID(g_java_capturer_class, "setPreviewRotation", "(I)V");
assert(j_spr);
int rotation_degrees;
if (RotationInDegrees(rotation, &rotation_degrees) != 0) {
assert(false);
}
env->CallVoidMethod(_jCapturer, j_spr, rotation_degrees);
return 0;
}

View File

@ -16,49 +16,31 @@
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
namespace webrtc {
namespace videocapturemodule {
class VideoCaptureAndroid : public VideoCaptureImpl {
public:
static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
static int32_t AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached);
static int32_t ReleaseAndroidDeviceInfoObjects(bool attached);
VideoCaptureAndroid(const int32_t id);
virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
virtual int32_t StartCapture(
const VideoCaptureCapability& capability);
virtual int32_t StartCapture(const VideoCaptureCapability& capability);
virtual int32_t StopCapture();
virtual bool CaptureStarted();
virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
int32_t OnIncomingFrame(uint8_t* videoFrame,
int32_t videoFrameLength,
int64_t captureTime = 0);
protected:
virtual ~VideoCaptureAndroid();
static void JNICALL ProvideCameraFrame (JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length, jlong context);
DeviceInfoAndroid _capInfo;
jobject _javaCaptureObj; // Java Camera object.
VideoCaptureCapability _frameInfo;
bool _captureStarted;
static JavaVM* g_jvm;
static jclass g_javaCmClass;
static jclass g_javaCmDevInfoClass;
//Static java object implementing the needed device info functions;
static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
DeviceInfoAndroid _deviceInfo;
jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object.
VideoCaptureCapability _captureCapability;
bool _captureStarted;
};
} // namespace videocapturemodule

View File

@ -14,10 +14,14 @@
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/video_capture/include/video_capture_defines.h"
#ifdef ANDROID
#include <jni.h>
#endif
namespace webrtc {
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext);
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int32_t SetCaptureAndroidVM(JavaVM* javaVM);
#endif
class VideoCaptureModule: public RefCountedModule {

View File

@ -91,6 +91,10 @@
},
}], # win
['OS=="android"', {
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
],
'sources': [
'android/device_info_android.cc',
'android/device_info_android.h',

View File

@ -41,6 +41,47 @@ const char* VideoCaptureImpl::CurrentDeviceName() const
return _deviceUniqueId;
}
// static
int32_t VideoCaptureImpl::RotationFromDegrees(int degrees,
VideoCaptureRotation* rotation) {
switch (degrees) {
case 0:
*rotation = kCameraRotate0;
return 0;
case 90:
*rotation = kCameraRotate90;
return 0;
case 180:
*rotation = kCameraRotate180;
return 0;
case 270:
*rotation = kCameraRotate270;
return 0;
default:
return -1;;
}
}
// static
int32_t VideoCaptureImpl::RotationInDegrees(VideoCaptureRotation rotation,
int* degrees) {
switch (rotation) {
case kCameraRotate0:
*degrees = 0;
return 0;
case kCameraRotate90:
*degrees = 90;
return 0;
case kCameraRotate180:
*degrees = 180;
return 0;
case kCameraRotate270:
*degrees = 270;
return 0;
}
return -1;
}
int32_t VideoCaptureImpl::ChangeUniqueId(const int32_t id)
{
_id = id;
@ -358,6 +399,8 @@ int32_t VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) {
case kCameraRotate270:
_rotateFrame = kRotate270;
break;
default:
return -1;
}
return 0;
}

View File

@ -51,6 +51,13 @@ public:
static DeviceInfo* CreateDeviceInfo(const int32_t id);
// Helpers for converting between (integral) degrees and
// VideoCaptureRotation values. Return 0 on success.
static int32_t RotationFromDegrees(int degrees,
VideoCaptureRotation* rotation);
static int32_t RotationInDegrees(VideoCaptureRotation rotation,
int* degrees);
// Implements Module declared functions.
virtual int32_t ChangeUniqueId(const int32_t id);

View File

@ -142,6 +142,11 @@
'WEBRTC_CLOCK_TYPE_REALTIME',
],
'dependencies': [ 'cpu_features_android', ],
'link_settings': {
'libraries': [
'-llog',
],
},
}, { # OS!="android"
'sources!': [
'../interface/logcat_trace_context.h',

View File

@ -21,6 +21,10 @@
#include "webrtc/common_types.h"
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
#include <jni.h>
#endif
namespace webrtc {
class Config;
@ -61,10 +65,10 @@ class WEBRTC_DLLEXPORT VideoEngine {
// user receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback);
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Android specific.
// Provides VideoEngine with pointers to objects supplied by the Java
// applications JNI interface.
static int SetAndroidObjects(void* java_vm, void* java_context);
static int SetAndroidObjects(JavaVM* java_vm);
#endif
protected:
VideoEngine() {}

View File

@ -311,7 +311,7 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideo
return 0;
}
VideoEngine::SetAndroidObjects(webrtcGlobalVM, context);
VideoEngine::SetAndroidObjects(webrtcGlobalVM);
// Create
vieData.vie = VideoEngine::Create();

View File

@ -22,7 +22,6 @@ LOCAL_MODULE_TAGS := tests
LOCAL_SRC_FILES := \
src/org/webrtc/vieautotest/ViEAutotest.java \
$(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
$(MY_RENDER_PATH)/ViEAndroidGLES20.java \

View File

@ -11,16 +11,17 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
class ViEAutoTestAndroid
{
public:
static int RunAutotest(int testSelection,
int subTestSelection,
void* window1,
void* window2,
void* javaVM,
void* env,
void* context);
#include <jni.h>
class ViEAutoTestAndroid {
public:
static int RunAutotest(int testSelection,
int subTestSelection,
void* window1,
void* window2,
JavaVM* javaVM,
void* env,
void* context);
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_

View File

@ -18,10 +18,10 @@
int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
void* window1, void* window2,
void* javaVM, void* env, void* context) {
JavaVM* javaVM, void* env, void* context) {
ViEAutoTest vieAutoTest(window1, window2);
ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
webrtc::VideoEngine::SetAndroidObjects(javaVM);
#ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);

View File

@ -163,12 +163,12 @@ int VideoEngine::SetTraceCallback(TraceCallback* callback) {
return Trace::SetTraceCallback(callback);
}
int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int VideoEngine::SetAndroidObjects(JavaVM* javaVM) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
"SetAndroidObjects()");
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
if (SetCaptureAndroidVM(javaVM, javaContext) != 0) {
if (SetCaptureAndroidVM(javaVM) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
"Could not set capture Android VM");
return -1;
@ -179,11 +179,7 @@ int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) {
return -1;
}
return 0;
#else
WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
"WEBRTC_ANDROID not defined for VideoEngine::SetAndroidObjects");
return -1;
#endif
}
#endif
} // namespace webrtc