AppRTCDemo(android): support app (UI) & capture rotation.

Now app UI rotates as the device orientation changes, and the captured stream
tries to maintain real-world-up, matching Chrome/Android and Hangouts/Android
behavior.

BUG=2432
R=glaznev@webrtc.org, henrike@webrtc.org, wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/15689005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6354 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org 2014-06-06 18:40:44 +00:00
parent 42694c5937
commit 9512719569
12 changed files with 97 additions and 20 deletions

View File

@ -1897,7 +1897,7 @@ JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
CHECK(g_jvm, "JNI_OnLoad failed to run?");
bool failure = false;
if (initialize_video)
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm);
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context);
if (initialize_audio)
failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
return !failure;

View File

@ -21,7 +21,8 @@
android:allowBackup="false">
<activity android:name="AppRTCDemoActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:screenOrientation="fullUser"
android:configChanges="orientation|screenSize"
android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

View File

@ -31,6 +31,7 @@ import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.Point;
import android.media.AudioManager;
@ -226,6 +227,13 @@ public class AppRTCDemoActivity extends Activity
}
}
@Override
public void onConfigurationChanged (Configuration newConfig) {
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
vsv.updateDisplaySize(displaySize);
super.onConfigurationChanged(newConfig);
}
// Just for fun (and to regression-test bug 2302) make sure that DataChannels
// can be created, queried, and disposed.

View File

@ -84,11 +84,15 @@ public class VideoStreamsView
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
public void updateDisplaySize(Point screenDimensions) {
this.screenDimensions = screenDimensions;
}
/** Queue |frame| to be uploaded. */
public void queueFrame(final Endpoint stream, I420Frame frame) {
// Paying for the copy of the YUV data here allows CSC and painting time
// to get spent on the render thread instead of the UI thread.
abortUnless(framePool.validateDimensions(frame), "Frame too large!");
abortUnless(FramePool.validateDimensions(frame), "Frame too large!");
final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
boolean needToScheduleRender;
synchronized (framesToRender) {

View File

@ -38,7 +38,7 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
jobject context) {
webrtc_examples::SetVoeDeviceObjects(g_vm);
webrtc_examples::SetVieDeviceObjects(g_vm);
CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm) == 0,
CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm, context) == 0,
"Failed to register android objects to video engine");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, jni, context) == 0,
"Failed to register android objects to voice engine");
@ -47,7 +47,7 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
JOWW(void, NativeWebRtcContextRegistry_unRegister)(
JNIEnv* jni,
jclass) {
CHECK(webrtc::VideoEngine::SetAndroidObjects(NULL) == 0,
CHECK(webrtc::VideoEngine::SetAndroidObjects(NULL, NULL) == 0,
"Failed to unregister android objects from video engine");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL, NULL) == 0,
"Failed to unregister android objects from voice engine");

View File

@ -15,6 +15,7 @@ import java.util.Locale;
import java.util.concurrent.Exchanger;
import java.util.concurrent.locks.ReentrantLock;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.Rect;
@ -25,6 +26,7 @@ import android.hardware.Camera;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.view.OrientationEventListener;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceHolder;
@ -46,6 +48,7 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
private Handler cameraThreadHandler;
private final int id;
private final Camera.CameraInfo info;
private final OrientationEventListener orientationListener;
private final long native_capturer; // |VideoCaptureAndroid*| in C++.
private SurfaceTexture dummySurfaceTexture;
// Arbitrary queue depth. Higher number means more memory allocated & held,
@ -66,8 +69,30 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
this.native_capturer = native_capturer;
this.info = new Camera.CameraInfo();
Camera.getCameraInfo(id, info);
// Must be the last thing in the ctor since we pass a reference to |this|!
final VideoCaptureAndroid self = this;
orientationListener = new OrientationEventListener(GetContext()) {
@Override public void onOrientationChanged(int degrees) {
if (degrees == OrientationEventListener.ORIENTATION_UNKNOWN) {
return;
}
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
degrees = (info.orientation - degrees + 360) % 360;
} else { // back-facing
degrees = (info.orientation + degrees) % 360;
}
self.OnOrientationChanged(self.native_capturer, degrees);
}
};
// Don't add any code here; see the comment above |self| above!
}
// Return the global application context.
private static native Context GetContext();
// Request frame rotation post-capture.
private native void OnOrientationChanged(long captureObject, int degrees);
private class CameraThread extends Thread {
private Exchanger<Handler> handlerExchanger;
public CameraThread(Exchanger<Handler> handlerExchanger) {
@ -96,6 +121,7 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
orientationListener.enable();
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@ -186,6 +212,7 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
}
cameraThreadHandler = null;
cameraThread = null;
orientationListener.disable();
return status;
}

View File

@ -19,9 +19,16 @@
static JavaVM* g_jvm = NULL;
static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
static jobject g_context = NULL; // Owned android.content.Context.
namespace webrtc {
// Called by Java to get the global application context.
jobject JNICALL GetContext(JNIEnv* env, jclass) {
assert(g_context);
return g_context;
}
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
@ -38,11 +45,31 @@ void JNICALL ProvideCameraFrame(
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
// Called by Java when the device orientation has changed.
void JNICALL OnOrientationChanged(
JNIEnv* env, jobject, jlong context, jint degrees) {
webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
context);
degrees = (360 + degrees) % 360;
assert(degrees >= 0 && degrees < 360);
VideoCaptureRotation rotation =
(degrees <= 45 || degrees > 315) ? kCameraRotate0 :
(degrees > 45 && degrees <= 135) ? kCameraRotate90 :
(degrees > 135 && degrees <= 225) ? kCameraRotate180 :
(degrees > 225 && degrees <= 315) ? kCameraRotate270 :
kCameraRotate0; // Impossible.
int32_t status =
captureModule->VideoCaptureImpl::SetCaptureRotation(rotation);
assert(status == 0);
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) {
if (javaVM) {
assert(!g_jvm);
g_jvm = javaVM;
AttachThreadScoped ats(g_jvm);
g_context = ats.env()->NewGlobalRef(context);
videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
@ -53,12 +80,18 @@ int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
assert(g_java_capturer_class);
JNINativeMethod native_method = {
"ProvideCameraFrame", "([BIJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)
};
JNINativeMethod native_methods[] = {
{"GetContext",
"()Landroid/content/Context;",
reinterpret_cast<void*>(&GetContext)},
{"OnOrientationChanged",
"(JI)V",
reinterpret_cast<void*>(&OnOrientationChanged)},
{"ProvideCameraFrame",
"([BIJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)}};
if (ats.env()->RegisterNatives(g_java_capturer_class,
&native_method, 1) != 0)
native_methods, 3) != 0)
assert(false);
} else {
if (g_jvm) {
@ -66,6 +99,8 @@ int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
ats.env()->UnregisterNatives(g_java_capturer_class);
ats.env()->DeleteGlobalRef(g_java_capturer_class);
g_java_capturer_class = NULL;
ats.env()->DeleteGlobalRef(g_context);
g_context = NULL;
videocapturemodule::DeviceInfoAndroid::DeInitialize();
g_jvm = NULL;
}
@ -198,8 +233,9 @@ int32_t VideoCaptureAndroid::CaptureSettings(
int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
return 0;
int32_t status = VideoCaptureImpl::SetCaptureRotation(rotation);
if (status != 0)
return status;
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();

View File

@ -36,7 +36,7 @@ void EnsureInitialized() {}
namespace webrtc {
// Declared in webrtc/modules/video_capture/include/video_capture.h.
int32_t SetCaptureAndroidVM(JavaVM* javaVM);
int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject g_context);
namespace videocapturemodule {
@ -44,10 +44,11 @@ static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
void EnsureInitializedOnce() {
JNIEnv* jni = ::base::android::AttachCurrentThread();
jobject context = ::base::android::GetApplicationContext();
JavaVM* jvm = NULL;
int status = jni->GetJavaVM(&jvm);
ASSERT(status == 0);
status = webrtc::SetCaptureAndroidVM(jvm) == 0;
status = webrtc::SetCaptureAndroidVM(jvm, context) == 0;
ASSERT(status);
}

View File

@ -21,7 +21,7 @@
namespace webrtc {
#if defined(ANDROID)
int32_t SetCaptureAndroidVM(JavaVM* javaVM);
int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
#endif
class VideoCaptureModule: public RefCountedModule {

View File

@ -150,7 +150,7 @@ class WEBRTC_DLLEXPORT VideoEngine {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Android specific.
static int SetAndroidObjects(JavaVM* java_vm);
static int SetAndroidObjects(JavaVM* java_vm, jobject context);
#endif
protected:

View File

@ -21,7 +21,7 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
JavaVM* javaVM, void* env, void* context) {
ViEAutoTest vieAutoTest(window1, window2);
ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
webrtc::VideoEngine::SetAndroidObjects(javaVM);
webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
#ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);

View File

@ -140,10 +140,10 @@ int VideoEngine::SetTraceCallback(TraceCallback* callback) {
}
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int VideoEngine::SetAndroidObjects(JavaVM* javaVM) {
int VideoEngine::SetAndroidObjects(JavaVM* javaVM, jobject context) {
LOG_F(LS_INFO);
if (SetCaptureAndroidVM(javaVM) != 0) {
if (SetCaptureAndroidVM(javaVM, context) != 0) {
LOG(LS_ERROR) << "Could not set capture Android VM";
return -1;
}