Stop AndroidVideoCapturer asynchronously.

The purpose is to avoid a deadlock between the C++ thread calling Stop and the Java thread that provides video frames.

BUG=4318
R=glaznev@webrtc.org, magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/35249004

Cr-Commit-Position: refs/heads/master@{#8425}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8425 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2015-02-19 08:43:38 +00:00
parent 254840692e
commit 3db042e2f0
8 changed files with 217 additions and 122 deletions

View File

@ -67,6 +67,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
private int frameSize = 0;
private Object frameLock = 0;
private Object capturerStartLock = 0;
private Object capturerStopLock = 0;
private boolean captureStartResult = false;
@Override
@ -77,6 +78,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
}
}
@Override
public void OnCapturerStopped() {
synchronized (capturerStopLock) {
capturerStopLock.notify();
}
}
@Override
public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) {
synchronized (frameLock) {
@ -93,6 +101,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
}
}
public void WaitForCapturerToStop() throws InterruptedException {
synchronized (capturerStopLock) {
capturerStopLock.wait();
}
}
public int WaitForNextCapturedFrame() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@ -124,6 +138,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
assertTrue(callbacks.WaitForNextFrameToRender() > 0);
track.dispose();
source.dispose();
factory.dispose();
}
@Override
@ -199,6 +214,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
assertTrue(callbacks.WaitForNextFrameToRender() > 0);
track.dispose();
source.dispose();
factory.dispose();
}
@SmallTest
@ -223,6 +239,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
assertEquals(MediaSource.State.LIVE, source.state());
track.dispose();
source.dispose();
factory.dispose();
}
@SmallTest
@ -242,9 +259,10 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
getInstrumentation().getContext(), observer);
assertTrue(observer.WaitForCapturerToStart());
observer.WaitForNextCapturedFrame();
// Check the frame size. NV21 is assumed.
// Check the frame size.
assertEquals((format.width*format.height*3)/2, observer.frameSize());
assertTrue(capturer.stopCapture());
capturer.stopCapture();
observer.WaitForCapturerToStop();
}
capturer.dispose();
}

View File

@ -27,7 +27,6 @@
#include "talk/app/webrtc/androidvideocapturer.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
#include "webrtc/base/timeutils.h"
@ -101,7 +100,7 @@ AndroidVideoCapturer::AndroidVideoCapturer(
delegate_(delegate.Pass()),
worker_thread_(NULL),
frame_factory_(NULL),
current_state_(cricket::CS_STOPPED){
current_state_(cricket::CS_STOPPED) {
std::string json_string = delegate_->GetSupportedFormats();
LOG(LS_INFO) << json_string;
@ -174,13 +173,6 @@ bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
}
void AndroidVideoCapturer::OnCapturerStarted(bool success) {
// This method is called from a Java thread.
DCHECK(!worker_thread_->IsCurrent());
worker_thread_->Invoke<void>(
rtc::Bind(&AndroidVideoCapturer::OnCapturerStarted_w, this, success));
}
void AndroidVideoCapturer::OnCapturerStarted_w(bool success) {
DCHECK(worker_thread_->IsCurrent());
cricket::CaptureState new_state =
success ? cricket::CS_RUNNING : cricket::CS_FAILED;
@ -194,21 +186,10 @@ void AndroidVideoCapturer::OnCapturerStarted_w(bool success) {
SignalStateChange(this, new_state);
}
void AndroidVideoCapturer::OnIncomingFrame(signed char* videoFrame,
void AndroidVideoCapturer::OnIncomingFrame(signed char* frame_data,
int length,
int rotation,
int64 time_stamp) {
// This method is called from a Java thread.
DCHECK(!worker_thread_->IsCurrent());
worker_thread_->Invoke<void>(
rtc::Bind(&AndroidVideoCapturer::OnIncomingFrame_w, this, videoFrame,
length, rotation, time_stamp));
}
void AndroidVideoCapturer::OnIncomingFrame_w(signed char* frame_data,
int length,
int rotation,
int64 time_stamp) {
DCHECK(worker_thread_->IsCurrent());
frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp);
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());

View File

@ -44,9 +44,9 @@ class AndroidVideoCapturerDelegate {
virtual void Start(int width, int height, int framerate,
AndroidVideoCapturer* capturer) = 0;
// Stops capturing. The implementation must synchronously stop the capturer.
// Stops capturing.
// The delegate may not call into AndroidVideoCapturer after this call.
virtual bool Stop() = 0;
virtual void Stop() = 0;
// Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
virtual std::string GetSupportedFormats() = 0;
@ -74,13 +74,6 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); }
private:
void OnCapturerStarted_w(bool success);
void OnIncomingFrame_w(signed char* frame_data,
int length,
int rotation,
int64 time_stamp);
// cricket::VideoCapturer implementation.
// Video frames will be delivered using
// cricket::VideoCapturer::SignalFrameCaptured on the thread that calls Start.

View File

@ -29,11 +29,70 @@
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "webrtc/base/bind.h"
namespace webrtc_jni {
jobject AndroidVideoCapturerJni::application_context_ = nullptr;
// JavaCaptureProxy is responsible for marshaling calls from the
// Java VideoCapturerAndroid to the C++ class AndroidVideoCapturer.
// Calls from Java occur on a Java thread and are marshaled to
// AndroidVideoCapturer on the thread that creates an instance of this object.
//
// An instance is created when AndroidVideoCapturerJni::Start is called and
// ownership is passed to an instance of the Java class NativeObserver.
// JavaCaptureProxy is destroyed when NativeObserver has reported that the
// capturer has stopped, see
// VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped.
// Marshaling is done as long as JavaCaptureProxy has a pointer to the
// AndroidVideoCapturer.
class JavaCaptureProxy {
public:
JavaCaptureProxy() : thread_(rtc::Thread::Current()), capturer_(nullptr) {
}
~JavaCaptureProxy() {
}
void SetAndroidCapturer(webrtc::AndroidVideoCapturer* capturer) {
DCHECK(thread_->IsCurrent());
capturer_ = capturer;
}
void OnCapturerStarted(bool success) {
thread_->Invoke<void>(
rtc::Bind(&JavaCaptureProxy::OnCapturerStarted_w, this, success));
}
void OnIncomingFrame(signed char* video_frame,
int length,
int rotation,
int64 time_stamp) {
thread_->Invoke<void>(
rtc::Bind(&JavaCaptureProxy::OnIncomingFrame_w, this, video_frame,
length, rotation, time_stamp));
}
private:
void OnCapturerStarted_w(bool success) {
DCHECK(thread_->IsCurrent());
if (capturer_)
capturer_->OnCapturerStarted(success);
}
void OnIncomingFrame_w(signed char* video_frame,
int length,
int rotation,
int64 time_stamp) {
DCHECK(thread_->IsCurrent());
if (capturer_)
capturer_->OnIncomingFrame(video_frame, length, rotation, time_stamp);
}
rtc::Thread* thread_;
webrtc::AndroidVideoCapturer* capturer_;
};
// static
int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
jobject appliction_context) {
@ -50,24 +109,50 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
: j_capturer_global_(jni, j_video_capturer),
j_video_capturer_class_(
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
j_frame_observer_class_(
j_observer_class_(
jni,
FindClass(jni,
"org/webrtc/VideoCapturerAndroid$NativeFrameObserver")) {
}
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
proxy_(nullptr) {
thread_checker_.DetachFromThread();
}
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {}
bool AndroidVideoCapturerJni::Init(jstring device_name) {
const jmethodID m(GetMethodID(
jni(), *j_video_capturer_class_, "init", "(Ljava/lang/String;)Z"));
if (!jni()->CallBooleanMethod(*j_capturer_global_, m, device_name)) {
return false;
}
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
return true;
}
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
DeInit();
}
void AndroidVideoCapturerJni::DeInit() {
DCHECK(proxy_ == nullptr);
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, "deInit", "()V");
jni()->CallVoidMethod(*j_capturer_global_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.DeInit";
}
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(proxy_ == nullptr);
proxy_ = new JavaCaptureProxy();
proxy_->SetAndroidCapturer(capturer);
j_frame_observer_ = NewGlobalRef(
jni(),
jni()->NewObject(*j_frame_observer_class_,
jni()->NewObject(*j_observer_class_,
GetMethodID(jni(),
*j_frame_observer_class_,
*j_observer_class_,
"<init>",
"(J)V"),
jlongFromPointer(capturer)));
jlongFromPointer(proxy_)));
CHECK_EXCEPTION(jni()) << "error during NewObject";
jmethodID m = GetMethodID(
@ -82,13 +167,15 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
}
bool AndroidVideoCapturerJni::Stop() {
void AndroidVideoCapturerJni::Stop() {
DCHECK(thread_checker_.CalledOnValidThread());
proxy_->SetAndroidCapturer(nullptr);
proxy_ = nullptr;
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"stopCapture", "()Z");
jboolean result = jni()->CallBooleanMethod(*j_capturer_global_, m);
"stopCapture", "()V");
jni()->CallVoidMethod(*j_capturer_global_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
DeleteGlobalRef(jni(), j_frame_observer_);
return result;
}
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
@ -103,20 +190,27 @@ std::string AndroidVideoCapturerJni::GetSupportedFormats() {
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
JOW(void, VideoCapturerAndroid_00024NativeFrameObserver_nativeOnFrameCaptured)
(JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame,
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnFrameCaptured)
(JNIEnv* jni, jclass, jlong j_proxy, jbyteArray j_frame,
jint rotation, jlong ts) {
jbyte* bytes = jni->GetByteArrayElements(j_frame, NULL);
reinterpret_cast<webrtc::AndroidVideoCapturer*>(
j_capturer)->OnIncomingFrame(bytes, jni->GetArrayLength(j_frame),
rotation, ts);
reinterpret_cast<JavaCaptureProxy*>(
j_proxy)->OnIncomingFrame(bytes, jni->GetArrayLength(j_frame), rotation,
ts);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
}
JOW(void, VideoCapturerAndroid_00024NativeFrameObserver_nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
reinterpret_cast<webrtc::AndroidVideoCapturer*>(
j_capturer)->OnCapturerStarted(j_success);
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_proxy, jboolean j_success) {
JavaCaptureProxy* proxy = reinterpret_cast<JavaCaptureProxy*>(j_proxy);
proxy->OnCapturerStarted(j_success);
if (!j_success)
delete proxy;
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped)
(JNIEnv* jni, jclass, jlong j_proxy) {
delete reinterpret_cast<JavaCaptureProxy*>(j_proxy);
}
} // namespace webrtc_jni

View File

@ -33,9 +33,12 @@
#include "talk/app/webrtc/androidvideocapturer.h"
#include "talk/app/webrtc/java/jni/jni_helpers.h"
#include "webrtc/base/thread_checker.h"
namespace webrtc_jni {
class JavaCaptureProxy;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
// AndroidVideoCapturer.
@ -45,20 +48,29 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
~AndroidVideoCapturerJni();
bool Init(jstring device_name);
void Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) override;
bool Stop() override;
void Stop() override;
std::string GetSupportedFormats() override;
private:
JNIEnv* jni();
void DeInit();
const ScopedGlobalRef<jobject> j_capturer_global_;
const ScopedGlobalRef<jclass> j_video_capturer_class_;
const ScopedGlobalRef<jclass> j_frame_observer_class_;
const ScopedGlobalRef<jclass> j_observer_class_;
jobject j_frame_observer_;
rtc::ThreadChecker thread_checker_;
// The proxy is a valid pointer between calling Start and Stop.
// It destroys itself when Java VideoCapturerAndroid has been stopped.
JavaCaptureProxy* proxy_;
static jobject application_context_;
DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);

View File

@ -72,7 +72,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeFrameObserver");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");

View File

@ -1385,15 +1385,10 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
j_videocapturer_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
const jmethodID m(GetMethodID(
jni, j_video_capturer_class, "Init", "(Ljava/lang/String;)Z"));
if (!jni->CallBooleanMethod(j_video_capturer, m, j_device_name)) {
return nullptr;
}
CHECK_EXCEPTION(jni) << "error during CallVoidMethod";
rtc::scoped_ptr<webrtc::AndroidVideoCapturerDelegate> delegate(
rtc::scoped_ptr<AndroidVideoCapturerJni> delegate(
new AndroidVideoCapturerJni(jni, j_video_capturer));
if (!delegate->Init(j_device_name))
return nullptr;
rtc::scoped_ptr<webrtc::AndroidVideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate.Pass()));

View File

@ -178,30 +178,50 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Called by native code.
// Enumerates resolution and frame rates for all cameras to be able to switch
// cameras. Initializes local variables for the camera named |deviceName|.
// cameras. Initializes local variables for the camera named |deviceName| and
// starts a thread to be used for capturing.
// If deviceName is empty, the first available device is used in order to be
// compatible with the generic VideoCapturer class.
boolean Init(String deviceName) {
Log.e(TAG, "Init " + deviceName);
if (!InitStatics())
boolean init(String deviceName) {
Log.d(TAG, "init " + deviceName);
if (!initStatics())
return false;
boolean foundDevice = false;
if (deviceName.isEmpty()) {
this.id = 0;
return true;
}
Boolean foundDevice = false;
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(getDeviceName(i))) {
this.id = i;
foundDevice = true;
foundDevice = true;
} else {
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(getDeviceName(i))) {
this.id = i;
foundDevice = true;
}
}
}
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
return foundDevice;
}
private static boolean InitStatics() {
// Called by native code. Frees the Java thread created in Init.
void deInit() throws InterruptedException {
Log.d(TAG, "deInit");
if (cameraThreadHandler != null) {
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
Log.d(TAG, "stop CameraThread");
Looper.myLooper().quit();
}
});
cameraThread.join();
cameraThreadHandler = null;
}
}
private static boolean initStatics() {
if (supportedFormats != null)
return true;
try {
@ -298,20 +318,15 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
final Context applicationContext, final CapturerObserver frameObserver) {
Log.d(TAG, "startCapture requested: " + width + "x" + height
+ "@" + framerate);
if (cameraThread != null || cameraThreadHandler != null) {
throw new RuntimeException("Camera thread already started!");
}
if (applicationContext == null) {
throw new RuntimeException("applicationContext not set.");
}
if (frameObserver == null) {
throw new RuntimeException("frameObserver not set.");
}
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
this.width = width;
this.height = height;
this.framerate = framerate;
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
@ -327,9 +342,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
Throwable error = null;
this.applicationContext = applicationContext;
this.frameObserver = frameObserver;
this.width = width;
this.height = height;
this.framerate = framerate;
try {
this.camera = Camera.open(id);
this.info = new Camera.CameraInfo();
@ -406,8 +418,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
Log.e(TAG, "startCapture failed", error);
if (camera != null) {
Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
stopCaptureOnCameraThread(resultDropper);
stopCaptureOnCameraThread();
frameObserver.OnCapturerStarted(false);
}
frameObserver.OnCapturerStarted(false);
@ -415,37 +426,19 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
// Called by native code. Returns true when camera is known to be stopped.
synchronized boolean stopCapture() {
synchronized void stopCapture() {
Log.d(TAG, "stopCapture");
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
stopCaptureOnCameraThread(result);
stopCaptureOnCameraThread();
}
});
boolean status = exchange(result, false); // |false| is a dummy value here.
Log.d(TAG, "stopCapture wait");
try {
cameraThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
cameraThreadHandler = null;
cameraThread = null;
Log.d(TAG, "stopCapture done");
return status;
}
private void stopCaptureOnCameraThread(Exchanger<Boolean> result) {
private void stopCaptureOnCameraThread() {
Log.d(TAG, "stopCaptureOnCameraThread");
if (camera == null) {
throw new RuntimeException("Camera is already stopped!");
}
frameObserver = null;
doStopCaptureOnCamerathread();
exchange(result, true);
Looper.myLooper().quit();
frameObserver.OnCapturerStopped();
return;
}
@ -570,9 +563,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Interface used for providing callbacks to an observer.
interface CapturerObserver {
// Notify if the camera have beens started successfully or not.
// Notify if the camera have been started successfully or not.
// Called on a Java thread owned by VideoCapturerAndroid.
abstract void OnCapturerStarted(boolean success);
// Notify that the camera have been stopped.
// Called on a Java thread owned by VideoCapturerAndroid.
abstract void OnCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by
// VideoCapturerAndroid.
abstract void OnFrameCaptured(byte[] data, int rotation, long timeStamp);
@ -580,27 +577,32 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// An implementation of CapturerObserver that forwards all calls from
// Java to the C layer.
public static class NativeFrameObserver implements CapturerObserver {
private final long nativeCapturer;
public static class NativeObserver implements CapturerObserver {
private final long nativeProxy;
public NativeFrameObserver(long nativeCapturer) {
this.nativeCapturer = nativeCapturer;
public NativeObserver(long nativeProxy) {
this.nativeProxy = nativeProxy;
}
@Override
public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) {
nativeOnFrameCaptured(nativeCapturer, data, rotation, timeStamp);
nativeOnFrameCaptured(nativeProxy, data, rotation, timeStamp);
}
private native void nativeOnFrameCaptured(
long captureObject, byte[] data, int rotation, long timeStamp);
@Override
public void OnCapturerStarted(boolean success) {
nativeCapturerStarted(nativeCapturer, success);
nativeCapturerStarted(nativeProxy, success);
}
private native void nativeCapturerStarted(long captureObject,
@Override
public void OnCapturerStopped() {
nativeCapturerStopped(nativeProxy);
}
private native void nativeCapturerStarted(long proxyObject,
boolean success);
private native void nativeCapturerStopped(long proxyObject);
private native void nativeOnFrameCaptured(
long proxyObject, byte[] data, int rotation, long timeStamp);
}
}