Refactor how VideoCapturerAndroid delivers frames and is stopped.

With this cl, video buffers are now allocated using direct buffers.
These buffers are guaranteed to live as long as the capturer is running.
We can now post frames in c++ from the Java thread to the  c++ worker thread and let c++ post the buffers back when it has finished
processing them.

This cl also reverts back to make Stop asynchronouse so that it is guaranteed that the c++ worker thread is not used and no frames are delivered to VideoCapturerAndroid after Stop completes.

BUG=4318
TESTED= On a N5, N6, N9 and Samsung device.
R=glaznev@webrtc.org, magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/43369004

Cr-Commit-Position: refs/heads/master@{#8493}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8493 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2015-02-25 09:20:07 +00:00
parent d4dfba8ea1
commit 112f127170
7 changed files with 232 additions and 185 deletions

View File

@ -33,6 +33,7 @@ import android.test.suitebuilder.annotation.SmallTest;
import org.webrtc.VideoCapturerAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@SuppressWarnings("deprecation")
@ -67,7 +68,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
private int frameSize = 0;
private Object frameLock = 0;
private Object capturerStartLock = 0;
private Object capturerStopLock = 0;
private boolean captureStartResult = false;
@Override
@ -79,17 +79,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
}
@Override
public void OnCapturerStopped() {
synchronized (capturerStopLock) {
capturerStopLock.notify();
}
}
@Override
public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) {
public void OnFrameCaptured(ByteBuffer frame, int rotation,
long timeStamp) {
assertTrue(frame.isDirect());
synchronized (frameLock) {
++framesCaptured;
frameSize = data.length;
frameSize = frame.capacity();
frameLock.notify();
}
}
@ -101,12 +96,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
}
}
public void WaitForCapturerToStop() throws InterruptedException {
synchronized (capturerStopLock) {
capturerStopLock.wait();
}
}
public int WaitForNextCapturedFrame() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@ -262,7 +251,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
// Check the frame size.
assertEquals((format.width*format.height*3)/2, observer.frameSize());
capturer.stopCapture();
observer.WaitForCapturerToStop();
}
capturer.dispose();
}

View File

@ -58,7 +58,7 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY);
}
void UpdateCapturedFrame(signed char* frame_data,
void UpdateCapturedFrame(void* frame_data,
int length,
int rotation,
int64 time_stamp_in_ms) {
@ -186,13 +186,14 @@ void AndroidVideoCapturer::OnCapturerStarted(bool success) {
SignalStateChange(this, new_state);
}
void AndroidVideoCapturer::OnIncomingFrame(signed char* frame_data,
void AndroidVideoCapturer::OnIncomingFrame(void* frame_data,
int length,
int rotation,
int64 time_stamp) {
DCHECK(worker_thread_->IsCurrent());
frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp);
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
delegate_->ReturnBuffer(time_stamp);
}
} // namespace webrtc

View File

@ -48,6 +48,10 @@ class AndroidVideoCapturerDelegate {
// The delegate may not call into AndroidVideoCapturer after this call.
virtual void Stop() = 0;
// Notify that a frame received in OnIncomingFrame with |time_stamp| has been
// processed and can be returned.
virtual void ReturnBuffer(int64 time_stamp) = 0;
// Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
virtual std::string GetSupportedFormats() = 0;
};
@ -60,13 +64,11 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate);
virtual ~AndroidVideoCapturer();
// Called from JNI when the capturer has been started. Called from a Java
// thread.
// Called from JNI when the capturer has been started.
void OnCapturerStarted(bool success);
// Called from JNI when a new frame has been captured. Called from a Java
// thread.
void OnIncomingFrame(signed char* videoFrame,
// Called from JNI when a new frame has been captured.
void OnIncomingFrame(void* video_frame,
int length,
int rotation,
int64 time_stamp);

View File

@ -35,64 +35,6 @@ namespace webrtc_jni {
jobject AndroidVideoCapturerJni::application_context_ = nullptr;
// JavaCaptureProxy is responsible for marshaling calls from the
// Java VideoCapturerAndroid to the C++ class AndroidVideoCapturer.
// Calls from Java occur on a Java thread and are marshaled to
// AndroidVideoCapturer on the thread that creates an instance of this object.
//
// An instance is created when AndroidVideoCapturerJni::Start is called and
// ownership is passed to an instance of the Java class NativeObserver.
// JavaCaptureProxy is destroyed when NativeObserver has reported that the
// capturer has stopped, see
// VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped.
// Marshaling is done as long as JavaCaptureProxy has a pointer to the
// AndroidVideoCapturer.
class JavaCaptureProxy {
public:
JavaCaptureProxy() : thread_(rtc::Thread::Current()), capturer_(nullptr) {
}
~JavaCaptureProxy() {
}
void SetAndroidCapturer(webrtc::AndroidVideoCapturer* capturer) {
DCHECK(thread_->IsCurrent());
capturer_ = capturer;
}
void OnCapturerStarted(bool success) {
thread_->Invoke<void>(
rtc::Bind(&JavaCaptureProxy::OnCapturerStarted_w, this, success));
}
void OnIncomingFrame(signed char* video_frame,
int length,
int rotation,
int64 time_stamp) {
thread_->Invoke<void>(
rtc::Bind(&JavaCaptureProxy::OnIncomingFrame_w, this, video_frame,
length, rotation, time_stamp));
}
private:
void OnCapturerStarted_w(bool success) {
DCHECK(thread_->IsCurrent());
if (capturer_)
capturer_->OnCapturerStarted(success);
}
void OnIncomingFrame_w(signed char* video_frame,
int length,
int rotation,
int64 time_stamp) {
DCHECK(thread_->IsCurrent());
if (capturer_)
capturer_->OnIncomingFrame(video_frame, length, rotation, time_stamp);
}
rtc::Thread* thread_;
webrtc::AndroidVideoCapturer* capturer_;
};
// static
int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
jobject appliction_context) {
@ -104,6 +46,20 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
return 0;
}
// static
rtc::scoped_ptr<AndroidVideoCapturerJni>
AndroidVideoCapturerJni::Create(JNIEnv* jni,
jobject j_video_capture,
jstring device_name) {
rtc::scoped_ptr<AndroidVideoCapturerJni> capturer(
new AndroidVideoCapturerJni(jni,
j_video_capture));
if (capturer->Init(device_name))
return capturer.Pass();
return nullptr;
}
AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
jobject j_video_capturer)
: j_capturer_global_(jni, j_video_capturer),
@ -113,7 +69,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
jni,
FindClass(jni,
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
proxy_(nullptr) {
capturer_(nullptr) {
thread_checker_.DetachFromThread();
}
@ -128,22 +84,14 @@ bool AndroidVideoCapturerJni::Init(jstring device_name) {
}
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
DeInit();
}
void AndroidVideoCapturerJni::DeInit() {
DCHECK(proxy_ == nullptr);
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, "deInit", "()V");
jni()->CallVoidMethod(*j_capturer_global_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.DeInit";
}
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(proxy_ == nullptr);
proxy_ = new JavaCaptureProxy();
proxy_->SetAndroidCapturer(capturer);
DCHECK(capturer_ == nullptr);
thread_ = rtc::Thread::Current();
capturer_ = capturer;
j_frame_observer_ = NewGlobalRef(
jni(),
@ -152,7 +100,7 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
*j_observer_class_,
"<init>",
"(J)V"),
jlongFromPointer(proxy_)));
jlongFromPointer(this)));
CHECK_EXCEPTION(jni()) << "error during NewObject";
jmethodID m = GetMethodID(
@ -169,13 +117,21 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
void AndroidVideoCapturerJni::Stop() {
DCHECK(thread_checker_.CalledOnValidThread());
proxy_->SetAndroidCapturer(nullptr);
proxy_ = nullptr;
capturer_ = nullptr;
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"stopCapture", "()V");
jni()->CallVoidMethod(*j_capturer_global_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
DeleteGlobalRef(jni(), j_frame_observer_);
// Do not process frames in flight after stop have returned since
// the memory buffers they point to have been deleted.
rtc::MessageQueueManager::Clear(&invoker_);
}
void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) {
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"returnBuffer", "(J)V");
jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
}
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
@ -188,29 +144,53 @@ std::string AndroidVideoCapturerJni::GetSupportedFormats() {
return JavaToStdString(jni(), j_json_caps);
}
void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
invoker_.AsyncInvoke<void>(
thread_,
rtc::Bind(&AndroidVideoCapturerJni::OnCapturerStarted_w, this, success));
}
void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
int length,
int rotation,
int64 time_stamp) {
invoker_.AsyncInvoke<void>(
thread_,
rtc::Bind(&AndroidVideoCapturerJni::OnIncomingFrame_w,
this, video_frame, length, rotation, time_stamp));
}
void AndroidVideoCapturerJni::OnCapturerStarted_w(bool success) {
DCHECK(thread_checker_.CalledOnValidThread());
if (capturer_)
capturer_->OnCapturerStarted(success);
}
void AndroidVideoCapturerJni::OnIncomingFrame_w(void* video_frame,
int length,
int rotation,
int64 time_stamp) {
DCHECK(thread_checker_.CalledOnValidThread());
if (capturer_)
capturer_->OnIncomingFrame(video_frame, length, rotation, time_stamp);
}
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnFrameCaptured)
(JNIEnv* jni, jclass, jlong j_proxy, jbyteArray j_frame,
(JNIEnv* jni, jclass, jlong j_capturer, jobject j_frame,
jint rotation, jlong ts) {
jbyte* bytes = jni->GetByteArrayElements(j_frame, NULL);
reinterpret_cast<JavaCaptureProxy*>(
j_proxy)->OnIncomingFrame(bytes, jni->GetArrayLength(j_frame), rotation,
ts);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
void* bytes = jni->GetDirectBufferAddress(j_frame);
DCHECK(bytes != NULL);
jlong length = jni->GetDirectBufferCapacity(j_frame);
reinterpret_cast<AndroidVideoCapturerJni*>(
j_capturer)->OnIncomingFrame(bytes, length, rotation, ts);
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_proxy, jboolean j_success) {
JavaCaptureProxy* proxy = reinterpret_cast<JavaCaptureProxy*>(j_proxy);
proxy->OnCapturerStarted(j_success);
if (!j_success)
delete proxy;
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped)
(JNIEnv* jni, jclass, jlong j_proxy) {
delete reinterpret_cast<JavaCaptureProxy*>(j_proxy);
(JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
j_success);
}
} // namespace webrtc_jni

View File

@ -33,32 +33,53 @@
#include "talk/app/webrtc/androidvideocapturer.h"
#include "talk/app/webrtc/java/jni/jni_helpers.h"
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/thread_checker.h"
namespace webrtc_jni {
class JavaCaptureProxy;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
// AndroidVideoCapturer.
class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
public:
static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
~AndroidVideoCapturerJni();
bool Init(jstring device_name);
// Creates a new instance of AndroidVideoCapturerJni. Returns a nullptr if
// it can't be created. This happens if |device_name| is invalid.
static rtc::scoped_ptr<AndroidVideoCapturerJni> Create(
JNIEnv* jni,
jobject j_video_capture, // Instance of VideoCapturerAndroid
jstring device_name); // Name of the camera to use.
void Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) override;
void Stop() override;
virtual void ReturnBuffer(int64 time_stamp) override;
std::string GetSupportedFormats() override;
private:
// Called from VideoCapturerAndroid::NativeObserver on a Java thread.
void OnCapturerStarted(bool success);
void OnIncomingFrame(void* video_frame,
int length,
int rotation,
int64 time_stamp);
private:
AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
bool Init(jstring device_name);
void OnCapturerStarted_w(bool success);
void OnCapturerStopped_w();
void OnIncomingFrame_w(void* video_frame,
int length,
int rotation,
int64 time_stamp);
JNIEnv* jni();
void DeInit();
const ScopedGlobalRef<jobject> j_capturer_global_;
const ScopedGlobalRef<jclass> j_video_capturer_class_;
@ -67,9 +88,12 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
rtc::ThreadChecker thread_checker_;
// The proxy is a valid pointer between calling Start and Stop.
// It destroys itself when Java VideoCapturerAndroid has been stopped.
JavaCaptureProxy* proxy_;
rtc::Thread* thread_; // The thread where Start is called on.
// |capturer| is a guaranteed to be a valid pointer between a call to
// AndroidVideoCapturerDelegate::Start
// until AndroidVideoCapturerDelegate::Stop.
webrtc::AndroidVideoCapturer* capturer_;
rtc::AsyncInvoker invoker_;
static jobject application_context_;

View File

@ -1385,9 +1385,9 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
j_videocapturer_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
rtc::scoped_ptr<AndroidVideoCapturerJni> delegate(
new AndroidVideoCapturerJni(jni, j_video_capturer));
if (!delegate->Init(j_device_name))
rtc::scoped_ptr<AndroidVideoCapturerJni> delegate =
AndroidVideoCapturerJni::Create(jni, j_video_capturer, j_device_name);
if (!delegate.get())
return nullptr;
rtc::scoped_ptr<webrtc::AndroidVideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate.Pass()));

View File

@ -48,6 +48,7 @@ import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Exchanger;
@ -79,17 +80,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private Camera.CameraInfo info;
private SurfaceTexture cameraSurfaceTexture;
private int[] cameraGlTextures = null;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private final int numCaptureBuffers = 3;
private FramePool videoBuffers = null;
private int width;
private int height;
private int framerate;
private CapturerObserver frameObserver = null;
// List of formats supported by all cameras. This list is filled once in order
// to be able to switch cameras.
private static ArrayList<CaptureFormat>[] supportedFormats;
private static List<List<CaptureFormat>> supportedFormats;
// Returns device names that can be used to create a new VideoCapturerAndroid.
public static String[] getDeviceNames() {
@ -152,7 +150,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
id = ++id % Camera.getNumberOfCameras();
CaptureFormat formatToUse = null;
for (CaptureFormat format : supportedFormats[id]) {
List<CaptureFormat> formats = supportedFormats.get(id);
for (CaptureFormat format : formats) {
if (format.width == width && format.height == height) {
formatToUse = format;
break;
@ -199,35 +198,17 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
}
}
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
return foundDevice;
}
// Called by native code. Frees the Java thread created in Init.
void deInit() throws InterruptedException {
Log.d(TAG, "deInit");
if (cameraThreadHandler != null) {
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
Log.d(TAG, "stop CameraThread");
Looper.myLooper().quit();
}
});
cameraThread.join();
cameraThreadHandler = null;
}
}
private static boolean initStatics() {
if (supportedFormats != null)
return true;
try {
supportedFormats = new ArrayList[Camera.getNumberOfCameras()];
supportedFormats =
new ArrayList<List<CaptureFormat>>(Camera.getNumberOfCameras());
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
supportedFormats[i] = getSupportedFormats(i);
supportedFormats.add(getSupportedFormats(i));
}
return true;
} catch (Exception e) {
@ -257,7 +238,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
private static String getSupportedFormatsAsJson(int id) throws JSONException {
ArrayList<CaptureFormat> formats = supportedFormats[id];
List<CaptureFormat> formats = supportedFormats.get(id);
JSONArray json_formats = new JSONArray();
for (CaptureFormat format : formats) {
JSONObject json_format = new JSONObject();
@ -288,7 +269,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
parameters.getSupportedPreviewSizes();
for (Camera.Size size : supportedSizes) {
if (size.width % 16 != 0) {
// If the width is not a multiple of 16, The frames received from the
// If the width is not a multiple of 16, the frames received from the
// camera will have a stride != width when YV12 is used. Since we
// currently only support tightly packed images, we simply ignore those
// resolutions.
@ -332,12 +313,20 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
throw new RuntimeException("frameObserver not set.");
}
if (width % 16 != 0) {
throw new RuntimeException("widht must be a multiple of 16." );
throw new RuntimeException("width must be a multiple of 16." );
}
this.width = width;
this.height = height;
this.framerate = framerate;
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
// We must guarantee that buffers sent to an observer are kept alive until
// stopCapture have completed. Therefore, create the buffers here and
// abandon them after the camera thread have been stopped.
videoBuffers = new FramePool(width, height, ImageFormat.YV12);
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
@ -356,7 +345,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
this.camera = Camera.open(id);
this.info = new Camera.CameraInfo();
Camera.getCameraInfo(id, info);
// No local renderer (we only care about onPreviewFrame() buffers, not a
// directly-displayed UI element). Camera won't capture without
// setPreview{Texture,Display}, so we create a SurfaceTexture and hand
@ -414,12 +402,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Note: setRecordingHint(true) actually decrease frame rate on N5.
// parameters.setRecordingHint(true);
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
camera.addCallbackBuffer(new byte[bufSize]);
}
videoBuffers.addBuffersAsCameraCallbackBuffers(camera);
camera.setPreviewCallbackWithBuffer(this);
camera.startPreview();
frameObserver.OnCapturerStarted(true);
return;
@ -436,19 +420,22 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
// Called by native code. Returns true when camera is known to be stopped.
synchronized void stopCapture() {
synchronized void stopCapture() throws InterruptedException {
Log.d(TAG, "stopCapture");
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
stopCaptureOnCameraThread();
}
});
cameraThread.join();
cameraThreadHandler = null;
videoBuffers = null;
}
private void stopCaptureOnCameraThread() {
Log.d(TAG, "stopCaptureOnCameraThread");
doStopCaptureOnCamerathread();
frameObserver.OnCapturerStopped();
Looper.myLooper().quit();
return;
}
@ -479,6 +466,16 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
applicationContext);
}
synchronized void returnBuffer(final long timeStamp) {
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
if (camera == null)
return;
videoBuffers.addBufferAsCameraCallbackBuffer(camera, timeStamp);
}
});
}
private int getDeviceOrientation() {
int orientation = 0;
@ -540,8 +537,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
rotation = (info.orientation + rotation) % 360;
frameObserver.OnFrameCaptured(data, rotation, captureTimeMs);
camera.addCallbackBuffer(data);
frameObserver.OnFrameCaptured(
videoBuffers.reserveByteBuffer(data, captureTimeMs),
rotation,
captureTimeMs);
}
// runCameraThreadUntilIdle make sure all posted messages to the cameraThread
@ -571,48 +570,101 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
}
// Class used for allocating and bookkeeping video frames. All buffers are
// direct allocated so that they can be directly used from native code.
private static class FramePool {
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private static int numCaptureBuffers = 3;
private final Frame cameraFrames[];
private static class Frame {
public final ByteBuffer buffer;
public long timeStamp = -1;
Frame(int width, int height, int format) {
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
buffer = ByteBuffer.allocateDirect(bufSize);
}
byte[] data() {
return buffer.array();
}
}
FramePool(int width, int height, int format) {
cameraFrames = new Frame[numCaptureBuffers];
for (int i = 0; i < numCaptureBuffers; i++) {
cameraFrames[i] = new Frame(width, height, format);
}
}
// Add all free buffers to |camera| that are currently not sent to a client.
void addBuffersAsCameraCallbackBuffers(Camera camera) {
for (Frame frame : cameraFrames) {
if (frame.timeStamp < 0)
camera.addCallbackBuffer(frame.data());
}
}
ByteBuffer reserveByteBuffer(byte[] data, long timeStamp) {
for (Frame frame : cameraFrames) {
if (data == frame.data()) {
frame.timeStamp = timeStamp;
return frame.buffer;
}
}
throw new RuntimeException("unknown data buffer?!?");
}
// Add the buffer with |timeStamp| to |camera|.
void addBufferAsCameraCallbackBuffer(Camera camera, long timeStamp) {
for (Frame frame : cameraFrames) {
if (timeStamp == frame.timeStamp) {
frame.timeStamp = -1;
camera.addCallbackBuffer(frame.data());
return;
}
}
throw new RuntimeException("unknown data buffer returned?!?");
}
}
// Interface used for providing callbacks to an observer.
interface CapturerObserver {
// Notify if the camera have been started successfully or not.
// Called on a Java thread owned by VideoCapturerAndroid.
abstract void OnCapturerStarted(boolean success);
// Notify that the camera have been stopped.
// Called on a Java thread owned by VideoCapturerAndroid.
abstract void OnCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by
// VideoCapturerAndroid.
abstract void OnFrameCaptured(byte[] data, int rotation, long timeStamp);
abstract void OnFrameCaptured(ByteBuffer buffer, int rotation, long timeStamp);
}
// An implementation of CapturerObserver that forwards all calls from
// Java to the C layer.
public static class NativeObserver implements CapturerObserver {
private final long nativeProxy;
static class NativeObserver implements CapturerObserver {
private final long nativeCapturer;
public NativeObserver(long nativeProxy) {
this.nativeProxy = nativeProxy;
}
@Override
public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) {
nativeOnFrameCaptured(nativeProxy, data, rotation, timeStamp);
public NativeObserver(long nativeCapturer) {
this.nativeCapturer = nativeCapturer;
}
@Override
public void OnCapturerStarted(boolean success) {
nativeCapturerStarted(nativeProxy, success);
nativeCapturerStarted(nativeCapturer, success);
}
@Override
public void OnCapturerStopped() {
nativeCapturerStopped(nativeProxy);
public void OnFrameCaptured(ByteBuffer byteBuffer, int rotation,
long timeStamp) {
nativeOnFrameCaptured(nativeCapturer, byteBuffer, rotation, timeStamp);
}
private native void nativeCapturerStarted(long proxyObject,
private native void nativeCapturerStarted(long nativeCapturer,
boolean success);
private native void nativeCapturerStopped(long proxyObject);
private native void nativeOnFrameCaptured(
long proxyObject, byte[] data, int rotation, long timeStamp);
private native void nativeOnFrameCaptured(long nativeCapturer,
ByteBuffer byteBuffer, int rotation, long timeStamp);
}
}