diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java index fec3b50fa..23cc2142a 100644 --- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java +++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java @@ -33,6 +33,7 @@ import android.test.suitebuilder.annotation.SmallTest; import org.webrtc.VideoCapturerAndroid.CaptureFormat; import org.webrtc.VideoRenderer.I420Frame; +import java.nio.ByteBuffer; import java.util.ArrayList; @SuppressWarnings("deprecation") @@ -67,7 +68,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { private int frameSize = 0; private Object frameLock = 0; private Object capturerStartLock = 0; - private Object capturerStopLock = 0; private boolean captureStartResult = false; @Override @@ -79,17 +79,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { } @Override - public void OnCapturerStopped() { - synchronized (capturerStopLock) { - capturerStopLock.notify(); - } - } - - @Override - public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) { + public void OnFrameCaptured(ByteBuffer frame, int rotation, + long timeStamp) { + assertTrue(frame.isDirect()); synchronized (frameLock) { ++framesCaptured; - frameSize = data.length; + frameSize = frame.capacity(); frameLock.notify(); } } @@ -101,12 +96,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { } } - public void WaitForCapturerToStop() throws InterruptedException { - synchronized (capturerStopLock) { - capturerStopLock.wait(); - } - } - public int WaitForNextCapturedFrame() throws InterruptedException { synchronized (frameLock) { frameLock.wait(); @@ -262,7 +251,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { // Check the frame size. assertEquals((format.width*format.height*3)/2, observer.frameSize()); capturer.stopCapture(); - observer.WaitForCapturerToStop(); } capturer.dispose(); } diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc index d1238b362..48b822b27 100644 --- a/talk/app/webrtc/androidvideocapturer.cc +++ b/talk/app/webrtc/androidvideocapturer.cc @@ -58,7 +58,7 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory { captured_frame_.fourcc = static_cast(cricket::FOURCC_ANY); } - void UpdateCapturedFrame(signed char* frame_data, + void UpdateCapturedFrame(void* frame_data, int length, int rotation, int64 time_stamp_in_ms) { @@ -186,13 +186,14 @@ void AndroidVideoCapturer::OnCapturerStarted(bool success) { SignalStateChange(this, new_state); } -void AndroidVideoCapturer::OnIncomingFrame(signed char* frame_data, +void AndroidVideoCapturer::OnIncomingFrame(void* frame_data, int length, int rotation, int64 time_stamp) { DCHECK(worker_thread_->IsCurrent()); frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp); SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); + delegate_->ReturnBuffer(time_stamp); } } // namespace webrtc diff --git a/talk/app/webrtc/androidvideocapturer.h b/talk/app/webrtc/androidvideocapturer.h index ed31cd2e1..380313bf7 100644 --- a/talk/app/webrtc/androidvideocapturer.h +++ b/talk/app/webrtc/androidvideocapturer.h @@ -48,6 +48,10 @@ class AndroidVideoCapturerDelegate { // The delegate may not call into AndroidVideoCapturer after this call. virtual void Stop() = 0; + // Notify that a frame received in OnIncomingFrame with |time_stamp| has been + // processed and can be returned. + virtual void ReturnBuffer(int64 time_stamp) = 0; + // Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}" virtual std::string GetSupportedFormats() = 0; }; @@ -60,13 +64,11 @@ class AndroidVideoCapturer : public cricket::VideoCapturer { rtc::scoped_ptr delegate); virtual ~AndroidVideoCapturer(); - // Called from JNI when the capturer has been started. Called from a Java - // thread. + // Called from JNI when the capturer has been started. void OnCapturerStarted(bool success); - // Called from JNI when a new frame has been captured. Called from a Java - // thread. - void OnIncomingFrame(signed char* videoFrame, + // Called from JNI when a new frame has been captured. + void OnIncomingFrame(void* video_frame, int length, int rotation, int64 time_stamp); diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc index 6e9710f24..b445be5aa 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc @@ -35,64 +35,6 @@ namespace webrtc_jni { jobject AndroidVideoCapturerJni::application_context_ = nullptr; -// JavaCaptureProxy is responsible for marshaling calls from the -// Java VideoCapturerAndroid to the C++ class AndroidVideoCapturer. -// Calls from Java occur on a Java thread and are marshaled to -// AndroidVideoCapturer on the thread that creates an instance of this object. -// -// An instance is created when AndroidVideoCapturerJni::Start is called and -// ownership is passed to an instance of the Java class NativeObserver. -// JavaCaptureProxy is destroyed when NativeObserver has reported that the -// capturer has stopped, see -// VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped. -// Marshaling is done as long as JavaCaptureProxy has a pointer to the -// AndroidVideoCapturer. -class JavaCaptureProxy { - public: - JavaCaptureProxy() : thread_(rtc::Thread::Current()), capturer_(nullptr) { - } - - ~JavaCaptureProxy() { - } - - void SetAndroidCapturer(webrtc::AndroidVideoCapturer* capturer) { - DCHECK(thread_->IsCurrent()); - capturer_ = capturer; - } - - void OnCapturerStarted(bool success) { - thread_->Invoke( - rtc::Bind(&JavaCaptureProxy::OnCapturerStarted_w, this, success)); - } - - void OnIncomingFrame(signed char* video_frame, - int length, - int rotation, - int64 time_stamp) { - thread_->Invoke( - rtc::Bind(&JavaCaptureProxy::OnIncomingFrame_w, this, video_frame, - length, rotation, time_stamp)); - } - - private: - void OnCapturerStarted_w(bool success) { - DCHECK(thread_->IsCurrent()); - if (capturer_) - capturer_->OnCapturerStarted(success); - } - void OnIncomingFrame_w(signed char* video_frame, - int length, - int rotation, - int64 time_stamp) { - DCHECK(thread_->IsCurrent()); - if (capturer_) - capturer_->OnIncomingFrame(video_frame, length, rotation, time_stamp); - } - - rtc::Thread* thread_; - webrtc::AndroidVideoCapturer* capturer_; -}; - // static int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, jobject appliction_context) { @@ -104,6 +46,20 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, return 0; } +// static +rtc::scoped_ptr +AndroidVideoCapturerJni::Create(JNIEnv* jni, + jobject j_video_capture, + jstring device_name) { + rtc::scoped_ptr capturer( + new AndroidVideoCapturerJni(jni, + j_video_capture)); + + if (capturer->Init(device_name)) + return capturer.Pass(); + return nullptr; +} + AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer) : j_capturer_global_(jni, j_video_capturer), @@ -113,7 +69,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni, jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver")), - proxy_(nullptr) { + capturer_(nullptr) { thread_checker_.DetachFromThread(); } @@ -128,22 +84,14 @@ bool AndroidVideoCapturerJni::Init(jstring device_name) { } AndroidVideoCapturerJni::~AndroidVideoCapturerJni() { - DeInit(); -} - -void AndroidVideoCapturerJni::DeInit() { - DCHECK(proxy_ == nullptr); - jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, "deInit", "()V"); - jni()->CallVoidMethod(*j_capturer_global_, m); - CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.DeInit"; } void AndroidVideoCapturerJni::Start(int width, int height, int framerate, webrtc::AndroidVideoCapturer* capturer) { DCHECK(thread_checker_.CalledOnValidThread()); - DCHECK(proxy_ == nullptr); - proxy_ = new JavaCaptureProxy(); - proxy_->SetAndroidCapturer(capturer); + DCHECK(capturer_ == nullptr); + thread_ = rtc::Thread::Current(); + capturer_ = capturer; j_frame_observer_ = NewGlobalRef( jni(), @@ -152,7 +100,7 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate, *j_observer_class_, "", "(J)V"), - jlongFromPointer(proxy_))); + jlongFromPointer(this))); CHECK_EXCEPTION(jni()) << "error during NewObject"; jmethodID m = GetMethodID( @@ -169,13 +117,21 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate, void AndroidVideoCapturerJni::Stop() { DCHECK(thread_checker_.CalledOnValidThread()); - proxy_->SetAndroidCapturer(nullptr); - proxy_ = nullptr; + capturer_ = nullptr; jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, "stopCapture", "()V"); jni()->CallVoidMethod(*j_capturer_global_, m); CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture"; DeleteGlobalRef(jni(), j_frame_observer_); + // Do not process frames in flight after stop have returned since + // the memory buffers they point to have been deleted. + rtc::MessageQueueManager::Clear(&invoker_); +} + +void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) { + jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, + "returnBuffer", "(J)V"); + jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp); } std::string AndroidVideoCapturerJni::GetSupportedFormats() { @@ -188,29 +144,53 @@ std::string AndroidVideoCapturerJni::GetSupportedFormats() { return JavaToStdString(jni(), j_json_caps); } +void AndroidVideoCapturerJni::OnCapturerStarted(bool success) { + invoker_.AsyncInvoke( + thread_, + rtc::Bind(&AndroidVideoCapturerJni::OnCapturerStarted_w, this, success)); +} + +void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame, + int length, + int rotation, + int64 time_stamp) { + invoker_.AsyncInvoke( + thread_, + rtc::Bind(&AndroidVideoCapturerJni::OnIncomingFrame_w, + this, video_frame, length, rotation, time_stamp)); +} + +void AndroidVideoCapturerJni::OnCapturerStarted_w(bool success) { + DCHECK(thread_checker_.CalledOnValidThread()); + if (capturer_) + capturer_->OnCapturerStarted(success); +} + +void AndroidVideoCapturerJni::OnIncomingFrame_w(void* video_frame, + int length, + int rotation, + int64 time_stamp) { + DCHECK(thread_checker_.CalledOnValidThread()); + if (capturer_) + capturer_->OnIncomingFrame(video_frame, length, rotation, time_stamp); +} + JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); } JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnFrameCaptured) - (JNIEnv* jni, jclass, jlong j_proxy, jbyteArray j_frame, + (JNIEnv* jni, jclass, jlong j_capturer, jobject j_frame, jint rotation, jlong ts) { - jbyte* bytes = jni->GetByteArrayElements(j_frame, NULL); - reinterpret_cast( - j_proxy)->OnIncomingFrame(bytes, jni->GetArrayLength(j_frame), rotation, - ts); - jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); + void* bytes = jni->GetDirectBufferAddress(j_frame); + DCHECK(bytes != NULL); + jlong length = jni->GetDirectBufferCapacity(j_frame); + reinterpret_cast( + j_capturer)->OnIncomingFrame(bytes, length, rotation, ts); } JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted) - (JNIEnv* jni, jclass, jlong j_proxy, jboolean j_success) { - JavaCaptureProxy* proxy = reinterpret_cast(j_proxy); - proxy->OnCapturerStarted(j_success); - if (!j_success) - delete proxy; -} - -JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStopped) - (JNIEnv* jni, jclass, jlong j_proxy) { - delete reinterpret_cast(j_proxy); + (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) { + reinterpret_cast(j_capturer)->OnCapturerStarted( + j_success); } } // namespace webrtc_jni diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h index 33c586fad..9149f2c38 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h @@ -33,32 +33,53 @@ #include "talk/app/webrtc/androidvideocapturer.h" #include "talk/app/webrtc/java/jni/jni_helpers.h" +#include "webrtc/base/asyncinvoker.h" #include "webrtc/base/thread_checker.h" namespace webrtc_jni { -class JavaCaptureProxy; - // AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate. // The purpose of the delegate is to hide the JNI specifics from the C++ only // AndroidVideoCapturer. class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate { public: static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context); - AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer); ~AndroidVideoCapturerJni(); - bool Init(jstring device_name); + // Creates a new instance of AndroidVideoCapturerJni. Returns a nullptr if + // it can't be created. This happens if |device_name| is invalid. + static rtc::scoped_ptr Create( + JNIEnv* jni, + jobject j_video_capture, // Instance of VideoCapturerAndroid + jstring device_name); // Name of the camera to use. void Start(int width, int height, int framerate, webrtc::AndroidVideoCapturer* capturer) override; void Stop() override; + virtual void ReturnBuffer(int64 time_stamp) override; + std::string GetSupportedFormats() override; - private: + // Called from VideoCapturerAndroid::NativeObserver on a Java thread. + void OnCapturerStarted(bool success); + void OnIncomingFrame(void* video_frame, + int length, + int rotation, + int64 time_stamp); + +private: + AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer); + bool Init(jstring device_name); + + void OnCapturerStarted_w(bool success); + void OnCapturerStopped_w(); + void OnIncomingFrame_w(void* video_frame, + int length, + int rotation, + int64 time_stamp); + JNIEnv* jni(); - void DeInit(); const ScopedGlobalRef j_capturer_global_; const ScopedGlobalRef j_video_capturer_class_; @@ -67,9 +88,12 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate { rtc::ThreadChecker thread_checker_; - // The proxy is a valid pointer between calling Start and Stop. - // It destroys itself when Java VideoCapturerAndroid has been stopped. - JavaCaptureProxy* proxy_; + rtc::Thread* thread_; // The thread where Start is called on. + // |capturer| is a guaranteed to be a valid pointer between a call to + // AndroidVideoCapturerDelegate::Start + // until AndroidVideoCapturerDelegate::Stop. + webrtc::AndroidVideoCapturer* capturer_; + rtc::AsyncInvoker invoker_; static jobject application_context_; diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc index fedc33db2..8f5e7f2d1 100644 --- a/talk/app/webrtc/java/jni/peerconnection_jni.cc +++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc @@ -1385,9 +1385,9 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)( j_videocapturer_ctor); CHECK_EXCEPTION(jni) << "error during NewObject"; - rtc::scoped_ptr delegate( - new AndroidVideoCapturerJni(jni, j_video_capturer)); - if (!delegate->Init(j_device_name)) + rtc::scoped_ptr delegate = + AndroidVideoCapturerJni::Create(jni, j_video_capturer, j_device_name); + if (!delegate.get()) return nullptr; rtc::scoped_ptr capturer( new webrtc::AndroidVideoCapturer(delegate.Pass())); diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java index 446535485..8b4771d97 100644 --- a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java +++ b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java @@ -48,6 +48,7 @@ import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Exchanger; @@ -79,17 +80,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba private Camera.CameraInfo info; private SurfaceTexture cameraSurfaceTexture; private int[] cameraGlTextures = null; - // Arbitrary queue depth. Higher number means more memory allocated & held, - // lower number means more sensitivity to processing time in the client (and - // potentially stalling the capturer if it runs out of buffers to write to). - private final int numCaptureBuffers = 3; + private FramePool videoBuffers = null; private int width; private int height; private int framerate; private CapturerObserver frameObserver = null; // List of formats supported by all cameras. This list is filled once in order // to be able to switch cameras. - private static ArrayList[] supportedFormats; + private static List> supportedFormats; // Returns device names that can be used to create a new VideoCapturerAndroid. public static String[] getDeviceNames() { @@ -152,7 +150,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba id = ++id % Camera.getNumberOfCameras(); CaptureFormat formatToUse = null; - for (CaptureFormat format : supportedFormats[id]) { + List formats = supportedFormats.get(id); + for (CaptureFormat format : formats) { if (format.width == width && format.height == height) { formatToUse = format; break; @@ -199,35 +198,17 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } } } - Exchanger handlerExchanger = new Exchanger(); - cameraThread = new CameraThread(handlerExchanger); - cameraThread.start(); - cameraThreadHandler = exchange(handlerExchanger, null); return foundDevice; } - // Called by native code. Frees the Java thread created in Init. - void deInit() throws InterruptedException { - Log.d(TAG, "deInit"); - if (cameraThreadHandler != null) { - cameraThreadHandler.post(new Runnable() { - @Override public void run() { - Log.d(TAG, "stop CameraThread"); - Looper.myLooper().quit(); - } - }); - cameraThread.join(); - cameraThreadHandler = null; - } - } - private static boolean initStatics() { if (supportedFormats != null) return true; try { - supportedFormats = new ArrayList[Camera.getNumberOfCameras()]; + supportedFormats = + new ArrayList>(Camera.getNumberOfCameras()); for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { - supportedFormats[i] = getSupportedFormats(i); + supportedFormats.add(getSupportedFormats(i)); } return true; } catch (Exception e) { @@ -257,7 +238,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } private static String getSupportedFormatsAsJson(int id) throws JSONException { - ArrayList formats = supportedFormats[id]; + List formats = supportedFormats.get(id); JSONArray json_formats = new JSONArray(); for (CaptureFormat format : formats) { JSONObject json_format = new JSONObject(); @@ -288,7 +269,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba parameters.getSupportedPreviewSizes(); for (Camera.Size size : supportedSizes) { if (size.width % 16 != 0) { - // If the width is not a multiple of 16, The frames received from the + // If the width is not a multiple of 16, the frames received from the // camera will have a stride != width when YV12 is used. Since we // currently only support tightly packed images, we simply ignore those // resolutions. @@ -332,12 +313,20 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba throw new RuntimeException("frameObserver not set."); } if (width % 16 != 0) { - throw new RuntimeException("widht must be a multiple of 16." ); + throw new RuntimeException("width must be a multiple of 16." ); } this.width = width; this.height = height; this.framerate = framerate; + Exchanger handlerExchanger = new Exchanger(); + cameraThread = new CameraThread(handlerExchanger); + cameraThread.start(); + cameraThreadHandler = exchange(handlerExchanger, null); + // We must guarantee that buffers sent to an observer are kept alive until + // stopCapture have completed. Therefore, create the buffers here and + // abandon them after the camera thread have been stopped. + videoBuffers = new FramePool(width, height, ImageFormat.YV12); cameraThreadHandler.post(new Runnable() { @Override public void run() { startCaptureOnCameraThread(width, height, framerate, frameObserver, @@ -356,7 +345,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba this.camera = Camera.open(id); this.info = new Camera.CameraInfo(); Camera.getCameraInfo(id, info); - // No local renderer (we only care about onPreviewFrame() buffers, not a // directly-displayed UI element). Camera won't capture without // setPreview{Texture,Display}, so we create a SurfaceTexture and hand @@ -414,12 +402,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba // Note: setRecordingHint(true) actually decrease frame rate on N5. // parameters.setRecordingHint(true); - int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; - for (int i = 0; i < numCaptureBuffers; i++) { - camera.addCallbackBuffer(new byte[bufSize]); - } + videoBuffers.addBuffersAsCameraCallbackBuffers(camera); camera.setPreviewCallbackWithBuffer(this); - camera.startPreview(); frameObserver.OnCapturerStarted(true); return; @@ -436,19 +420,22 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } // Called by native code. Returns true when camera is known to be stopped. - synchronized void stopCapture() { + synchronized void stopCapture() throws InterruptedException { Log.d(TAG, "stopCapture"); cameraThreadHandler.post(new Runnable() { @Override public void run() { stopCaptureOnCameraThread(); } }); + cameraThread.join(); + cameraThreadHandler = null; + videoBuffers = null; } private void stopCaptureOnCameraThread() { Log.d(TAG, "stopCaptureOnCameraThread"); doStopCaptureOnCamerathread(); - frameObserver.OnCapturerStopped(); + Looper.myLooper().quit(); return; } @@ -479,6 +466,16 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba applicationContext); } + synchronized void returnBuffer(final long timeStamp) { + cameraThreadHandler.post(new Runnable() { + @Override public void run() { + if (camera == null) + return; + videoBuffers.addBufferAsCameraCallbackBuffer(camera, timeStamp); + } + }); + } + private int getDeviceOrientation() { int orientation = 0; @@ -540,8 +537,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } rotation = (info.orientation + rotation) % 360; - frameObserver.OnFrameCaptured(data, rotation, captureTimeMs); - camera.addCallbackBuffer(data); + frameObserver.OnFrameCaptured( + videoBuffers.reserveByteBuffer(data, captureTimeMs), + rotation, + captureTimeMs); } // runCameraThreadUntilIdle make sure all posted messages to the cameraThread @@ -571,48 +570,101 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } } + // Class used for allocating and bookkeeping video frames. All buffers are + // direct allocated so that they can be directly used from native code. + private static class FramePool { + // Arbitrary queue depth. Higher number means more memory allocated & held, + // lower number means more sensitivity to processing time in the client (and + // potentially stalling the capturer if it runs out of buffers to write to). + private static int numCaptureBuffers = 3; + private final Frame cameraFrames[]; + + private static class Frame { + public final ByteBuffer buffer; + public long timeStamp = -1; + + Frame(int width, int height, int format) { + int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; + buffer = ByteBuffer.allocateDirect(bufSize); + } + + byte[] data() { + return buffer.array(); + } + } + + FramePool(int width, int height, int format) { + cameraFrames = new Frame[numCaptureBuffers]; + for (int i = 0; i < numCaptureBuffers; i++) { + cameraFrames[i] = new Frame(width, height, format); + } + } + + // Add all free buffers to |camera| that are currently not sent to a client. + void addBuffersAsCameraCallbackBuffers(Camera camera) { + for (Frame frame : cameraFrames) { + if (frame.timeStamp < 0) + camera.addCallbackBuffer(frame.data()); + } + } + + ByteBuffer reserveByteBuffer(byte[] data, long timeStamp) { + for (Frame frame : cameraFrames) { + if (data == frame.data()) { + frame.timeStamp = timeStamp; + return frame.buffer; + } + } + throw new RuntimeException("unknown data buffer?!?"); + } + + // Add the buffer with |timeStamp| to |camera|. + void addBufferAsCameraCallbackBuffer(Camera camera, long timeStamp) { + for (Frame frame : cameraFrames) { + if (timeStamp == frame.timeStamp) { + frame.timeStamp = -1; + camera.addCallbackBuffer(frame.data()); + return; + } + } + throw new RuntimeException("unknown data buffer returned?!?"); + } + } + // Interface used for providing callbacks to an observer. interface CapturerObserver { // Notify if the camera have been started successfully or not. // Called on a Java thread owned by VideoCapturerAndroid. abstract void OnCapturerStarted(boolean success); - // Notify that the camera have been stopped. - // Called on a Java thread owned by VideoCapturerAndroid. - abstract void OnCapturerStopped(); // Delivers a captured frame. Called on a Java thread owned by // VideoCapturerAndroid. - abstract void OnFrameCaptured(byte[] data, int rotation, long timeStamp); + abstract void OnFrameCaptured(ByteBuffer buffer, int rotation, long timeStamp); } // An implementation of CapturerObserver that forwards all calls from // Java to the C layer. - public static class NativeObserver implements CapturerObserver { - private final long nativeProxy; + static class NativeObserver implements CapturerObserver { + private final long nativeCapturer; - public NativeObserver(long nativeProxy) { - this.nativeProxy = nativeProxy; - } - - @Override - public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) { - nativeOnFrameCaptured(nativeProxy, data, rotation, timeStamp); + public NativeObserver(long nativeCapturer) { + this.nativeCapturer = nativeCapturer; } @Override public void OnCapturerStarted(boolean success) { - nativeCapturerStarted(nativeProxy, success); + nativeCapturerStarted(nativeCapturer, success); } @Override - public void OnCapturerStopped() { - nativeCapturerStopped(nativeProxy); + public void OnFrameCaptured(ByteBuffer byteBuffer, int rotation, + long timeStamp) { + nativeOnFrameCaptured(nativeCapturer, byteBuffer, rotation, timeStamp); } - private native void nativeCapturerStarted(long proxyObject, + private native void nativeCapturerStarted(long nativeCapturer, boolean success); - private native void nativeCapturerStopped(long proxyObject); - private native void nativeOnFrameCaptured( - long proxyObject, byte[] data, int rotation, long timeStamp); + private native void nativeOnFrameCaptured(long nativeCapturer, + ByteBuffer byteBuffer, int rotation, long timeStamp); } }