- Exit from a camera thread lopper loop() method only after all camera release calls are completed. This fixes camera exceptions observed from time to time when calling camera functions on a terminated looper.
- Allocate real texture for camera preview. - Add fps and camera frame duration logging. - Get camera frame timestamp in Java code and pass it to jni code so the frame timestamp is assigned as soon as possible. Jni code will not use these timestamps yet until timestamp ntp correction and zeroing in webrtcvideengine.cc will be addressed. R=fischman@webrtc.org Review URL: https://webrtc-codereview.appspot.com/16729004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6513 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
0d15159b04
commit
a24d366e1c
@ -11,20 +11,18 @@
|
||||
package org.webrtc.videoengine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.Exchanger;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.PixelFormat;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.graphics.YuvImage;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
import android.hardware.Camera;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.SystemClock;
|
||||
import android.util.Log;
|
||||
import android.view.OrientationEventListener;
|
||||
import android.view.SurfaceHolder.Callback;
|
||||
@ -50,11 +48,15 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
private final Camera.CameraInfo info;
|
||||
private final OrientationEventListener orientationListener;
|
||||
private final long native_capturer; // |VideoCaptureAndroid*| in C++.
|
||||
private SurfaceTexture dummySurfaceTexture;
|
||||
private SurfaceTexture cameraSurfaceTexture;
|
||||
private int[] cameraGlTextures = null;
|
||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||
// lower number means more sensitivity to processing time in the client (and
|
||||
// potentially stalling the capturer if it runs out of buffers to write to).
|
||||
private final int numCaptureBuffers = 3;
|
||||
private double averageDurationMs;
|
||||
private long lastCaptureTimeMs;
|
||||
private int frameCount;
|
||||
|
||||
// Requests future capturers to send their frames to |localPreview| directly.
|
||||
public static void setLocalPreview(SurfaceHolder localPreview) {
|
||||
@ -114,6 +116,8 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
private synchronized boolean startCapture(
|
||||
final int width, final int height,
|
||||
final int min_mfps, final int max_mfps) {
|
||||
Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
|
||||
min_mfps + ":" + max_mfps);
|
||||
if (cameraThread != null || cameraThreadHandler != null) {
|
||||
throw new RuntimeException("Camera thread already started!");
|
||||
}
|
||||
@ -121,7 +125,6 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
cameraThread = new CameraThread(handlerExchanger);
|
||||
cameraThread.start();
|
||||
cameraThreadHandler = exchange(handlerExchanger, null);
|
||||
orientationListener.enable();
|
||||
|
||||
final Exchanger<Boolean> result = new Exchanger<Boolean>();
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@ -129,14 +132,14 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
|
||||
}
|
||||
});
|
||||
return exchange(result, false); // |false| is a dummy value here.
|
||||
boolean startResult = exchange(result, false); // |false| is a dummy value.
|
||||
orientationListener.enable();
|
||||
return startResult;
|
||||
}
|
||||
|
||||
private void startCaptureOnCameraThread(
|
||||
int width, int height, int min_mfps, int max_mfps,
|
||||
Exchanger<Boolean> result) {
|
||||
Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
|
||||
min_mfps + ":" + max_mfps);
|
||||
Throwable error = null;
|
||||
try {
|
||||
camera = Camera.open(id);
|
||||
@ -150,13 +153,27 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
} else {
|
||||
// No local renderer (we only care about onPreviewFrame() buffers, not a
|
||||
// directly-displayed UI element). Camera won't capture without
|
||||
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
|
||||
// hand it over to Camera, but never listen for frame-ready callbacks,
|
||||
// setPreview{Texture,Display}, so we create a SurfaceTexture and hand
|
||||
// it over to Camera, but never listen for frame-ready callbacks,
|
||||
// and never call updateTexImage on it.
|
||||
try {
|
||||
// "42" because http://goo.gl/KaEn8
|
||||
dummySurfaceTexture = new SurfaceTexture(42);
|
||||
camera.setPreviewTexture(dummySurfaceTexture);
|
||||
cameraGlTextures = new int[1];
|
||||
// Generate one texture pointer and bind it as an external texture.
|
||||
GLES20.glGenTextures(1, cameraGlTextures, 0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
|
||||
cameraGlTextures[0]);
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
|
||||
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
|
||||
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
|
||||
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
|
||||
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
|
||||
cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
|
||||
cameraSurfaceTexture.setOnFrameAvailableListener(null);
|
||||
camera.setPreviewTexture(cameraSurfaceTexture);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -178,6 +195,8 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
camera.addCallbackBuffer(new byte[bufSize]);
|
||||
}
|
||||
camera.setPreviewCallbackWithBuffer(this);
|
||||
frameCount = 0;
|
||||
averageDurationMs = 1000 / max_mfps;
|
||||
camera.startPreview();
|
||||
exchange(result, true);
|
||||
return;
|
||||
@ -198,6 +217,8 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
|
||||
// Called by native code. Returns true when camera is known to be stopped.
|
||||
private synchronized boolean stopCapture() {
|
||||
Log.d(TAG, "stopCapture");
|
||||
orientationListener.disable();
|
||||
final Exchanger<Boolean> result = new Exchanger<Boolean>();
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@ -212,14 +233,12 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
}
|
||||
cameraThreadHandler = null;
|
||||
cameraThread = null;
|
||||
orientationListener.disable();
|
||||
Log.d(TAG, "stopCapture done");
|
||||
return status;
|
||||
}
|
||||
|
||||
private void stopCaptureOnCameraThread(
|
||||
Exchanger<Boolean> result) {
|
||||
Log.d(TAG, "stopCapture");
|
||||
Looper.myLooper().quit();
|
||||
if (camera == null) {
|
||||
throw new RuntimeException("Camera is already stopped!");
|
||||
}
|
||||
@ -232,10 +251,16 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
camera.setPreviewDisplay(null);
|
||||
} else {
|
||||
camera.setPreviewTexture(null);
|
||||
cameraSurfaceTexture = null;
|
||||
if (cameraGlTextures != null) {
|
||||
GLES20.glDeleteTextures(1, cameraGlTextures, 0);
|
||||
cameraGlTextures = null;
|
||||
}
|
||||
}
|
||||
camera.release();
|
||||
camera = null;
|
||||
exchange(result, true);
|
||||
Looper.myLooper().quit();
|
||||
return;
|
||||
} catch (IOException e) {
|
||||
error = e;
|
||||
@ -244,11 +269,12 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
}
|
||||
Log.e(TAG, "Failed to stop camera", error);
|
||||
exchange(result, false);
|
||||
Looper.myLooper().quit();
|
||||
return;
|
||||
}
|
||||
|
||||
private native void ProvideCameraFrame(
|
||||
byte[] data, int length, long captureObject);
|
||||
byte[] data, int length, long timeStamp, long captureObject);
|
||||
|
||||
// Called on cameraThread so must not "synchronized".
|
||||
@Override
|
||||
@ -262,7 +288,19 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||
if (camera != callbackCamera) {
|
||||
throw new RuntimeException("Unexpected camera in callback!");
|
||||
}
|
||||
ProvideCameraFrame(data, data.length, native_capturer);
|
||||
frameCount++;
|
||||
long captureTimeMs = SystemClock.elapsedRealtime();
|
||||
if (frameCount > 1) {
|
||||
double durationMs = captureTimeMs - lastCaptureTimeMs;
|
||||
averageDurationMs = 0.9 * averageDurationMs + 0.1 * durationMs;
|
||||
if ((frameCount % 30) == 0) {
|
||||
Log.d(TAG, "Camera TS " + captureTimeMs +
|
||||
". Duration: " + (int)durationMs + " ms. FPS: " +
|
||||
(int) (1000 / averageDurationMs + 0.5));
|
||||
}
|
||||
}
|
||||
lastCaptureTimeMs = captureTimeMs;
|
||||
ProvideCameraFrame(data, data.length, captureTimeMs, native_capturer);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
|
||||
|
@ -10,14 +10,8 @@
|
||||
|
||||
package org.webrtc.videoengine;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import android.content.Context;
|
||||
import android.hardware.Camera.CameraInfo;
|
||||
import android.hardware.Camera.Parameters;
|
||||
import android.hardware.Camera.Size;
|
||||
@ -99,6 +93,7 @@ public class VideoCaptureDeviceInfoAndroid {
|
||||
.put("mfpsRanges", mfpsRanges);
|
||||
}
|
||||
String ret = devices.toString(2);
|
||||
Log.d(TAG, ret);
|
||||
return ret;
|
||||
} catch (JSONException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "webrtc/modules/video_capture/android/device_info_android.h"
|
||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
|
||||
#include "webrtc/system_wrappers/interface/logging.h"
|
||||
#include "webrtc/system_wrappers/interface/ref_count.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
@ -36,6 +37,7 @@ void JNICALL ProvideCameraFrame(
|
||||
jobject,
|
||||
jbyteArray javaCameraFrame,
|
||||
jint length,
|
||||
jlong timeStamp,
|
||||
jlong context) {
|
||||
webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
|
||||
reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
|
||||
@ -90,7 +92,7 @@ int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) {
|
||||
"(JI)V",
|
||||
reinterpret_cast<void*>(&OnOrientationChanged)},
|
||||
{"ProvideCameraFrame",
|
||||
"([BIJ)V",
|
||||
"([BIJJ)V",
|
||||
reinterpret_cast<void*>(&ProvideCameraFrame)}};
|
||||
if (ats.env()->RegisterNatives(g_java_capturer_class,
|
||||
native_methods, 3) != 0)
|
||||
@ -146,18 +148,18 @@ int32_t VideoCaptureAndroid::Init(const int32_t id,
|
||||
return -1;
|
||||
|
||||
// Store the device name
|
||||
LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
|
||||
size_t camera_id = 0;
|
||||
if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
|
||||
return -1;
|
||||
_deviceUniqueId = new char[nameLength + 1];
|
||||
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
|
||||
|
||||
AttachThreadScoped ats(g_jvm);
|
||||
JNIEnv* env = ats.env();
|
||||
|
||||
jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
|
||||
assert(ctor);
|
||||
jlong j_this = reinterpret_cast<intptr_t>(this);
|
||||
size_t camera_id = 0;
|
||||
if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
|
||||
return -1;
|
||||
_jCapturer = env->NewGlobalRef(
|
||||
env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
|
||||
assert(_jCapturer);
|
||||
|
Loading…
Reference in New Issue
Block a user