Zero copy AndroidVideeCapturer.

This cl uses the YV12 buffers from Java without a copy if no rotation is needed. Buffers are returned to the camera when the encoder and renderers no longer needs them.

This add a new frame type WrappedI420Buffer based in  that allows for wrapping existing memory buffers and getting a notification when it is no longer used.

AndroidVideoCapturer::FrameFactory::CreateAliasedFrame wraps frame received from Java. For each wrapped frame a new reference to AndroidVideoCapturerDelegate is held to ensure that the delegate can not be destroyed until all frames have been returned.

Some overlap exist in webrtcvideoframe.cc and webrtcvideengine.cc with https://webrtc-codereview.appspot.com/47399004/ that is expected to be landed before this cl.

BUG=1128
R=glaznev@webrtc.org, magjed@webrtc.org
TBR=mflodman@webrtc.org // For changes in webrtc/common_video/video_frame_buffer

Review URL: https://webrtc-codereview.appspot.com/49459004

Cr-Commit-Position: refs/heads/master@{#8923}
This commit is contained in:
Per 2015-04-02 12:30:51 +02:00
parent 037bad7497
commit 3354419a2d
12 changed files with 386 additions and 70 deletions

View File

@ -34,6 +34,7 @@ import org.webrtc.VideoCapturerAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
import java.util.ArrayList;
import java.util.List;
@SuppressWarnings("deprecation")
public class VideoCapturerAndroidTest extends ActivityTestCase {
@ -41,9 +42,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
private int framesRendered = 0;
private Object frameLock = 0;
private void setSize(int width, int height) {
}
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
@ -73,6 +71,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
private Object frameLock = 0;
private Object capturerStartLock = 0;
private boolean captureStartResult = false;
private List<Long> timestamps = new ArrayList<Long>();
@Override
public void OnCapturerStarted(boolean success) {
@ -88,6 +87,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
synchronized (frameLock) {
++framesCaptured;
frameSize = length;
timestamps.add(timeStamp);
frameLock.notify();
}
}
@ -111,6 +111,14 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
return frameSize;
}
}
List<Long> getCopyAndResetListOftimeStamps() {
synchronized (frameLock) {
ArrayList<Long> list = new ArrayList<Long>(timestamps);
timestamps.clear();
return list;
}
}
}
// Return true if the device under test have at least two cameras.
@ -257,4 +265,45 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
}
capturer.dispose();
}
@SmallTest
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
public void testReturnBufferLate() throws Exception {
FakeCapturerObserver observer = new FakeCapturerObserver();
String deviceName = VideoCapturerAndroid.getDeviceName(0);
ArrayList<CaptureFormat> formats =
VideoCapturerAndroid.getSupportedFormats(0);
VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName);
VideoCapturerAndroid.CaptureFormat format = formats.get(0);
capturer.startCapture(format.width, format.height, format.maxFramerate,
getInstrumentation().getContext(), observer);
assertTrue(observer.WaitForCapturerToStart());
observer.WaitForNextCapturedFrame();
capturer.stopCapture();
List<Long> listOftimestamps = observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
format = formats.get(1);
capturer.startCapture(format.width, format.height, format.maxFramerate,
getInstrumentation().getContext(), observer);
observer.WaitForCapturerToStart();
observer.WaitForNextCapturedFrame();
for (Long timeStamp : listOftimestamps) {
capturer.returnBuffer(timeStamp);
}
observer.WaitForNextCapturedFrame();
capturer.stopCapture();
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 2);
for (Long timeStamp : listOftimestamps) {
capturer.returnBuffer(timeStamp);
}
}
}

View File

@ -27,6 +27,8 @@
#include "talk/app/webrtc/androidvideocapturer.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/callback.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
#include "webrtc/base/timeutils.h"
@ -36,6 +38,7 @@ namespace webrtc {
using cricket::WebRtcVideoFrame;
using rtc::scoped_ptr;
using rtc::scoped_refptr;
// An implementation of cricket::VideoFrameFactory for frames that are not
// guaranteed to outlive the created cricket::VideoFrame.
@ -45,7 +48,10 @@ using rtc::scoped_ptr;
// CreateAliasedFrame for every frame.
class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
public:
FrameFactory(int width, int height) : start_time_(rtc::TimeNanos()) {
FrameFactory(int width,
int height,
const scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
: start_time_(rtc::TimeNanos()), delegate_(delegate) {
// Create a CapturedFrame that only contains header information, not the
// actual pixel data.
captured_frame_.width = width;
@ -61,12 +67,11 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
void UpdateCapturedFrame(void* frame_data,
int length,
int rotation,
int64 time_stamp_in_ms) {
int64 time_stamp_in_ns) {
captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_YV12);
captured_frame_.data = frame_data;
captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_;
captured_frame_.time_stamp =
time_stamp_in_ms * rtc::kNumNanosecsPerMillisec;
captured_frame_.time_stamp = time_stamp_in_ns;
captured_frame_.rotation = rotation;
captured_frame_.data_size = length;
}
@ -84,23 +89,60 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
// of |AndroidVideoCapturer::OnIncomingFrame_w|.
// Check that captured_frame is actually our frame.
DCHECK(captured_frame == &captured_frame_);
if (!apply_rotation_ || captured_frame->rotation == kVideoRotation_0) {
DCHECK(captured_frame->fourcc == cricket::FOURCC_YV12);
const uint8_t* y_plane = static_cast<uint8_t*>(captured_frame_.data);
const int y_stride = captured_frame->width;
const uint8_t* v_plane = y_plane +
captured_frame->width * captured_frame->height;
const int uv_stride = (captured_frame->width + 1) / 2;
const int uv_height = (captured_frame->height + 1) / 2;
const uint8_t* u_plane = v_plane + uv_stride * uv_height;
// Create a WrappedI420Buffer and bind the |no_longer_used| callback
// to the static method ReturnFrame. The |delegate_| is bound as an
// argument which means that the callback will hold a reference to
// |delegate_|.
rtc::scoped_refptr<WrappedI420Buffer> buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
dst_width, dst_height, captured_frame->width,
captured_frame->height, y_plane, y_stride, u_plane, uv_stride,
v_plane, uv_stride,
rtc::Bind(&AndroidVideoCapturer::FrameFactory::ReturnFrame,
delegate_,
captured_frame->time_stamp)));
return new WebRtcVideoFrame(
buffer, captured_frame->elapsed_time,
captured_frame->time_stamp, captured_frame->GetRotation());
}
scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame());
frame->Init(captured_frame, dst_width, dst_height, apply_rotation_);
// frame->Init copies the data in |captured_frame| so it is safe to return
// the buffer immediately.
delegate_->ReturnBuffer(captured_frame->time_stamp);
return frame.release();
}
static void ReturnFrame(scoped_refptr<AndroidVideoCapturerDelegate> delegate,
int64 time_stamp) {
delegate->ReturnBuffer(time_stamp);
}
private:
uint64 start_time_;
cricket::CapturedFrame captured_frame_;
scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
};
AndroidVideoCapturer::AndroidVideoCapturer(
rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate)
const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
: running_(false),
delegate_(delegate.Pass()),
worker_thread_(NULL),
delegate_(delegate),
frame_factory_(NULL),
current_state_(cricket::CS_STOPPED) {
thread_checker_.DetachFromThread();
std::string json_string = delegate_->GetSupportedFormats();
LOG(LS_INFO) << json_string;
@ -131,15 +173,13 @@ AndroidVideoCapturer::~AndroidVideoCapturer() {
cricket::CaptureState AndroidVideoCapturer::Start(
const cricket::VideoFormat& capture_format) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!running_);
DCHECK(worker_thread_ == nullptr || worker_thread_ == rtc::Thread::Current());
// TODO(perkj): Better way to get a handle to the worker thread?
worker_thread_ = rtc::Thread::Current();
LOG(LS_INFO) << " AndroidVideoCapturer::Start w = " << capture_format.width
<< " h = " << capture_format.height;
frame_factory_ = new AndroidVideoCapturer::FrameFactory(
capture_format.width, capture_format.height);
capture_format.width, capture_format.height, delegate_.get());
set_frame_factory(frame_factory_);
running_ = true;
@ -152,7 +192,7 @@ cricket::CaptureState AndroidVideoCapturer::Start(
}
void AndroidVideoCapturer::Stop() {
DCHECK(worker_thread_->IsCurrent());
DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << " AndroidVideoCapturer::Stop ";
DCHECK(running_);
running_ = false;
@ -164,16 +204,18 @@ void AndroidVideoCapturer::Stop() {
}
bool AndroidVideoCapturer::IsRunning() {
DCHECK(thread_checker_.CalledOnValidThread());
return running_;
}
bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
DCHECK(thread_checker_.CalledOnValidThread());
fourccs->push_back(cricket::FOURCC_YV12);
return true;
}
void AndroidVideoCapturer::OnCapturerStarted(bool success) {
DCHECK(worker_thread_->IsCurrent());
DCHECK(thread_checker_.CalledOnValidThread());
cricket::CaptureState new_state =
success ? cricket::CS_RUNNING : cricket::CS_FAILED;
if (new_state == current_state_)
@ -190,10 +232,9 @@ void AndroidVideoCapturer::OnIncomingFrame(void* frame_data,
int length,
int rotation,
int64 time_stamp) {
DCHECK(worker_thread_->IsCurrent());
DCHECK(thread_checker_.CalledOnValidThread());
frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp);
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
delegate_->ReturnBuffer(time_stamp);
}
} // namespace webrtc

View File

@ -31,12 +31,13 @@
#include <vector>
#include "talk/media/base/videocapturer.h"
#include "webrtc/base/thread_checker.h"
namespace webrtc {
class AndroidVideoCapturer;
class AndroidVideoCapturerDelegate {
class AndroidVideoCapturerDelegate : public rtc::RefCountInterface {
public:
virtual ~AndroidVideoCapturerDelegate() {}
// Start capturing. The implementation of the delegate must call
@ -49,7 +50,7 @@ class AndroidVideoCapturerDelegate {
virtual void Stop() = 0;
// Notify that a frame received in OnIncomingFrame with |time_stamp| has been
// processed and can be returned.
// processed and can be returned. May be called on an arbitrary thread.
virtual void ReturnBuffer(int64 time_stamp) = 0;
// Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
@ -61,7 +62,7 @@ class AndroidVideoCapturerDelegate {
class AndroidVideoCapturer : public cricket::VideoCapturer {
public:
explicit AndroidVideoCapturer(
rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate);
const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate);
virtual ~AndroidVideoCapturer();
// Called from JNI when the capturer has been started.
@ -87,13 +88,9 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
bool GetPreferredFourccs(std::vector<uint32>* fourccs) override;
bool running_;
rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate_;
rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
// |worker_thread_| is the thread that calls Start and is used for
// communication with the Java capturer.
// Video frames are delivered to cricket::VideoCapturer::SignalFrameCaptured
// on this thread.
rtc::Thread* worker_thread_;
rtc::ThreadChecker thread_checker_;
class FrameFactory;
FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer.

View File

@ -47,16 +47,15 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
}
// static
rtc::scoped_ptr<AndroidVideoCapturerJni>
rtc::scoped_refptr<AndroidVideoCapturerJni>
AndroidVideoCapturerJni::Create(JNIEnv* jni,
jobject j_video_capture,
jstring device_name) {
rtc::scoped_ptr<AndroidVideoCapturerJni> capturer(
new AndroidVideoCapturerJni(jni,
j_video_capture));
rtc::scoped_refptr<AndroidVideoCapturerJni> capturer(
new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capture));
if (capturer->Init(device_name))
return capturer.Pass();
return capturer;
return nullptr;
}
@ -123,15 +122,19 @@ void AndroidVideoCapturerJni::Stop() {
jni()->CallVoidMethod(*j_capturer_global_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
DeleteGlobalRef(jni(), j_frame_observer_);
// Do not process frames in flight after stop have returned since
// the memory buffers they point to have been deleted.
rtc::MessageQueueManager::Clear(&invoker_);
}
void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) {
invoker_.AsyncInvoke<void>(
thread_,
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer_w, this, time_stamp));
}
void AndroidVideoCapturerJni::ReturnBuffer_w(int64 time_stamp) {
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"returnBuffer", "(J)V");
jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
}
std::string AndroidVideoCapturerJni::GetSupportedFormats() {

View File

@ -44,11 +44,10 @@ namespace webrtc_jni {
class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
public:
static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
~AndroidVideoCapturerJni();
// Creates a new instance of AndroidVideoCapturerJni. Returns a nullptr if
// it can't be created. This happens if |device_name| is invalid.
static rtc::scoped_ptr<AndroidVideoCapturerJni> Create(
static rtc::scoped_refptr<AndroidVideoCapturerJni> Create(
JNIEnv* jni,
jobject j_video_capture, // Instance of VideoCapturerAndroid
jstring device_name); // Name of the camera to use.
@ -67,9 +66,11 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
int length,
int rotation,
int64 time_stamp);
protected:
AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
~AndroidVideoCapturerJni();
private:
AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
bool Init(jstring device_name);
void OnCapturerStarted_w(bool success);
@ -78,6 +79,7 @@ private:
int length,
int rotation,
int64 time_stamp);
void ReturnBuffer_w(int64 time_stamp);
JNIEnv* jni();

View File

@ -1410,12 +1410,12 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
j_videocapturer_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
rtc::scoped_ptr<AndroidVideoCapturerJni> delegate =
rtc::scoped_refptr<AndroidVideoCapturerJni> delegate =
AndroidVideoCapturerJni::Create(jni, j_video_capturer, j_device_name);
if (!delegate.get())
return nullptr;
rtc::scoped_ptr<webrtc::AndroidVideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate.Pass()));
new webrtc::AndroidVideoCapturer(delegate));
#else
std::string device_name = JavaToStdString(jni, j_device_name);

View File

@ -52,6 +52,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Exchanger;
import java.util.concurrent.TimeUnit;
// Android specific implementation of VideoCapturer.
// An instance of this class can be created by an application using
@ -80,7 +81,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private Camera.CameraInfo info;
private SurfaceTexture cameraSurfaceTexture;
private int[] cameraGlTextures = null;
private FramePool videoBuffers = null;
private final FramePool videoBuffers = new FramePool();
private int width;
private int height;
private int framerate;
@ -360,10 +361,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
// We must guarantee that buffers sent to an observer are kept alive until
// stopCapture have completed. Therefore, create the buffers here and
// abandon them after the camera thread have been stopped.
videoBuffers = new FramePool(width, height, ImageFormat.YV12);
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
@ -440,7 +437,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Note: setRecordingHint(true) actually decrease frame rate on N5.
// parameters.setRecordingHint(true);
videoBuffers.addBuffersAsCameraCallbackBuffers(camera);
videoBuffers.queueCameraBuffers(width, height, format, camera);
camera.setPreviewCallbackWithBuffer(this);
camera.startPreview();
frameObserver.OnCapturerStarted(true);
@ -467,7 +464,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
});
cameraThread.join();
cameraThreadHandler = null;
videoBuffers = null;
Log.d(TAG, "stopCapture done");
}
private void stopCaptureOnCameraThread() {
@ -481,6 +478,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
try {
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
videoBuffers.stopReturnBuffersToCamera();
camera.setPreviewTexture(null);
cameraSurfaceTexture = null;
@ -505,11 +503,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
synchronized void returnBuffer(final long timeStamp) {
if (cameraThreadHandler == null) {
// The camera has been stopped.
videoBuffers.returnBuffer(timeStamp);
return;
}
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
if (camera == null)
return;
videoBuffers.addBufferAsCameraCallbackBuffer(camera, timeStamp);
videoBuffers.returnBuffer(timeStamp);
}
});
}
@ -583,7 +584,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
throw new RuntimeException("Unexpected camera in callback!");
}
long captureTimeMs = SystemClock.elapsedRealtime();
long captureTimeNs =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
int rotation = getDeviceOrientation();
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
@ -593,9 +595,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Mark the frame owning |data| as used.
// Note that since data is directBuffer,
// data.length >= videoBuffers.frameSize.
videoBuffers.reserveByteBuffer(data, captureTimeMs);
videoBuffers.reserveByteBuffer(data, captureTimeNs);
frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, rotation,
captureTimeMs);
captureTimeNs);
}
// runCameraThreadUntilIdle make sure all posted messages to the cameraThread
@ -632,14 +634,17 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private static int numCaptureBuffers = 3;
private final Frame cameraFrames[];
public final int frameSize;
private final List<Frame> cameraFrames = new ArrayList<Frame>();
public int frameSize = 0;
private Camera camera;
private static class Frame {
private final ByteBuffer buffer;
public long timeStamp = -1;
public final int frameSize;
Frame(int frameSize) {
this.frameSize = frameSize;
buffer = ByteBuffer.allocateDirect(frameSize);
}
@ -648,25 +653,60 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
}
FramePool(int width, int height, int format) {
cameraFrames = new Frame[numCaptureBuffers];
frameSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
cameraFrames[i] = new Frame(frameSize);
// Adds frames as callback buffers to |camera|. If a new frame size is
// required, new buffers are allocated and added.
void queueCameraBuffers(int width, int height, int format, Camera camera) {
if (this.camera != null)
throw new RuntimeException("camera already set.");
this.camera = camera;
int newframeSize =
width * height * ImageFormat.getBitsPerPixel(format) / 8;
int numberOfEnquedCameraBuffers = 0;
if (newframeSize != frameSize) {
// Create new frames and add to the camera.
// The old frames will be released when frames are returned.
for (int i = 0; i < numCaptureBuffers; ++i) {
Frame frame = new Frame(newframeSize);
cameraFrames.add(frame);
this.camera.addCallbackBuffer(frame.data());
}
numberOfEnquedCameraBuffers = numCaptureBuffers;
} else {
// Add all frames that have been returned.
for (Frame frame : cameraFrames) {
if (frame.timeStamp < 0) {
camera.addCallbackBuffer(frame.data());
++numberOfEnquedCameraBuffers;
}
}
}
frameSize = newframeSize;
Log.d(TAG, "queueCameraBuffers enqued " + numberOfEnquedCameraBuffers
+ " buffers of size " + frameSize + ".");
}
// Add all free buffers to |camera| that are currently not sent to a client.
void addBuffersAsCameraCallbackBuffers(Camera camera) {
void stopReturnBuffersToCamera() {
this.camera = null;
String pendingTimeStamps = new String();
for (Frame frame : cameraFrames) {
if (frame.timeStamp < 0)
camera.addCallbackBuffer(frame.data());
if (frame.timeStamp > -1) {
pendingTimeStamps+= " " + frame.timeStamp;
}
}
Log.d(TAG, "stopReturnBuffersToCamera called."
+ (pendingTimeStamps.isEmpty() ?
" All buffers have been returned."
: " Pending buffers " + pendingTimeStamps + "."));
}
void reserveByteBuffer(byte[] data, long timeStamp) {
for (Frame frame : cameraFrames) {
if (data == frame.data()) {
if (frame.timeStamp > 0) {
throw new RuntimeException("Frame already in use !");
}
frame.timeStamp = timeStamp;
return;
}
@ -674,16 +714,38 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
throw new RuntimeException("unknown data buffer?!?");
}
// Add the buffer with |timeStamp| to |camera|.
void addBufferAsCameraCallbackBuffer(Camera camera, long timeStamp) {
void returnBuffer(long timeStamp) {
Frame returnedFrame = null;
for (Frame frame : cameraFrames) {
if (timeStamp == frame.timeStamp) {
frame.timeStamp = -1;
camera.addCallbackBuffer(frame.data());
return;
returnedFrame = frame;
break;
}
}
throw new RuntimeException("unknown data buffer returned?!?");
if (returnedFrame == null) {
throw new RuntimeException("unknown data buffer with time stamp "
+ timeStamp + "returned?!?");
}
if (camera != null && returnedFrame.frameSize == frameSize) {
camera.addCallbackBuffer(returnedFrame.data());
return;
}
if (returnedFrame.frameSize != frameSize) {
Log.d(TAG, "returnBuffer with time stamp "+ timeStamp
+ " called with old frame size, " + returnedFrame.frameSize + ".");
// Since this frame has the wrong size, remove it from the list. Frames
// with the correct size is created in queueCameraBuffers so this must
// be an old buffer.
cameraFrames.remove(returnedFrame);
return;
}
Log.d(TAG, "returnBuffer with time stamp "+ timeStamp
+ " called after camera has been stopped.");
}
}

View File

@ -10,6 +10,8 @@
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/logging.h"
namespace rtc {
AsyncInvoker::AsyncInvoker() : destroying_(false) {}

View File

@ -62,7 +62,6 @@
#ifndef WEBRTC_BASE_CALLBACK_H_
#define WEBRTC_BASE_CALLBACK_H_
#include "webrtc/base/logging.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"

View File

@ -54,6 +54,47 @@ class RefCountedObject : public T {
: T(p1, p2, p3, p4, p5), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6>
RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6)
: T(p1, p2, p3, p4, p5, p6), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6, typename P7>
RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7)
: T(p1, p2, p3, p4, p5, p6, p7), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6, typename P7, typename P8>
RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8)
: T(p1, p2, p3, p4, p5, p6, p7, p8), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6, typename P7, typename P8, typename P9>
RefCountedObject(
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9)
: T(p1, p2, p3, p4, p5, p6, p7, p8, p9), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6, typename P7, typename P8, typename P9, typename P10>
RefCountedObject(
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10)
: T(p1, p2, p3, p4, p5, p6, p7, p8, p9, p10), ref_count_(0) {
}
template<typename P1, typename P2, typename P3, typename P4, typename P5,
typename P6, typename P7, typename P8, typename P9, typename P10,
typename P11>
RefCountedObject(
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10,
P11 p11)
: T(p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11), ref_count_(0) {
}
virtual int AddRef() {
return rtc::AtomicOps::Increment(&ref_count_);
}

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_VIDEO_FRAME_BUFFER_H_
#define WEBRTC_VIDEO_FRAME_BUFFER_H_
#include "webrtc/base/callback.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
@ -104,6 +105,43 @@ class TextureBuffer : public VideoFrameBuffer {
const int height_;
};
class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
public:
WrappedI420Buffer(int desired_width,
int desired_height,
int width,
int height,
const uint8_t* y_plane,
int y_stride,
const uint8_t* u_plane,
int u_stride,
const uint8_t* v_plane,
int v_stride,
const rtc::Callback0<void>& no_longer_used);
int width() const override;
int height() const override;
const uint8_t* data(PlaneType type) const override;
uint8_t* data(PlaneType type) override;
int stride(PlaneType type) const override;
rtc::scoped_refptr<NativeHandle> native_handle() const override;
private:
friend class rtc::RefCountedObject<WrappedI420Buffer>;
~WrappedI420Buffer() override;
int width_;
int height_;
const uint8_t* y_plane_;
const uint8_t* u_plane_;
const uint8_t* v_plane_;
const int y_stride_;
const int u_stride_;
const int v_stride_;
rtc::Callback0<void> no_longer_used_cb_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_FRAME_BUFFER_H_

View File

@ -133,4 +133,86 @@ rtc::scoped_refptr<NativeHandle> TextureBuffer::native_handle() const {
return native_handle_;
}
WrappedI420Buffer::WrappedI420Buffer(int desired_width,
int desired_height,
int width,
int height,
const uint8_t* y_plane,
int y_stride,
const uint8_t* u_plane,
int u_stride,
const uint8_t* v_plane,
int v_stride,
const rtc::Callback0<void>& no_longer_used)
: width_(desired_width),
height_(desired_height),
y_plane_(y_plane),
u_plane_(u_plane),
v_plane_(v_plane),
y_stride_(y_stride),
u_stride_(u_stride),
v_stride_(v_stride),
no_longer_used_cb_(no_longer_used) {
CHECK(width >= desired_width && height >= desired_height);
// Center crop to |desired_width| x |desired_height|.
// Make sure offset is even so that u/v plane becomes aligned.
const int offset_x = ((width - desired_width) / 2) & ~1;
const int offset_y = ((height - desired_height) / 2) & ~1;
y_plane_ += y_stride_ * offset_y + offset_x;
u_plane_ += u_stride_ * (offset_y / 2) + (offset_x / 2);
v_plane_ += v_stride_ * (offset_y / 2) + (offset_x / 2);
}
WrappedI420Buffer::~WrappedI420Buffer() {
no_longer_used_cb_();
}
int WrappedI420Buffer::width() const {
return width_;
}
int WrappedI420Buffer::height() const {
return height_;
}
const uint8_t* WrappedI420Buffer::data(PlaneType type) const {
switch (type) {
case kYPlane:
return y_plane_;
case kUPlane:
return u_plane_;
case kVPlane:
return v_plane_;
default:
RTC_NOTREACHED();
return nullptr;
}
}
uint8_t* WrappedI420Buffer::data(PlaneType type) {
RTC_NOTREACHED();
return nullptr;
}
int WrappedI420Buffer::stride(PlaneType type) const {
switch (type) {
case kYPlane:
return y_stride_;
case kUPlane:
return u_stride_;
case kVPlane:
return v_stride_;
default:
RTC_NOTREACHED();
return 0;
}
}
rtc::scoped_refptr<NativeHandle> WrappedI420Buffer::native_handle() const {
return nullptr;
}
} // namespace webrtc