Add VideoSource::Stop and Restart methods.

The purpose is to make sure that start and stop is called on the correct thread on Android. It also cleans up the Java VideoSource implementation.

BUG=4303
R=glaznev@webrtc.org, magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/39989004

Cr-Commit-Position: refs/heads/master@{#8389}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8389 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2015-02-17 13:53:56 +00:00
parent 959dac7498
commit 8f605e8911
12 changed files with 114 additions and 39 deletions

View File

@ -122,6 +122,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
RendererCallbacks callbacks = new RendererCallbacks(); RendererCallbacks callbacks = new RendererCallbacks();
track.addRenderer(new VideoRenderer(callbacks)); track.addRenderer(new VideoRenderer(callbacks));
assertTrue(callbacks.WaitForNextFrameToRender() > 0); assertTrue(callbacks.WaitForNextFrameToRender() > 0);
track.dispose();
source.dispose();
} }
@Override @Override
@ -195,6 +197,32 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
RendererCallbacks callbacks = new RendererCallbacks(); RendererCallbacks callbacks = new RendererCallbacks();
track.addRenderer(new VideoRenderer(callbacks)); track.addRenderer(new VideoRenderer(callbacks));
assertTrue(callbacks.WaitForNextFrameToRender() > 0); assertTrue(callbacks.WaitForNextFrameToRender() > 0);
track.dispose();
source.dispose();
}
@SmallTest
// This test that the VideoSource that the VideoCapturer is connected to can
// be stopped and restarted. It tests both the Java and the C++ layer.
public void testStopRestartVideoSource() throws Exception {
PeerConnectionFactory factory = new PeerConnectionFactory();
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("");
VideoSource source =
factory.createVideoSource(capturer, new MediaConstraints());
VideoTrack track = factory.createVideoTrack("dummy", source);
RendererCallbacks callbacks = new RendererCallbacks();
track.addRenderer(new VideoRenderer(callbacks));
assertTrue(callbacks.WaitForNextFrameToRender() > 0);
assertEquals(MediaSource.State.LIVE, source.state());
source.stop();
assertEquals(MediaSource.State.ENDED, source.state());
source.restart();
assertTrue(callbacks.WaitForNextFrameToRender() > 0);
assertEquals(MediaSource.State.LIVE, source.state());
track.dispose();
source.dispose();
} }
@SmallTest @SmallTest
@ -218,5 +246,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
assertEquals((format.width*format.height*3)/2, observer.frameSize()); assertEquals((format.width*format.height*3)/2, observer.frameSize());
assertTrue(capturer.stopCapture()); assertTrue(capturer.stopCapture());
} }
capturer.dispose();
} }
} }

View File

@ -100,7 +100,8 @@ AndroidVideoCapturer::AndroidVideoCapturer(
: running_(false), : running_(false),
delegate_(delegate.Pass()), delegate_(delegate.Pass()),
worker_thread_(NULL), worker_thread_(NULL),
frame_factory_(NULL) { frame_factory_(NULL),
current_state_(cricket::CS_STOPPED){
std::string json_string = delegate_->GetSupportedFormats(); std::string json_string = delegate_->GetSupportedFormats();
LOG(LS_INFO) << json_string; LOG(LS_INFO) << json_string;
@ -132,7 +133,7 @@ AndroidVideoCapturer::~AndroidVideoCapturer() {
cricket::CaptureState AndroidVideoCapturer::Start( cricket::CaptureState AndroidVideoCapturer::Start(
const cricket::VideoFormat& capture_format) { const cricket::VideoFormat& capture_format) {
DCHECK(!running_); DCHECK(!running_);
DCHECK(worker_thread_ == nullptr); DCHECK(worker_thread_ == nullptr || worker_thread_ == rtc::Thread::Current());
// TODO(perkj): Better way to get a handle to the worker thread? // TODO(perkj): Better way to get a handle to the worker thread?
worker_thread_ = rtc::Thread::Current(); worker_thread_ = rtc::Thread::Current();
@ -146,7 +147,9 @@ cricket::CaptureState AndroidVideoCapturer::Start(
delegate_->Start( delegate_->Start(
capture_format.width, capture_format.height, capture_format.width, capture_format.height,
cricket::VideoFormat::IntervalToFps(capture_format.interval), this); cricket::VideoFormat::IntervalToFps(capture_format.interval), this);
return cricket::CS_STARTING; SetCaptureFormat(&capture_format);
current_state_ = cricket::CS_STARTING;
return current_state_;
} }
void AndroidVideoCapturer::Stop() { void AndroidVideoCapturer::Stop() {
@ -157,7 +160,8 @@ void AndroidVideoCapturer::Stop() {
SetCaptureFormat(NULL); SetCaptureFormat(NULL);
delegate_->Stop(); delegate_->Stop();
SignalStateChange(this, cricket::CS_STOPPED); current_state_ = cricket::CS_STOPPED;
SignalStateChange(this, current_state_);
} }
bool AndroidVideoCapturer::IsRunning() { bool AndroidVideoCapturer::IsRunning() {
@ -180,7 +184,14 @@ void AndroidVideoCapturer::OnCapturerStarted_w(bool success) {
DCHECK(worker_thread_->IsCurrent()); DCHECK(worker_thread_->IsCurrent());
cricket::CaptureState new_state = cricket::CaptureState new_state =
success ? cricket::CS_RUNNING : cricket::CS_FAILED; success ? cricket::CS_RUNNING : cricket::CS_FAILED;
SetCaptureState(new_state); if (new_state == current_state_)
return;
current_state_ = new_state;
// TODO(perkj): SetCaptureState can not be used since it posts to |thread_|.
// But |thread_ | is currently just the thread that happened to create the
// cricket::VideoCapturer.
SignalStateChange(this, new_state);
} }
void AndroidVideoCapturer::OnIncomingFrame(signed char* videoFrame, void AndroidVideoCapturer::OnIncomingFrame(signed char* videoFrame,

View File

@ -102,6 +102,8 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
class FrameFactory; class FrameFactory;
FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer. FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer.
cricket::CaptureState current_state_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -2950,28 +2950,13 @@ JOW(void, VideoRenderer_nativeCopyPlane)(
} }
} }
JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) { JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
cricket::VideoCapturer* capturer = reinterpret_cast<VideoSourceInterface*>(j_p)->Stop();
reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
scoped_ptr<cricket::VideoFormatPod> format(
new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
capturer->Stop();
return jlongFromPointer(format.release());
} }
JOW(void, VideoSource_restart)( JOW(void, VideoSource_restart)(
JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) { JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
CHECK(j_p_source); reinterpret_cast<VideoSourceInterface*>(j_p_source)->Restart();
CHECK(j_p_format);
scoped_ptr<cricket::VideoFormatPod> format(
reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
StartCapturing(cricket::VideoFormat(*format));
}
JOW(void, VideoSource_freeNativeVideoFormat)(
JNIEnv* jni, jclass, jlong j_p) {
delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
} }
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) { JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {

View File

@ -422,8 +422,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
@Override public void run() { @Override public void run() {
stopCaptureOnCameraThread(result); stopCaptureOnCameraThread(result);
} }
}); });
boolean status = exchange(result, false); // |false| is a dummy value here. boolean status = exchange(result, false); // |false| is a dummy value here.
Log.d(TAG, "stopCapture wait");
try { try {
cameraThread.join(); cameraThread.join();
} catch (InterruptedException e) { } catch (InterruptedException e) {

View File

@ -36,7 +36,6 @@ package org.webrtc;
* its output to the encoder) can be too high to bear. * its output to the encoder) can be too high to bear.
*/ */
public class VideoSource extends MediaSource { public class VideoSource extends MediaSource {
private long nativeVideoFormatAtStop;
public VideoSource(long nativeSource) { public VideoSource(long nativeSource) {
super(nativeSource); super(nativeSource);
@ -44,30 +43,21 @@ public class VideoSource extends MediaSource {
// Stop capture feeding this source. // Stop capture feeding this source.
public void stop() { public void stop() {
nativeVideoFormatAtStop = stop(nativeSource); stop(nativeSource);
} }
// Restart capture feeding this source. stop() must have been called since // Restart capture feeding this source. stop() must have been called since
// the last call to restart() (if any). Note that this isn't "start()"; // the last call to restart() (if any). Note that this isn't "start()";
// sources are started by default at birth. // sources are started by default at birth.
public void restart() { public void restart() {
restart(nativeSource, nativeVideoFormatAtStop); restart(nativeSource);
nativeVideoFormatAtStop = 0;
} }
@Override @Override
public void dispose() { public void dispose() {
if (nativeVideoFormatAtStop != 0) {
freeNativeVideoFormat(nativeVideoFormatAtStop);
nativeVideoFormatAtStop = 0;
}
super.dispose(); super.dispose();
} }
// This stop() returns an owned C++ VideoFormat pointer for use in restart() private static native void stop(long nativeSource);
// and dispose(). private static native void restart(long nativeSource);
private static native long stop(long nativeSource);
private static native void restart(
long nativeSource, long nativeVideoFormatAtStop);
private static native void freeNativeVideoFormat(long nativeVideoFormat);
} }

View File

@ -85,6 +85,8 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> {
virtual cricket::VideoCapturer* GetVideoCapturer() { virtual cricket::VideoCapturer* GetVideoCapturer() {
return &fake_capturer_; return &fake_capturer_;
} }
virtual void Stop() {}
virtual void Restart() {}
virtual void AddSink(cricket::VideoRenderer* output) {} virtual void AddSink(cricket::VideoRenderer* output) {}
virtual void RemoveSink(cricket::VideoRenderer* output) {} virtual void RemoveSink(cricket::VideoRenderer* output) {}
virtual SourceState state() const { return state_; } virtual SourceState state() const { return state_; }

View File

@ -432,11 +432,27 @@ cricket::VideoRenderer* VideoSource::FrameInput() {
return frame_input_.get(); return frame_input_.get();
} }
void VideoSource::Stop() {
channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
}
void VideoSource::Restart() {
if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
SetState(kEnded);
return;
}
for(cricket::VideoRenderer* sink : sinks_) {
channel_manager_->AddVideoRenderer(video_capturer_.get(), sink);
}
}
void VideoSource::AddSink(cricket::VideoRenderer* output) { void VideoSource::AddSink(cricket::VideoRenderer* output) {
sinks_.push_back(output);
channel_manager_->AddVideoRenderer(video_capturer_.get(), output); channel_manager_->AddVideoRenderer(video_capturer_.get(), output);
} }
void VideoSource::RemoveSink(cricket::VideoRenderer* output) { void VideoSource::RemoveSink(cricket::VideoRenderer* output) {
sinks_.remove(output);
channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output); channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output);
} }

View File

@ -28,6 +28,8 @@
#ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_ #ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_
#define TALK_APP_WEBRTC_VIDEOSOURCE_H_ #define TALK_APP_WEBRTC_VIDEOSOURCE_H_
#include <list>
#include "talk/app/webrtc/mediastreaminterface.h" #include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h" #include "talk/app/webrtc/notifier.h"
#include "talk/app/webrtc/videosourceinterface.h" #include "talk/app/webrtc/videosourceinterface.h"
@ -73,6 +75,10 @@ class VideoSource : public Notifier<VideoSourceInterface>,
virtual cricket::VideoCapturer* GetVideoCapturer() { virtual cricket::VideoCapturer* GetVideoCapturer() {
return video_capturer_.get(); return video_capturer_.get();
} }
void Stop() override;
void Restart() override;
// |output| will be served video frames as long as the underlying capturer // |output| will be served video frames as long as the underlying capturer
// is running video frames. // is running video frames.
virtual void AddSink(cricket::VideoRenderer* output); virtual void AddSink(cricket::VideoRenderer* output);
@ -93,6 +99,8 @@ class VideoSource : public Notifier<VideoSourceInterface>,
rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_; rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_;
rtc::scoped_ptr<cricket::VideoRenderer> frame_input_; rtc::scoped_ptr<cricket::VideoRenderer> frame_input_;
std::list<cricket::VideoRenderer*> sinks_;
cricket::VideoFormat format_; cricket::VideoFormat format_;
cricket::VideoOptions options_; cricket::VideoOptions options_;
SourceState state_; SourceState state_;

View File

@ -169,7 +169,7 @@ class VideoSourceTest : public testing::Test {
// Test that a VideoSource transition to kLive state when the capture // Test that a VideoSource transition to kLive state when the capture
// device have started and kEnded if it is stopped. // device have started and kEnded if it is stopped.
// It also test that an output can receive video frames. // It also test that an output can receive video frames.
TEST_F(VideoSourceTest, StartStop) { TEST_F(VideoSourceTest, CapturerStartStop) {
// Initialize without constraints. // Initialize without constraints.
CreateVideoSource(); CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
@ -183,6 +183,30 @@ TEST_F(VideoSourceTest, StartStop) {
kMaxWaitMs); kMaxWaitMs);
} }
// Test that a VideoSource can be stopped and restarted.
TEST_F(VideoSourceTest, StopRestart) {
// Initialize without constraints.
CreateVideoSource();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
ASSERT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(1, renderer_.num_rendered_frames());
source_->Stop();
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
source_->Restart();
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
ASSERT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(2, renderer_.num_rendered_frames());
source_->Stop();
}
// Test start stop with a remote VideoSource - the video source that has a // Test start stop with a remote VideoSource - the video source that has a
// RemoteVideoCapturer and takes video frames from FrameInput. // RemoteVideoCapturer and takes video frames from FrameInput.
TEST_F(VideoSourceTest, StartStopRemote) { TEST_F(VideoSourceTest, StartStopRemote) {

View File

@ -43,6 +43,11 @@ class VideoSourceInterface : public MediaSourceInterface {
// This can be used for receiving frames and state notifications. // This can be used for receiving frames and state notifications.
// But it should not be used for starting or stopping capturing. // But it should not be used for starting or stopping capturing.
virtual cricket::VideoCapturer* GetVideoCapturer() = 0; virtual cricket::VideoCapturer* GetVideoCapturer() = 0;
// Stop the video capturer.
virtual void Stop() = 0;
virtual void Restart() = 0;
// Adds |output| to the source to receive frames. // Adds |output| to the source to receive frames.
virtual void AddSink(cricket::VideoRenderer* output) = 0; virtual void AddSink(cricket::VideoRenderer* output) = 0;
virtual void RemoveSink(cricket::VideoRenderer* output) = 0; virtual void RemoveSink(cricket::VideoRenderer* output) = 0;

View File

@ -39,6 +39,8 @@ namespace webrtc {
BEGIN_PROXY_MAP(VideoSource) BEGIN_PROXY_MAP(VideoSource)
PROXY_CONSTMETHOD0(SourceState, state) PROXY_CONSTMETHOD0(SourceState, state)
PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer) PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer)
PROXY_METHOD0(void, Stop)
PROXY_METHOD0(void, Restart)
PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*) PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*)
PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*) PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*)
PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options) PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options)